From 79b0dfe0fe67106827f7a05914c278fed243dbef Mon Sep 17 00:00:00 2001 From: Niran Babalola Date: Fri, 19 Sep 2025 14:52:36 -0500 Subject: [PATCH 01/39] Add simulations to the data store --- ...2de483ddcb4e5af564ae1540be844a26d8262.json | 20 ++ Cargo.lock | 1 + crates/datastore/Cargo.toml | 1 + .../1758262637_create_simulations_table.sql | 29 ++ crates/datastore/src/postgres.rs | 238 +++++++++++- crates/datastore/src/traits.rs | 20 +- crates/datastore/tests/datastore.rs | 340 +++++++++++++++++- ui/src/db/relations.ts | 13 + ui/src/db/schema.ts | 51 +++ 9 files changed, 709 insertions(+), 4 deletions(-) create mode 100644 .sqlx/query-4ec98cc82ef5dcab381aa633ccb2de483ddcb4e5af564ae1540be844a26d8262.json create mode 100644 crates/datastore/migrations/1758262637_create_simulations_table.sql diff --git a/.sqlx/query-4ec98cc82ef5dcab381aa633ccb2de483ddcb4e5af564ae1540be844a26d8262.json b/.sqlx/query-4ec98cc82ef5dcab381aa633ccb2de483ddcb4e5af564ae1540be844a26d8262.json new file mode 100644 index 0000000..e63b4df --- /dev/null +++ b/.sqlx/query-4ec98cc82ef5dcab381aa633ccb2de483ddcb4e5af564ae1540be844a26d8262.json @@ -0,0 +1,20 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO simulations (\n id, bundle_id, block_number, block_hash, execution_time_us, \n gas_used, state_diff, created_at, updated_at\n )\n VALUES ($1, $2, $3, $4, $5, $6, $7, NOW(), NOW())\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Uuid", + "Int8", + "Bpchar", + "Int8", + "Int8", + "Jsonb" + ] + }, + "nullable": [] + }, + "hash": "4ec98cc82ef5dcab381aa633ccb2de483ddcb4e5af564ae1540be844a26d8262" +} diff --git a/Cargo.lock b/Cargo.lock index 359e0c9..604a6ff 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7075,6 +7075,7 @@ dependencies = [ "async-trait", "eyre", "op-alloy-consensus 0.20.0", + "serde_json", "sqlx", "testcontainers", "testcontainers-modules", diff --git a/crates/datastore/Cargo.toml b/crates/datastore/Cargo.toml index a77c22c..8c6b15f 100644 --- a/crates/datastore/Cargo.toml +++ b/crates/datastore/Cargo.toml @@ -15,6 +15,7 @@ alloy-consensus.workspace = true op-alloy-consensus.workspace = true eyre.workspace = true tracing.workspace = true +serde_json.workspace = true [dev-dependencies] testcontainers.workspace = true diff --git a/crates/datastore/migrations/1758262637_create_simulations_table.sql b/crates/datastore/migrations/1758262637_create_simulations_table.sql new file mode 100644 index 0000000..a424086 --- /dev/null +++ b/crates/datastore/migrations/1758262637_create_simulations_table.sql @@ -0,0 +1,29 @@ +-- Create simulations table +CREATE TABLE IF NOT EXISTS simulations ( + id UUID PRIMARY KEY, + bundle_id UUID NOT NULL REFERENCES bundles(id) ON DELETE CASCADE, + + block_number BIGINT NOT NULL, + block_hash CHAR(66) NOT NULL, + execution_time_us BIGINT NOT NULL, + gas_used BIGINT NOT NULL, + + -- State diff mapping accounts to storage slots to values + -- Structure: { "account_address": { "slot": "value", ... }, ... } + state_diff JSONB NOT NULL, + + created_at TIMESTAMPTZ NOT NULL, + updated_at TIMESTAMPTZ NOT NULL, + + -- Unique constraint: one simulation per bundle per block hash + UNIQUE(bundle_id, block_hash) +); + +-- Index for efficient bundle lookups +CREATE INDEX IF NOT EXISTS idx_simulations_bundle_id ON simulations(bundle_id); + +-- Index for block number queries +CREATE INDEX IF NOT EXISTS idx_simulations_block_number ON simulations(block_number); + +-- Index for block hash queries +CREATE INDEX IF NOT EXISTS idx_simulations_block_hash ON simulations(block_hash); diff --git a/crates/datastore/src/postgres.rs b/crates/datastore/src/postgres.rs index 0fea3b4..69353d6 100644 --- a/crates/datastore/src/postgres.rs +++ b/crates/datastore/src/postgres.rs @@ -3,11 +3,13 @@ use alloy_consensus::Transaction; use alloy_consensus::private::alloy_eips::Decodable2718; use alloy_consensus::transaction::SignerRecoverable; use alloy_primitives::hex::{FromHex, ToHexExt}; -use alloy_primitives::{Address, TxHash}; +use alloy_primitives::{Address, StorageKey, StorageValue, TxHash}; use alloy_rpc_types_mev::EthSendBundle; use anyhow::Result; use op_alloy_consensus::OpTxEnvelope; +use serde_json::{self, Value}; use sqlx::PgPool; +use std::collections::HashMap; use tracing::info; use uuid::Uuid; @@ -36,6 +38,17 @@ struct BundleRow { state: BundleState, } +#[derive(sqlx::FromRow, Debug)] +struct SimulationRow { + id: Uuid, + bundle_id: Uuid, + block_number: i64, + block_hash: String, + execution_time_us: i64, + gas_used: i64, + state_diff: Value, +} + /// Filter criteria for selecting bundles #[derive(Debug, Clone, Default)] pub struct BundleFilter { @@ -75,6 +88,51 @@ pub struct BundleWithMetadata { pub state: BundleState, } +/// Bundle with its latest simulation +#[derive(Debug, Clone)] +pub struct BundleWithLatestSimulation { + pub bundle_with_metadata: BundleWithMetadata, + pub latest_simulation: Simulation, +} + +/// State diff type: maps account addresses to storage slot mappings +pub type StateDiff = HashMap>; + +/// Simulation data +#[derive(Debug, Clone)] +pub struct Simulation { + pub id: Uuid, + pub bundle_id: Uuid, + pub block_number: u64, + pub block_hash: String, + pub execution_time_us: u64, + pub gas_used: u64, + pub state_diff: StateDiff, +} + +/// Filter criteria for selecting simulations +#[derive(Debug, Clone, Default)] +pub struct SimulationFilter { + pub bundle_id: Option, + pub block_number: Option, +} + +impl SimulationFilter { + pub fn new() -> Self { + Self::default() + } + + pub fn for_bundle(mut self, bundle_id: Uuid) -> Self { + self.bundle_id = Some(bundle_id); + self + } + + pub fn for_block(mut self, block_number: u64) -> Self { + self.block_number = Some(block_number); + self + } +} + /// PostgreSQL implementation of the BundleDatastore trait #[derive(Debug, Clone)] pub struct PostgresDatastore { @@ -154,6 +212,20 @@ impl PostgresDatastore { }) } + fn row_to_simulation(&self, row: SimulationRow) -> Result { + let state_diff: StateDiff = serde_json::from_value(row.state_diff)?; + + Ok(Simulation { + id: row.id, + bundle_id: row.bundle_id, + block_number: row.block_number as u64, + block_hash: row.block_hash, + execution_time_us: row.execution_time_us as u64, + gas_used: row.gas_used as u64, + state_diff, + }) + } + fn extract_bundle_metadata( &self, bundle: &EthSendBundle, @@ -331,4 +403,168 @@ impl BundleDatastore for PostgresDatastore { .await?; Ok(()) } + + async fn insert_simulation( + &self, + bundle_id: Uuid, + block_number: u64, + block_hash: String, + execution_time_us: u64, + gas_used: u64, + state_diff: StateDiff, + ) -> Result { + let id = Uuid::new_v4(); + let state_diff_json = serde_json::to_value(&state_diff)?; + + sqlx::query!( + r#" + INSERT INTO simulations ( + id, bundle_id, block_number, block_hash, execution_time_us, + gas_used, state_diff, created_at, updated_at + ) + VALUES ($1, $2, $3, $4, $5, $6, $7, NOW(), NOW()) + "#, + id, + bundle_id, + block_number as i64, + block_hash, + execution_time_us as i64, + gas_used as i64, + state_diff_json + ) + .execute(&self.pool) + .await?; + + Ok(id) + } + + async fn get_simulation(&self, id: Uuid) -> Result> { + let result = sqlx::query_as::<_, SimulationRow>( + r#" + SELECT id, bundle_id, block_number, block_hash, execution_time_us, + gas_used, state_diff + FROM simulations + WHERE id = $1 + "#, + ) + .bind(id) + .fetch_optional(&self.pool) + .await?; + + match result { + Some(row) => { + let simulation = self.row_to_simulation(row)?; + Ok(Some(simulation)) + } + None => Ok(None), + } + } + + async fn select_bundles_with_latest_simulation(&self, filter: BundleFilter) -> Result> { + let base_fee = filter.base_fee.unwrap_or(0); + let block_number = filter.block_number.unwrap_or(0) as i64; + + let (min_ts, max_ts) = if let Some(timestamp) = filter.timestamp { + (timestamp as i64, timestamp as i64) + } else { + // If not specified, set the parameters to be the whole range + (i64::MAX, 0i64) + }; + + let query = r#" + WITH latest_simulations AS ( + SELECT + s.id as sim_id, + s.bundle_id, + s.block_number as sim_block_number, + s.block_hash, + s.execution_time_us, + s.gas_used, + s.state_diff, + ROW_NUMBER() OVER (PARTITION BY s.bundle_id ORDER BY s.block_number DESC) as rn + FROM simulations s + ) + SELECT + b.senders, b.minimum_base_fee, b.txn_hashes, b.txs, + b.reverting_tx_hashes, b.dropping_tx_hashes, + b.block_number, b.min_timestamp, b.max_timestamp, b."state", + ls.sim_id, ls.bundle_id as sim_bundle_id, ls.sim_block_number, + ls.block_hash, ls.execution_time_us, ls.gas_used, ls.state_diff + FROM bundles b + INNER JOIN latest_simulations ls ON b.id = ls.bundle_id AND ls.rn = 1 + WHERE b.minimum_base_fee >= $1 + AND (b.block_number = $2 OR b.block_number IS NULL OR b.block_number = 0 OR $2 = 0) + AND (b.min_timestamp <= $3 OR b.min_timestamp IS NULL) + AND (b.max_timestamp >= $4 OR b.max_timestamp IS NULL) + ORDER BY b.minimum_base_fee DESC + "#; + + #[derive(sqlx::FromRow)] + struct BundleWithSimulationRow { + // Bundle fields + senders: Option>, + minimum_base_fee: Option, + txn_hashes: Option>, + txs: Vec, + reverting_tx_hashes: Option>, + dropping_tx_hashes: Option>, + block_number: Option, + min_timestamp: Option, + max_timestamp: Option, + state: BundleState, + // Simulation fields + sim_id: Uuid, + sim_bundle_id: Uuid, + sim_block_number: i64, + block_hash: String, + execution_time_us: i64, + gas_used: i64, + state_diff: Value, + } + + let rows = sqlx::query_as::<_, BundleWithSimulationRow>(query) + .bind(base_fee) + .bind(block_number) + .bind(min_ts) + .bind(max_ts) + .fetch_all(&self.pool) + .await?; + + let mut results = Vec::new(); + for row in rows { + // Convert bundle part + let bundle_row = BundleRow { + senders: row.senders, + minimum_base_fee: row.minimum_base_fee, + txn_hashes: row.txn_hashes, + txs: row.txs, + reverting_tx_hashes: row.reverting_tx_hashes, + dropping_tx_hashes: row.dropping_tx_hashes, + block_number: row.block_number, + min_timestamp: row.min_timestamp, + max_timestamp: row.max_timestamp, + state: row.state, + }; + let bundle_with_metadata = self.row_to_bundle_with_metadata(bundle_row)?; + + // Convert simulation part + let simulation_row = SimulationRow { + id: row.sim_id, + bundle_id: row.sim_bundle_id, + block_number: row.sim_block_number, + block_hash: row.block_hash, + execution_time_us: row.execution_time_us, + gas_used: row.gas_used, + state_diff: row.state_diff, + }; + let simulation = self.row_to_simulation(simulation_row)?; + + results.push(BundleWithLatestSimulation { + bundle_with_metadata, + latest_simulation: simulation, + }); + } + + Ok(results) + } } diff --git a/crates/datastore/src/traits.rs b/crates/datastore/src/traits.rs index e5e58f2..1432360 100644 --- a/crates/datastore/src/traits.rs +++ b/crates/datastore/src/traits.rs @@ -1,4 +1,4 @@ -use crate::postgres::{BundleFilter, BundleWithMetadata}; +use crate::postgres::{BundleFilter, BundleWithMetadata, BundleWithLatestSimulation, Simulation, StateDiff}; use alloy_primitives::TxHash; use alloy_rpc_types_mev::EthSendBundle; use anyhow::Result; @@ -24,4 +24,22 @@ pub trait BundleDatastore: Send + Sync { /// Remove a bundle by ID async fn remove_bundle(&self, id: Uuid) -> Result<()>; + + /// Insert a new simulation result + async fn insert_simulation( + &self, + bundle_id: Uuid, + block_number: u64, + block_hash: String, + execution_time_us: u64, + gas_used: u64, + state_diff: StateDiff, + ) -> Result; + + /// Get a simulation by its ID + async fn get_simulation(&self, id: Uuid) -> Result>; + + /// Select bundles with their latest simulation + /// Only bundles that have at least one simulation are returned + async fn select_bundles_with_latest_simulation(&self, filter: BundleFilter) -> Result>; } diff --git a/crates/datastore/tests/datastore.rs b/crates/datastore/tests/datastore.rs index cf2a4af..74d53f0 100644 --- a/crates/datastore/tests/datastore.rs +++ b/crates/datastore/tests/datastore.rs @@ -1,12 +1,16 @@ -use alloy_primitives::{Address, Bytes, TxHash, address, b256, bytes}; +use alloy_primitives::{ + Address, Bytes, StorageKey, StorageValue, TxHash, U256, address, b256, bytes, +}; use alloy_rpc_types_mev::EthSendBundle; use sqlx::PgPool; +use std::collections::HashMap; use testcontainers_modules::{ postgres, testcontainers::{ContainerAsync, runners::AsyncRunner}, }; -use tips_datastore::postgres::{BundleFilter, BundleState}; +use tips_datastore::postgres::{BundleFilter, BundleState, StateDiff}; use tips_datastore::{BundleDatastore, PostgresDatastore}; +use uuid::Uuid; struct TestHarness { _postgres_instance: ContainerAsync, @@ -73,6 +77,41 @@ fn create_test_bundle( }) } +fn create_test_state_diff() -> StateDiff { + let mut state_diff = HashMap::new(); + + // Create test account address + let account1: Address = "0x742d35cc6635c0532925a3b8d40b33dd33ad7309".parse().unwrap(); + let account2: Address = "0x24ae36512421f1d9f6e074f00ff5b8393f5dd925".parse().unwrap(); + + // Create storage mappings for account1 + let mut account1_storage = HashMap::new(); + account1_storage.insert( + StorageKey::ZERO, + StorageValue::from(U256::from(1)), + ); + account1_storage.insert( + StorageKey::from(U256::from(1)), + StorageValue::from(U256::from(2)), + ); + + // Create storage mappings for account2 + let mut account2_storage = HashMap::new(); + account2_storage.insert( + StorageKey::from(U256::from(3)), + StorageValue::from(U256::from(4)), + ); + + state_diff.insert(account1, account1_storage); + state_diff.insert(account2, account2_storage); + + state_diff +} + +fn create_empty_state_diff() -> StateDiff { + HashMap::new() +} + #[tokio::test] async fn insert_and_get() -> eyre::Result<()> { let harness = setup_datastore().await?; @@ -301,3 +340,300 @@ async fn cancel_bundle_workflow() -> eyre::Result<()> { Ok(()) } + +#[tokio::test] +async fn insert_and_get_simulation() -> eyre::Result<()> { + let harness = setup_datastore().await?; + + // First create a bundle to link the simulation to + let test_bundle = create_test_bundle(12345, Some(1640995200), Some(1640995260))?; + let bundle_id = harness.data_store.insert_bundle(test_bundle).await + .map_err(|e| eyre::eyre!(e))?; + + // Create simulation data + let block_number = 18500000u64; + let block_hash = "0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef".to_string(); + let execution_time_us = 250000u64; + let gas_used = 21000u64; + let state_diff = create_test_state_diff(); + + // Insert simulation + let simulation_id = harness.data_store.insert_simulation( + bundle_id, + block_number, + block_hash.clone(), + execution_time_us, + gas_used, + state_diff.clone(), + ).await.map_err(|e| eyre::eyre!(e))?; + + // Retrieve simulation + let retrieved_simulation = harness.data_store.get_simulation(simulation_id).await + .map_err(|e| eyre::eyre!(e))?; + assert!(retrieved_simulation.is_some(), "Simulation should be found"); + + let simulation = retrieved_simulation.unwrap(); + assert_eq!(simulation.id, simulation_id); + assert_eq!(simulation.bundle_id, bundle_id); + assert_eq!(simulation.block_number, block_number); + assert_eq!(simulation.block_hash, block_hash); + assert_eq!(simulation.execution_time_us, execution_time_us); + assert_eq!(simulation.gas_used, gas_used); + assert_eq!(simulation.state_diff.len(), state_diff.len()); + + // Verify state diff content + for (account, expected_storage) in &state_diff { + let actual_storage = simulation.state_diff.get(account) + .expect("Account should exist in state diff"); + assert_eq!(actual_storage.len(), expected_storage.len()); + for (slot, expected_value) in expected_storage { + let actual_value = actual_storage.get(slot) + .expect("Storage slot should exist"); + assert_eq!(actual_value, expected_value); + } + } + + Ok(()) +} + +#[tokio::test] +async fn simulation_with_empty_state_diff() -> eyre::Result<()> { + let harness = setup_datastore().await?; + + // Create a bundle + let test_bundle = create_test_bundle(12345, None, None)?; + let bundle_id = harness.data_store.insert_bundle(test_bundle).await + .map_err(|e| eyre::eyre!(e))?; + + // Create simulation with empty state diff + let simulation_id = harness.data_store.insert_simulation( + bundle_id, + 18500000, + "0xabcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890".to_string(), + 100000, + 15000, + create_empty_state_diff(), + ).await.map_err(|e| eyre::eyre!(e))?; + + // Retrieve and verify + let simulation = harness.data_store.get_simulation(simulation_id).await + .map_err(|e| eyre::eyre!(e))? + .expect("Simulation should exist"); + + assert!(simulation.state_diff.is_empty(), "State diff should be empty"); + + Ok(()) +} + +#[tokio::test] +async fn multiple_simulations_latest_selection() -> eyre::Result<()> { + let harness = setup_datastore().await?; + + // Create a single bundle + let test_bundle = create_test_bundle(12345, Some(1000), Some(2000))?; + let bundle_id = harness.data_store.insert_bundle(test_bundle).await + .map_err(|e| eyre::eyre!(e))?; + + // Insert multiple simulations with sequential block numbers + let base_block = 18500000u64; + let mut simulation_ids = Vec::new(); + + for i in 0..5 { + let block_number = base_block + i; + let block_hash = format!("0x{:064x}", block_number); // Create unique block hash + let execution_time = 100000 + (i * 10000); // Varying execution times + let gas_used = 21000 + (i * 1000); // Varying gas usage + + let simulation_id = harness.data_store.insert_simulation( + bundle_id, + block_number, + block_hash, + execution_time, + gas_used, + if i % 2 == 0 { create_test_state_diff() } else { create_empty_state_diff() }, + ).await.map_err(|e| eyre::eyre!(e))?; + + simulation_ids.push((simulation_id, block_number, execution_time, gas_used)); + } + + // Query for bundles with latest simulation + let results = harness.data_store.select_bundles_with_latest_simulation( + BundleFilter::new() + ).await.map_err(|e| eyre::eyre!(e))?; + + // Should return exactly one bundle + assert_eq!(results.len(), 1, "Should return exactly one bundle"); + + let bundle_with_sim = &results[0]; + let latest_sim = &bundle_with_sim.latest_simulation; + + // Verify it's the latest simulation (highest block number) + let expected_latest_block = base_block + 4; // Last iteration was i=4 + assert_eq!(latest_sim.block_number, expected_latest_block, "Should return simulation with highest block number"); + assert_eq!(latest_sim.bundle_id, bundle_id, "Should reference correct bundle"); + + // Verify the execution time and gas used match the latest simulation + let expected_execution_time = 100000 + (4 * 10000); // i=4 + let expected_gas_used = 21000 + (4 * 1000); // i=4 + assert_eq!(latest_sim.execution_time_us, expected_execution_time, "Execution time should match latest simulation"); + assert_eq!(latest_sim.gas_used, expected_gas_used, "Gas used should match latest simulation"); + + // Verify the latest simulation has the expected state diff (should be non-empty since i=4 is even) + assert!(!latest_sim.state_diff.is_empty(), "Latest simulation should have non-empty state diff"); + + // Verify that we can still retrieve all individual simulations + for (sim_id, block_num, exec_time, gas) in &simulation_ids { + let individual_sim = harness.data_store.get_simulation(*sim_id).await + .map_err(|e| eyre::eyre!(e))? + .expect("Individual simulation should exist"); + + assert_eq!(individual_sim.block_number, *block_num); + assert_eq!(individual_sim.execution_time_us, *exec_time); + assert_eq!(individual_sim.gas_used, *gas); + } + + Ok(()) +} + +#[tokio::test] +async fn select_bundles_with_latest_simulation() -> eyre::Result<()> { + let harness = setup_datastore().await?; + + // Create three bundles + let bundle1 = create_test_bundle(100, Some(1000), Some(2000))?; + let bundle2 = create_test_bundle(200, Some(1500), Some(2500))?; + let bundle3 = create_test_bundle(300, None, None)?; + + let bundle1_id = harness.data_store.insert_bundle(bundle1).await + .map_err(|e| eyre::eyre!(e))?; + let bundle2_id = harness.data_store.insert_bundle(bundle2).await + .map_err(|e| eyre::eyre!(e))?; + let _bundle3_id = harness.data_store.insert_bundle(bundle3).await + .map_err(|e| eyre::eyre!(e))?; + + // Add multiple simulations for bundle1 (to test "latest" logic) + harness.data_store.insert_simulation( + bundle1_id, + 18500000, + "0x1111111111111111111111111111111111111111111111111111111111111111".to_string(), + 100000, + 21000, + create_test_state_diff(), + ).await.map_err(|e| eyre::eyre!(e))?; + + let latest_sim1_id = harness.data_store.insert_simulation( + bundle1_id, + 18500001, // Higher block number = later + "0x2222222222222222222222222222222222222222222222222222222222222222".to_string(), + 120000, + 22000, + create_empty_state_diff(), + ).await.map_err(|e| eyre::eyre!(e))?; + + // Add one simulation for bundle2 + let sim2_id = harness.data_store.insert_simulation( + bundle2_id, + 18500002, + "0x3333333333333333333333333333333333333333333333333333333333333333".to_string(), + 90000, + 19000, + create_test_state_diff(), + ).await.map_err(|e| eyre::eyre!(e))?; + + // Bundle3 has no simulations + + // Query bundles with latest simulation (no filter) + let results = harness.data_store.select_bundles_with_latest_simulation( + BundleFilter::new() + ).await.map_err(|e| eyre::eyre!(e))?; + + // Should return 2 bundles (bundle1 and bundle2), sorted by minimum_base_fee DESC + assert_eq!(results.len(), 2, "Should return 2 bundles that have simulations"); + + // Verify the results contain the correct bundles and latest simulations + let bundle1_result = results.iter().find(|r| r.bundle_with_metadata.bundle.block_number == 100); + let bundle2_result = results.iter().find(|r| r.bundle_with_metadata.bundle.block_number == 200); + + assert!(bundle1_result.is_some(), "Bundle1 should be in results"); + assert!(bundle2_result.is_some(), "Bundle2 should be in results"); + + let bundle1_result = bundle1_result.unwrap(); + let bundle2_result = bundle2_result.unwrap(); + + // Check that bundle1 has the latest simulation (block 18500001) + assert_eq!(bundle1_result.latest_simulation.id, latest_sim1_id); + assert_eq!(bundle1_result.latest_simulation.block_number, 18500001); + assert_eq!(bundle1_result.latest_simulation.gas_used, 22000); + + // Check that bundle2 has its simulation + assert_eq!(bundle2_result.latest_simulation.id, sim2_id); + assert_eq!(bundle2_result.latest_simulation.block_number, 18500002); + assert_eq!(bundle2_result.latest_simulation.gas_used, 19000); + + Ok(()) +} + +#[tokio::test] +async fn select_bundles_with_latest_simulation_filtered() -> eyre::Result<()> { + let harness = setup_datastore().await?; + + // Create bundles with different criteria + let bundle1 = create_test_bundle(100, Some(1000), Some(2000))?; // Valid for block 100, timestamp 1000-2000 + let bundle2 = create_test_bundle(200, Some(1500), Some(2500))?; // Valid for block 200, timestamp 1500-2500 + + let bundle1_id = harness.data_store.insert_bundle(bundle1).await + .map_err(|e| eyre::eyre!(e))?; + let bundle2_id = harness.data_store.insert_bundle(bundle2).await + .map_err(|e| eyre::eyre!(e))?; + + // Add simulations to both bundles + harness.data_store.insert_simulation( + bundle1_id, + 18500000, + "0x1111111111111111111111111111111111111111111111111111111111111111".to_string(), + 100000, + 21000, + create_test_state_diff(), + ).await.map_err(|e| eyre::eyre!(e))?; + + harness.data_store.insert_simulation( + bundle2_id, + 18500001, + "0x2222222222222222222222222222222222222222222222222222222222222222".to_string(), + 120000, + 22000, + create_empty_state_diff(), + ).await.map_err(|e| eyre::eyre!(e))?; + + // Test filtering by block number + let block_filter = BundleFilter::new().valid_for_block(200); + let filtered_results = harness.data_store.select_bundles_with_latest_simulation(block_filter).await + .map_err(|e| eyre::eyre!(e))?; + + assert_eq!(filtered_results.len(), 1, "Should return 1 bundle valid for block 200"); + assert_eq!(filtered_results[0].bundle_with_metadata.bundle.block_number, 200); + + // Test filtering by timestamp + let timestamp_filter = BundleFilter::new().valid_for_timestamp(1200); + let timestamp_results = harness.data_store.select_bundles_with_latest_simulation(timestamp_filter).await + .map_err(|e| eyre::eyre!(e))?; + + assert_eq!(timestamp_results.len(), 1, "Should return 1 bundle valid for timestamp 1200"); + assert_eq!(timestamp_results[0].bundle_with_metadata.bundle.block_number, 100); + + Ok(()) +} + +#[tokio::test] +async fn get_nonexistent_simulation() -> eyre::Result<()> { + let harness = setup_datastore().await?; + + // Try to get simulation that doesn't exist + let fake_id = Uuid::new_v4(); + let result = harness.data_store.get_simulation(fake_id).await + .map_err(|e| eyre::eyre!(e))?; + + assert!(result.is_none(), "Should return None for non-existent simulation"); + + Ok(()) +} diff --git a/ui/src/db/relations.ts b/ui/src/db/relations.ts index e69de29..5e273b8 100644 --- a/ui/src/db/relations.ts +++ b/ui/src/db/relations.ts @@ -0,0 +1,13 @@ +import { relations } from "drizzle-orm/relations"; +import { bundles, simulations } from "./schema"; + +export const simulationsRelations = relations(simulations, ({ one }) => ({ + bundle: one(bundles, { + fields: [simulations.bundleId], + references: [bundles.id], + }), +})); + +export const bundlesRelations = relations(bundles, ({ many }) => ({ + simulations: many(simulations), +})); diff --git a/ui/src/db/schema.ts b/ui/src/db/schema.ts index b528617..bad7ae8 100644 --- a/ui/src/db/schema.ts +++ b/ui/src/db/schema.ts @@ -1,10 +1,14 @@ import { bigint, char, + foreignKey, + index, + jsonb, pgEnum, pgTable, text, timestamp, + unique, uuid, } from "drizzle-orm/pg-core"; @@ -42,3 +46,50 @@ export const bundles = pgTable("bundles", { mode: "string", }).notNull(), }); + +export const simulations = pgTable( + "simulations", + { + id: uuid().primaryKey().notNull(), + bundleId: uuid("bundle_id").notNull(), + // You can use { mode: "bigint" } if numbers are exceeding js number limitations + blockNumber: bigint("block_number", { mode: "number" }).notNull(), + blockHash: char("block_hash", { length: 66 }).notNull(), + // You can use { mode: "bigint" } if numbers are exceeding js number limitations + executionTimeUs: bigint("execution_time_us", { mode: "number" }).notNull(), + // You can use { mode: "bigint" } if numbers are exceeding js number limitations + gasUsed: bigint("gas_used", { mode: "number" }).notNull(), + stateDiff: jsonb("state_diff").notNull(), + createdAt: timestamp("created_at", { + withTimezone: true, + mode: "string", + }).notNull(), + updatedAt: timestamp("updated_at", { + withTimezone: true, + mode: "string", + }).notNull(), + }, + (table) => [ + index("idx_simulations_block_hash").using( + "btree", + table.blockHash.asc().nullsLast().op("bpchar_ops"), + ), + index("idx_simulations_block_number").using( + "btree", + table.blockNumber.asc().nullsLast().op("int8_ops"), + ), + index("idx_simulations_bundle_id").using( + "btree", + table.bundleId.asc().nullsLast().op("uuid_ops"), + ), + foreignKey({ + columns: [table.bundleId], + foreignColumns: [bundles.id], + name: "simulations_bundle_id_fkey", + }).onDelete("cascade"), + unique("simulations_bundle_id_block_hash_key").on( + table.bundleId, + table.blockHash, + ), + ], +); From f4e2e9b2ae86da0a1e2ccb640fdea9418517bdab Mon Sep 17 00:00:00 2001 From: Niran Babalola Date: Fri, 19 Sep 2025 20:49:50 -0500 Subject: [PATCH 02/39] Add success and error_reason to simulations --- .../1758262637_create_simulations_table.sql | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/crates/datastore/migrations/1758262637_create_simulations_table.sql b/crates/datastore/migrations/1758262637_create_simulations_table.sql index a424086..2e646c1 100644 --- a/crates/datastore/migrations/1758262637_create_simulations_table.sql +++ b/crates/datastore/migrations/1758262637_create_simulations_table.sql @@ -5,8 +5,12 @@ CREATE TABLE IF NOT EXISTS simulations ( block_number BIGINT NOT NULL, block_hash CHAR(66) NOT NULL, - execution_time_us BIGINT NOT NULL, - gas_used BIGINT NOT NULL, + execution_time_us BIGINT, + gas_used BIGINT, + + -- Success tracking + success BOOLEAN NOT NULL DEFAULT true, + error_reason TEXT, -- State diff mapping accounts to storage slots to values -- Structure: { "account_address": { "slot": "value", ... }, ... } @@ -27,3 +31,6 @@ CREATE INDEX IF NOT EXISTS idx_simulations_block_number ON simulations(block_num -- Index for block hash queries CREATE INDEX IF NOT EXISTS idx_simulations_block_hash ON simulations(block_hash); + +-- Index for success field for efficient querying +CREATE INDEX IF NOT EXISTS idx_simulations_success ON simulations(success); From 376273ef5847f9b8d0dde71719bfb7e9cc6a1967 Mon Sep 17 00:00:00 2001 From: Niran Babalola Date: Fri, 19 Sep 2025 20:51:54 -0500 Subject: [PATCH 03/39] Simulator draft implementation --- Cargo.toml | 5 +- crates/simulator/Cargo.toml | 55 ++++ crates/simulator/src/config.rs | 56 ++++ crates/simulator/src/engine.rs | 325 +++++++++++++++++++++ crates/simulator/src/exex.rs | 306 +++++++++++++++++++ crates/simulator/src/lib.rs | 84 ++++++ crates/simulator/src/listener.rs | 150 ++++++++++ crates/simulator/src/main.rs | 45 +++ crates/simulator/src/publisher.rs | 219 ++++++++++++++ crates/simulator/src/service.rs | 283 ++++++++++++++++++ crates/simulator/src/state.rs | 270 +++++++++++++++++ crates/simulator/src/types.rs | 165 +++++++++++ crates/simulator/tests/integration_test.rs | 84 ++++++ 13 files changed, 2046 insertions(+), 1 deletion(-) create mode 100644 crates/simulator/Cargo.toml create mode 100644 crates/simulator/src/config.rs create mode 100644 crates/simulator/src/engine.rs create mode 100644 crates/simulator/src/exex.rs create mode 100644 crates/simulator/src/lib.rs create mode 100644 crates/simulator/src/listener.rs create mode 100644 crates/simulator/src/main.rs create mode 100644 crates/simulator/src/publisher.rs create mode 100644 crates/simulator/src/service.rs create mode 100644 crates/simulator/src/state.rs create mode 100644 crates/simulator/src/types.rs create mode 100644 crates/simulator/tests/integration_test.rs diff --git a/Cargo.toml b/Cargo.toml index 4e907d1..fa2d10e 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,5 +1,5 @@ [workspace] -members = ["crates/datastore", "crates/audit", "crates/ingress-rpc", "crates/maintenance", "crates/ingress-writer"] +members = ["crates/datastore", "crates/audit", "crates/ingress-rpc", "crates/maintenance", "crates/ingress-writer", "crates/simulator"] resolver = "2" [workspace.dependencies] @@ -7,6 +7,7 @@ tips-datastore = { path = "crates/datastore" } tips-audit = { path = "crates/audit" } tips-maintenance = { path = "crates/maintenance" } tips-ingress-writer = { path = "crates/ingress-writer" } +tips-simulator = { path = "crates/simulator" } # Reth @@ -59,6 +60,8 @@ aws-config = "1.1.7" aws-sdk-s3 = "1.106.0" aws-credential-types = "1.1.7" bytes = { version = "1.8.0", features = ["serde"] } +md5 = "0.7.0" +base64 = "0.22.1" # tips-ingress backon = "1.5.2" diff --git a/crates/simulator/Cargo.toml b/crates/simulator/Cargo.toml new file mode 100644 index 0000000..7a466fc --- /dev/null +++ b/crates/simulator/Cargo.toml @@ -0,0 +1,55 @@ +[package] +name = "tips-simulator" +version = "0.1.0" +edition = "2021" + +[[bin]] +name = "tips-simulator" +path = "src/main.rs" + +[dependencies] +# Workspace dependencies +tips-datastore.workspace = true +tips-audit.workspace = true +tokio.workspace = true +tracing.workspace = true +tracing-subscriber.workspace = true +anyhow.workspace = true +clap.workspace = true +serde.workspace = true +serde_json.workspace = true +uuid.workspace = true +async-trait.workspace = true +dotenvy.workspace = true +chrono.workspace = true +eyre.workspace = true + +# Alloy for Ethereum types +alloy-primitives.workspace = true +alloy-rpc-types.workspace = true +alloy-consensus.workspace = true +alloy-rpc-types-mev.workspace = true +op-alloy-consensus.workspace = true +op-alloy-network.workspace = true + +# Reth ExEx dependencies +reth = { git = "https://github.com/paradigmxyz/reth", features = ["exex"] } +reth-exex = { git = "https://github.com/paradigmxyz/reth" } +reth-provider = { git = "https://github.com/paradigmxyz/reth" } +reth-db = { git = "https://github.com/paradigmxyz/reth" } +reth-primitives = { git = "https://github.com/paradigmxyz/reth" } +reth-execution-types = { git = "https://github.com/paradigmxyz/reth" } +reth-node-api = { git = "https://github.com/paradigmxyz/reth" } +reth-node-builder = { git = "https://github.com/paradigmxyz/reth" } +reth-node-ethereum = { git = "https://github.com/paradigmxyz/reth" } +reth-cli = { git = "https://github.com/paradigmxyz/reth" } + +# Additional dependencies for simulation +std-semaphore = "0.1" +tokio-util = { version = "0.7", features = ["time"] } +hex = "0.4" + +[dev-dependencies] +tokio-test = "0.4.4" +testcontainers.workspace = true +testcontainers-modules.workspace = true diff --git a/crates/simulator/src/config.rs b/crates/simulator/src/config.rs new file mode 100644 index 0000000..6f48cf0 --- /dev/null +++ b/crates/simulator/src/config.rs @@ -0,0 +1,56 @@ +use crate::types::ExExSimulationConfig; +use clap::Parser; + +/// Combined configuration for reth node with simulator ExEx +#[derive(Parser, Debug, Clone)] +#[command(author, version, about = "Reth node with Tips Simulator ExEx")] +pub struct SimulatorNodeConfig { + /// Reth node arguments + #[command(flatten)] + pub node: reth_cli::Cli, + + /// PostgreSQL database connection URL for simulator + #[arg(long, env = "TIPS_SIMULATOR_DATABASE_URL")] + pub database_url: String, + + /// Maximum number of concurrent simulations + #[arg(long, env = "TIPS_SIMULATOR_MAX_CONCURRENT", default_value = "10")] + pub max_concurrent_simulations: usize, + + /// Timeout for individual simulations in milliseconds + #[arg(long, env = "TIPS_SIMULATOR_TIMEOUT_MS", default_value = "5000")] + pub simulation_timeout_ms: u64, +} + +/// Legacy standalone ExEx config (for library use) +#[derive(Debug, Clone)] +pub struct SimulatorExExConfig { + /// PostgreSQL database connection URL + pub database_url: String, + + /// Maximum number of concurrent simulations + pub max_concurrent_simulations: usize, + + /// Timeout for individual simulations in milliseconds + pub simulation_timeout_ms: u64, +} + +impl From for ExExSimulationConfig { + fn from(config: SimulatorNodeConfig) -> Self { + Self { + database_url: config.database_url, + max_concurrent_simulations: config.max_concurrent_simulations, + simulation_timeout_ms: config.simulation_timeout_ms, + } + } +} + +impl From for ExExSimulationConfig { + fn from(config: SimulatorExExConfig) -> Self { + Self { + database_url: config.database_url, + max_concurrent_simulations: config.max_concurrent_simulations, + simulation_timeout_ms: config.simulation_timeout_ms, + } + } +} diff --git a/crates/simulator/src/engine.rs b/crates/simulator/src/engine.rs new file mode 100644 index 0000000..1f8875b --- /dev/null +++ b/crates/simulator/src/engine.rs @@ -0,0 +1,325 @@ +use crate::state::StateProvider; +use crate::types::{SimulationError, SimulationRequest, SimulationResult}; +use alloy_consensus::transaction::{SignerRecoverable, Transaction}; +use alloy_primitives::{Address, U256}; +use alloy_provider::network::eip2718::Decodable2718; +use anyhow::Result; +use async_trait::async_trait; +use op_alloy_consensus::OpTxEnvelope; +use std::collections::HashMap; +use std::sync::Arc; +use std::time::Instant; +use tokio::time::{timeout, Duration}; +use tracing::{debug, error, info, warn}; +use uuid::Uuid; + +#[async_trait] +pub trait SimulationEngine: Send + Sync { + /// Simulate a bundle execution + async fn simulate_bundle(&self, request: SimulationRequest) -> Result; +} + +pub struct BundleSimulationEngine { + state_provider: Arc, + timeout: Duration, +} + +/// Represents the execution context for a bundle simulation +#[derive(Debug)] +struct ExecutionContext { + /// Block number for simulation + block_number: u64, + /// Initial balances of involved accounts + initial_balances: HashMap, + /// Initial nonces of involved accounts + initial_nonces: HashMap, + /// Storage changes during simulation + storage_changes: HashMap>, + /// Gas used so far + gas_used: u64, +} + +impl BundleSimulationEngine { + pub fn new(state_provider: Arc, timeout_ms: u64) -> Self { + Self { + state_provider, + timeout: Duration::from_millis(timeout_ms), + } + } + + /// Extract transaction details from raw transaction bytes + fn decode_transaction(&self, tx_bytes: &[u8]) -> Result { + OpTxEnvelope::decode_2718_exact(tx_bytes) + .map_err(|e| anyhow::anyhow!("Failed to decode transaction: {}", e)) + } + + /// Validate that a transaction can be executed in the current context + async fn validate_transaction( + &self, + tx: &OpTxEnvelope, + context: &ExecutionContext, + ) -> Result<(), SimulationError> { + let sender = tx.recover_signer() + .map_err(|_| SimulationError::Unknown { + message: "Failed to recover transaction sender".to_string() + })?; + + // Check nonce + let expected_nonce = context.initial_nonces.get(&sender) + .copied() + .unwrap_or(0); + let tx_nonce = tx.nonce(); + + if tx_nonce != expected_nonce { + return Err(SimulationError::InvalidNonce { + tx_index: 0, // TODO: Pass actual tx index + expected: expected_nonce, + actual: tx_nonce, + }); + } + + // Check balance for gas payment + let gas_fee = U256::from(tx.gas_limit()) * U256::from(tx.max_fee_per_gas()); + let available_balance = context.initial_balances.get(&sender) + .copied() + .unwrap_or(U256::ZERO); + + if available_balance < gas_fee { + return Err(SimulationError::InsufficientBalance { + tx_index: 0, // TODO: Pass actual tx index + required: gas_fee, + available: available_balance, + }); + } + + Ok(()) + } + + /// Simulate a single transaction execution + async fn simulate_transaction( + &self, + tx: &OpTxEnvelope, + context: &mut ExecutionContext, + tx_index: usize, + ) -> Result<(), SimulationError> { + // For now, this is a placeholder implementation + // In a full implementation, this would: + // 1. Create an EVM instance with the current state + // 2. Execute the transaction + // 3. Track gas usage and state changes + // 4. Handle reverts appropriately + + debug!( + tx_index = tx_index, + tx_hash = ?tx.hash(), + gas_limit = tx.gas_limit(), + "Simulating transaction" + ); + + // Validate the transaction first + self.validate_transaction(tx, context).await?; + + // Simulate gas usage (placeholder logic) + let estimated_gas = std::cmp::min(tx.gas_limit(), 100_000); // Simple estimation + context.gas_used += estimated_gas; + + // Simulate some state changes (placeholder) + if let Some(to) = tx.to() { + let storage_slot = U256::from(tx_index); + let new_value = U256::from(context.gas_used); + + context.storage_changes + .entry(Address::from(*to)) + .or_insert_with(HashMap::new) + .insert(storage_slot, new_value); + } + + // Update nonce for sender + let sender = tx.recover_signer() + .map_err(|_| SimulationError::Unknown { + message: "Failed to recover sender".to_string() + })?; + + if let Some(nonce) = context.initial_nonces.get_mut(&sender) { + *nonce += 1; + } + + debug!( + tx_index = tx_index, + gas_used = estimated_gas, + total_gas = context.gas_used, + "Transaction simulation completed" + ); + + Ok(()) + } + + /// Initialize execution context by fetching initial state + async fn initialize_context( + &self, + request: &SimulationRequest, + ) -> Result { + let mut initial_balances = HashMap::new(); + let mut initial_nonces = HashMap::new(); + + // Extract all addresses involved in the bundle + let mut addresses = std::collections::HashSet::new(); + + for tx_bytes in &request.bundle.txs { + match self.decode_transaction(tx_bytes) { + Ok(tx) => { + if let Ok(sender) = tx.recover_signer() { + addresses.insert(sender); + } + if let Some(to) = tx.to() { + addresses.insert(Address::from(*to)); + } + } + Err(e) => { + warn!(error = %e, "Failed to decode transaction in bundle"); + } + } + } + + // Fetch initial state for all addresses + for address in addresses { + match self.state_provider.get_balance(address, request.block_number).await { + Ok(balance) => { + initial_balances.insert(address, balance); + } + Err(e) => { + error!( + error = %e, + address = %address, + "Failed to fetch balance for address" + ); + } + } + + match self.state_provider.get_nonce(address, request.block_number).await { + Ok(nonce) => { + initial_nonces.insert(address, nonce); + } + Err(e) => { + error!( + error = %e, + address = %address, + "Failed to fetch nonce for address" + ); + } + } + } + + Ok(ExecutionContext { + block_number: request.block_number, + initial_balances, + initial_nonces, + storage_changes: HashMap::new(), + gas_used: 0, + }) + } + + /// Perform the actual bundle simulation + async fn execute_bundle_simulation( + &self, + request: SimulationRequest, + ) -> Result { + let start_time = Instant::now(); + let simulation_id = Uuid::new_v4(); + + info!( + bundle_id = %request.bundle_id, + simulation_id = %simulation_id, + num_transactions = request.bundle.txs.len(), + block_number = request.block_number, + "Starting bundle simulation" + ); + + // Initialize execution context + let mut context = self.initialize_context(&request).await + .map_err(|e| anyhow::anyhow!("Failed to initialize context: {}", e))?; + + // Simulate each transaction in the bundle + for (tx_index, tx_bytes) in request.bundle.txs.iter().enumerate() { + let tx = self.decode_transaction(tx_bytes) + .map_err(|e| SimulationError::Unknown { + message: format!("Failed to decode transaction {}: {}", tx_index, e) + })?; + + if let Err(sim_error) = self.simulate_transaction(&tx, &mut context, tx_index).await { + let execution_time = start_time.elapsed().as_micros(); + + error!( + bundle_id = %request.bundle_id, + simulation_id = %simulation_id, + tx_index = tx_index, + error = %sim_error, + "Bundle simulation failed" + ); + + return Ok(SimulationResult::failure( + simulation_id, + request.bundle_id, + request.block_number, + request.block_hash, + execution_time, + sim_error, + )); + } + } + + let execution_time = start_time.elapsed().as_micros(); + + info!( + bundle_id = %request.bundle_id, + simulation_id = %simulation_id, + gas_used = context.gas_used, + execution_time_us = execution_time, + storage_changes = context.storage_changes.len(), + "Bundle simulation completed successfully" + ); + + Ok(SimulationResult::success( + simulation_id, + request.bundle_id, + request.block_number, + request.block_hash, + context.gas_used, + execution_time, + context.storage_changes, + )) + } +} + +#[async_trait] +impl SimulationEngine for BundleSimulationEngine { + async fn simulate_bundle(&self, request: SimulationRequest) -> Result { + match timeout(self.timeout, self.execute_bundle_simulation(request.clone())).await { + Ok(result) => result, + Err(_) => { + warn!( + bundle_id = %request.bundle_id, + timeout_ms = self.timeout.as_millis(), + "Bundle simulation timed out" + ); + + Ok(SimulationResult::failure( + Uuid::new_v4(), + request.bundle_id, + request.block_number, + request.block_hash, + self.timeout.as_micros(), + SimulationError::Timeout, + )) + } + } + } +} + +/// Create a bundle simulation engine +pub fn create_simulation_engine( + state_provider: Arc, + timeout_ms: u64, +) -> impl SimulationEngine { + BundleSimulationEngine::new(state_provider, timeout_ms) +} diff --git a/crates/simulator/src/exex.rs b/crates/simulator/src/exex.rs new file mode 100644 index 0000000..f036ccd --- /dev/null +++ b/crates/simulator/src/exex.rs @@ -0,0 +1,306 @@ +use crate::engine::SimulationEngine; +use crate::publisher::SimulationResultPublisher; +use crate::types::{SimulationError, SimulationRequest, SimulationResult}; + +use alloy_primitives::{B256, U256}; +use alloy_rpc_types_mev::EthSendBundle; +use anyhow::Result; +use reth_exex::{ExExContext, ExExEvent, ExExNotification}; +use reth_node_api::{FullNodeComponents, NodeAddOns}; +use reth_primitives::{BlockNumber, TransactionSignedEcRecovered}; +use reth_provider::{CanonicalInMemoryState, Chain, StateProviderFactory}; +use std::sync::Arc; +use tokio::sync::mpsc; +use tracing::{debug, error, info, warn}; +use uuid::Uuid; + +/// ExEx that simulates bundles when new blocks are committed +pub struct SimulatorExEx> { + /// The execution extension context + ctx: ExExContext, + /// Simulation engine for processing bundles + engine: Box, + /// Publisher for simulation results + publisher: Box, + /// Channel for receiving simulation requests + simulation_rx: mpsc::UnboundedReceiver, + /// Sender for simulation requests + simulation_tx: mpsc::UnboundedSender, + /// Maximum number of concurrent simulations + max_concurrent: usize, +} + +impl SimulatorExEx +where + Node: FullNodeComponents, + AddOns: NodeAddOns, +{ + /// Create a new simulator ExEx + pub fn new( + ctx: ExExContext, + engine: Box, + publisher: Box, + max_concurrent: usize, + ) -> Self { + let (simulation_tx, simulation_rx) = mpsc::unbounded_channel(); + + Self { + ctx, + engine, + publisher, + simulation_rx, + simulation_tx, + max_concurrent, + } + } + + /// Main execution loop for the ExEx + pub async fn run(mut self) -> Result<()> { + info!("Starting Tips Simulator ExEx"); + + // Spawn the simulation worker + let simulation_handle = { + let engine = std::mem::replace(&mut self.engine, Box::new(NoOpEngine)); + let publisher = std::mem::replace(&mut self.publisher, Box::new(NoOpPublisher)); + let mut rx = std::mem::replace(&mut self.simulation_rx, mpsc::unbounded_channel().1); + let max_concurrent = self.max_concurrent; + + tokio::spawn(async move { + Self::simulation_worker(&mut rx, engine.as_ref(), publisher.as_ref(), max_concurrent).await + }) + }; + + loop { + tokio::select! { + notification = self.ctx.notifications.recv() => { + match notification { + Some(notification) => { + if let Err(e) = self.handle_notification(notification).await { + error!(error = %e, "Failed to handle ExEx notification"); + } + } + None => { + info!("ExEx notification channel closed, shutting down"); + break; + } + } + } + result = &mut simulation_handle => { + match result { + Ok(_) => info!("Simulation worker completed"), + Err(e) => error!(error = %e, "Simulation worker failed"), + } + break; + } + } + } + + // Clean shutdown + simulation_handle.abort(); + info!("Tips Simulator ExEx shutting down"); + Ok(()) + } + + /// Handle ExEx notifications + async fn handle_notification(&mut self, notification: ExExNotification) -> Result<()> { + match notification { + ExExNotification::ChainCommitted { new } => { + info!( + block_range = ?new.range(), + num_blocks = new.blocks().len(), + "Processing committed blocks" + ); + + // Process each block in the committed chain + for block in new.blocks() { + self.process_block(block).await?; + } + + // Notify that we've processed this notification + self.ctx + .events + .send(ExExEvent::FinishedHeight(new.tip().number))?; + } + ExExNotification::ChainReorged { old: _, new } => { + warn!( + block_range = ?new.range(), + "Chain reorg detected, processing new chain" + ); + + // Process the new canonical chain + for block in new.blocks() { + self.process_block(block).await?; + } + + self.ctx + .events + .send(ExExEvent::FinishedHeight(new.tip().number))?; + } + ExExNotification::ChainReverted { old } => { + warn!( + block_range = ?old.range(), + "Chain reverted, no simulation needed" + ); + + self.ctx + .events + .send(ExExEvent::FinishedHeight(old.tip().number))?; + } + } + + Ok(()) + } + + /// Process a single block for potential bundle simulations + async fn process_block(&mut self, execution_outcome: &reth_execution_types::ExecutionOutcome) -> Result<()> { + debug!( + block_number = execution_outcome.block_number(), + "Processing block for bundle simulation" + ); + + // TODO: Extract potential bundles from the block's transactions + // For now, this is a placeholder that would need to implement logic to: + // 1. Group transactions that could be bundles + // 2. Identify MEV opportunities + // 3. Create simulation requests for those bundles + + // This would be where we analyze transactions in the block + // and create simulation requests for potential bundles + let _block_number = execution_outcome.block_number(); + let _block_hash = execution_outcome.block_hash(); + + // Placeholder: Create a mock bundle simulation request + // In a real implementation, this would extract actual bundles from transactions + self.create_mock_simulation_request().await?; + + Ok(()) + } + + /// Create a mock simulation request (placeholder) + async fn create_mock_simulation_request(&self) -> Result<()> { + // This is a placeholder for bundle extraction logic + let bundle_id = Uuid::new_v4(); + let mock_bundle = EthSendBundle { + txs: vec![], // Would contain actual transaction data + block_number: None, + min_timestamp: None, + max_timestamp: None, + reverting_tx_hashes: vec![], + replacement_uuid: None, + }; + + let request = SimulationRequest { + bundle_id, + bundle: mock_bundle, + block_number: 0, // Would be actual block number + block_hash: B256::ZERO, // Would be actual block hash + }; + + if let Err(e) = self.simulation_tx.send(request) { + warn!(error = %e, "Failed to queue simulation request"); + } + + Ok(()) + } + + /// Simulation worker that processes simulation requests + async fn simulation_worker( + queue: &mut mpsc::UnboundedReceiver, + engine: &dyn SimulationEngine, + publisher: &dyn SimulationResultPublisher, + max_concurrent: usize, + ) -> Result<()> { + info!(max_concurrent = max_concurrent, "Starting ExEx simulation worker"); + + let semaphore = Arc::new(tokio::sync::Semaphore::new(max_concurrent)); + + while let Some(request) = queue.recv().await { + let semaphore_clone = semaphore.clone(); + let request_clone = request.clone(); + + tokio::spawn(async move { + let _permit = match semaphore_clone.acquire().await { + Ok(permit) => permit, + Err(_) => { + error!("Failed to acquire semaphore permit"); + return; + } + }; + + info!( + bundle_id = %request_clone.bundle_id, + block_number = request_clone.block_number, + "Processing ExEx simulation request" + ); + + match engine.simulate_bundle(request_clone.clone()).await { + Ok(result) => { + info!( + bundle_id = %request_clone.bundle_id, + simulation_id = %result.id, + success = result.success, + "ExEx simulation completed" + ); + + if let Err(e) = publisher.publish_result(result).await { + error!( + error = %e, + bundle_id = %request_clone.bundle_id, + "Failed to publish ExEx simulation result" + ); + } + } + Err(e) => { + error!( + error = %e, + bundle_id = %request_clone.bundle_id, + "ExEx simulation failed" + ); + } + } + }); + } + + info!("ExEx simulation worker shutting down"); + Ok(()) + } +} + +/// No-op engine for move semantics +struct NoOpEngine; + +#[async_trait::async_trait] +impl SimulationEngine for NoOpEngine { + async fn simulate_bundle(&self, _request: SimulationRequest) -> Result { + Err(anyhow::anyhow!("NoOpEngine should never be called")) + } +} + +/// No-op publisher for move semantics +struct NoOpPublisher; + +#[async_trait::async_trait] +impl SimulationResultPublisher for NoOpPublisher { + async fn publish_result(&self, _result: SimulationResult) -> Result<()> { + Err(anyhow::anyhow!("NoOpPublisher should never be called")) + } + + async fn get_results_for_bundle(&self, _bundle_id: Uuid) -> Result> { + Err(anyhow::anyhow!("NoOpPublisher should never be called")) + } + + async fn get_result_by_id(&self, _result_id: Uuid) -> Result> { + Err(anyhow::anyhow!("NoOpPublisher should never be called")) + } +} + +impl Clone for SimulationRequest { + fn clone(&self) -> Self { + Self { + bundle_id: self.bundle_id, + bundle: self.bundle.clone(), + block_number: self.block_number, + block_hash: self.block_hash, + } + } +} diff --git a/crates/simulator/src/lib.rs b/crates/simulator/src/lib.rs new file mode 100644 index 0000000..a386171 --- /dev/null +++ b/crates/simulator/src/lib.rs @@ -0,0 +1,84 @@ +pub mod config; +pub mod engine; +pub mod exex; +pub mod listener; +pub mod publisher; +pub mod service; +pub mod state; +pub mod types; + +use anyhow::Result; +use reth_exex::ExExContext; +use reth_node_api::{FullNodeComponents, NodeAddOns}; +use std::sync::Arc; +use tracing::info; + +pub use config::{SimulatorConfig, SimulatorExExConfig, SimulatorNodeConfig}; +pub use engine::{create_simulation_engine, SimulationEngine}; +pub use exex::SimulatorExEx; +pub use listener::{MempoolEventListener, KafkaMempoolListener}; +pub use publisher::{create_database_publisher, SimulationResultPublisher}; +pub use service::SimulatorService; +pub use state::{create_direct_state_provider, StateProvider}; +pub use types::{SimulationResult, SimulationError, ExExSimulationConfig}; + +/// ExEx initialization function that should be called by reth +pub async fn init_simulator_exex( + ctx: ExExContext, + config: ExExSimulationConfig, +) -> Result> +where + Node: FullNodeComponents, + AddOns: NodeAddOns, +{ + info!("Initializing Tips Simulator ExEx"); + + // Create database connection and publisher + let datastore = Arc::new( + tips_datastore::PostgresDatastore::connect(config.database_url.clone()).await? + ); + + // Run database migrations + datastore.run_migrations().await?; + info!("Database migrations completed"); + + let publisher = Box::new(create_database_publisher(datastore)); + info!("Database publisher initialized"); + + // Create state provider using reth's provider factory + let state_provider_factory = ctx.components.provider().clone(); + let current_block_number = ctx.head.number; + let state_provider = Arc::new(create_direct_state_provider( + state_provider_factory, + current_block_number, + )); + info!( + current_block = current_block_number, + "Direct state provider initialized" + ); + + // Create simulation engine + let engine = Box::new(create_simulation_engine( + state_provider, + config.simulation_timeout_ms, + )); + info!( + timeout_ms = config.simulation_timeout_ms, + "Simulation engine initialized" + ); + + // Create the ExEx + let exex = SimulatorExEx::new( + ctx, + engine, + publisher, + config.max_concurrent_simulations, + ); + + info!( + max_concurrent = config.max_concurrent_simulations, + "Tips Simulator ExEx initialized successfully" + ); + + Ok(exex) +} diff --git a/crates/simulator/src/listener.rs b/crates/simulator/src/listener.rs new file mode 100644 index 0000000..6ca2751 --- /dev/null +++ b/crates/simulator/src/listener.rs @@ -0,0 +1,150 @@ +use crate::types::{SimulationConfig, SimulationRequest}; +use alloy_primitives::B256; +use anyhow::Result; +use async_trait::async_trait; +use rdkafka::consumer::{Consumer, StreamConsumer}; +use rdkafka::Message; +use std::time::Duration; +use tips_audit::{create_kafka_consumer, types::MempoolEvent}; +use tokio::sync::mpsc; +use tracing::{debug, error, info, warn}; + +#[async_trait] +pub trait MempoolEventListener: Send + Sync { + /// Start listening for mempool events and send simulation requests + async fn start(&mut self, sender: mpsc::Sender) -> Result<()>; + /// Stop the listener + async fn stop(&mut self) -> Result<()>; +} + +pub struct KafkaMempoolListener { + consumer: StreamConsumer, + topic: String, + running: bool, +} + +impl KafkaMempoolListener { + pub fn new(config: &SimulationConfig) -> Result { + let consumer = create_kafka_consumer( + &config.kafka_brokers.join(","), + &config.kafka_group_id, + )?; + + Ok(Self { + consumer, + topic: config.kafka_topic.clone(), + running: false, + }) + } + + async fn process_event( + &self, + event: MempoolEvent, + sender: &mpsc::Sender, + current_block: u64, + current_block_hash: B256, + ) -> Result<()> { + match event { + MempoolEvent::Created { bundle_id, bundle } | MempoolEvent::Updated { bundle_id, bundle } => { + debug!( + bundle_id = %bundle_id, + num_transactions = bundle.txs.len(), + "Processing bundle for simulation" + ); + + let request = SimulationRequest { + bundle_id, + bundle, + block_number: current_block, + block_hash: current_block_hash, + }; + + if let Err(e) = sender.try_send(request) { + match e { + mpsc::error::TrySendError::Full(_) => { + warn!( + bundle_id = %bundle_id, + "Simulation queue is full, dropping request" + ); + } + mpsc::error::TrySendError::Closed(_) => { + error!("Simulation queue receiver has been dropped"); + return Err(anyhow::anyhow!("Simulation queue closed")); + } + } + } + } + // We only care about Created and Updated events for simulation + _ => { + debug!(event = ?event, "Ignoring non-creation event"); + } + } + + Ok(()) + } + + // TODO: This should be updated to get current block info from the state provider + // For now, we'll use dummy values + fn get_current_block_info(&self) -> (u64, B256) { + (0, B256::ZERO) + } +} + +#[async_trait] +impl MempoolEventListener for KafkaMempoolListener { + async fn start(&mut self, sender: mpsc::Sender) -> Result<()> { + info!(topic = %self.topic, "Starting mempool listener"); + + self.consumer.subscribe(&[&self.topic])?; + self.running = true; + + while self.running { + match self.consumer.recv().await { + Ok(message) => { + let payload = match message.payload() { + Some(payload) => payload, + None => { + warn!("Received message with empty payload"); + continue; + } + }; + + match serde_json::from_slice::(payload) { + Ok(event) => { + let (current_block, current_block_hash) = self.get_current_block_info(); + + if let Err(e) = self.process_event(event, &sender, current_block, current_block_hash).await { + error!(error = %e, "Failed to process mempool event"); + } + } + Err(e) => { + error!( + error = %e, + payload_size = payload.len(), + "Failed to deserialize mempool event" + ); + } + } + } + Err(e) => { + error!(error = %e, "Error receiving message from Kafka"); + tokio::time::sleep(Duration::from_secs(1)).await; + } + } + } + + info!("Mempool listener stopped"); + Ok(()) + } + + async fn stop(&mut self) -> Result<()> { + info!("Stopping mempool listener"); + self.running = false; + Ok(()) + } +} + +/// Create a mempool listener using the provided configuration +pub fn create_mempool_listener(config: &SimulationConfig) -> Result { + KafkaMempoolListener::new(config) +} diff --git a/crates/simulator/src/main.rs b/crates/simulator/src/main.rs new file mode 100644 index 0000000..42146d4 --- /dev/null +++ b/crates/simulator/src/main.rs @@ -0,0 +1,45 @@ +use anyhow::Result; +use clap::Parser; +use reth_node_builder::{NodeBuilder, NodeConfig}; +use reth_node_ethereum::EthereumNode; +use tips_simulator::{init_simulator_exex, SimulatorNodeConfig}; +use tracing::{error, info}; + +#[tokio::main] +async fn main() -> Result<()> { + // Load environment variables + dotenvy::dotenv().ok(); + + // Parse command line arguments + let config = SimulatorNodeConfig::parse(); + + // Extract simulator config + let simulator_config = config.clone().into(); + + info!( + database_url = %config.database_url, + max_concurrent = config.max_concurrent_simulations, + timeout_ms = config.simulation_timeout_ms, + "Starting reth node with Tips Simulator ExEx" + ); + + // Create node builder with ExEx + let handle = NodeBuilder::new(config.node.clone()) + .node(EthereumNode::default()) + .install_exex("tips-simulator", move |ctx| async move { + // Initialize the simulator ExEx + let exex = init_simulator_exex(ctx, simulator_config).await?; + + info!("Tips Simulator ExEx installed successfully"); + + // Run the ExEx + Ok(exex.run()) + }) + .launch() + .await?; + + info!("Reth node with Tips Simulator ExEx started successfully"); + + // Wait for the node to finish + handle.wait_for_node_exit().await +} diff --git a/crates/simulator/src/publisher.rs b/crates/simulator/src/publisher.rs new file mode 100644 index 0000000..adb05cb --- /dev/null +++ b/crates/simulator/src/publisher.rs @@ -0,0 +1,219 @@ +use crate::types::SimulationResult; +use anyhow::Result; +use async_trait::async_trait; +use rdkafka::producer::FutureProducer; +use serde_json; +use std::sync::Arc; +use tips_audit::{MempoolEventPublisher, KafkaMempoolEventPublisher}; +use tips_datastore::PostgresDatastore; +use tracing::{debug, error, info, warn}; +use uuid::Uuid; + +#[async_trait] +pub trait SimulationResultPublisher: Send + Sync { + /// Store a simulation result + async fn publish_result(&self, result: SimulationResult) -> Result<()>; + + /// Get simulation results for a bundle + async fn get_results_for_bundle(&self, bundle_id: Uuid) -> Result>; + + /// Get a specific simulation result by ID + async fn get_result_by_id(&self, result_id: Uuid) -> Result>; +} + +pub struct DatabaseResultPublisher { + datastore: Arc, + kafka_publisher: Option>, +} + +impl DatabaseResultPublisher { + pub fn new( + datastore: Arc, + kafka_publisher: Option>, + ) -> Self { + Self { + datastore, + kafka_publisher, + } + } + + pub fn with_kafka( + datastore: Arc, + producer: FutureProducer, + topic: String, + ) -> Self { + let publisher = Arc::new(KafkaMempoolEventPublisher::new(producer, topic)); + Self::new(datastore, Some(publisher)) + } + + /// Convert SimulationResult to database format + fn result_to_db_format(&self, result: &SimulationResult) -> Result { + Ok(DatabaseSimulation { + id: result.id, + bundle_id: result.bundle_id, + block_number: result.block_number as i64, + block_hash: format!("0x{}", hex::encode(result.block_hash.as_slice())), + success: result.success, + gas_used: result.gas_used.map(|g| g as i64), + execution_time_us: result.execution_time_us as i64, + state_diff: serde_json::to_value(&result.state_diff)?, + error_reason: result.error_reason.clone(), + created_at: result.created_at, + updated_at: result.created_at, // For new records, created_at == updated_at + }) + } + + /// Store result in database + async fn store_in_database(&self, result: &SimulationResult) -> Result<()> { + let _db_result = self.result_to_db_format(result)?; + + info!( + simulation_id = %result.id, + bundle_id = %result.bundle_id, + success = result.success, + gas_used = ?result.gas_used, + "Storing simulation result in database" + ); + + // TODO: This would need to be implemented with proper sqlx queries + // For now, we'll use the datastore interface if it has simulation methods + // Otherwise, we need to add simulation-specific methods to the datastore + + // Placeholder implementation - in a real scenario, we'd add methods to PostgresDatastore + // like: datastore.store_simulation_result(result).await?; + + debug!( + simulation_id = %result.id, + "Database storage placeholder - would insert simulation result here" + ); + + Ok(()) + } + + /// Publish result to Kafka if configured + async fn publish_to_kafka(&self, result: &SimulationResult) -> Result<()> { + if let Some(ref _publisher) = self.kafka_publisher { + // Create a custom event type for simulation results + // For now, we'll create a mock event - in the future, we might want to extend + // the MempoolEvent enum to include simulation results + + debug!( + simulation_id = %result.id, + bundle_id = %result.bundle_id, + success = result.success, + "Publishing simulation result to Kafka" + ); + + // TODO: Implement proper simulation result event + // For now, this is commented out as we'd need to extend the MempoolEvent enum + + // let event = MempoolEvent::SimulationComplete { + // bundle_id: result.bundle_id, + // simulation_id: result.id, + // success: result.success, + // gas_used: result.gas_used, + // execution_time_us: result.execution_time_us, + // }; + + // publisher.publish(event).await?; + } + + Ok(()) + } +} + +#[async_trait] +impl SimulationResultPublisher for DatabaseResultPublisher { + async fn publish_result(&self, result: SimulationResult) -> Result<()> { + info!( + simulation_id = %result.id, + bundle_id = %result.bundle_id, + success = result.success, + "Publishing simulation result" + ); + + // Store in database + if let Err(e) = self.store_in_database(&result).await { + error!( + error = %e, + simulation_id = %result.id, + "Failed to store simulation result in database" + ); + return Err(e); + } + + // Publish to Kafka if configured + if let Err(e) = self.publish_to_kafka(&result).await { + warn!( + error = %e, + simulation_id = %result.id, + "Failed to publish simulation result to Kafka" + ); + // Don't fail the entire operation if Kafka publish fails + } + + debug!( + simulation_id = %result.id, + bundle_id = %result.bundle_id, + "Successfully published simulation result" + ); + + Ok(()) + } + + async fn get_results_for_bundle(&self, bundle_id: Uuid) -> Result> { + info!(bundle_id = %bundle_id, "Fetching simulation results for bundle"); + + // TODO: Implement actual database query + // For now, return empty vec as placeholder + + debug!(bundle_id = %bundle_id, "No simulation results found"); + Ok(vec![]) + } + + async fn get_result_by_id(&self, result_id: Uuid) -> Result> { + info!(simulation_id = %result_id, "Fetching simulation result by ID"); + + // TODO: Implement actual database query + // For now, return None as placeholder + + debug!(simulation_id = %result_id, "Simulation result not found"); + Ok(None) + } +} + +/// Database representation of a simulation result +/// This matches the expected database schema +#[derive(Debug, Clone)] +struct DatabaseSimulation { + id: Uuid, + bundle_id: Uuid, + block_number: i64, + block_hash: String, + success: bool, + gas_used: Option, + execution_time_us: i64, + state_diff: serde_json::Value, + error_reason: Option, + created_at: chrono::DateTime, + updated_at: chrono::DateTime, +} + +/// Create a result publisher with database storage +pub fn create_database_publisher( + datastore: Arc, +) -> impl SimulationResultPublisher { + DatabaseResultPublisher::new(datastore, None) +} + +/// Create a result publisher with database storage and Kafka publishing +pub fn create_database_kafka_publisher( + datastore: Arc, + producer: FutureProducer, + topic: String, +) -> impl SimulationResultPublisher { + DatabaseResultPublisher::with_kafka(datastore, producer, topic) +} + +// We'll need to add hex as a dependency for block hash formatting +// For now, using a simple placeholder diff --git a/crates/simulator/src/service.rs b/crates/simulator/src/service.rs new file mode 100644 index 0000000..d71ffab --- /dev/null +++ b/crates/simulator/src/service.rs @@ -0,0 +1,283 @@ +use crate::engine::{create_simulation_engine, SimulationEngine}; +use crate::listener::{create_mempool_listener, MempoolEventListener}; +use crate::publisher::{create_database_publisher, SimulationResultPublisher}; +use crate::state::create_rpc_state_provider; +use crate::types::{SimulationConfig, SimulationRequest}; +use anyhow::Result; +use std::sync::Arc; +use tokio::sync::mpsc; +use tokio::task::JoinHandle; +use tracing::{error, info}; +use uuid; + +/// Main service that orchestrates all simulation components +pub struct SimulatorService { + config: SimulationConfig, + listener: Box, + engine: Box, + publisher: Box, + simulation_queue: Option>, + listener_handle: Option>>, + simulation_handle: Option>>, +} + +impl SimulatorService { + /// Create a new simulator service with the given configuration + pub async fn new(config: SimulationConfig) -> Result { + info!("Initializing simulator service"); + + // Create state provider + let state_provider = Arc::new(create_rpc_state_provider(&config.reth_http_url)?); + info!(reth_url = %config.reth_http_url, "State provider initialized"); + + // Create simulation engine + let engine = Box::new(create_simulation_engine( + state_provider, + config.simulation_timeout_ms, + )); + info!( + timeout_ms = config.simulation_timeout_ms, + "Simulation engine initialized" + ); + + // Create mempool listener + let listener = Box::new(create_mempool_listener(&config)?); + info!( + topic = %config.kafka_topic, + brokers = ?config.kafka_brokers, + "Mempool listener initialized" + ); + + // Create database connection and publisher + let datastore = Arc::new( + tips_datastore::PostgresDatastore::connect(config.database_url.clone()).await? + ); + + // Run database migrations + datastore.run_migrations().await?; + info!("Database migrations completed"); + + let publisher = Box::new(create_database_publisher(datastore)); + info!("Result publisher initialized"); + + Ok(Self { + config, + listener, + engine, + publisher, + simulation_queue: None, + listener_handle: None, + simulation_handle: None, + }) + } + + /// Start the simulator service + pub async fn start(&mut self) -> Result<()> { + info!("Starting simulator service"); + + // Create channel for simulation requests + let (sender, receiver) = mpsc::channel::(1000); + self.simulation_queue = Some(receiver); + + // Start mempool listener + let listener_handle = { + let mut listener = std::mem::replace( + &mut self.listener, + Box::new(NoOpListener), + ); + let sender_clone = sender.clone(); + + tokio::spawn(async move { + listener.start(sender_clone).await + }) + }; + + // Start simulation worker + let simulation_handle = { + let engine = std::mem::replace( + &mut self.engine, + Box::new(NoOpEngine), + ); + let publisher = std::mem::replace( + &mut self.publisher, + Box::new(NoOpPublisher), + ); + let mut queue = self.simulation_queue.take().unwrap(); + let max_concurrent = self.config.max_concurrent_simulations; + + tokio::spawn(async move { + Self::simulation_worker( + &mut queue, + engine.as_ref(), + publisher.as_ref(), + max_concurrent, + ).await + }) + }; + + self.listener_handle = Some(listener_handle); + self.simulation_handle = Some(simulation_handle); + + info!( + max_concurrent = self.config.max_concurrent_simulations, + "Simulator service started successfully" + ); + + Ok(()) + } + + /// Stop the simulator service + pub async fn stop(&mut self) -> Result<()> { + info!("Stopping simulator service"); + + // Stop listener + if let Some(handle) = self.listener_handle.take() { + handle.abort(); + } + + // Stop simulation worker + if let Some(handle) = self.simulation_handle.take() { + handle.abort(); + } + + info!("Simulator service stopped"); + Ok(()) + } + + /// Wait for the service to complete + pub async fn wait(&mut self) -> Result<()> { + if let Some(listener_handle) = &mut self.listener_handle { + if let Err(e) = listener_handle.await { + error!(error = %e, "Listener task failed"); + } + } + + if let Some(simulation_handle) = &mut self.simulation_handle { + if let Err(e) = simulation_handle.await { + error!(error = %e, "Simulation worker task failed"); + } + } + + Ok(()) + } + + /// Main simulation worker that processes simulation requests + async fn simulation_worker( + queue: &mut mpsc::Receiver, + engine: &dyn SimulationEngine, + publisher: &dyn SimulationResultPublisher, + max_concurrent: usize, + ) -> Result<()> { + info!(max_concurrent = max_concurrent, "Starting simulation worker"); + + // Use a semaphore to limit concurrent simulations + let semaphore = Arc::new(tokio::sync::Semaphore::new(max_concurrent)); + + while let Some(request) = queue.recv().await { + let semaphore_clone = semaphore.clone(); + let request_clone = request.clone(); + + // Spawn a task for this simulation + tokio::spawn(async move { + let _permit = match semaphore_clone.acquire().await { + Ok(permit) => permit, + Err(_) => { + error!("Failed to acquire semaphore permit"); + return; + } + }; + + info!( + bundle_id = %request_clone.bundle_id, + block_number = request_clone.block_number, + num_transactions = request_clone.bundle.txs.len(), + "Processing simulation request" + ); + + // Perform the simulation + match engine.simulate_bundle(request_clone.clone()).await { + Ok(result) => { + info!( + bundle_id = %request_clone.bundle_id, + simulation_id = %result.id, + success = result.success, + gas_used = ?result.gas_used, + execution_time_us = result.execution_time_us, + "Simulation completed" + ); + + // Publish the result + if let Err(e) = publisher.publish_result(result).await { + error!( + error = %e, + bundle_id = %request_clone.bundle_id, + "Failed to publish simulation result" + ); + } + } + Err(e) => { + error!( + error = %e, + bundle_id = %request_clone.bundle_id, + "Simulation failed with error" + ); + } + } + }); + } + + info!("Simulation worker shutting down"); + Ok(()) + } +} + +// Placeholder implementations for move semantics +struct NoOpListener; + +#[async_trait::async_trait] +impl MempoolEventListener for NoOpListener { + async fn start(&mut self, _sender: mpsc::Sender) -> Result<()> { + Ok(()) + } + + async fn stop(&mut self) -> Result<()> { + Ok(()) + } +} + +struct NoOpEngine; + +#[async_trait::async_trait] +impl SimulationEngine for NoOpEngine { + async fn simulate_bundle(&self, _request: SimulationRequest) -> Result { + Err(anyhow::anyhow!("NoOpEngine should never be called")) + } +} + +struct NoOpPublisher; + +#[async_trait::async_trait] +impl SimulationResultPublisher for NoOpPublisher { + async fn publish_result(&self, _result: SimulationResult) -> Result<()> { + Err(anyhow::anyhow!("NoOpPublisher should never be called")) + } + + async fn get_results_for_bundle(&self, _bundle_id: uuid::Uuid) -> Result> { + Err(anyhow::anyhow!("NoOpPublisher should never be called")) + } + + async fn get_result_by_id(&self, _result_id: uuid::Uuid) -> Result> { + Err(anyhow::anyhow!("NoOpPublisher should never be called")) + } +} + +impl Clone for SimulationRequest { + fn clone(&self) -> Self { + Self { + bundle_id: self.bundle_id, + bundle: self.bundle.clone(), + block_number: self.block_number, + block_hash: self.block_hash, + } + } +} diff --git a/crates/simulator/src/state.rs b/crates/simulator/src/state.rs new file mode 100644 index 0000000..943979d --- /dev/null +++ b/crates/simulator/src/state.rs @@ -0,0 +1,270 @@ +use alloy_primitives::{Address, B256, U256}; +use anyhow::Result; +use async_trait::async_trait; +use reth_provider::{StateProvider as RethStateProvider, StateProviderFactory}; +use std::collections::HashMap; +use std::sync::Arc; +use tracing::{debug, error, warn}; + +/// Provides access to blockchain state for simulation +#[async_trait] +pub trait StateProvider: Send + Sync { + /// Get the current block number + async fn get_block_number(&self) -> Result; + + /// Get block hash for a given block number + async fn get_block_hash(&self, block_number: u64) -> Result; + + /// Get account balance at a specific block + async fn get_balance(&self, address: Address, block_number: u64) -> Result; + + /// Get account nonce at a specific block + async fn get_nonce(&self, address: Address, block_number: u64) -> Result; + + /// Get storage value at a specific slot and block + async fn get_storage(&self, address: Address, slot: U256, block_number: u64) -> Result; + + /// Get account code at a specific block + async fn get_code(&self, address: Address, block_number: u64) -> Result>; + + /// Get multiple storage slots efficiently + async fn get_storage_batch( + &self, + requests: Vec<(Address, Vec)>, + block_number: u64, + ) -> Result>>; +} + +/// Direct reth state provider that accesses state without RPC +pub struct DirectStateProvider { + state_provider_factory: Arc, + /// Current block number for state queries + current_block_number: u64, +} + +impl DirectStateProvider +where + SF: StateProviderFactory, +{ + pub fn new(state_provider_factory: Arc, current_block_number: u64) -> Self { + Self { + state_provider_factory, + current_block_number, + } + } + + /// Update the current block number for state queries + pub fn set_current_block(&mut self, block_number: u64) { + self.current_block_number = block_number; + } + + /// Get a state provider for the current block + fn get_state_provider(&self) -> Result> { + self.state_provider_factory + .state_by_block_number(self.current_block_number) + .map_err(|e| anyhow::anyhow!("Failed to get state provider: {}", e)) + } +} + +#[async_trait] +impl StateProvider for DirectStateProvider +where + SF: StateProviderFactory + Send + Sync, +{ + async fn get_block_number(&self) -> Result { + Ok(self.current_block_number) + } + + async fn get_block_hash(&self, block_number: u64) -> Result { + let state_provider = self.get_state_provider()?; + + // Get block hash from state provider + // Note: This would need to be implemented based on reth's state provider API + // For now, we'll use a placeholder + debug!(block_number = block_number, "Getting block hash from direct state"); + + // TODO: Implement proper block hash retrieval from reth state provider + Ok(B256::ZERO) // Placeholder + } + + async fn get_balance(&self, address: Address, _block_number: u64) -> Result { + let state_provider = self.get_state_provider()?; + + match state_provider.account_balance(address) { + Ok(Some(balance)) => { + debug!( + address = %address, + block_number = self.current_block_number, + balance = %balance, + "Retrieved balance from direct state" + ); + Ok(balance) + } + Ok(None) => { + debug!( + address = %address, + block_number = self.current_block_number, + "Account not found, returning zero balance" + ); + Ok(U256::ZERO) + } + Err(e) => { + error!( + error = %e, + address = %address, + block_number = self.current_block_number, + "Failed to get balance from direct state" + ); + Err(anyhow::anyhow!("State provider error: {}", e)) + } + } + } + + async fn get_nonce(&self, address: Address, _block_number: u64) -> Result { + let state_provider = self.get_state_provider()?; + + match state_provider.account_nonce(address) { + Ok(Some(nonce)) => { + debug!( + address = %address, + block_number = self.current_block_number, + nonce = nonce, + "Retrieved nonce from direct state" + ); + Ok(nonce) + } + Ok(None) => { + debug!( + address = %address, + block_number = self.current_block_number, + "Account not found, returning zero nonce" + ); + Ok(0) + } + Err(e) => { + error!( + error = %e, + address = %address, + block_number = self.current_block_number, + "Failed to get nonce from direct state" + ); + Err(anyhow::anyhow!("State provider error: {}", e)) + } + } + } + + async fn get_storage(&self, address: Address, slot: U256, _block_number: u64) -> Result { + let state_provider = self.get_state_provider()?; + + match state_provider.storage(address, reth_primitives::StorageKey::from(slot)) { + Ok(Some(value)) => { + debug!( + address = %address, + slot = %slot, + block_number = self.current_block_number, + value = %value, + "Retrieved storage from direct state" + ); + Ok(U256::from(value)) + } + Ok(None) => { + debug!( + address = %address, + slot = %slot, + block_number = self.current_block_number, + "Storage slot not found, returning zero" + ); + Ok(U256::ZERO) + } + Err(e) => { + error!( + error = %e, + address = %address, + slot = %slot, + block_number = self.current_block_number, + "Failed to get storage from direct state" + ); + Err(anyhow::anyhow!("State provider error: {}", e)) + } + } + } + + async fn get_code(&self, address: Address, _block_number: u64) -> Result> { + let state_provider = self.get_state_provider()?; + + match state_provider.account_code(address) { + Ok(Some(code)) => { + debug!( + address = %address, + block_number = self.current_block_number, + code_len = code.len(), + "Retrieved code from direct state" + ); + Ok(code.original_bytes()) + } + Ok(None) => { + debug!( + address = %address, + block_number = self.current_block_number, + "Account has no code" + ); + Ok(vec![]) + } + Err(e) => { + error!( + error = %e, + address = %address, + block_number = self.current_block_number, + "Failed to get code from direct state" + ); + Err(anyhow::anyhow!("State provider error: {}", e)) + } + } + } + + async fn get_storage_batch( + &self, + requests: Vec<(Address, Vec)>, + block_number: u64, + ) -> Result>> { + let mut result = HashMap::new(); + + // Process each address + for (address, slots) in requests { + let mut address_storage = HashMap::new(); + + for slot in slots { + match self.get_storage(address, slot, block_number).await { + Ok(value) => { + address_storage.insert(slot, value); + } + Err(e) => { + warn!( + error = %e, + address = %address, + slot = %slot, + "Failed to get storage in batch request" + ); + } + } + } + + if !address_storage.is_empty() { + result.insert(address, address_storage); + } + } + + Ok(result) + } +} + +/// Create a direct state provider using reth's state provider factory +pub fn create_direct_state_provider( + state_provider_factory: Arc, + current_block_number: u64, +) -> impl StateProvider +where + SF: StateProviderFactory + Send + Sync + 'static, +{ + DirectStateProvider::new(state_provider_factory, current_block_number) +} diff --git a/crates/simulator/src/types.rs b/crates/simulator/src/types.rs new file mode 100644 index 0000000..4146e7f --- /dev/null +++ b/crates/simulator/src/types.rs @@ -0,0 +1,165 @@ +use alloy_primitives::{Address, B256, U256}; +use alloy_rpc_types_mev::EthSendBundle; +use chrono::{DateTime, Utc}; +use serde::{Deserialize, Serialize}; +use std::collections::HashMap; +use uuid::Uuid; + +/// Result of simulating a complete bundle +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct SimulationResult { + /// Unique identifier for this simulation + pub id: Uuid, + /// Bundle that was simulated + pub bundle_id: Uuid, + /// Block number at which simulation was performed + pub block_number: u64, + /// Block hash at which simulation was performed + pub block_hash: B256, + /// Whether the bundle simulation was successful + pub success: bool, + /// Total gas used by all transactions in the bundle + pub gas_used: Option, + /// Time taken to execute the simulation in microseconds + pub execution_time_us: u128, + /// State changes produced by the bundle simulation + /// Map of account address -> (storage slot -> new value) + pub state_diff: HashMap>, + /// Error message if simulation failed + pub error_reason: Option, + /// When this simulation was created + pub created_at: DateTime, +} + +/// Configuration for the simulation service (legacy) +#[derive(Debug, Clone)] +pub struct SimulationConfig { + /// Kafka brokers for consuming mempool events + pub kafka_brokers: Vec, + /// Kafka topic to consume mempool events from + pub kafka_topic: String, + /// Kafka consumer group ID + pub kafka_group_id: String, + /// URL for Reth HTTP RPC endpoint + pub reth_http_url: String, + /// URL for Reth WebSocket endpoint + pub reth_ws_url: String, + /// PostgreSQL database connection URL + pub database_url: String, + /// Maximum number of concurrent simulations + pub max_concurrent_simulations: usize, + /// Timeout for individual simulations in milliseconds + pub simulation_timeout_ms: u64, + /// Whether to publish simulation results back to Kafka + pub publish_results: bool, + /// Topic to publish results to (if publishing enabled) + pub results_topic: Option, +} + +/// Configuration for ExEx-based simulation +#[derive(Debug, Clone)] +pub struct ExExSimulationConfig { + /// PostgreSQL database connection URL + pub database_url: String, + /// Maximum number of concurrent simulations + pub max_concurrent_simulations: usize, + /// Timeout for individual simulations in milliseconds + pub simulation_timeout_ms: u64, +} + +/// Errors that can occur during simulation +#[derive(Debug, Clone, Serialize, Deserialize)] +pub enum SimulationError { + /// Bundle execution reverted + Revert { reason: String }, + /// Bundle ran out of gas + OutOfGas, + /// Invalid nonce in one of the transactions + InvalidNonce { tx_index: usize, expected: u64, actual: u64 }, + /// Insufficient balance for gas payment + InsufficientBalance { tx_index: usize, required: U256, available: U256 }, + /// State access error (RPC failure, etc.) + StateAccessError { message: String }, + /// Simulation timeout + Timeout, + /// Unknown error + Unknown { message: String }, +} + +impl std::fmt::Display for SimulationError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + SimulationError::Revert { reason } => write!(f, "Bundle reverted: {}", reason), + SimulationError::OutOfGas => write!(f, "Bundle ran out of gas"), + SimulationError::InvalidNonce { tx_index, expected, actual } => { + write!(f, "Invalid nonce in tx {}: expected {}, got {}", tx_index, expected, actual) + } + SimulationError::InsufficientBalance { tx_index, required, available } => { + write!(f, "Insufficient balance in tx {}: required {}, available {}", tx_index, required, available) + } + SimulationError::StateAccessError { message } => write!(f, "State access error: {}", message), + SimulationError::Timeout => write!(f, "Simulation timed out"), + SimulationError::Unknown { message } => write!(f, "Unknown error: {}", message), + } + } +} + +impl std::error::Error for SimulationError {} + +/// A request to simulate a bundle +#[derive(Debug)] +pub struct SimulationRequest { + pub bundle_id: Uuid, + pub bundle: EthSendBundle, + pub block_number: u64, + pub block_hash: B256, +} + +impl SimulationResult { + /// Create a new successful simulation result + pub fn success( + id: Uuid, + bundle_id: Uuid, + block_number: u64, + block_hash: B256, + gas_used: u64, + execution_time_us: u128, + state_diff: HashMap>, + ) -> Self { + Self { + id, + bundle_id, + block_number, + block_hash, + success: true, + gas_used: Some(gas_used), + execution_time_us, + state_diff, + error_reason: None, + created_at: Utc::now(), + } + } + + /// Create a new failed simulation result + pub fn failure( + id: Uuid, + bundle_id: Uuid, + block_number: u64, + block_hash: B256, + execution_time_us: u128, + error: SimulationError, + ) -> Self { + Self { + id, + bundle_id, + block_number, + block_hash, + success: false, + gas_used: None, + execution_time_us, + state_diff: HashMap::new(), + error_reason: Some(error.to_string()), + created_at: Utc::now(), + } + } +} diff --git a/crates/simulator/tests/integration_test.rs b/crates/simulator/tests/integration_test.rs new file mode 100644 index 0000000..497e0ba --- /dev/null +++ b/crates/simulator/tests/integration_test.rs @@ -0,0 +1,84 @@ +use tips_simulator::types::{SimulationConfig, SimulationRequest}; +use tips_simulator::service::SimulatorService; +use alloy_primitives::{Address, Bytes, B256}; +use alloy_rpc_types_mev::EthSendBundle; +use uuid::Uuid; + +// Basic smoke test to ensure the simulator compiles and can be instantiated +#[tokio::test] +async fn test_simulator_service_creation() { + let config = SimulationConfig { + kafka_brokers: vec!["localhost:9092".to_string()], + kafka_topic: "test-topic".to_string(), + kafka_group_id: "test-group".to_string(), + reth_http_url: "http://localhost:8545".to_string(), + reth_ws_url: "ws://localhost:8546".to_string(), + database_url: "postgresql://user:pass@localhost:5432/test".to_string(), + max_concurrent_simulations: 5, + simulation_timeout_ms: 1000, + publish_results: false, + results_topic: None, + }; + + // This test will fail to connect to real services, but it tests compilation + // and basic service construction + let result = SimulatorService::new(config).await; + + // We expect this to fail due to connection issues in test environment + assert!(result.is_err()); +} + +#[test] +fn test_simulation_request_creation() { + let bundle_id = Uuid::new_v4(); + let bundle = EthSendBundle { + txs: vec![ + Bytes::from_static(&[0x01, 0x02, 0x03]), // Mock transaction data + ], + block_number: 18_000_000, + min_timestamp: Some(1625097600), + max_timestamp: Some(1625097900), + reverting_tx_hashes: vec![], + replacement_uuid: None, + dropping_tx_hashes: vec![], + refund_percent: None, + refund_recipient: None, + refund_tx_hashes: vec![], + extra_fields: Default::default(), + }; + + let request = SimulationRequest { + bundle_id, + bundle: bundle.clone(), + block_number: 18_000_000, + block_hash: B256::ZERO, + }; + + assert_eq!(request.bundle_id, bundle_id); + assert_eq!(request.bundle.txs.len(), 1); + assert_eq!(request.block_number, 18_000_000); +} + +#[cfg(feature = "integration-tests")] +mod integration_tests { + use super::*; + use testcontainers::core::{ContainerPort, WaitFor}; + use testcontainers::{Container, GenericImage}; + use testcontainers_modules::{kafka::Kafka, postgres::Postgres}; + + // This would be a full integration test with real containers + // Disabled by default since it requires Docker + #[tokio::test] + async fn test_full_simulation_flow() { + // Start test containers + let postgres = Postgres::default(); + let kafka = Kafka::default(); + + // This would test the full flow: + // 1. Start simulator service + // 2. Send test bundle via Kafka + // 3. Verify simulation result in database + + todo!("Implement full integration test"); + } +} From 9b5c15dda4b71e176fac82c8105a89f867195d07 Mon Sep 17 00:00:00 2001 From: Niran Babalola Date: Fri, 19 Sep 2025 22:46:40 -0500 Subject: [PATCH 04/39] Customize the default reth datadir --- crates/simulator/src/config.rs | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/crates/simulator/src/config.rs b/crates/simulator/src/config.rs index 6f48cf0..d99de73 100644 --- a/crates/simulator/src/config.rs +++ b/crates/simulator/src/config.rs @@ -1,5 +1,6 @@ use crate::types::ExExSimulationConfig; use clap::Parser; +use std::path::PathBuf; /// Combined configuration for reth node with simulator ExEx #[derive(Parser, Debug, Clone)] @@ -9,6 +10,10 @@ pub struct SimulatorNodeConfig { #[command(flatten)] pub node: reth_cli::Cli, + /// Data directory for simulator + #[arg(long, env = "TIPS_SIMULATOR_DATADIR", default_value = "~/.tips-simulator-reth")] + pub datadir: std::path::PathBuf, + /// PostgreSQL database connection URL for simulator #[arg(long, env = "TIPS_SIMULATOR_DATABASE_URL")] pub database_url: String, From f8cf73883dc7e2cdfa65a438b1a5b0040e0770c3 Mon Sep 17 00:00:00 2001 From: Niran Babalola Date: Sat, 20 Sep 2025 16:23:08 -0500 Subject: [PATCH 05/39] Implement the simulator as a reth ExEx to provide the state for simulations and notification to trigger them --- Cargo.toml | 10 + crates/simulator/Cargo.toml | 23 +- crates/simulator/src/config.rs | 11 +- crates/simulator/src/core.rs | 77 +++ crates/simulator/src/engine.rs | 179 ++++--- crates/simulator/src/exex.rs | 560 +++++++++++++-------- crates/simulator/src/lib.rs | 131 +++-- crates/simulator/src/listener.rs | 150 ------ crates/simulator/src/main.rs | 56 ++- crates/simulator/src/mempool.rs | 154 ++++++ crates/simulator/src/publisher.rs | 4 +- crates/simulator/src/service.rs | 283 ----------- crates/simulator/src/state.rs | 270 ---------- crates/simulator/src/types.rs | 27 +- crates/simulator/tests/integration_test.rs | 71 +-- 15 files changed, 880 insertions(+), 1126 deletions(-) create mode 100644 crates/simulator/src/core.rs delete mode 100644 crates/simulator/src/listener.rs create mode 100644 crates/simulator/src/mempool.rs delete mode 100644 crates/simulator/src/service.rs delete mode 100644 crates/simulator/src/state.rs diff --git a/Cargo.toml b/Cargo.toml index fa2d10e..fc1ba0d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -13,6 +13,14 @@ tips-simulator = { path = "crates/simulator" } # Reth reth = { git = "https://github.com/paradigmxyz/reth", tag = "v1.7.0" } reth-rpc-eth-types = { git = "https://github.com/paradigmxyz/reth", tag = "v1.7.0" } +reth-exex = { git = "https://github.com/paradigmxyz/reth", tag = "v1.7.0" } +reth-provider = { git = "https://github.com/paradigmxyz/reth", tag = "v1.7.0" } +reth-primitives = { git = "https://github.com/paradigmxyz/reth", tag = "v1.7.0" } +reth-execution-types = { git = "https://github.com/paradigmxyz/reth", tag = "v1.7.0" } +reth-node-api = { git = "https://github.com/paradigmxyz/reth", tag = "v1.7.0" } +reth-evm = { git = "https://github.com/paradigmxyz/reth", tag = "v1.7.0" } +reth-node-builder = { git = "https://github.com/paradigmxyz/reth", tag = "v1.7.0" } +reth-node-ethereum = { git = "https://github.com/paradigmxyz/reth", tag = "v1.7.0" } # alloy alloy-primitives = { version = "1.3.1", default-features = false, features = [ @@ -26,6 +34,7 @@ alloy-rpc-client = { version = "1.0.30" } alloy-rpc-types-mev = "1.0.30" alloy-transport-http = "1.0.30" alloy-rlp = "0.3.12" +alloy-eips = { version = "1.0.32" } # op-alloy op-alloy-consensus = { version = "0.20.0", features = ["k256"] } @@ -53,6 +62,7 @@ dotenvy = "0.15.7" testcontainers = { version = "0.23.1", features = ["blocking"] } testcontainers-modules = { version = "0.11.2", features = ["postgres", "kafka", "minio"] } jsonrpsee = { version = "0.26.0", features = ["server", "macros"] } +futures-util = "0.3.31" # Kafka and S3 dependencies rdkafka = { version = "0.37.0", features = ["libz-static"] } diff --git a/crates/simulator/Cargo.toml b/crates/simulator/Cargo.toml index 7a466fc..9778e9d 100644 --- a/crates/simulator/Cargo.toml +++ b/crates/simulator/Cargo.toml @@ -23,31 +23,32 @@ async-trait.workspace = true dotenvy.workspace = true chrono.workspace = true eyre.workspace = true +futures-util.workspace = true # Alloy for Ethereum types alloy-primitives.workspace = true alloy-rpc-types.workspace = true alloy-consensus.workspace = true alloy-rpc-types-mev.workspace = true +alloy-eips.workspace = true op-alloy-consensus.workspace = true -op-alloy-network.workspace = true # Reth ExEx dependencies -reth = { git = "https://github.com/paradigmxyz/reth", features = ["exex"] } -reth-exex = { git = "https://github.com/paradigmxyz/reth" } -reth-provider = { git = "https://github.com/paradigmxyz/reth" } -reth-db = { git = "https://github.com/paradigmxyz/reth" } -reth-primitives = { git = "https://github.com/paradigmxyz/reth" } -reth-execution-types = { git = "https://github.com/paradigmxyz/reth" } -reth-node-api = { git = "https://github.com/paradigmxyz/reth" } -reth-node-builder = { git = "https://github.com/paradigmxyz/reth" } -reth-node-ethereum = { git = "https://github.com/paradigmxyz/reth" } -reth-cli = { git = "https://github.com/paradigmxyz/reth" } +reth.workspace = true +reth-exex.workspace = true +reth-provider.workspace = true +reth-primitives.workspace = true +reth-execution-types.workspace = true +reth-node-api.workspace = true +reth-evm.workspace = true +reth-node-builder.workspace = true +reth-node-ethereum.workspace = true # Additional dependencies for simulation std-semaphore = "0.1" tokio-util = { version = "0.7", features = ["time"] } hex = "0.4" +rdkafka.workspace = true [dev-dependencies] tokio-test = "0.4.4" diff --git a/crates/simulator/src/config.rs b/crates/simulator/src/config.rs index d99de73..1b536ce 100644 --- a/crates/simulator/src/config.rs +++ b/crates/simulator/src/config.rs @@ -1,14 +1,13 @@ use crate::types::ExExSimulationConfig; use clap::Parser; -use std::path::PathBuf; /// Combined configuration for reth node with simulator ExEx -#[derive(Parser, Debug, Clone)] +#[derive(Parser, Debug)] #[command(author, version, about = "Reth node with Tips Simulator ExEx")] pub struct SimulatorNodeConfig { /// Reth node arguments #[command(flatten)] - pub node: reth_cli::Cli, + pub node: reth::cli::Cli, /// Data directory for simulator #[arg(long, env = "TIPS_SIMULATOR_DATADIR", default_value = "~/.tips-simulator-reth")] @@ -40,10 +39,10 @@ pub struct SimulatorExExConfig { pub simulation_timeout_ms: u64, } -impl From for ExExSimulationConfig { - fn from(config: SimulatorNodeConfig) -> Self { +impl From<&SimulatorNodeConfig> for ExExSimulationConfig { + fn from(config: &SimulatorNodeConfig) -> Self { Self { - database_url: config.database_url, + database_url: config.database_url.clone(), max_concurrent_simulations: config.max_concurrent_simulations, simulation_timeout_ms: config.simulation_timeout_ms, } diff --git a/crates/simulator/src/core.rs b/crates/simulator/src/core.rs new file mode 100644 index 0000000..78eee65 --- /dev/null +++ b/crates/simulator/src/core.rs @@ -0,0 +1,77 @@ +use crate::engine::SimulationEngine; +use crate::publisher::SimulationResultPublisher; +use crate::types::SimulationRequest; +use eyre::Result; +use reth_provider::{StateProvider, StateProviderFactory}; +use tracing::{error, info}; + +/// Core bundle simulator that provides shared simulation logic +/// Used by both mempool event simulators and ExEx event simulators +pub struct BundleSimulator +where + E: SimulationEngine, + P: SimulationResultPublisher, +{ + engine: E, + publisher: P, +} + +impl BundleSimulator +where + E: SimulationEngine, + P: SimulationResultPublisher, +{ + pub fn new(engine: E, publisher: P) -> Self { + Self { + engine, + publisher, + } + } + + /// Process a simulation request by creating state provider from factory + /// Convenience method that handles state provider creation + pub async fn simulate( + &self, + request: SimulationRequest, + state_provider_factory: &F, + ) -> Result<()> + where + F: StateProviderFactory, + { + // Get state provider for the block + // FIXME: We probably want to get the state provider once per block rather than once per + // bundle for each block. + let state_provider = state_provider_factory + .state_by_block_hash(request.block_hash) + .map_err(|e| eyre::eyre!("Failed to get state provider: {}", e))?; + + // Run the simulation + match self.engine.simulate_bundle(request.clone(), &state_provider).await { + Ok(result) => { + info!( + bundle_id = %request.bundle_id, + simulation_id = %result.id, + success = result.success, + "Simulation completed" + ); + + if let Err(e) = self.publisher.publish_result(result).await { + error!( + error = %e, + bundle_id = %request.bundle_id, + "Failed to publish simulation result" + ); + } + } + Err(e) => { + error!( + error = %e, + bundle_id = %request.bundle_id, + "Simulation failed" + ); + } + } + + Ok(()) + } +} diff --git a/crates/simulator/src/engine.rs b/crates/simulator/src/engine.rs index 1f8875b..cf395e6 100644 --- a/crates/simulator/src/engine.rs +++ b/crates/simulator/src/engine.rs @@ -1,26 +1,104 @@ -use crate::state::StateProvider; use crate::types::{SimulationError, SimulationRequest, SimulationResult}; use alloy_consensus::transaction::{SignerRecoverable, Transaction}; -use alloy_primitives::{Address, U256}; -use alloy_provider::network::eip2718::Decodable2718; -use anyhow::Result; +use alloy_primitives::{Address, B256, U256}; +use alloy_eips::eip2718::Decodable2718; +use alloy_rpc_types::BlockNumberOrTag; +use eyre::Result; use async_trait::async_trait; use op_alloy_consensus::OpTxEnvelope; +use reth_provider::{StateProvider, StateProviderFactory}; use std::collections::HashMap; use std::sync::Arc; use std::time::Instant; -use tokio::time::{timeout, Duration}; +use tokio::time::Duration; use tracing::{debug, error, info, warn}; use uuid::Uuid; +/// Create state provider from ExEx context +/// +/// This function prepares the necessary components for EVM simulation: +/// 1. Creates a StateProvider at a specific block using the Provider from ExEx context +/// 2. Validates that the block exists and retrieves its hash +/// 3. Returns the state provider that can be used for EVM database initialization +/// +/// # Arguments +/// * `provider` - The state provider factory from the ExEx context (e.g., ctx.provider) +/// * `block_number` - The block number to create the state at +/// +/// # Returns +/// A tuple of (StateProvider, block_hash) ready for EVM initialization +/// +/// # Usage in ExEx +/// When implementing an ExEx that needs to simulate transactions, you can use this +/// function to get a state provider that implements the Client interface. This state +/// provider can then be used with reth's EvmConfig to create an EVM instance. +/// +/// The typical flow is: +/// 1. Get the provider from ExExContext: `ctx.provider` +/// 2. Call this function to get a state provider at a specific block +/// 3. Use the state provider with reth_revm::database::StateProviderDatabase +/// 4. Configure the EVM with the appropriate EvmConfig from your node +pub fn prepare_evm_state

( + provider: Arc

, + block_number: u64, +) -> Result<(Box, B256)> +where + P: StateProviderFactory, +{ + // Get the state provider at the specified block + let state_provider = provider + .state_by_block_number_or_tag(BlockNumberOrTag::Number(block_number)) + .map_err(|e| eyre::eyre!("Failed to get state provider at block {}: {}", block_number, e))?; + + // Get the block hash + let block_hash = state_provider + .block_hash(block_number) + .map_err(|e| eyre::eyre!("Failed to get block hash: {}", e))? + .ok_or_else(|| eyre::eyre!("Block {} not found", block_number))?; + + Ok((state_provider, block_hash)) +} + +/// Example usage within an ExEx: +/// ```ignore +/// // In your ExEx implementation +/// use reth_exex::ExExContext; +/// use reth_revm::database::StateProviderDatabase; +/// use revm::Evm; +/// +/// // Get provider from ExEx context +/// let provider = ctx.provider.clone(); +/// +/// // Prepare EVM state +/// let (state_provider, block_hash) = prepare_evm_state::( +/// provider.clone(), +/// block_number, +/// )?; +/// +/// // Create state database +/// let db = StateProviderDatabase::new(state_provider); +/// +/// // Build EVM with the database +/// // Note: You would configure the EVM with proper environment settings +/// // based on your chain's requirements (gas limits, fork settings, etc.) +/// let evm = Evm::builder() +/// .with_db(db) +/// .build(); +/// ``` + #[async_trait] pub trait SimulationEngine: Send + Sync { /// Simulate a bundle execution - async fn simulate_bundle(&self, request: SimulationRequest) -> Result; + async fn simulate_bundle( + &self, + request: SimulationRequest, + state_provider: &S, + ) -> Result + where + S: StateProvider + Send + Sync; } -pub struct BundleSimulationEngine { - state_provider: Arc, +pub struct RethSimulationEngine { timeout: Duration, } @@ -39,10 +117,9 @@ struct ExecutionContext { gas_used: u64, } -impl BundleSimulationEngine { - pub fn new(state_provider: Arc, timeout_ms: u64) -> Self { +impl RethSimulationEngine { + pub fn new(timeout_ms: u64) -> Self { Self { - state_provider, timeout: Duration::from_millis(timeout_ms), } } @@ -50,11 +127,11 @@ impl BundleSimulationEngine { /// Extract transaction details from raw transaction bytes fn decode_transaction(&self, tx_bytes: &[u8]) -> Result { OpTxEnvelope::decode_2718_exact(tx_bytes) - .map_err(|e| anyhow::anyhow!("Failed to decode transaction: {}", e)) + .map_err(|e| eyre::eyre!("Failed to decode transaction: {}", e)) } /// Validate that a transaction can be executed in the current context - async fn validate_transaction( + fn validate_transaction( &self, tx: &OpTxEnvelope, context: &ExecutionContext, @@ -96,7 +173,7 @@ impl BundleSimulationEngine { } /// Simulate a single transaction execution - async fn simulate_transaction( + fn simulate_transaction( &self, tx: &OpTxEnvelope, context: &mut ExecutionContext, @@ -117,7 +194,7 @@ impl BundleSimulationEngine { ); // Validate the transaction first - self.validate_transaction(tx, context).await?; + self.validate_transaction(tx, context)?; // Simulate gas usage (placeholder logic) let estimated_gas = std::cmp::min(tx.gas_limit(), 100_000); // Simple estimation @@ -155,10 +232,14 @@ impl BundleSimulationEngine { } /// Initialize execution context by fetching initial state - async fn initialize_context( + fn initialize_context( &self, request: &SimulationRequest, - ) -> Result { + state_provider: &S, + ) -> Result + where + S: StateProvider, + { let mut initial_balances = HashMap::new(); let mut initial_nonces = HashMap::new(); @@ -183,10 +264,13 @@ impl BundleSimulationEngine { // Fetch initial state for all addresses for address in addresses { - match self.state_provider.get_balance(address, request.block_number).await { - Ok(balance) => { + match state_provider.account_balance(&address) { + Ok(Some(balance)) => { initial_balances.insert(address, balance); } + Ok(None) => { + initial_balances.insert(address, U256::ZERO); + } Err(e) => { error!( error = %e, @@ -196,10 +280,13 @@ impl BundleSimulationEngine { } } - match self.state_provider.get_nonce(address, request.block_number).await { - Ok(nonce) => { + match state_provider.account_nonce(&address) { + Ok(Some(nonce)) => { initial_nonces.insert(address, nonce); } + Ok(None) => { + initial_nonces.insert(address, 0); + } Err(e) => { error!( error = %e, @@ -218,12 +305,18 @@ impl BundleSimulationEngine { gas_used: 0, }) } +} - /// Perform the actual bundle simulation - async fn execute_bundle_simulation( +#[async_trait] +impl SimulationEngine for RethSimulationEngine { + async fn simulate_bundle( &self, request: SimulationRequest, - ) -> Result { + state_provider: &S, + ) -> Result + where + S: StateProvider + Send + Sync, + { let start_time = Instant::now(); let simulation_id = Uuid::new_v4(); @@ -236,8 +329,8 @@ impl BundleSimulationEngine { ); // Initialize execution context - let mut context = self.initialize_context(&request).await - .map_err(|e| anyhow::anyhow!("Failed to initialize context: {}", e))?; + let mut context = self.initialize_context(&request, state_provider) + .map_err(|e| eyre::eyre!("Failed to initialize context: {}", e))?; // Simulate each transaction in the bundle for (tx_index, tx_bytes) in request.bundle.txs.iter().enumerate() { @@ -246,7 +339,7 @@ impl BundleSimulationEngine { message: format!("Failed to decode transaction {}: {}", tx_index, e) })?; - if let Err(sim_error) = self.simulate_transaction(&tx, &mut context, tx_index).await { + if let Err(sim_error) = self.simulate_transaction(&tx, &mut context, tx_index) { let execution_time = start_time.elapsed().as_micros(); error!( @@ -291,35 +384,7 @@ impl BundleSimulationEngine { } } -#[async_trait] -impl SimulationEngine for BundleSimulationEngine { - async fn simulate_bundle(&self, request: SimulationRequest) -> Result { - match timeout(self.timeout, self.execute_bundle_simulation(request.clone())).await { - Ok(result) => result, - Err(_) => { - warn!( - bundle_id = %request.bundle_id, - timeout_ms = self.timeout.as_millis(), - "Bundle simulation timed out" - ); - - Ok(SimulationResult::failure( - Uuid::new_v4(), - request.bundle_id, - request.block_number, - request.block_hash, - self.timeout.as_micros(), - SimulationError::Timeout, - )) - } - } - } -} - /// Create a bundle simulation engine -pub fn create_simulation_engine( - state_provider: Arc, - timeout_ms: u64, -) -> impl SimulationEngine { - BundleSimulationEngine::new(state_provider, timeout_ms) +pub fn create_simulation_engine(timeout_ms: u64) -> RethSimulationEngine { + RethSimulationEngine::new(timeout_ms) } diff --git a/crates/simulator/src/exex.rs b/crates/simulator/src/exex.rs index f036ccd..76d8a8d 100644 --- a/crates/simulator/src/exex.rs +++ b/crates/simulator/src/exex.rs @@ -1,108 +1,261 @@ -use crate::engine::SimulationEngine; -use crate::publisher::SimulationResultPublisher; -use crate::types::{SimulationError, SimulationRequest, SimulationResult}; +use crate::core::BundleSimulator; +use crate::types::SimulationRequest; -use alloy_primitives::{B256, U256}; +use alloy_consensus::BlockHeader; +use alloy_primitives::B256; +use alloy_rpc_types::BlockNumHash; use alloy_rpc_types_mev::EthSendBundle; -use anyhow::Result; +use eyre::Result; use reth_exex::{ExExContext, ExExEvent, ExExNotification}; -use reth_node_api::{FullNodeComponents, NodeAddOns}; -use reth_primitives::{BlockNumber, TransactionSignedEcRecovered}; -use reth_provider::{CanonicalInMemoryState, Chain, StateProviderFactory}; +use reth_node_api::FullNodeComponents; +use futures_util::StreamExt; +use std::collections::HashMap; use std::sync::Arc; -use tokio::sync::mpsc; +use tokio::sync::{mpsc, RwLock}; +use tokio::task::JoinSet; use tracing::{debug, error, info, warn}; use uuid::Uuid; -/// ExEx that simulates bundles when new blocks are committed -pub struct SimulatorExEx> { +/// Datastore-based mempool bundle provider +pub struct DatastoreBundleProvider +where + D: tips_datastore::BundleDatastore, +{ + datastore: Arc, +} + +impl DatastoreBundleProvider +where + D: tips_datastore::BundleDatastore, +{ + pub fn new(datastore: Arc) -> Self { + Self { datastore } + } + + /// Get all bundles valid for a specific block + pub async fn get_bundles_for_block(&self, block_number: u64) -> Result> { + use tips_datastore::postgres::BundleFilter; + + // Create filter for bundles valid at this block + let filter = BundleFilter::new() + .valid_for_block(block_number); + + // Fetch bundles from datastore + let bundles_with_metadata = self.datastore.select_bundles(filter).await + .map_err(|e| eyre::eyre!("Failed to select bundles: {}", e))?; + + // Convert to (Uuid, EthSendBundle) pairs + // TODO: The bundle ID should be returned from the datastore query + // For now, we generate new IDs for each bundle + let result = bundles_with_metadata + .into_iter() + .map(|bwm| (Uuid::new_v4(), bwm.bundle.clone())) + .collect(); + + Ok(result) + } +} + +/// Simulation task with cancellation token +struct SimulationTask { + request: SimulationRequest, + block_number: u64, + cancel_tx: mpsc::Sender<()>, +} + +/// ExEx event simulator that simulates bundles from committed blocks +/// Processes chain events (commits, reorgs, reverts) and simulates potential bundles +pub struct ExExEventSimulator +where + Node: FullNodeComponents, + E: crate::engine::SimulationEngine, + P: crate::publisher::SimulationResultPublisher, + D: tips_datastore::BundleDatastore, +{ /// The execution extension context - ctx: ExExContext, - /// Simulation engine for processing bundles - engine: Box, - /// Publisher for simulation results - publisher: Box, - /// Channel for receiving simulation requests - simulation_rx: mpsc::UnboundedReceiver, - /// Sender for simulation requests - simulation_tx: mpsc::UnboundedSender, + ctx: ExExContext, + /// Core bundle simulator for shared simulation logic + core_simulator: Arc>, + /// State provider factory for creating state providers + state_provider_factory: Arc, + /// Datastore for fetching bundles from mempool + datastore: Arc, + /// Channel for sending simulation requests to workers + simulation_tx: mpsc::Sender, + /// Channel for receiving simulation requests in workers + simulation_rx: Arc>>, + /// Map of block number to cancellation channels for pending simulations + pending_simulations: Arc>>>>, + /// Worker task handles + worker_handles: JoinSet<()>, /// Maximum number of concurrent simulations max_concurrent: usize, } -impl SimulatorExEx +impl ExExEventSimulator where Node: FullNodeComponents, - AddOns: NodeAddOns, + E: crate::engine::SimulationEngine + Clone + 'static, + P: crate::publisher::SimulationResultPublisher + Clone + 'static, + D: tips_datastore::BundleDatastore + 'static, { - /// Create a new simulator ExEx + /// Create a new ExEx event simulator pub fn new( - ctx: ExExContext, - engine: Box, - publisher: Box, - max_concurrent: usize, + ctx: ExExContext, + core_simulator: BundleSimulator, + state_provider_factory: Arc, + datastore: Arc, + max_concurrent_simulations: usize, ) -> Self { - let (simulation_tx, simulation_rx) = mpsc::unbounded_channel(); + let (simulation_tx, simulation_rx) = mpsc::channel(1000); Self { ctx, - engine, - publisher, - simulation_rx, + core_simulator: Arc::new(core_simulator), + state_provider_factory, + datastore, simulation_tx, - max_concurrent, + simulation_rx: Arc::new(tokio::sync::Mutex::new(simulation_rx)), + pending_simulations: Arc::new(RwLock::new(HashMap::new())), + worker_handles: JoinSet::new(), + max_concurrent: max_concurrent_simulations, } } - /// Main execution loop for the ExEx + /// Main execution loop for the ExEx event simulator pub async fn run(mut self) -> Result<()> { - info!("Starting Tips Simulator ExEx"); + info!("Starting ExEx event simulator"); - // Spawn the simulation worker - let simulation_handle = { - let engine = std::mem::replace(&mut self.engine, Box::new(NoOpEngine)); - let publisher = std::mem::replace(&mut self.publisher, Box::new(NoOpPublisher)); - let mut rx = std::mem::replace(&mut self.simulation_rx, mpsc::unbounded_channel().1); - let max_concurrent = self.max_concurrent; - - tokio::spawn(async move { - Self::simulation_worker(&mut rx, engine.as_ref(), publisher.as_ref(), max_concurrent).await - }) - }; + // Initialize simulation workers + self.start_simulation_workers(); loop { - tokio::select! { - notification = self.ctx.notifications.recv() => { - match notification { - Some(notification) => { - if let Err(e) = self.handle_notification(notification).await { - error!(error = %e, "Failed to handle ExEx notification"); - } - } - None => { - info!("ExEx notification channel closed, shutting down"); - break; - } + match self.ctx.notifications.next().await { + Some(Ok(notification)) => { + if let Err(e) = self.handle_notification(notification).await { + error!(error = %e, "Failed to handle ExEx notification"); } } - result = &mut simulation_handle => { - match result { - Ok(_) => info!("Simulation worker completed"), - Err(e) => error!(error = %e, "Simulation worker failed"), - } + Some(Err(e)) => { + error!(error = %e, "Failed to receive ExEx notification"); + break; + } + None => { + info!("ExEx notification channel closed, shutting down"); break; } } } - // Clean shutdown - simulation_handle.abort(); - info!("Tips Simulator ExEx shutting down"); + info!("ExEx event simulator shutting down"); + + // Cancel all pending simulations + self.cancel_all_simulations().await; + + // Wait for workers to complete + while let Some(result) = self.worker_handles.join_next().await { + if let Err(e) = result { + error!(error = %e, "Worker task failed"); + } + } + Ok(()) } + + /// Start simulation worker tasks + fn start_simulation_workers(&mut self) { + info!(num_workers = self.max_concurrent, "Starting simulation workers"); + + for worker_id in 0..self.max_concurrent { + let core_simulator = self.core_simulator.clone(); + let state_provider_factory = self.state_provider_factory.clone(); + let simulation_rx = self.simulation_rx.clone(); + let pending_simulations = self.pending_simulations.clone(); + + self.worker_handles.spawn(async move { + Self::simulation_worker( + worker_id, + core_simulator, + state_provider_factory, + simulation_rx, + pending_simulations, + ).await + }); + } + } + + /// Worker task that processes simulation requests + async fn simulation_worker( + worker_id: usize, + core_simulator: Arc>, + state_provider_factory: Arc, + simulation_rx: Arc>>, + pending_simulations: Arc>>>>, + ) { + debug!(worker_id, "Simulation worker started"); + + loop { + // Get the next simulation task + let task = { + let mut rx = simulation_rx.lock().await; + rx.recv().await + }; + + let Some(task) = task else { + debug!(worker_id, "Simulation channel closed, worker shutting down"); + break; + }; + + // Create a cancellation receiver + let (cancel_tx, mut cancel_rx) = mpsc::channel(1); + + // Check if simulation should be cancelled + tokio::select! { + _ = cancel_rx.recv() => { + debug!( + worker_id, + bundle_id = %task.request.bundle_id, + block_number = task.block_number, + "Simulation cancelled before starting" + ); + continue; + } + result = core_simulator.simulate(task.request.clone(), &state_provider_factory) => { + match result { + Ok(_) => { + debug!( + worker_id, + bundle_id = %task.request.bundle_id, + "Simulation completed successfully" + ); + } + Err(e) => { + error!( + worker_id, + bundle_id = %task.request.bundle_id, + error = %e, + "Simulation failed" + ); + } + } + } + } + + // Remove cancellation channel from pending simulations + let mut pending = pending_simulations.write().await; + if let Some(channels) = pending.get_mut(&task.block_number) { + channels.retain(|tx| !tx.same_channel(&cancel_tx)); + if channels.is_empty() { + pending.remove(&task.block_number); + } + } + } + + debug!(worker_id, "Simulation worker stopped"); + } /// Handle ExEx notifications - async fn handle_notification(&mut self, notification: ExExNotification) -> Result<()> { + async fn handle_notification(&mut self, notification: ExExNotification<<::Types as reth_node_api::NodeTypes>::Primitives>) -> Result<()> { match notification { ExExNotification::ChainCommitted { new } => { info!( @@ -112,14 +265,15 @@ where ); // Process each block in the committed chain - for block in new.blocks() { - self.process_block(block).await?; + for (_block_num, block) in new.blocks() { + let block_hash = block.hash(); + self.process_block((&block_hash, block)).await?; } // Notify that we've processed this notification self.ctx .events - .send(ExExEvent::FinishedHeight(new.tip().number))?; + .send(ExExEvent::FinishedHeight(BlockNumHash::new(new.tip().number(), new.tip().hash())))?; } ExExNotification::ChainReorged { old: _, new } => { warn!( @@ -128,13 +282,14 @@ where ); // Process the new canonical chain - for block in new.blocks() { - self.process_block(block).await?; + for (_block_num, block) in new.blocks() { + let block_hash = block.hash(); + self.process_block((&block_hash, block)).await?; } self.ctx .events - .send(ExExEvent::FinishedHeight(new.tip().number))?; + .send(ExExEvent::FinishedHeight(BlockNumHash::new(new.tip().number(), new.tip().hash())))?; } ExExNotification::ChainReverted { old } => { warn!( @@ -144,7 +299,7 @@ where self.ctx .events - .send(ExExEvent::FinishedHeight(old.tip().number))?; + .send(ExExEvent::FinishedHeight(BlockNumHash::new(old.tip().number(), old.tip().hash())))?; } } @@ -152,155 +307,150 @@ where } /// Process a single block for potential bundle simulations - async fn process_block(&mut self, execution_outcome: &reth_execution_types::ExecutionOutcome) -> Result<()> { + async fn process_block(&mut self, block: (&B256, &reth_primitives::RecoveredBlock)) -> Result<()> + where + B: reth_node_api::Block, + { + let (block_hash, sealed_block) = block; + let block_number = sealed_block.number(); + debug!( - block_number = execution_outcome.block_number(), + block_number = block_number, + block_hash = ?block_hash, "Processing block for bundle simulation" ); - // TODO: Extract potential bundles from the block's transactions - // For now, this is a placeholder that would need to implement logic to: - // 1. Group transactions that could be bundles - // 2. Identify MEV opportunities - // 3. Create simulation requests for those bundles + // Cancel simulations for older blocks + self.cancel_simulations_before_block(block_number).await; - // This would be where we analyze transactions in the block - // and create simulation requests for potential bundles - let _block_number = execution_outcome.block_number(); - let _block_hash = execution_outcome.block_hash(); - - // Placeholder: Create a mock bundle simulation request - // In a real implementation, this would extract actual bundles from transactions - self.create_mock_simulation_request().await?; - - Ok(()) - } - - /// Create a mock simulation request (placeholder) - async fn create_mock_simulation_request(&self) -> Result<()> { - // This is a placeholder for bundle extraction logic - let bundle_id = Uuid::new_v4(); - let mock_bundle = EthSendBundle { - txs: vec![], // Would contain actual transaction data - block_number: None, - min_timestamp: None, - max_timestamp: None, - reverting_tx_hashes: vec![], - replacement_uuid: None, + // Fetch all bundles valid for this block from datastore + use tips_datastore::postgres::BundleFilter; + let filter = BundleFilter::new() + .valid_for_block(block_number); + + let bundles_with_metadata = match self.datastore.select_bundles(filter).await { + Ok(bundles) => bundles, + Err(e) => { + error!( + error = %e, + block_number, + "Failed to fetch bundles from datastore" + ); + return Ok(()); + } }; + + info!( + block_number, + num_bundles = bundles_with_metadata.len(), + "Queuing bundle simulations for new block" + ); - let request = SimulationRequest { - bundle_id, - bundle: mock_bundle, - block_number: 0, // Would be actual block number - block_hash: B256::ZERO, // Would be actual block hash - }; + // Create a list to track cancellation channels for this block + let mut cancellation_channels = Vec::new(); - if let Err(e) = self.simulation_tx.send(request) { - warn!(error = %e, "Failed to queue simulation request"); + // Queue simulations for each bundle + for (index, bundle_metadata) in bundles_with_metadata.into_iter().enumerate() { + // TODO: The bundle ID should be returned from the datastore query + // For now, we generate new IDs for each bundle + let bundle_id = Uuid::new_v4(); + + // Create simulation request + let request = SimulationRequest { + bundle_id, + bundle: bundle_metadata.bundle, + block_number, + block_hash: *block_hash, + }; + + // Create cancellation channel + let (cancel_tx, _cancel_rx) = mpsc::channel(1); + cancellation_channels.push(cancel_tx.clone()); + + // Create simulation task + let task = SimulationTask { + request, + block_number, + cancel_tx, + }; + + // Send to worker queue + if let Err(e) = self.simulation_tx.send(task).await { + error!( + error = %e, + bundle_index = index, + "Failed to queue simulation task" + ); + break; + } + } + + // Store cancellation channels for this block + if !cancellation_channels.is_empty() { + let mut pending = self.pending_simulations.write().await; + pending.insert(block_number, cancellation_channels); } Ok(()) } - - /// Simulation worker that processes simulation requests - async fn simulation_worker( - queue: &mut mpsc::UnboundedReceiver, - engine: &dyn SimulationEngine, - publisher: &dyn SimulationResultPublisher, - max_concurrent: usize, - ) -> Result<()> { - info!(max_concurrent = max_concurrent, "Starting ExEx simulation worker"); - - let semaphore = Arc::new(tokio::sync::Semaphore::new(max_concurrent)); - - while let Some(request) = queue.recv().await { - let semaphore_clone = semaphore.clone(); - let request_clone = request.clone(); - - tokio::spawn(async move { - let _permit = match semaphore_clone.acquire().await { - Ok(permit) => permit, - Err(_) => { - error!("Failed to acquire semaphore permit"); - return; - } - }; - - info!( - bundle_id = %request_clone.bundle_id, - block_number = request_clone.block_number, - "Processing ExEx simulation request" + + /// Cancel all simulations for blocks before the given block number + async fn cancel_simulations_before_block(&self, block_number: u64) { + let mut pending = self.pending_simulations.write().await; + + // Find all blocks to cancel + let blocks_to_cancel: Vec = pending.keys() + .filter(|&&block| block < block_number) + .copied() + .collect(); + + if blocks_to_cancel.is_empty() { + return; + } + + info!( + current_block = block_number, + num_blocks = blocks_to_cancel.len(), + "Cancelling simulations for older blocks" + ); + + // Cancel simulations for each old block + for old_block in blocks_to_cancel { + if let Some(channels) = pending.remove(&old_block) { + debug!( + old_block, + num_simulations = channels.len(), + "Cancelling simulations for block" ); - - match engine.simulate_bundle(request_clone.clone()).await { - Ok(result) => { - info!( - bundle_id = %request_clone.bundle_id, - simulation_id = %result.id, - success = result.success, - "ExEx simulation completed" - ); - - if let Err(e) = publisher.publish_result(result).await { - error!( - error = %e, - bundle_id = %request_clone.bundle_id, - "Failed to publish ExEx simulation result" - ); - } - } - Err(e) => { - error!( - error = %e, - bundle_id = %request_clone.bundle_id, - "ExEx simulation failed" - ); - } + + // Send cancellation signal to all tasks for this block + for cancel_tx in channels { + let _ = cancel_tx.send(()).await; } - }); + } } - - info!("ExEx simulation worker shutting down"); - Ok(()) - } -} - -/// No-op engine for move semantics -struct NoOpEngine; - -#[async_trait::async_trait] -impl SimulationEngine for NoOpEngine { - async fn simulate_bundle(&self, _request: SimulationRequest) -> Result { - Err(anyhow::anyhow!("NoOpEngine should never be called")) - } -} - -/// No-op publisher for move semantics -struct NoOpPublisher; - -#[async_trait::async_trait] -impl SimulationResultPublisher for NoOpPublisher { - async fn publish_result(&self, _result: SimulationResult) -> Result<()> { - Err(anyhow::anyhow!("NoOpPublisher should never be called")) - } - - async fn get_results_for_bundle(&self, _bundle_id: Uuid) -> Result> { - Err(anyhow::anyhow!("NoOpPublisher should never be called")) } - async fn get_result_by_id(&self, _result_id: Uuid) -> Result> { - Err(anyhow::anyhow!("NoOpPublisher should never be called")) - } -} - -impl Clone for SimulationRequest { - fn clone(&self) -> Self { - Self { - bundle_id: self.bundle_id, - bundle: self.bundle.clone(), - block_number: self.block_number, - block_hash: self.block_hash, + /// Cancel all pending simulations + async fn cancel_all_simulations(&self) { + let mut pending = self.pending_simulations.write().await; + + info!( + num_blocks = pending.len(), + "Cancelling all pending simulations" + ); + + // Cancel all simulations + for (block_number, channels) in pending.drain() { + debug!( + block_number, + num_simulations = channels.len(), + "Cancelling simulations for block" + ); + + for cancel_tx in channels { + let _ = cancel_tx.send(()).await; + } } } } diff --git a/crates/simulator/src/lib.rs b/crates/simulator/src/lib.rs index a386171..511da88 100644 --- a/crates/simulator/src/lib.rs +++ b/crates/simulator/src/lib.rs @@ -1,84 +1,131 @@ pub mod config; +pub mod core; pub mod engine; pub mod exex; -pub mod listener; +pub mod mempool; pub mod publisher; -pub mod service; -pub mod state; pub mod types; -use anyhow::Result; +use eyre::Result; use reth_exex::ExExContext; -use reth_node_api::{FullNodeComponents, NodeAddOns}; +use reth_node_api::FullNodeComponents; use std::sync::Arc; use tracing::info; -pub use config::{SimulatorConfig, SimulatorExExConfig, SimulatorNodeConfig}; -pub use engine::{create_simulation_engine, SimulationEngine}; -pub use exex::SimulatorExEx; -pub use listener::{MempoolEventListener, KafkaMempoolListener}; -pub use publisher::{create_database_publisher, SimulationResultPublisher}; -pub use service::SimulatorService; -pub use state::{create_direct_state_provider, StateProvider}; +pub use config::{SimulatorExExConfig, SimulatorNodeConfig}; +pub use core::BundleSimulator; +pub use engine::{create_simulation_engine, SimulationEngine, RethSimulationEngine}; +pub use exex::ExExEventSimulator; +pub use mempool::{MempoolEventSimulator, MempoolSimulatorConfig, MempoolEventListener, KafkaMempoolListener}; +pub use publisher::{create_database_publisher, SimulationResultPublisher, DatabaseResultPublisher}; pub use types::{SimulationResult, SimulationError, ExExSimulationConfig}; -/// ExEx initialization function that should be called by reth -pub async fn init_simulator_exex( - ctx: ExExContext, +// Type aliases for concrete implementations +pub type TipsBundleSimulator = BundleSimulator; +pub type TipsExExEventSimulator = ExExEventSimulator; +pub type TipsMempoolEventSimulator = MempoolEventSimulator; + +// Initialization functions + +/// Initialize ExEx event simulator (ExEx) that processes committed blocks +pub async fn init_exex_event_simulator( + ctx: ExExContext, config: ExExSimulationConfig, -) -> Result> +) -> Result> where Node: FullNodeComponents, - AddOns: NodeAddOns, { - info!("Initializing Tips Simulator ExEx"); + info!("Initializing ExEx event simulator"); // Create database connection and publisher let datastore = Arc::new( - tips_datastore::PostgresDatastore::connect(config.database_url.clone()).await? + tips_datastore::PostgresDatastore::connect(config.database_url.clone()).await + .map_err(|e| eyre::eyre!("Failed to connect to database: {}", e))? ); // Run database migrations - datastore.run_migrations().await?; + datastore.run_migrations().await + .map_err(|e| eyre::eyre!("Failed to run migrations: {}", e))?; info!("Database migrations completed"); - let publisher = Box::new(create_database_publisher(datastore)); + let publisher = create_database_publisher(datastore); info!("Database publisher initialized"); - // Create state provider using reth's provider factory - let state_provider_factory = ctx.components.provider().clone(); - let current_block_number = ctx.head.number; - let state_provider = Arc::new(create_direct_state_provider( - state_provider_factory, - current_block_number, - )); - info!( - current_block = current_block_number, - "Direct state provider initialized" - ); - // Create simulation engine - let engine = Box::new(create_simulation_engine( - state_provider, - config.simulation_timeout_ms, - )); + let engine = create_simulation_engine(config.simulation_timeout_ms); info!( timeout_ms = config.simulation_timeout_ms, "Simulation engine initialized" ); - // Create the ExEx - let exex = SimulatorExEx::new( - ctx, + // Create core bundle simulator with shared logic + let core_simulator = BundleSimulator::new( engine, publisher, + ); + info!("Core bundle simulator initialized"); + + // Get state provider factory for ExEx event simulation + let state_provider_factory = Arc::new(ctx.components.provider().clone()); + + // Create the ExEx event simulator + let consensus_simulator = ExExEventSimulator::new( + ctx, + core_simulator, + state_provider_factory, + datastore, config.max_concurrent_simulations, ); info!( max_concurrent = config.max_concurrent_simulations, - "Tips Simulator ExEx initialized successfully" + "ExEx event simulator initialized successfully" + ); + + Ok(consensus_simulator) +} + +/// Initialize mempool event simulator that processes mempool transactions +pub async fn init_mempool_event_simulator( + config: MempoolSimulatorConfig, +) -> Result { + info!("Initializing mempool event simulator"); + + // Create database connection and publisher + let datastore = Arc::new( + tips_datastore::PostgresDatastore::connect(config.database_url.clone()).await + .map_err(|e| eyre::eyre!("Failed to connect to database: {}", e))? ); + + // Run database migrations + datastore.run_migrations().await + .map_err(|e| eyre::eyre!("Failed to run migrations: {}", e))?; + info!("Database migrations completed"); + + let publisher = create_database_publisher(datastore); + info!("Database publisher initialized"); + + // Create simulation engine + let engine = create_simulation_engine(config.simulation_timeout_ms); + info!( + timeout_ms = config.simulation_timeout_ms, + "Simulation engine initialized" + ); + + // Create core bundle simulator with shared logic + let core_simulator = BundleSimulator::new( + engine, + publisher, + ); + info!("Core bundle simulator initialized"); + + // Create Kafka listener + let listener = KafkaMempoolListener::new(config.clone()); + + // Create the mempool event simulator + let mempool_simulator = MempoolEventSimulator::new(core_simulator, listener, config); + + info!("Mempool event simulator initialized successfully"); - Ok(exex) + Ok(mempool_simulator) } diff --git a/crates/simulator/src/listener.rs b/crates/simulator/src/listener.rs deleted file mode 100644 index 6ca2751..0000000 --- a/crates/simulator/src/listener.rs +++ /dev/null @@ -1,150 +0,0 @@ -use crate::types::{SimulationConfig, SimulationRequest}; -use alloy_primitives::B256; -use anyhow::Result; -use async_trait::async_trait; -use rdkafka::consumer::{Consumer, StreamConsumer}; -use rdkafka::Message; -use std::time::Duration; -use tips_audit::{create_kafka_consumer, types::MempoolEvent}; -use tokio::sync::mpsc; -use tracing::{debug, error, info, warn}; - -#[async_trait] -pub trait MempoolEventListener: Send + Sync { - /// Start listening for mempool events and send simulation requests - async fn start(&mut self, sender: mpsc::Sender) -> Result<()>; - /// Stop the listener - async fn stop(&mut self) -> Result<()>; -} - -pub struct KafkaMempoolListener { - consumer: StreamConsumer, - topic: String, - running: bool, -} - -impl KafkaMempoolListener { - pub fn new(config: &SimulationConfig) -> Result { - let consumer = create_kafka_consumer( - &config.kafka_brokers.join(","), - &config.kafka_group_id, - )?; - - Ok(Self { - consumer, - topic: config.kafka_topic.clone(), - running: false, - }) - } - - async fn process_event( - &self, - event: MempoolEvent, - sender: &mpsc::Sender, - current_block: u64, - current_block_hash: B256, - ) -> Result<()> { - match event { - MempoolEvent::Created { bundle_id, bundle } | MempoolEvent::Updated { bundle_id, bundle } => { - debug!( - bundle_id = %bundle_id, - num_transactions = bundle.txs.len(), - "Processing bundle for simulation" - ); - - let request = SimulationRequest { - bundle_id, - bundle, - block_number: current_block, - block_hash: current_block_hash, - }; - - if let Err(e) = sender.try_send(request) { - match e { - mpsc::error::TrySendError::Full(_) => { - warn!( - bundle_id = %bundle_id, - "Simulation queue is full, dropping request" - ); - } - mpsc::error::TrySendError::Closed(_) => { - error!("Simulation queue receiver has been dropped"); - return Err(anyhow::anyhow!("Simulation queue closed")); - } - } - } - } - // We only care about Created and Updated events for simulation - _ => { - debug!(event = ?event, "Ignoring non-creation event"); - } - } - - Ok(()) - } - - // TODO: This should be updated to get current block info from the state provider - // For now, we'll use dummy values - fn get_current_block_info(&self) -> (u64, B256) { - (0, B256::ZERO) - } -} - -#[async_trait] -impl MempoolEventListener for KafkaMempoolListener { - async fn start(&mut self, sender: mpsc::Sender) -> Result<()> { - info!(topic = %self.topic, "Starting mempool listener"); - - self.consumer.subscribe(&[&self.topic])?; - self.running = true; - - while self.running { - match self.consumer.recv().await { - Ok(message) => { - let payload = match message.payload() { - Some(payload) => payload, - None => { - warn!("Received message with empty payload"); - continue; - } - }; - - match serde_json::from_slice::(payload) { - Ok(event) => { - let (current_block, current_block_hash) = self.get_current_block_info(); - - if let Err(e) = self.process_event(event, &sender, current_block, current_block_hash).await { - error!(error = %e, "Failed to process mempool event"); - } - } - Err(e) => { - error!( - error = %e, - payload_size = payload.len(), - "Failed to deserialize mempool event" - ); - } - } - } - Err(e) => { - error!(error = %e, "Error receiving message from Kafka"); - tokio::time::sleep(Duration::from_secs(1)).await; - } - } - } - - info!("Mempool listener stopped"); - Ok(()) - } - - async fn stop(&mut self) -> Result<()> { - info!("Stopping mempool listener"); - self.running = false; - Ok(()) - } -} - -/// Create a mempool listener using the provided configuration -pub fn create_mempool_listener(config: &SimulationConfig) -> Result { - KafkaMempoolListener::new(config) -} diff --git a/crates/simulator/src/main.rs b/crates/simulator/src/main.rs index 42146d4..dbb0231 100644 --- a/crates/simulator/src/main.rs +++ b/crates/simulator/src/main.rs @@ -1,45 +1,47 @@ -use anyhow::Result; use clap::Parser; -use reth_node_builder::{NodeBuilder, NodeConfig}; use reth_node_ethereum::EthereumNode; -use tips_simulator::{init_simulator_exex, SimulatorNodeConfig}; -use tracing::{error, info}; +use tips_simulator::{init_exex_event_simulator, SimulatorNodeConfig}; +use tracing::info; #[tokio::main] -async fn main() -> Result<()> { +async fn main() -> eyre::Result<()> { // Load environment variables dotenvy::dotenv().ok(); // Parse command line arguments let config = SimulatorNodeConfig::parse(); - // Extract simulator config - let simulator_config = config.clone().into(); + let simulator_config: tips_simulator::types::ExExSimulationConfig = (&config).into(); info!( database_url = %config.database_url, max_concurrent = config.max_concurrent_simulations, timeout_ms = config.simulation_timeout_ms, - "Starting reth node with Tips Simulator ExEx" + "Starting reth node with ExEx event simulator" ); - // Create node builder with ExEx - let handle = NodeBuilder::new(config.node.clone()) - .node(EthereumNode::default()) - .install_exex("tips-simulator", move |ctx| async move { - // Initialize the simulator ExEx - let exex = init_simulator_exex(ctx, simulator_config).await?; - - info!("Tips Simulator ExEx installed successfully"); - - // Run the ExEx - Ok(exex.run()) - }) - .launch() - .await?; - - info!("Reth node with Tips Simulator ExEx started successfully"); - - // Wait for the node to finish - handle.wait_for_node_exit().await + // Launch the node with ExEx using the CLI + config.node.run(|builder, _| async move { + let handle = builder + .node(EthereumNode::default()) + .install_exex("tips-simulator", move |ctx| async move { + // Initialize the ExEx event simulator + let consensus_simulator = init_exex_event_simulator(ctx, simulator_config).await + .map_err(|e| eyre::eyre!("Failed to initialize simulator: {}", e))?; + + info!("ExEx event simulator installed successfully"); + + // Run the ExEx event simulator + Ok(consensus_simulator.run()) + }) + .launch() + .await?; + + info!("Reth node with ExEx event simulator started successfully"); + + // Wait for the node to finish + handle.wait_for_node_exit().await + })?; + + Ok(()) } diff --git a/crates/simulator/src/mempool.rs b/crates/simulator/src/mempool.rs new file mode 100644 index 0000000..0da8d76 --- /dev/null +++ b/crates/simulator/src/mempool.rs @@ -0,0 +1,154 @@ +use crate::core::BundleSimulator; +use crate::types::SimulationRequest; +use eyre::Result; +use async_trait::async_trait; +use tokio::sync::mpsc; +use tracing::{error, info, warn}; + +/// Configuration for mempool event simulation +#[derive(Debug, Clone)] +pub struct MempoolSimulatorConfig { + /// Kafka brokers for consuming mempool events + pub kafka_brokers: Vec, + /// Kafka topic to consume mempool events from + pub kafka_topic: String, + /// Kafka consumer group ID + pub kafka_group_id: String, + /// URL for Reth HTTP RPC endpoint for state access + pub reth_http_url: String, + /// PostgreSQL database connection URL + pub database_url: String, + /// Maximum number of concurrent simulations + pub max_concurrent_simulations: usize, + /// Timeout for individual simulations in milliseconds + pub simulation_timeout_ms: u64, +} + +/// Trait for listening to mempool events +#[async_trait] +pub trait MempoolEventListener: Send + Sync { + /// Start listening to mempool events and send simulation requests + async fn start(&mut self, sender: mpsc::Sender) -> Result<()>; + /// Stop listening to mempool events + async fn stop(&mut self) -> Result<()>; +} + +/// Kafka-based mempool event listener +pub struct KafkaMempoolListener { + config: MempoolSimulatorConfig, +} + +impl KafkaMempoolListener { + pub fn new(config: MempoolSimulatorConfig) -> Self { + Self { config } + } +} + +#[async_trait] +impl MempoolEventListener for KafkaMempoolListener { + async fn start(&mut self, _sender: mpsc::Sender) -> Result<()> { + info!( + brokers = ?self.config.kafka_brokers, + topic = %self.config.kafka_topic, + group_id = %self.config.kafka_group_id, + "Starting Kafka mempool event listener" + ); + + // TODO: Implement actual Kafka consumer + // This is a placeholder that would: + // 1. Connect to Kafka brokers + // 2. Subscribe to the mempool topic + // 3. Parse incoming mempool events + // 4. Convert them to SimulationRequest + // 5. Send to the simulation queue + + warn!("Kafka mempool listener not yet fully implemented"); + Ok(()) + } + + async fn stop(&mut self) -> Result<()> { + info!("Stopping Kafka mempool event listener"); + Ok(()) + } +} + +/// Mempool event simulator that combines listening and simulation +pub struct MempoolEventSimulator +where + E: crate::engine::SimulationEngine, + P: crate::publisher::SimulationResultPublisher, + L: MempoolEventListener, +{ + core_simulator: BundleSimulator, + listener: L, + config: MempoolSimulatorConfig, +} + +impl MempoolEventSimulator +where + E: crate::engine::SimulationEngine, + P: crate::publisher::SimulationResultPublisher, + L: MempoolEventListener, +{ + /// Create a new mempool event simulator + pub fn new( + core_simulator: BundleSimulator, + listener: L, + config: MempoolSimulatorConfig, + ) -> Self { + info!("Initializing mempool event simulator"); + + Self { + core_simulator, + listener, + config, + } + } + + /// Start the mempool event simulator + pub async fn start(&mut self) -> Result<()> { + info!("Starting mempool event simulator"); + + // Create channel for simulation requests + let (sender, _receiver) = mpsc::channel::(1000); + + // Start mempool listener + let listener_handle = { + let _sender_clone = sender.clone(); + + tokio::spawn(async move { + // TODO: Start actual listener + // self.listener.start(sender_clone).await + Ok::<(), eyre::Error>(()) + }) + }; + + // TODO: Create state provider for RPC-based access + // This would create HTTP RPC client for state access + // For now, this is a placeholder + + info!( + max_concurrent = self.config.max_concurrent_simulations, + "Mempool event simulator started successfully" + ); + + // In a real implementation, this would: + // 1. Start the listener task + // 2. Start the simulation worker with RPC state provider + // 3. Handle shutdown gracefully + + // Wait for listener (placeholder) + if let Err(e) = listener_handle.await { + error!(error = %e, "Mempool listener task failed"); + } + + Ok(()) + } +} + +// No-op listener removed - using generics instead of dynamic dispatch + +/// Create a Kafka mempool listener +pub fn create_kafka_mempool_listener(config: &MempoolSimulatorConfig) -> impl MempoolEventListener { + KafkaMempoolListener::new(config.clone()) +} diff --git a/crates/simulator/src/publisher.rs b/crates/simulator/src/publisher.rs index adb05cb..5cf3342 100644 --- a/crates/simulator/src/publisher.rs +++ b/crates/simulator/src/publisher.rs @@ -1,5 +1,5 @@ use crate::types::SimulationResult; -use anyhow::Result; +use eyre::Result; use async_trait::async_trait; use rdkafka::producer::FutureProducer; use serde_json; @@ -202,7 +202,7 @@ struct DatabaseSimulation { /// Create a result publisher with database storage pub fn create_database_publisher( datastore: Arc, -) -> impl SimulationResultPublisher { +) -> DatabaseResultPublisher { DatabaseResultPublisher::new(datastore, None) } diff --git a/crates/simulator/src/service.rs b/crates/simulator/src/service.rs deleted file mode 100644 index d71ffab..0000000 --- a/crates/simulator/src/service.rs +++ /dev/null @@ -1,283 +0,0 @@ -use crate::engine::{create_simulation_engine, SimulationEngine}; -use crate::listener::{create_mempool_listener, MempoolEventListener}; -use crate::publisher::{create_database_publisher, SimulationResultPublisher}; -use crate::state::create_rpc_state_provider; -use crate::types::{SimulationConfig, SimulationRequest}; -use anyhow::Result; -use std::sync::Arc; -use tokio::sync::mpsc; -use tokio::task::JoinHandle; -use tracing::{error, info}; -use uuid; - -/// Main service that orchestrates all simulation components -pub struct SimulatorService { - config: SimulationConfig, - listener: Box, - engine: Box, - publisher: Box, - simulation_queue: Option>, - listener_handle: Option>>, - simulation_handle: Option>>, -} - -impl SimulatorService { - /// Create a new simulator service with the given configuration - pub async fn new(config: SimulationConfig) -> Result { - info!("Initializing simulator service"); - - // Create state provider - let state_provider = Arc::new(create_rpc_state_provider(&config.reth_http_url)?); - info!(reth_url = %config.reth_http_url, "State provider initialized"); - - // Create simulation engine - let engine = Box::new(create_simulation_engine( - state_provider, - config.simulation_timeout_ms, - )); - info!( - timeout_ms = config.simulation_timeout_ms, - "Simulation engine initialized" - ); - - // Create mempool listener - let listener = Box::new(create_mempool_listener(&config)?); - info!( - topic = %config.kafka_topic, - brokers = ?config.kafka_brokers, - "Mempool listener initialized" - ); - - // Create database connection and publisher - let datastore = Arc::new( - tips_datastore::PostgresDatastore::connect(config.database_url.clone()).await? - ); - - // Run database migrations - datastore.run_migrations().await?; - info!("Database migrations completed"); - - let publisher = Box::new(create_database_publisher(datastore)); - info!("Result publisher initialized"); - - Ok(Self { - config, - listener, - engine, - publisher, - simulation_queue: None, - listener_handle: None, - simulation_handle: None, - }) - } - - /// Start the simulator service - pub async fn start(&mut self) -> Result<()> { - info!("Starting simulator service"); - - // Create channel for simulation requests - let (sender, receiver) = mpsc::channel::(1000); - self.simulation_queue = Some(receiver); - - // Start mempool listener - let listener_handle = { - let mut listener = std::mem::replace( - &mut self.listener, - Box::new(NoOpListener), - ); - let sender_clone = sender.clone(); - - tokio::spawn(async move { - listener.start(sender_clone).await - }) - }; - - // Start simulation worker - let simulation_handle = { - let engine = std::mem::replace( - &mut self.engine, - Box::new(NoOpEngine), - ); - let publisher = std::mem::replace( - &mut self.publisher, - Box::new(NoOpPublisher), - ); - let mut queue = self.simulation_queue.take().unwrap(); - let max_concurrent = self.config.max_concurrent_simulations; - - tokio::spawn(async move { - Self::simulation_worker( - &mut queue, - engine.as_ref(), - publisher.as_ref(), - max_concurrent, - ).await - }) - }; - - self.listener_handle = Some(listener_handle); - self.simulation_handle = Some(simulation_handle); - - info!( - max_concurrent = self.config.max_concurrent_simulations, - "Simulator service started successfully" - ); - - Ok(()) - } - - /// Stop the simulator service - pub async fn stop(&mut self) -> Result<()> { - info!("Stopping simulator service"); - - // Stop listener - if let Some(handle) = self.listener_handle.take() { - handle.abort(); - } - - // Stop simulation worker - if let Some(handle) = self.simulation_handle.take() { - handle.abort(); - } - - info!("Simulator service stopped"); - Ok(()) - } - - /// Wait for the service to complete - pub async fn wait(&mut self) -> Result<()> { - if let Some(listener_handle) = &mut self.listener_handle { - if let Err(e) = listener_handle.await { - error!(error = %e, "Listener task failed"); - } - } - - if let Some(simulation_handle) = &mut self.simulation_handle { - if let Err(e) = simulation_handle.await { - error!(error = %e, "Simulation worker task failed"); - } - } - - Ok(()) - } - - /// Main simulation worker that processes simulation requests - async fn simulation_worker( - queue: &mut mpsc::Receiver, - engine: &dyn SimulationEngine, - publisher: &dyn SimulationResultPublisher, - max_concurrent: usize, - ) -> Result<()> { - info!(max_concurrent = max_concurrent, "Starting simulation worker"); - - // Use a semaphore to limit concurrent simulations - let semaphore = Arc::new(tokio::sync::Semaphore::new(max_concurrent)); - - while let Some(request) = queue.recv().await { - let semaphore_clone = semaphore.clone(); - let request_clone = request.clone(); - - // Spawn a task for this simulation - tokio::spawn(async move { - let _permit = match semaphore_clone.acquire().await { - Ok(permit) => permit, - Err(_) => { - error!("Failed to acquire semaphore permit"); - return; - } - }; - - info!( - bundle_id = %request_clone.bundle_id, - block_number = request_clone.block_number, - num_transactions = request_clone.bundle.txs.len(), - "Processing simulation request" - ); - - // Perform the simulation - match engine.simulate_bundle(request_clone.clone()).await { - Ok(result) => { - info!( - bundle_id = %request_clone.bundle_id, - simulation_id = %result.id, - success = result.success, - gas_used = ?result.gas_used, - execution_time_us = result.execution_time_us, - "Simulation completed" - ); - - // Publish the result - if let Err(e) = publisher.publish_result(result).await { - error!( - error = %e, - bundle_id = %request_clone.bundle_id, - "Failed to publish simulation result" - ); - } - } - Err(e) => { - error!( - error = %e, - bundle_id = %request_clone.bundle_id, - "Simulation failed with error" - ); - } - } - }); - } - - info!("Simulation worker shutting down"); - Ok(()) - } -} - -// Placeholder implementations for move semantics -struct NoOpListener; - -#[async_trait::async_trait] -impl MempoolEventListener for NoOpListener { - async fn start(&mut self, _sender: mpsc::Sender) -> Result<()> { - Ok(()) - } - - async fn stop(&mut self) -> Result<()> { - Ok(()) - } -} - -struct NoOpEngine; - -#[async_trait::async_trait] -impl SimulationEngine for NoOpEngine { - async fn simulate_bundle(&self, _request: SimulationRequest) -> Result { - Err(anyhow::anyhow!("NoOpEngine should never be called")) - } -} - -struct NoOpPublisher; - -#[async_trait::async_trait] -impl SimulationResultPublisher for NoOpPublisher { - async fn publish_result(&self, _result: SimulationResult) -> Result<()> { - Err(anyhow::anyhow!("NoOpPublisher should never be called")) - } - - async fn get_results_for_bundle(&self, _bundle_id: uuid::Uuid) -> Result> { - Err(anyhow::anyhow!("NoOpPublisher should never be called")) - } - - async fn get_result_by_id(&self, _result_id: uuid::Uuid) -> Result> { - Err(anyhow::anyhow!("NoOpPublisher should never be called")) - } -} - -impl Clone for SimulationRequest { - fn clone(&self) -> Self { - Self { - bundle_id: self.bundle_id, - bundle: self.bundle.clone(), - block_number: self.block_number, - block_hash: self.block_hash, - } - } -} diff --git a/crates/simulator/src/state.rs b/crates/simulator/src/state.rs deleted file mode 100644 index 943979d..0000000 --- a/crates/simulator/src/state.rs +++ /dev/null @@ -1,270 +0,0 @@ -use alloy_primitives::{Address, B256, U256}; -use anyhow::Result; -use async_trait::async_trait; -use reth_provider::{StateProvider as RethStateProvider, StateProviderFactory}; -use std::collections::HashMap; -use std::sync::Arc; -use tracing::{debug, error, warn}; - -/// Provides access to blockchain state for simulation -#[async_trait] -pub trait StateProvider: Send + Sync { - /// Get the current block number - async fn get_block_number(&self) -> Result; - - /// Get block hash for a given block number - async fn get_block_hash(&self, block_number: u64) -> Result; - - /// Get account balance at a specific block - async fn get_balance(&self, address: Address, block_number: u64) -> Result; - - /// Get account nonce at a specific block - async fn get_nonce(&self, address: Address, block_number: u64) -> Result; - - /// Get storage value at a specific slot and block - async fn get_storage(&self, address: Address, slot: U256, block_number: u64) -> Result; - - /// Get account code at a specific block - async fn get_code(&self, address: Address, block_number: u64) -> Result>; - - /// Get multiple storage slots efficiently - async fn get_storage_batch( - &self, - requests: Vec<(Address, Vec)>, - block_number: u64, - ) -> Result>>; -} - -/// Direct reth state provider that accesses state without RPC -pub struct DirectStateProvider { - state_provider_factory: Arc, - /// Current block number for state queries - current_block_number: u64, -} - -impl DirectStateProvider -where - SF: StateProviderFactory, -{ - pub fn new(state_provider_factory: Arc, current_block_number: u64) -> Self { - Self { - state_provider_factory, - current_block_number, - } - } - - /// Update the current block number for state queries - pub fn set_current_block(&mut self, block_number: u64) { - self.current_block_number = block_number; - } - - /// Get a state provider for the current block - fn get_state_provider(&self) -> Result> { - self.state_provider_factory - .state_by_block_number(self.current_block_number) - .map_err(|e| anyhow::anyhow!("Failed to get state provider: {}", e)) - } -} - -#[async_trait] -impl StateProvider for DirectStateProvider -where - SF: StateProviderFactory + Send + Sync, -{ - async fn get_block_number(&self) -> Result { - Ok(self.current_block_number) - } - - async fn get_block_hash(&self, block_number: u64) -> Result { - let state_provider = self.get_state_provider()?; - - // Get block hash from state provider - // Note: This would need to be implemented based on reth's state provider API - // For now, we'll use a placeholder - debug!(block_number = block_number, "Getting block hash from direct state"); - - // TODO: Implement proper block hash retrieval from reth state provider - Ok(B256::ZERO) // Placeholder - } - - async fn get_balance(&self, address: Address, _block_number: u64) -> Result { - let state_provider = self.get_state_provider()?; - - match state_provider.account_balance(address) { - Ok(Some(balance)) => { - debug!( - address = %address, - block_number = self.current_block_number, - balance = %balance, - "Retrieved balance from direct state" - ); - Ok(balance) - } - Ok(None) => { - debug!( - address = %address, - block_number = self.current_block_number, - "Account not found, returning zero balance" - ); - Ok(U256::ZERO) - } - Err(e) => { - error!( - error = %e, - address = %address, - block_number = self.current_block_number, - "Failed to get balance from direct state" - ); - Err(anyhow::anyhow!("State provider error: {}", e)) - } - } - } - - async fn get_nonce(&self, address: Address, _block_number: u64) -> Result { - let state_provider = self.get_state_provider()?; - - match state_provider.account_nonce(address) { - Ok(Some(nonce)) => { - debug!( - address = %address, - block_number = self.current_block_number, - nonce = nonce, - "Retrieved nonce from direct state" - ); - Ok(nonce) - } - Ok(None) => { - debug!( - address = %address, - block_number = self.current_block_number, - "Account not found, returning zero nonce" - ); - Ok(0) - } - Err(e) => { - error!( - error = %e, - address = %address, - block_number = self.current_block_number, - "Failed to get nonce from direct state" - ); - Err(anyhow::anyhow!("State provider error: {}", e)) - } - } - } - - async fn get_storage(&self, address: Address, slot: U256, _block_number: u64) -> Result { - let state_provider = self.get_state_provider()?; - - match state_provider.storage(address, reth_primitives::StorageKey::from(slot)) { - Ok(Some(value)) => { - debug!( - address = %address, - slot = %slot, - block_number = self.current_block_number, - value = %value, - "Retrieved storage from direct state" - ); - Ok(U256::from(value)) - } - Ok(None) => { - debug!( - address = %address, - slot = %slot, - block_number = self.current_block_number, - "Storage slot not found, returning zero" - ); - Ok(U256::ZERO) - } - Err(e) => { - error!( - error = %e, - address = %address, - slot = %slot, - block_number = self.current_block_number, - "Failed to get storage from direct state" - ); - Err(anyhow::anyhow!("State provider error: {}", e)) - } - } - } - - async fn get_code(&self, address: Address, _block_number: u64) -> Result> { - let state_provider = self.get_state_provider()?; - - match state_provider.account_code(address) { - Ok(Some(code)) => { - debug!( - address = %address, - block_number = self.current_block_number, - code_len = code.len(), - "Retrieved code from direct state" - ); - Ok(code.original_bytes()) - } - Ok(None) => { - debug!( - address = %address, - block_number = self.current_block_number, - "Account has no code" - ); - Ok(vec![]) - } - Err(e) => { - error!( - error = %e, - address = %address, - block_number = self.current_block_number, - "Failed to get code from direct state" - ); - Err(anyhow::anyhow!("State provider error: {}", e)) - } - } - } - - async fn get_storage_batch( - &self, - requests: Vec<(Address, Vec)>, - block_number: u64, - ) -> Result>> { - let mut result = HashMap::new(); - - // Process each address - for (address, slots) in requests { - let mut address_storage = HashMap::new(); - - for slot in slots { - match self.get_storage(address, slot, block_number).await { - Ok(value) => { - address_storage.insert(slot, value); - } - Err(e) => { - warn!( - error = %e, - address = %address, - slot = %slot, - "Failed to get storage in batch request" - ); - } - } - } - - if !address_storage.is_empty() { - result.insert(address, address_storage); - } - } - - Ok(result) - } -} - -/// Create a direct state provider using reth's state provider factory -pub fn create_direct_state_provider( - state_provider_factory: Arc, - current_block_number: u64, -) -> impl StateProvider -where - SF: StateProviderFactory + Send + Sync + 'static, -{ - DirectStateProvider::new(state_provider_factory, current_block_number) -} diff --git a/crates/simulator/src/types.rs b/crates/simulator/src/types.rs index 4146e7f..fcf1231 100644 --- a/crates/simulator/src/types.rs +++ b/crates/simulator/src/types.rs @@ -31,31 +31,6 @@ pub struct SimulationResult { pub created_at: DateTime, } -/// Configuration for the simulation service (legacy) -#[derive(Debug, Clone)] -pub struct SimulationConfig { - /// Kafka brokers for consuming mempool events - pub kafka_brokers: Vec, - /// Kafka topic to consume mempool events from - pub kafka_topic: String, - /// Kafka consumer group ID - pub kafka_group_id: String, - /// URL for Reth HTTP RPC endpoint - pub reth_http_url: String, - /// URL for Reth WebSocket endpoint - pub reth_ws_url: String, - /// PostgreSQL database connection URL - pub database_url: String, - /// Maximum number of concurrent simulations - pub max_concurrent_simulations: usize, - /// Timeout for individual simulations in milliseconds - pub simulation_timeout_ms: u64, - /// Whether to publish simulation results back to Kafka - pub publish_results: bool, - /// Topic to publish results to (if publishing enabled) - pub results_topic: Option, -} - /// Configuration for ExEx-based simulation #[derive(Debug, Clone)] pub struct ExExSimulationConfig { @@ -107,7 +82,7 @@ impl std::fmt::Display for SimulationError { impl std::error::Error for SimulationError {} /// A request to simulate a bundle -#[derive(Debug)] +#[derive(Debug, Clone)] pub struct SimulationRequest { pub bundle_id: Uuid, pub bundle: EthSendBundle, diff --git a/crates/simulator/tests/integration_test.rs b/crates/simulator/tests/integration_test.rs index 497e0ba..857ccb7 100644 --- a/crates/simulator/tests/integration_test.rs +++ b/crates/simulator/tests/integration_test.rs @@ -1,32 +1,11 @@ -use tips_simulator::types::{SimulationConfig, SimulationRequest}; -use tips_simulator::service::SimulatorService; +use tips_simulator::types::SimulationRequest; +use tips_simulator::MempoolSimulatorConfig; use alloy_primitives::{Address, Bytes, B256}; use alloy_rpc_types_mev::EthSendBundle; use uuid::Uuid; -// Basic smoke test to ensure the simulator compiles and can be instantiated -#[tokio::test] -async fn test_simulator_service_creation() { - let config = SimulationConfig { - kafka_brokers: vec!["localhost:9092".to_string()], - kafka_topic: "test-topic".to_string(), - kafka_group_id: "test-group".to_string(), - reth_http_url: "http://localhost:8545".to_string(), - reth_ws_url: "ws://localhost:8546".to_string(), - database_url: "postgresql://user:pass@localhost:5432/test".to_string(), - max_concurrent_simulations: 5, - simulation_timeout_ms: 1000, - publish_results: false, - results_topic: None, - }; - - // This test will fail to connect to real services, but it tests compilation - // and basic service construction - let result = SimulatorService::new(config).await; - - // We expect this to fail due to connection issues in test environment - assert!(result.is_err()); -} +// Basic smoke test to ensure the core simulation types work correctly +// Tests both mempool event simulation and ExEx event simulation architectures #[test] fn test_simulation_request_creation() { @@ -59,26 +38,24 @@ fn test_simulation_request_creation() { assert_eq!(request.block_number, 18_000_000); } -#[cfg(feature = "integration-tests")] -mod integration_tests { - use super::*; - use testcontainers::core::{ContainerPort, WaitFor}; - use testcontainers::{Container, GenericImage}; - use testcontainers_modules::{kafka::Kafka, postgres::Postgres}; - - // This would be a full integration test with real containers - // Disabled by default since it requires Docker - #[tokio::test] - async fn test_full_simulation_flow() { - // Start test containers - let postgres = Postgres::default(); - let kafka = Kafka::default(); - - // This would test the full flow: - // 1. Start simulator service - // 2. Send test bundle via Kafka - // 3. Verify simulation result in database - - todo!("Implement full integration test"); - } +// Test mempool simulator configuration creation +#[test] +fn test_mempool_simulator_config() { + let config = MempoolSimulatorConfig { + kafka_brokers: vec!["localhost:9092".to_string()], + kafka_topic: "mempool-events".to_string(), + kafka_group_id: "tips-simulator".to_string(), + reth_http_url: "http://localhost:8545".to_string(), + database_url: "postgresql://user:pass@localhost:5432/tips".to_string(), + max_concurrent_simulations: 10, + simulation_timeout_ms: 5000, + }; + + assert_eq!(config.kafka_brokers, vec!["localhost:9092"]); + assert_eq!(config.kafka_topic, "mempool-events"); + assert_eq!(config.max_concurrent_simulations, 10); } + +// Future integration tests would test both: +// 1. Mempool event simulation (Kafka-based) +// 2. ExEx event simulation From d1e4044b5bed928cb9b247bf0044493ff2cea307 Mon Sep 17 00:00:00 2001 From: Niran Babalola Date: Sat, 20 Sep 2025 17:04:33 -0500 Subject: [PATCH 06/39] Extract the worker pool so the mempool event listener can use it too --- crates/simulator/src/core.rs | 2 +- crates/simulator/src/engine.rs | 1 + crates/simulator/src/exex.rs | 231 ++-------------------------- crates/simulator/src/lib.rs | 5 +- crates/simulator/src/publisher.rs | 1 + crates/simulator/src/worker_pool.rs | 168 ++++++++++++++++++++ 6 files changed, 191 insertions(+), 217 deletions(-) create mode 100644 crates/simulator/src/worker_pool.rs diff --git a/crates/simulator/src/core.rs b/crates/simulator/src/core.rs index 78eee65..2c18c91 100644 --- a/crates/simulator/src/core.rs +++ b/crates/simulator/src/core.rs @@ -2,7 +2,7 @@ use crate::engine::SimulationEngine; use crate::publisher::SimulationResultPublisher; use crate::types::SimulationRequest; use eyre::Result; -use reth_provider::{StateProvider, StateProviderFactory}; +use reth_provider::StateProviderFactory; use tracing::{error, info}; /// Core bundle simulator that provides shared simulation logic diff --git a/crates/simulator/src/engine.rs b/crates/simulator/src/engine.rs index cf395e6..f597fec 100644 --- a/crates/simulator/src/engine.rs +++ b/crates/simulator/src/engine.rs @@ -98,6 +98,7 @@ pub trait SimulationEngine: Send + Sync { S: StateProvider + Send + Sync; } +#[derive(Clone)] pub struct RethSimulationEngine { timeout: Duration, } diff --git a/crates/simulator/src/exex.rs b/crates/simulator/src/exex.rs index 76d8a8d..27d9e7f 100644 --- a/crates/simulator/src/exex.rs +++ b/crates/simulator/src/exex.rs @@ -1,5 +1,6 @@ use crate::core::BundleSimulator; use crate::types::SimulationRequest; +use crate::worker_pool::{SimulationWorkerPool, SimulationTask}; use alloy_consensus::BlockHeader; use alloy_primitives::B256; @@ -9,10 +10,7 @@ use eyre::Result; use reth_exex::{ExExContext, ExExEvent, ExExNotification}; use reth_node_api::FullNodeComponents; use futures_util::StreamExt; -use std::collections::HashMap; use std::sync::Arc; -use tokio::sync::{mpsc, RwLock}; -use tokio::task::JoinSet; use tracing::{debug, error, info, warn}; use uuid::Uuid; @@ -56,40 +54,21 @@ where } } -/// Simulation task with cancellation token -struct SimulationTask { - request: SimulationRequest, - block_number: u64, - cancel_tx: mpsc::Sender<()>, -} - /// ExEx event simulator that simulates bundles from committed blocks /// Processes chain events (commits, reorgs, reverts) and simulates potential bundles pub struct ExExEventSimulator where Node: FullNodeComponents, - E: crate::engine::SimulationEngine, - P: crate::publisher::SimulationResultPublisher, + E: crate::engine::SimulationEngine + Clone + 'static, + P: crate::publisher::SimulationResultPublisher + Clone + 'static, D: tips_datastore::BundleDatastore, { /// The execution extension context ctx: ExExContext, - /// Core bundle simulator for shared simulation logic - core_simulator: Arc>, - /// State provider factory for creating state providers - state_provider_factory: Arc, /// Datastore for fetching bundles from mempool datastore: Arc, - /// Channel for sending simulation requests to workers - simulation_tx: mpsc::Sender, - /// Channel for receiving simulation requests in workers - simulation_rx: Arc>>, - /// Map of block number to cancellation channels for pending simulations - pending_simulations: Arc>>>>, - /// Worker task handles - worker_handles: JoinSet<()>, - /// Maximum number of concurrent simulations - max_concurrent: usize, + /// Shared simulation worker pool + worker_pool: SimulationWorkerPool, } impl ExExEventSimulator @@ -107,18 +86,16 @@ where datastore: Arc, max_concurrent_simulations: usize, ) -> Self { - let (simulation_tx, simulation_rx) = mpsc::channel(1000); + let worker_pool = SimulationWorkerPool::new( + Arc::new(core_simulator), + state_provider_factory, + max_concurrent_simulations, + ); Self { ctx, - core_simulator: Arc::new(core_simulator), - state_provider_factory, datastore, - simulation_tx, - simulation_rx: Arc::new(tokio::sync::Mutex::new(simulation_rx)), - pending_simulations: Arc::new(RwLock::new(HashMap::new())), - worker_handles: JoinSet::new(), - max_concurrent: max_concurrent_simulations, + worker_pool, } } @@ -127,7 +104,7 @@ where info!("Starting ExEx event simulator"); // Initialize simulation workers - self.start_simulation_workers(); + self.worker_pool.start(); loop { match self.ctx.notifications.next().await { @@ -149,110 +126,11 @@ where info!("ExEx event simulator shutting down"); - // Cancel all pending simulations - self.cancel_all_simulations().await; - - // Wait for workers to complete - while let Some(result) = self.worker_handles.join_next().await { - if let Err(e) = result { - error!(error = %e, "Worker task failed"); - } - } + // Shutdown the worker pool + self.worker_pool.shutdown().await; Ok(()) } - - /// Start simulation worker tasks - fn start_simulation_workers(&mut self) { - info!(num_workers = self.max_concurrent, "Starting simulation workers"); - - for worker_id in 0..self.max_concurrent { - let core_simulator = self.core_simulator.clone(); - let state_provider_factory = self.state_provider_factory.clone(); - let simulation_rx = self.simulation_rx.clone(); - let pending_simulations = self.pending_simulations.clone(); - - self.worker_handles.spawn(async move { - Self::simulation_worker( - worker_id, - core_simulator, - state_provider_factory, - simulation_rx, - pending_simulations, - ).await - }); - } - } - - /// Worker task that processes simulation requests - async fn simulation_worker( - worker_id: usize, - core_simulator: Arc>, - state_provider_factory: Arc, - simulation_rx: Arc>>, - pending_simulations: Arc>>>>, - ) { - debug!(worker_id, "Simulation worker started"); - - loop { - // Get the next simulation task - let task = { - let mut rx = simulation_rx.lock().await; - rx.recv().await - }; - - let Some(task) = task else { - debug!(worker_id, "Simulation channel closed, worker shutting down"); - break; - }; - - // Create a cancellation receiver - let (cancel_tx, mut cancel_rx) = mpsc::channel(1); - - // Check if simulation should be cancelled - tokio::select! { - _ = cancel_rx.recv() => { - debug!( - worker_id, - bundle_id = %task.request.bundle_id, - block_number = task.block_number, - "Simulation cancelled before starting" - ); - continue; - } - result = core_simulator.simulate(task.request.clone(), &state_provider_factory) => { - match result { - Ok(_) => { - debug!( - worker_id, - bundle_id = %task.request.bundle_id, - "Simulation completed successfully" - ); - } - Err(e) => { - error!( - worker_id, - bundle_id = %task.request.bundle_id, - error = %e, - "Simulation failed" - ); - } - } - } - } - - // Remove cancellation channel from pending simulations - let mut pending = pending_simulations.write().await; - if let Some(channels) = pending.get_mut(&task.block_number) { - channels.retain(|tx| !tx.same_channel(&cancel_tx)); - if channels.is_empty() { - pending.remove(&task.block_number); - } - } - } - - debug!(worker_id, "Simulation worker stopped"); - } /// Handle ExEx notifications async fn handle_notification(&mut self, notification: ExExNotification<<::Types as reth_node_api::NodeTypes>::Primitives>) -> Result<()> { @@ -320,8 +198,8 @@ where "Processing block for bundle simulation" ); - // Cancel simulations for older blocks - self.cancel_simulations_before_block(block_number).await; + // Update latest block for cancellation + self.worker_pool.update_latest_block(block_number); // Fetch all bundles valid for this block from datastore use tips_datastore::postgres::BundleFilter; @@ -346,9 +224,6 @@ where "Queuing bundle simulations for new block" ); - // Create a list to track cancellation channels for this block - let mut cancellation_channels = Vec::new(); - // Queue simulations for each bundle for (index, bundle_metadata) in bundles_with_metadata.into_iter().enumerate() { // TODO: The bundle ID should be returned from the datastore query @@ -363,19 +238,13 @@ where block_hash: *block_hash, }; - // Create cancellation channel - let (cancel_tx, _cancel_rx) = mpsc::channel(1); - cancellation_channels.push(cancel_tx.clone()); - // Create simulation task let task = SimulationTask { request, - block_number, - cancel_tx, }; // Send to worker queue - if let Err(e) = self.simulation_tx.send(task).await { + if let Err(e) = self.worker_pool.queue_simulation(task).await { error!( error = %e, bundle_index = index, @@ -384,73 +253,7 @@ where break; } } - - // Store cancellation channels for this block - if !cancellation_channels.is_empty() { - let mut pending = self.pending_simulations.write().await; - pending.insert(block_number, cancellation_channels); - } Ok(()) } - - /// Cancel all simulations for blocks before the given block number - async fn cancel_simulations_before_block(&self, block_number: u64) { - let mut pending = self.pending_simulations.write().await; - - // Find all blocks to cancel - let blocks_to_cancel: Vec = pending.keys() - .filter(|&&block| block < block_number) - .copied() - .collect(); - - if blocks_to_cancel.is_empty() { - return; - } - - info!( - current_block = block_number, - num_blocks = blocks_to_cancel.len(), - "Cancelling simulations for older blocks" - ); - - // Cancel simulations for each old block - for old_block in blocks_to_cancel { - if let Some(channels) = pending.remove(&old_block) { - debug!( - old_block, - num_simulations = channels.len(), - "Cancelling simulations for block" - ); - - // Send cancellation signal to all tasks for this block - for cancel_tx in channels { - let _ = cancel_tx.send(()).await; - } - } - } - } - - /// Cancel all pending simulations - async fn cancel_all_simulations(&self) { - let mut pending = self.pending_simulations.write().await; - - info!( - num_blocks = pending.len(), - "Cancelling all pending simulations" - ); - - // Cancel all simulations - for (block_number, channels) in pending.drain() { - debug!( - block_number, - num_simulations = channels.len(), - "Cancelling simulations for block" - ); - - for cancel_tx in channels { - let _ = cancel_tx.send(()).await; - } - } - } } diff --git a/crates/simulator/src/lib.rs b/crates/simulator/src/lib.rs index 511da88..ce5f130 100644 --- a/crates/simulator/src/lib.rs +++ b/crates/simulator/src/lib.rs @@ -5,6 +5,7 @@ pub mod exex; pub mod mempool; pub mod publisher; pub mod types; +pub mod worker_pool; use eyre::Result; use reth_exex::ExExContext; @@ -48,7 +49,7 @@ where .map_err(|e| eyre::eyre!("Failed to run migrations: {}", e))?; info!("Database migrations completed"); - let publisher = create_database_publisher(datastore); + let publisher = create_database_publisher(datastore.clone()); info!("Database publisher initialized"); // Create simulation engine @@ -102,7 +103,7 @@ pub async fn init_mempool_event_simulator( .map_err(|e| eyre::eyre!("Failed to run migrations: {}", e))?; info!("Database migrations completed"); - let publisher = create_database_publisher(datastore); + let publisher = create_database_publisher(datastore.clone()); info!("Database publisher initialized"); // Create simulation engine diff --git a/crates/simulator/src/publisher.rs b/crates/simulator/src/publisher.rs index 5cf3342..b4643ea 100644 --- a/crates/simulator/src/publisher.rs +++ b/crates/simulator/src/publisher.rs @@ -21,6 +21,7 @@ pub trait SimulationResultPublisher: Send + Sync { async fn get_result_by_id(&self, result_id: Uuid) -> Result>; } +#[derive(Clone)] pub struct DatabaseResultPublisher { datastore: Arc, kafka_publisher: Option>, diff --git a/crates/simulator/src/worker_pool.rs b/crates/simulator/src/worker_pool.rs new file mode 100644 index 0000000..3cdfd37 --- /dev/null +++ b/crates/simulator/src/worker_pool.rs @@ -0,0 +1,168 @@ +use crate::core::BundleSimulator; +use crate::types::SimulationRequest; +use std::sync::Arc; +use std::sync::atomic::{AtomicU64, Ordering}; +use tokio::sync::mpsc; +use tokio::task::JoinSet; +use tracing::{debug, info, warn}; + +/// Simulation task +pub struct SimulationTask { + pub request: SimulationRequest, +} + +/// Generic simulation worker pool that can be shared across different simulators +pub struct SimulationWorkerPool +where + E: crate::engine::SimulationEngine, + P: crate::publisher::SimulationResultPublisher, +{ + /// Core bundle simulator + core_simulator: Arc>, + /// State provider factory + state_provider_factory: Arc, + /// Channel for sending simulation requests to workers + simulation_tx: mpsc::Sender, + /// Channel for receiving simulation requests in workers + simulation_rx: Arc>>, + /// Latest block number being processed (for cancellation) + latest_block: Arc, + /// Worker task handles + worker_handles: JoinSet<()>, + /// Maximum number of concurrent simulations + max_concurrent: usize, +} + +impl SimulationWorkerPool +where + E: crate::engine::SimulationEngine + Clone + 'static, + P: crate::publisher::SimulationResultPublisher + Clone + 'static, + S: reth_provider::StateProviderFactory + Send + Sync + 'static, +{ + /// Create a new simulation worker pool + pub fn new( + core_simulator: Arc>, + state_provider_factory: Arc, + max_concurrent_simulations: usize, + ) -> Self { + let (simulation_tx, simulation_rx) = mpsc::channel(1000); + + Self { + core_simulator, + state_provider_factory, + simulation_tx, + simulation_rx: Arc::new(tokio::sync::Mutex::new(simulation_rx)), + latest_block: Arc::new(AtomicU64::new(0)), + worker_handles: JoinSet::new(), + max_concurrent: max_concurrent_simulations, + } + } + + /// Start simulation worker tasks + pub fn start(&mut self) { + info!(num_workers = self.max_concurrent, "Starting simulation workers"); + + for worker_id in 0..self.max_concurrent { + let core_simulator = self.core_simulator.clone(); + let state_provider_factory = self.state_provider_factory.clone(); + let simulation_rx = self.simulation_rx.clone(); + let latest_block = self.latest_block.clone(); + + self.worker_handles.spawn(async move { + Self::simulation_worker( + worker_id, + core_simulator, + state_provider_factory, + simulation_rx, + latest_block, + ).await + }); + } + } + + /// Queue a simulation task + pub async fn queue_simulation(&self, task: SimulationTask) -> Result<(), mpsc::error::SendError> { + self.simulation_tx.send(task).await + } + + /// Update the latest block number being processed + pub fn update_latest_block(&self, block_number: u64) { + self.latest_block.store(block_number, Ordering::Release); + debug!(block_number, "Updated latest block for cancellation"); + } + + + /// Wait for all workers to complete + pub async fn shutdown(mut self) { + // Close the channel to signal workers to stop + drop(self.simulation_tx); + + // Wait for workers to complete + while let Some(result) = self.worker_handles.join_next().await { + if let Err(e) = result { + tracing::error!(error = %e, "Worker task failed"); + } + } + } + + /// Worker task that processes simulation requests + async fn simulation_worker( + worker_id: usize, + core_simulator: Arc>, + state_provider_factory: Arc, + simulation_rx: Arc>>, + latest_block: Arc, + ) + where + S: reth_provider::StateProviderFactory, + { + debug!(worker_id, "Simulation worker started"); + + loop { + // Get the next simulation task + let task = { + let mut rx = simulation_rx.lock().await; + rx.recv().await + }; + + let Some(task) = task else { + debug!(worker_id, "Simulation channel closed, worker shutting down"); + break; + }; + + // Check if this simulation is for an old block + let current_latest = latest_block.load(Ordering::Acquire); + if task.request.block_number < current_latest { + warn!( + worker_id, + bundle_id = %task.request.bundle_id, + block_number = task.request.block_number, + latest_block = current_latest, + "Skipping simulation for outdated block" + ); + continue; + } + + // Execute the simulation + match core_simulator.simulate(task.request.clone(), state_provider_factory.as_ref()).await { + Ok(_) => { + debug!( + worker_id, + bundle_id = %task.request.bundle_id, + "Simulation completed successfully" + ); + } + Err(e) => { + tracing::error!( + worker_id, + bundle_id = %task.request.bundle_id, + error = %e, + "Simulation failed" + ); + } + } + } + + debug!(worker_id, "Simulation worker stopped"); + } +} From d43b9448c4a1a8e7f7a459d716fdfb09ce9e9cfd Mon Sep 17 00:00:00 2001 From: Niran Babalola Date: Sun, 21 Sep 2025 00:16:17 -0500 Subject: [PATCH 07/39] Implement listening for mempool events with a shared worker pool --- crates/simulator/src/config.rs | 24 ++ crates/simulator/src/exex.rs | 20 +- crates/simulator/src/lib.rs | 212 ++++++++++----- crates/simulator/src/main.rs | 38 +-- crates/simulator/src/mempool.rs | 295 +++++++++++++-------- crates/simulator/src/worker_pool.rs | 16 +- crates/simulator/tests/integration_test.rs | 8 +- 7 files changed, 388 insertions(+), 225 deletions(-) diff --git a/crates/simulator/src/config.rs b/crates/simulator/src/config.rs index 1b536ce..0a48eca 100644 --- a/crates/simulator/src/config.rs +++ b/crates/simulator/src/config.rs @@ -1,4 +1,5 @@ use crate::types::ExExSimulationConfig; +use crate::mempool::MempoolSimulatorConfig; use clap::Parser; /// Combined configuration for reth node with simulator ExEx @@ -24,6 +25,18 @@ pub struct SimulatorNodeConfig { /// Timeout for individual simulations in milliseconds #[arg(long, env = "TIPS_SIMULATOR_TIMEOUT_MS", default_value = "5000")] pub simulation_timeout_ms: u64, + + /// Kafka brokers for mempool events (comma-separated) + #[arg(long, env = "TIPS_SIMULATOR_KAFKA_BROKERS", default_value = "localhost:9092")] + pub kafka_brokers: String, + + /// Kafka topic for mempool events + #[arg(long, env = "TIPS_SIMULATOR_KAFKA_TOPIC", default_value = "mempool-events")] + pub kafka_topic: String, + + /// Kafka consumer group ID + #[arg(long, env = "TIPS_SIMULATOR_KAFKA_GROUP_ID", default_value = "tips-simulator")] + pub kafka_group_id: String, } /// Legacy standalone ExEx config (for library use) @@ -58,3 +71,14 @@ impl From for ExExSimulationConfig { } } } + +impl From<&SimulatorNodeConfig> for MempoolSimulatorConfig { + fn from(config: &SimulatorNodeConfig) -> Self { + Self { + kafka_brokers: config.kafka_brokers.split(',').map(|s| s.trim().to_string()).collect(), + kafka_topic: config.kafka_topic.clone(), + kafka_group_id: config.kafka_group_id.clone(), + database_url: config.database_url.clone(), + } + } +} diff --git a/crates/simulator/src/exex.rs b/crates/simulator/src/exex.rs index 27d9e7f..4575901 100644 --- a/crates/simulator/src/exex.rs +++ b/crates/simulator/src/exex.rs @@ -1,4 +1,3 @@ -use crate::core::BundleSimulator; use crate::types::SimulationRequest; use crate::worker_pool::{SimulationWorkerPool, SimulationTask}; @@ -68,7 +67,7 @@ where /// Datastore for fetching bundles from mempool datastore: Arc, /// Shared simulation worker pool - worker_pool: SimulationWorkerPool, + worker_pool: Arc>, } impl ExExEventSimulator @@ -81,17 +80,9 @@ where /// Create a new ExEx event simulator pub fn new( ctx: ExExContext, - core_simulator: BundleSimulator, - state_provider_factory: Arc, datastore: Arc, - max_concurrent_simulations: usize, + worker_pool: Arc>, ) -> Self { - let worker_pool = SimulationWorkerPool::new( - Arc::new(core_simulator), - state_provider_factory, - max_concurrent_simulations, - ); - Self { ctx, datastore, @@ -103,9 +94,6 @@ where pub async fn run(mut self) -> Result<()> { info!("Starting ExEx event simulator"); - // Initialize simulation workers - self.worker_pool.start(); - loop { match self.ctx.notifications.next().await { Some(Ok(notification)) => { @@ -125,10 +113,6 @@ where } info!("ExEx event simulator shutting down"); - - // Shutdown the worker pool - self.worker_pool.shutdown().await; - Ok(()) } diff --git a/crates/simulator/src/lib.rs b/crates/simulator/src/lib.rs index ce5f130..733f6f6 100644 --- a/crates/simulator/src/lib.rs +++ b/crates/simulator/src/lib.rs @@ -11,73 +11,83 @@ use eyre::Result; use reth_exex::ExExContext; use reth_node_api::FullNodeComponents; use std::sync::Arc; -use tracing::info; +use tracing::{info, error}; +use crate::worker_pool::SimulationWorkerPool; pub use config::{SimulatorExExConfig, SimulatorNodeConfig}; pub use core::BundleSimulator; pub use engine::{create_simulation_engine, SimulationEngine, RethSimulationEngine}; pub use exex::ExExEventSimulator; -pub use mempool::{MempoolEventSimulator, MempoolSimulatorConfig, MempoolEventListener, KafkaMempoolListener}; +pub use mempool::{MempoolEventSimulator, MempoolSimulatorConfig}; pub use publisher::{create_database_publisher, SimulationResultPublisher, DatabaseResultPublisher}; pub use types::{SimulationResult, SimulationError, ExExSimulationConfig}; // Type aliases for concrete implementations pub type TipsBundleSimulator = BundleSimulator; pub type TipsExExEventSimulator = ExExEventSimulator; -pub type TipsMempoolEventSimulator = MempoolEventSimulator; +pub type TipsMempoolEventSimulator = MempoolEventSimulator; // Initialization functions -/// Initialize ExEx event simulator (ExEx) that processes committed blocks -pub async fn init_exex_event_simulator( - ctx: ExExContext, - config: ExExSimulationConfig, -) -> Result> -where - Node: FullNodeComponents, -{ - info!("Initializing ExEx event simulator"); +/// Common initialization components shared across simulators +struct CommonSimulatorComponents { + datastore: Arc, + simulator: BundleSimulator, +} - // Create database connection and publisher +/// Initialize common simulator components (database, publisher, engine, core simulator) +async fn init_common_components(database_url: String, simulation_timeout_ms: u64) -> Result { let datastore = Arc::new( - tips_datastore::PostgresDatastore::connect(config.database_url.clone()).await + tips_datastore::PostgresDatastore::connect(database_url).await .map_err(|e| eyre::eyre!("Failed to connect to database: {}", e))? ); - - // Run database migrations - datastore.run_migrations().await - .map_err(|e| eyre::eyre!("Failed to run migrations: {}", e))?; - info!("Database migrations completed"); let publisher = create_database_publisher(datastore.clone()); info!("Database publisher initialized"); - // Create simulation engine - let engine = create_simulation_engine(config.simulation_timeout_ms); + let engine = create_simulation_engine(simulation_timeout_ms); info!( - timeout_ms = config.simulation_timeout_ms, + timeout_ms = simulation_timeout_ms, "Simulation engine initialized" ); - // Create core bundle simulator with shared logic - let core_simulator = BundleSimulator::new( - engine, - publisher, - ); + let simulator = BundleSimulator::new(engine, publisher); info!("Core bundle simulator initialized"); - // Get state provider factory for ExEx event simulation + Ok(CommonSimulatorComponents { + datastore, + simulator, + }) +} + +/// Initialize ExEx event simulator (ExEx) that processes committed blocks +/// +/// Note: The worker pool is created but NOT started. +pub async fn init_exex_event_simulator( + ctx: ExExContext, + config: ExExSimulationConfig, +) -> Result> +where + Node: FullNodeComponents, +{ + info!("Initializing ExEx event simulator"); + + let common_components = init_common_components(config.database_url.clone(), config.simulation_timeout_ms).await?; + let state_provider_factory = Arc::new(ctx.components.provider().clone()); - // Create the ExEx event simulator - let consensus_simulator = ExExEventSimulator::new( - ctx, - core_simulator, + let worker_pool = crate::worker_pool::SimulationWorkerPool::new( + Arc::new(common_components.simulator), state_provider_factory, - datastore, config.max_concurrent_simulations, ); + let consensus_simulator = ExExEventSimulator::new( + ctx, + common_components.datastore, + Arc::new(worker_pool), + ); + info!( max_concurrent = config.max_concurrent_simulations, "ExEx event simulator initialized successfully" @@ -87,46 +97,122 @@ where } /// Initialize mempool event simulator that processes mempool transactions -pub async fn init_mempool_event_simulator( +/// +/// Note: The worker pool is created but NOT started. +pub async fn init_mempool_event_simulator( + provider: Arc, config: MempoolSimulatorConfig, -) -> Result { + max_concurrent_simulations: usize, + simulation_timeout_ms: u64, +) -> Result> +where + Node: FullNodeComponents, +{ info!("Initializing mempool event simulator"); - // Create database connection and publisher - let datastore = Arc::new( - tips_datastore::PostgresDatastore::connect(config.database_url.clone()).await - .map_err(|e| eyre::eyre!("Failed to connect to database: {}", e))? - ); - - // Run database migrations - datastore.run_migrations().await - .map_err(|e| eyre::eyre!("Failed to run migrations: {}", e))?; - info!("Database migrations completed"); + let common_components = init_common_components(config.database_url.clone(), simulation_timeout_ms).await?; - let publisher = create_database_publisher(datastore.clone()); - info!("Database publisher initialized"); + let worker_pool = crate::worker_pool::SimulationWorkerPool::new( + Arc::new(common_components.simulator), + provider.clone(), + max_concurrent_simulations, + ); - // Create simulation engine - let engine = create_simulation_engine(config.simulation_timeout_ms); + let mempool_simulator = MempoolEventSimulator::new( + provider, + config, + Arc::new(worker_pool), + )?; + info!( - timeout_ms = config.simulation_timeout_ms, - "Simulation engine initialized" + max_concurrent = max_concurrent_simulations, + "Mempool event simulator initialized successfully" ); - // Create core bundle simulator with shared logic - let core_simulator = BundleSimulator::new( - engine, - publisher, + Ok(mempool_simulator) +} + + +/// Initialize both event simulators with a shared worker pool +/// +/// Returns the shared worker pool and both simulators. The worker pool is created +/// but NOT started. +pub async fn init_shared_event_simulators( + exex_ctx: ExExContext, + exex_config: ExExSimulationConfig, + mempool_config: MempoolSimulatorConfig, + max_concurrent_simulations: usize, + simulation_timeout_ms: u64, +) -> Result<(Arc>, TipsExExEventSimulator, TipsMempoolEventSimulator)> +where + Node: FullNodeComponents, +{ + info!("Initializing shared event simulators"); + + let common_components = init_common_components(exex_config.database_url.clone(), simulation_timeout_ms).await?; + + let state_provider_factory = Arc::new(exex_ctx.components.provider().clone()); + + let shared_worker_pool = Arc::new(SimulationWorkerPool::new( + Arc::new(common_components.simulator), + state_provider_factory.clone(), + max_concurrent_simulations, + )); + + let exex_simulator = ExExEventSimulator::new( + exex_ctx, + common_components.datastore, + shared_worker_pool.clone(), ); - info!("Core bundle simulator initialized"); - // Create Kafka listener - let listener = KafkaMempoolListener::new(config.clone()); - - // Create the mempool event simulator - let mempool_simulator = MempoolEventSimulator::new(core_simulator, listener, config); + let mempool_simulator = MempoolEventSimulator::new( + state_provider_factory, + mempool_config, + shared_worker_pool.clone(), + )?; - info!("Mempool event simulator initialized successfully"); + Ok((shared_worker_pool, exex_simulator, mempool_simulator)) +} - Ok(mempool_simulator) +/// Run both simulators with lifecycle management for the shared worker pool +/// Starts the worker pool, runs both simulators concurrently, and ensures proper shutdown +pub async fn run_simulators_with_shared_workers( + mut worker_pool: Arc>, + exex_simulator: TipsExExEventSimulator, + mempool_simulator: TipsMempoolEventSimulator, +) -> Result<()> +where + Node: FullNodeComponents, +{ + info!("Starting shared worker pool"); + + Arc::get_mut(&mut worker_pool) + .ok_or_else(|| eyre::eyre!("Cannot get mutable reference to worker pool"))? + .start(); + + info!("Running simulators concurrently"); + + let result = tokio::select! { + res = exex_simulator.run() => { + info!("ExEx simulator completed"); + res + }, + res = mempool_simulator.run() => { + info!("Mempool simulator completed"); + res + }, + }; + + info!("Shutting down worker pool"); + match Arc::try_unwrap(worker_pool) { + Ok(pool) => { + pool.shutdown().await; + info!("Worker pool shutdown complete"); + } + Err(_) => { + error!("Failed to get ownership of worker pool for shutdown"); + } + } + + result } diff --git a/crates/simulator/src/main.rs b/crates/simulator/src/main.rs index dbb0231..fba339d 100644 --- a/crates/simulator/src/main.rs +++ b/crates/simulator/src/main.rs @@ -1,45 +1,53 @@ use clap::Parser; use reth_node_ethereum::EthereumNode; -use tips_simulator::{init_exex_event_simulator, SimulatorNodeConfig}; +use tips_simulator::{ + init_shared_event_simulators, + run_simulators_with_shared_workers, + SimulatorNodeConfig, + MempoolSimulatorConfig +}; use tracing::info; #[tokio::main] async fn main() -> eyre::Result<()> { - // Load environment variables dotenvy::dotenv().ok(); - // Parse command line arguments let config = SimulatorNodeConfig::parse(); - // Extract simulator config - let simulator_config: tips_simulator::types::ExExSimulationConfig = (&config).into(); + let exex_config: tips_simulator::types::ExExSimulationConfig = (&config).into(); + let mempool_config: MempoolSimulatorConfig = (&config).into(); info!( database_url = %config.database_url, max_concurrent = config.max_concurrent_simulations, timeout_ms = config.simulation_timeout_ms, - "Starting reth node with ExEx event simulator" + kafka_brokers = %config.kafka_brokers, + kafka_topic = %config.kafka_topic, + "Starting reth node with both ExEx and mempool event simulators" ); - // Launch the node with ExEx using the CLI config.node.run(|builder, _| async move { let handle = builder .node(EthereumNode::default()) .install_exex("tips-simulator", move |ctx| async move { - // Initialize the ExEx event simulator - let consensus_simulator = init_exex_event_simulator(ctx, simulator_config).await - .map_err(|e| eyre::eyre!("Failed to initialize simulator: {}", e))?; + let (worker_pool, exex_simulator, mempool_simulator) = + init_shared_event_simulators( + ctx, + exex_config, + mempool_config, + config.max_concurrent_simulations, + config.simulation_timeout_ms + ).await + .map_err(|e| eyre::eyre!("Failed to initialize simulators: {}", e))?; - info!("ExEx event simulator installed successfully"); + info!("Both ExEx and mempool event simulators initialized successfully"); - // Run the ExEx event simulator - Ok(consensus_simulator.run()) + Ok(run_simulators_with_shared_workers(worker_pool, exex_simulator, mempool_simulator)) }) .launch() .await?; - info!("Reth node with ExEx event simulator started successfully"); + info!("Reth node with both simulators started successfully"); - // Wait for the node to finish handle.wait_for_node_exit().await })?; diff --git a/crates/simulator/src/mempool.rs b/crates/simulator/src/mempool.rs index 0da8d76..6b4e133 100644 --- a/crates/simulator/src/mempool.rs +++ b/crates/simulator/src/mempool.rs @@ -1,9 +1,21 @@ -use crate::core::BundleSimulator; +use crate::engine::SimulationEngine; +use crate::publisher::SimulationResultPublisher; use crate::types::SimulationRequest; +use crate::worker_pool::{SimulationTask, SimulationWorkerPool}; use eyre::Result; -use async_trait::async_trait; +use rdkafka::{ + config::ClientConfig, + consumer::{Consumer, StreamConsumer}, + message::Message, +}; use tokio::sync::mpsc; -use tracing::{error, info, warn}; +use tracing::{debug, error, info}; +use std::time::Duration; +use std::sync::Arc; +use alloy_primitives::B256; +use reth_provider::{BlockNumReader, HeaderProvider}; +use reth_node_api::FullNodeComponents; +use tips_audit::types::MempoolEvent; /// Configuration for mempool event simulation #[derive(Debug, Clone)] @@ -14,141 +26,190 @@ pub struct MempoolSimulatorConfig { pub kafka_topic: String, /// Kafka consumer group ID pub kafka_group_id: String, - /// URL for Reth HTTP RPC endpoint for state access - pub reth_http_url: String, /// PostgreSQL database connection URL pub database_url: String, - /// Maximum number of concurrent simulations - pub max_concurrent_simulations: usize, - /// Timeout for individual simulations in milliseconds - pub simulation_timeout_ms: u64, } -/// Trait for listening to mempool events -#[async_trait] -pub trait MempoolEventListener: Send + Sync { - /// Start listening to mempool events and send simulation requests - async fn start(&mut self, sender: mpsc::Sender) -> Result<()>; - /// Stop listening to mempool events - async fn stop(&mut self) -> Result<()>; -} - -/// Kafka-based mempool event listener -pub struct KafkaMempoolListener { - config: MempoolSimulatorConfig, -} - -impl KafkaMempoolListener { - pub fn new(config: MempoolSimulatorConfig) -> Self { - Self { config } - } -} - -#[async_trait] -impl MempoolEventListener for KafkaMempoolListener { - async fn start(&mut self, _sender: mpsc::Sender) -> Result<()> { - info!( - brokers = ?self.config.kafka_brokers, - topic = %self.config.kafka_topic, - group_id = %self.config.kafka_group_id, - "Starting Kafka mempool event listener" - ); - - // TODO: Implement actual Kafka consumer - // This is a placeholder that would: - // 1. Connect to Kafka brokers - // 2. Subscribe to the mempool topic - // 3. Parse incoming mempool events - // 4. Convert them to SimulationRequest - // 5. Send to the simulation queue - - warn!("Kafka mempool listener not yet fully implemented"); - Ok(()) - } - - async fn stop(&mut self) -> Result<()> { - info!("Stopping Kafka mempool event listener"); - Ok(()) - } -} /// Mempool event simulator that combines listening and simulation -pub struct MempoolEventSimulator +pub struct MempoolEventSimulator where - E: crate::engine::SimulationEngine, - P: crate::publisher::SimulationResultPublisher, - L: MempoolEventListener, + Node: FullNodeComponents, + E: SimulationEngine, + P: SimulationResultPublisher, { - core_simulator: BundleSimulator, - listener: L, - config: MempoolSimulatorConfig, + /// State provider factory for getting current block info + provider: Arc, + /// Kafka consumer for mempool events + consumer: StreamConsumer, + /// Kafka topic name + topic: String, + /// Shared simulation worker pool + worker_pool: Arc>, } -impl MempoolEventSimulator +impl MempoolEventSimulator where - E: crate::engine::SimulationEngine, - P: crate::publisher::SimulationResultPublisher, - L: MempoolEventListener, + Node: FullNodeComponents, + E: SimulationEngine + Clone + 'static, + P: SimulationResultPublisher + Clone + 'static, { /// Create a new mempool event simulator pub fn new( - core_simulator: BundleSimulator, - listener: L, + provider: Arc, config: MempoolSimulatorConfig, - ) -> Self { - info!("Initializing mempool event simulator"); - - Self { - core_simulator, - listener, - config, - } + worker_pool: Arc>, + ) -> Result { + let consumer: StreamConsumer = ClientConfig::new() + .set("group.id", &config.kafka_group_id) + .set("bootstrap.servers", config.kafka_brokers.join(",")) + .set("enable.partition.eof", "false") + .set("session.timeout.ms", "6000") + .set("enable.auto.commit", "false") + .set("auto.offset.reset", "earliest") + .set("fetch.wait.max.ms", "100") + .set("fetch.min.bytes", "1") + .create() + .map_err(|e| eyre::eyre!("Failed to create Kafka consumer: {}", e))?; + + consumer.subscribe(&[&config.kafka_topic]) + .map_err(|e| eyre::eyre!("Failed to subscribe to topic {}: {}", config.kafka_topic, e))?; + + Ok(Self { + provider, + consumer, + topic: config.kafka_topic, + worker_pool, + }) } - /// Start the mempool event simulator - pub async fn start(&mut self) -> Result<()> { - info!("Starting mempool event simulator"); - - // Create channel for simulation requests - let (sender, _receiver) = mpsc::channel::(1000); - - // Start mempool listener - let listener_handle = { - let _sender_clone = sender.clone(); - - tokio::spawn(async move { - // TODO: Start actual listener - // self.listener.start(sender_clone).await - Ok::<(), eyre::Error>(()) - }) - }; - - // TODO: Create state provider for RPC-based access - // This would create HTTP RPC client for state access - // For now, this is a placeholder - + /// Run the mempool event simulator + pub async fn run(self) -> Result<()> + where + E: 'static, + P: 'static, + { info!( - max_concurrent = self.config.max_concurrent_simulations, - "Mempool event simulator started successfully" + topic = %self.topic, + "Starting mempool event simulator" ); - - // In a real implementation, this would: - // 1. Start the listener task - // 2. Start the simulation worker with RPC state provider - // 3. Handle shutdown gracefully - - // Wait for listener (placeholder) - if let Err(e) = listener_handle.await { + + // Create channel for simulation requests + let (sender, mut receiver) = mpsc::channel::(1000); + + // Start Kafka listener in a separate task + let consumer = self.consumer; + let provider = self.provider; + let topic = self.topic.clone(); + let listener_handle: tokio::task::JoinHandle> = tokio::spawn(async move { + info!(topic = %topic, "Starting Kafka mempool event listener"); + + loop { + match consumer.recv().await { + Ok(message) => { + let payload = message + .payload() + .ok_or_else(|| eyre::eyre!("Message has no payload"))?; + + // Parse the mempool event + let event: MempoolEvent = serde_json::from_slice(payload) + .map_err(|e| eyre::eyre!("Failed to parse mempool event: {}", e))?; + + debug!( + bundle_id = %event.bundle_id(), + offset = message.offset(), + partition = message.partition(), + "Received mempool event" + ); + + // Convert mempool events that contain bundles into simulation requests + match event { + MempoolEvent::Created { bundle_id, bundle } | + MempoolEvent::Updated { bundle_id, bundle } => { + let (block_number, block_hash) = match provider.best_block_number() { + Ok(num) => { + let hash = provider.sealed_header(num) + .unwrap_or_default() + .map(|h| h.hash()) + .unwrap_or_default(); + (num, hash) + } + Err(_) => (0, B256::ZERO), + }; + + let simulation_request = SimulationRequest { + bundle_id, + bundle, + block_number, + block_hash, + }; + + if let Err(e) = sender.send(simulation_request).await { + error!( + error = %e, + bundle_id = %bundle_id, + "Failed to send simulation request" + ); + } + } + _ => { + // Other events (cancelled, included, dropped) don't need simulation + debug!( + bundle_id = %event.bundle_id(), + "Skipping non-simulatable event" + ); + } + } + + // Commit the message + if let Err(e) = consumer.commit_message(&message, rdkafka::consumer::CommitMode::Async) { + error!(error = %e, "Failed to commit Kafka message"); + } + } + Err(e) => { + error!(error = %e, "Error receiving message from Kafka"); + tokio::time::sleep(Duration::from_secs(1)).await; + } + } + } + }); + + // Process simulation requests using the shared worker pool + let worker_pool = self.worker_pool.clone(); + let processing_handle = tokio::spawn(async move { + while let Some(request) = receiver.recv().await { + debug!( + bundle_id = %request.bundle_id, + block_number = request.block_number, + "Queuing bundle simulation for mempool event" + ); + + // Create simulation task + let task = SimulationTask { + request: request.clone(), + }; + + // Queue simulation using shared worker pool + if let Err(e) = worker_pool.queue_simulation(task).await { + error!( + error = %e, + bundle_id = %request.bundle_id, + "Failed to queue simulation task" + ); + } + } + }); + + // Wait for both tasks to complete + let (listener_result, _processing_result) = tokio::try_join!(listener_handle, processing_handle) + .map_err(|e| eyre::eyre!("Task join error: {}", e))?; + + if let Err(e) = listener_result { error!(error = %e, "Mempool listener task failed"); + return Err(e); } - + + info!("Mempool event simulator completed"); Ok(()) } } - -// No-op listener removed - using generics instead of dynamic dispatch - -/// Create a Kafka mempool listener -pub fn create_kafka_mempool_listener(config: &MempoolSimulatorConfig) -> impl MempoolEventListener { - KafkaMempoolListener::new(config.clone()) -} diff --git a/crates/simulator/src/worker_pool.rs b/crates/simulator/src/worker_pool.rs index 3cdfd37..4fdd024 100644 --- a/crates/simulator/src/worker_pool.rs +++ b/crates/simulator/src/worker_pool.rs @@ -2,6 +2,7 @@ use crate::core::BundleSimulator; use crate::types::SimulationRequest; use std::sync::Arc; use std::sync::atomic::{AtomicU64, Ordering}; +use reth_provider::StateProviderFactory; use tokio::sync::mpsc; use tokio::task::JoinSet; use tracing::{debug, info, warn}; @@ -16,9 +17,10 @@ pub struct SimulationWorkerPool where E: crate::engine::SimulationEngine, P: crate::publisher::SimulationResultPublisher, + S: StateProviderFactory, { /// Core bundle simulator - core_simulator: Arc>, + simulator: Arc>, /// State provider factory state_provider_factory: Arc, /// Channel for sending simulation requests to workers @@ -41,14 +43,14 @@ where { /// Create a new simulation worker pool pub fn new( - core_simulator: Arc>, + simulator: Arc>, state_provider_factory: Arc, max_concurrent_simulations: usize, ) -> Self { let (simulation_tx, simulation_rx) = mpsc::channel(1000); Self { - core_simulator, + simulator, state_provider_factory, simulation_tx, simulation_rx: Arc::new(tokio::sync::Mutex::new(simulation_rx)), @@ -63,7 +65,7 @@ where info!(num_workers = self.max_concurrent, "Starting simulation workers"); for worker_id in 0..self.max_concurrent { - let core_simulator = self.core_simulator.clone(); + let simulator = self.simulator.clone(); let state_provider_factory = self.state_provider_factory.clone(); let simulation_rx = self.simulation_rx.clone(); let latest_block = self.latest_block.clone(); @@ -71,7 +73,7 @@ where self.worker_handles.spawn(async move { Self::simulation_worker( worker_id, - core_simulator, + simulator, state_provider_factory, simulation_rx, latest_block, @@ -108,7 +110,7 @@ where /// Worker task that processes simulation requests async fn simulation_worker( worker_id: usize, - core_simulator: Arc>, + simulator: Arc>, state_provider_factory: Arc, simulation_rx: Arc>>, latest_block: Arc, @@ -144,7 +146,7 @@ where } // Execute the simulation - match core_simulator.simulate(task.request.clone(), state_provider_factory.as_ref()).await { + match simulator.simulate(task.request.clone(), state_provider_factory.as_ref()).await { Ok(_) => { debug!( worker_id, diff --git a/crates/simulator/tests/integration_test.rs b/crates/simulator/tests/integration_test.rs index 857ccb7..84993e2 100644 --- a/crates/simulator/tests/integration_test.rs +++ b/crates/simulator/tests/integration_test.rs @@ -1,6 +1,6 @@ use tips_simulator::types::SimulationRequest; use tips_simulator::MempoolSimulatorConfig; -use alloy_primitives::{Address, Bytes, B256}; +use alloy_primitives::{Bytes, B256}; use alloy_rpc_types_mev::EthSendBundle; use uuid::Uuid; @@ -45,15 +45,13 @@ fn test_mempool_simulator_config() { kafka_brokers: vec!["localhost:9092".to_string()], kafka_topic: "mempool-events".to_string(), kafka_group_id: "tips-simulator".to_string(), - reth_http_url: "http://localhost:8545".to_string(), database_url: "postgresql://user:pass@localhost:5432/tips".to_string(), - max_concurrent_simulations: 10, - simulation_timeout_ms: 5000, }; assert_eq!(config.kafka_brokers, vec!["localhost:9092"]); assert_eq!(config.kafka_topic, "mempool-events"); - assert_eq!(config.max_concurrent_simulations, 10); + assert_eq!(config.kafka_group_id, "tips-simulator"); + assert_eq!(config.database_url, "postgresql://user:pass@localhost:5432/tips"); } // Future integration tests would test both: From 214f47762a16e13a479937c62bc3603f011864bf Mon Sep 17 00:00:00 2001 From: Niran Babalola Date: Sun, 21 Sep 2025 00:37:07 -0500 Subject: [PATCH 08/39] Rename the components that listen for mempool and exex events to listeners --- crates/simulator/src/config.rs | 4 +- crates/simulator/src/lib.rs | 88 +++++++++---------- crates/simulator/src/{ => listeners}/exex.rs | 16 ++-- .../simulator/src/{ => listeners}/mempool.rs | 20 ++--- crates/simulator/src/listeners/mod.rs | 10 +++ crates/simulator/src/main.rs | 22 ++--- crates/simulator/tests/integration_test.rs | 4 +- 7 files changed, 86 insertions(+), 78 deletions(-) rename crates/simulator/src/{ => listeners}/exex.rs (94%) rename crates/simulator/src/{ => listeners}/mempool.rs (94%) create mode 100644 crates/simulator/src/listeners/mod.rs diff --git a/crates/simulator/src/config.rs b/crates/simulator/src/config.rs index 0a48eca..ed329c8 100644 --- a/crates/simulator/src/config.rs +++ b/crates/simulator/src/config.rs @@ -1,5 +1,5 @@ use crate::types::ExExSimulationConfig; -use crate::mempool::MempoolSimulatorConfig; +use crate::listeners::MempoolListenerConfig; use clap::Parser; /// Combined configuration for reth node with simulator ExEx @@ -72,7 +72,7 @@ impl From for ExExSimulationConfig { } } -impl From<&SimulatorNodeConfig> for MempoolSimulatorConfig { +impl From<&SimulatorNodeConfig> for MempoolListenerConfig { fn from(config: &SimulatorNodeConfig) -> Self { Self { kafka_brokers: config.kafka_brokers.split(',').map(|s| s.trim().to_string()).collect(), diff --git a/crates/simulator/src/lib.rs b/crates/simulator/src/lib.rs index 733f6f6..471fb4b 100644 --- a/crates/simulator/src/lib.rs +++ b/crates/simulator/src/lib.rs @@ -1,8 +1,7 @@ pub mod config; pub mod core; pub mod engine; -pub mod exex; -pub mod mempool; +pub mod listeners; pub mod publisher; pub mod types; pub mod worker_pool; @@ -17,26 +16,25 @@ use crate::worker_pool::SimulationWorkerPool; pub use config::{SimulatorExExConfig, SimulatorNodeConfig}; pub use core::BundleSimulator; pub use engine::{create_simulation_engine, SimulationEngine, RethSimulationEngine}; -pub use exex::ExExEventSimulator; -pub use mempool::{MempoolEventSimulator, MempoolSimulatorConfig}; +pub use listeners::{ExExEventListener, MempoolEventListener, MempoolListenerConfig}; pub use publisher::{create_database_publisher, SimulationResultPublisher, DatabaseResultPublisher}; pub use types::{SimulationResult, SimulationError, ExExSimulationConfig}; // Type aliases for concrete implementations pub type TipsBundleSimulator = BundleSimulator; -pub type TipsExExEventSimulator = ExExEventSimulator; -pub type TipsMempoolEventSimulator = MempoolEventSimulator; +pub type TipsExExEventListener = ExExEventListener; +pub type TipsMempoolEventListener = MempoolEventListener; // Initialization functions -/// Common initialization components shared across simulators -struct CommonSimulatorComponents { +/// Common initialization components shared across listeners +struct CommonListenerComponents { datastore: Arc, simulator: BundleSimulator, } -/// Initialize common simulator components (database, publisher, engine, core simulator) -async fn init_common_components(database_url: String, simulation_timeout_ms: u64) -> Result { +/// Initialize common listener components (database, publisher, engine, core simulator) +async fn init_common_components(database_url: String, simulation_timeout_ms: u64) -> Result { let datastore = Arc::new( tips_datastore::PostgresDatastore::connect(database_url).await .map_err(|e| eyre::eyre!("Failed to connect to database: {}", e))? @@ -54,23 +52,23 @@ async fn init_common_components(database_url: String, simulation_timeout_ms: u64 let simulator = BundleSimulator::new(engine, publisher); info!("Core bundle simulator initialized"); - Ok(CommonSimulatorComponents { + Ok(CommonListenerComponents { datastore, simulator, }) } -/// Initialize ExEx event simulator (ExEx) that processes committed blocks +/// Initialize ExEx event listener (ExEx) that processes committed blocks /// /// Note: The worker pool is created but NOT started. -pub async fn init_exex_event_simulator( +pub async fn init_exex_event_listener( ctx: ExExContext, config: ExExSimulationConfig, -) -> Result> +) -> Result> where Node: FullNodeComponents, { - info!("Initializing ExEx event simulator"); + info!("Initializing ExEx event listener"); let common_components = init_common_components(config.database_url.clone(), config.simulation_timeout_ms).await?; @@ -82,7 +80,7 @@ where config.max_concurrent_simulations, ); - let consensus_simulator = ExExEventSimulator::new( + let consensus_listener = ExExEventListener::new( ctx, common_components.datastore, Arc::new(worker_pool), @@ -90,25 +88,25 @@ where info!( max_concurrent = config.max_concurrent_simulations, - "ExEx event simulator initialized successfully" + "ExEx event listener initialized successfully" ); - Ok(consensus_simulator) + Ok(consensus_listener) } -/// Initialize mempool event simulator that processes mempool transactions +/// Initialize mempool event listener that processes mempool transactions /// /// Note: The worker pool is created but NOT started. -pub async fn init_mempool_event_simulator( +pub async fn init_mempool_event_listener( provider: Arc, - config: MempoolSimulatorConfig, + config: MempoolListenerConfig, max_concurrent_simulations: usize, simulation_timeout_ms: u64, -) -> Result> +) -> Result> where Node: FullNodeComponents, { - info!("Initializing mempool event simulator"); + info!("Initializing mempool event listener"); let common_components = init_common_components(config.database_url.clone(), simulation_timeout_ms).await?; @@ -118,7 +116,7 @@ where max_concurrent_simulations, ); - let mempool_simulator = MempoolEventSimulator::new( + let mempool_listener = MempoolEventListener::new( provider, config, Arc::new(worker_pool), @@ -126,28 +124,28 @@ where info!( max_concurrent = max_concurrent_simulations, - "Mempool event simulator initialized successfully" + "Mempool event listener initialized successfully" ); - Ok(mempool_simulator) + Ok(mempool_listener) } -/// Initialize both event simulators with a shared worker pool +/// Initialize both event listeners with a shared worker pool /// -/// Returns the shared worker pool and both simulators. The worker pool is created +/// Returns the shared worker pool and both listeners. The worker pool is created /// but NOT started. -pub async fn init_shared_event_simulators( +pub async fn init_shared_event_listeners( exex_ctx: ExExContext, exex_config: ExExSimulationConfig, - mempool_config: MempoolSimulatorConfig, + mempool_config: MempoolListenerConfig, max_concurrent_simulations: usize, simulation_timeout_ms: u64, -) -> Result<(Arc>, TipsExExEventSimulator, TipsMempoolEventSimulator)> +) -> Result<(Arc>, TipsExExEventListener, TipsMempoolEventListener)> where Node: FullNodeComponents, { - info!("Initializing shared event simulators"); + info!("Initializing shared event listeners"); let common_components = init_common_components(exex_config.database_url.clone(), simulation_timeout_ms).await?; @@ -159,27 +157,27 @@ where max_concurrent_simulations, )); - let exex_simulator = ExExEventSimulator::new( + let exex_listener = ExExEventListener::new( exex_ctx, common_components.datastore, shared_worker_pool.clone(), ); - let mempool_simulator = MempoolEventSimulator::new( + let mempool_listener = MempoolEventListener::new( state_provider_factory, mempool_config, shared_worker_pool.clone(), )?; - Ok((shared_worker_pool, exex_simulator, mempool_simulator)) + Ok((shared_worker_pool, exex_listener, mempool_listener)) } -/// Run both simulators with lifecycle management for the shared worker pool -/// Starts the worker pool, runs both simulators concurrently, and ensures proper shutdown -pub async fn run_simulators_with_shared_workers( +/// Run both listeners with lifecycle management for the shared worker pool +/// Starts the worker pool, runs both listeners concurrently, and ensures proper shutdown +pub async fn run_listeners_with_shared_workers( mut worker_pool: Arc>, - exex_simulator: TipsExExEventSimulator, - mempool_simulator: TipsMempoolEventSimulator, + exex_listener: TipsExExEventListener, + mempool_listener: TipsMempoolEventListener, ) -> Result<()> where Node: FullNodeComponents, @@ -190,15 +188,15 @@ where .ok_or_else(|| eyre::eyre!("Cannot get mutable reference to worker pool"))? .start(); - info!("Running simulators concurrently"); + info!("Running listeners concurrently"); let result = tokio::select! { - res = exex_simulator.run() => { - info!("ExEx simulator completed"); + res = exex_listener.run() => { + info!("ExEx listener completed"); res }, - res = mempool_simulator.run() => { - info!("Mempool simulator completed"); + res = mempool_listener.run() => { + info!("Mempool listener completed"); res }, }; diff --git a/crates/simulator/src/exex.rs b/crates/simulator/src/listeners/exex.rs similarity index 94% rename from crates/simulator/src/exex.rs rename to crates/simulator/src/listeners/exex.rs index 4575901..eb877ac 100644 --- a/crates/simulator/src/exex.rs +++ b/crates/simulator/src/listeners/exex.rs @@ -53,9 +53,9 @@ where } } -/// ExEx event simulator that simulates bundles from committed blocks -/// Processes chain events (commits, reorgs, reverts) and simulates potential bundles -pub struct ExExEventSimulator +/// ExEx event listener that processes chain events and queues bundle simulations +/// Processes chain events (commits, reorgs, reverts) and queues simulation tasks +pub struct ExExEventListener where Node: FullNodeComponents, E: crate::engine::SimulationEngine + Clone + 'static, @@ -70,14 +70,14 @@ where worker_pool: Arc>, } -impl ExExEventSimulator +impl ExExEventListener where Node: FullNodeComponents, E: crate::engine::SimulationEngine + Clone + 'static, P: crate::publisher::SimulationResultPublisher + Clone + 'static, D: tips_datastore::BundleDatastore + 'static, { - /// Create a new ExEx event simulator + /// Create a new ExEx event listener pub fn new( ctx: ExExContext, datastore: Arc, @@ -90,9 +90,9 @@ where } } - /// Main execution loop for the ExEx event simulator + /// Main execution loop for the ExEx event listener pub async fn run(mut self) -> Result<()> { - info!("Starting ExEx event simulator"); + info!("Starting ExEx event listener"); loop { match self.ctx.notifications.next().await { @@ -112,7 +112,7 @@ where } } - info!("ExEx event simulator shutting down"); + info!("ExEx event listener shutting down"); Ok(()) } diff --git a/crates/simulator/src/mempool.rs b/crates/simulator/src/listeners/mempool.rs similarity index 94% rename from crates/simulator/src/mempool.rs rename to crates/simulator/src/listeners/mempool.rs index 6b4e133..748cf67 100644 --- a/crates/simulator/src/mempool.rs +++ b/crates/simulator/src/listeners/mempool.rs @@ -17,9 +17,9 @@ use reth_provider::{BlockNumReader, HeaderProvider}; use reth_node_api::FullNodeComponents; use tips_audit::types::MempoolEvent; -/// Configuration for mempool event simulation +/// Configuration for mempool event listening #[derive(Debug, Clone)] -pub struct MempoolSimulatorConfig { +pub struct MempoolListenerConfig { /// Kafka brokers for consuming mempool events pub kafka_brokers: Vec, /// Kafka topic to consume mempool events from @@ -31,8 +31,8 @@ pub struct MempoolSimulatorConfig { } -/// Mempool event simulator that combines listening and simulation -pub struct MempoolEventSimulator +/// Mempool event listener that processes events and queues simulations +pub struct MempoolEventListener where Node: FullNodeComponents, E: SimulationEngine, @@ -48,16 +48,16 @@ where worker_pool: Arc>, } -impl MempoolEventSimulator +impl MempoolEventListener where Node: FullNodeComponents, E: SimulationEngine + Clone + 'static, P: SimulationResultPublisher + Clone + 'static, { - /// Create a new mempool event simulator + /// Create a new mempool event listener pub fn new( provider: Arc, - config: MempoolSimulatorConfig, + config: MempoolListenerConfig, worker_pool: Arc>, ) -> Result { let consumer: StreamConsumer = ClientConfig::new() @@ -83,7 +83,7 @@ where }) } - /// Run the mempool event simulator + /// Run the mempool event listener pub async fn run(self) -> Result<()> where E: 'static, @@ -91,7 +91,7 @@ where { info!( topic = %self.topic, - "Starting mempool event simulator" + "Starting mempool event listener" ); // Create channel for simulation requests @@ -209,7 +209,7 @@ where return Err(e); } - info!("Mempool event simulator completed"); + info!("Mempool event listener completed"); Ok(()) } } diff --git a/crates/simulator/src/listeners/mod.rs b/crates/simulator/src/listeners/mod.rs new file mode 100644 index 0000000..bdb2dff --- /dev/null +++ b/crates/simulator/src/listeners/mod.rs @@ -0,0 +1,10 @@ +//! Event listeners for simulation triggering +//! +//! This module contains listeners that process different types of events +//! and queue simulation tasks using the shared worker pool. + +pub mod exex; +pub mod mempool; + +pub use exex::{DatastoreBundleProvider, ExExEventListener}; +pub use mempool::{MempoolEventListener, MempoolListenerConfig}; diff --git a/crates/simulator/src/main.rs b/crates/simulator/src/main.rs index fba339d..6444f48 100644 --- a/crates/simulator/src/main.rs +++ b/crates/simulator/src/main.rs @@ -1,10 +1,10 @@ use clap::Parser; use reth_node_ethereum::EthereumNode; use tips_simulator::{ - init_shared_event_simulators, - run_simulators_with_shared_workers, + init_shared_event_listeners, + run_listeners_with_shared_workers, SimulatorNodeConfig, - MempoolSimulatorConfig + MempoolListenerConfig }; use tracing::info; @@ -14,7 +14,7 @@ async fn main() -> eyre::Result<()> { let config = SimulatorNodeConfig::parse(); let exex_config: tips_simulator::types::ExExSimulationConfig = (&config).into(); - let mempool_config: MempoolSimulatorConfig = (&config).into(); + let mempool_config: MempoolListenerConfig = (&config).into(); info!( database_url = %config.database_url, @@ -22,31 +22,31 @@ async fn main() -> eyre::Result<()> { timeout_ms = config.simulation_timeout_ms, kafka_brokers = %config.kafka_brokers, kafka_topic = %config.kafka_topic, - "Starting reth node with both ExEx and mempool event simulators" + "Starting reth node with both ExEx and mempool event listeners" ); config.node.run(|builder, _| async move { let handle = builder .node(EthereumNode::default()) .install_exex("tips-simulator", move |ctx| async move { - let (worker_pool, exex_simulator, mempool_simulator) = - init_shared_event_simulators( + let (worker_pool, exex_listener, mempool_listener) = + init_shared_event_listeners( ctx, exex_config, mempool_config, config.max_concurrent_simulations, config.simulation_timeout_ms ).await - .map_err(|e| eyre::eyre!("Failed to initialize simulators: {}", e))?; + .map_err(|e| eyre::eyre!("Failed to initialize listeners: {}", e))?; - info!("Both ExEx and mempool event simulators initialized successfully"); + info!("Both ExEx and mempool event listeners initialized successfully"); - Ok(run_simulators_with_shared_workers(worker_pool, exex_simulator, mempool_simulator)) + Ok(run_listeners_with_shared_workers(worker_pool, exex_listener, mempool_listener)) }) .launch() .await?; - info!("Reth node with both simulators started successfully"); + info!("Reth node with both listeners started successfully"); handle.wait_for_node_exit().await })?; diff --git a/crates/simulator/tests/integration_test.rs b/crates/simulator/tests/integration_test.rs index 84993e2..b7e1db2 100644 --- a/crates/simulator/tests/integration_test.rs +++ b/crates/simulator/tests/integration_test.rs @@ -1,5 +1,5 @@ use tips_simulator::types::SimulationRequest; -use tips_simulator::MempoolSimulatorConfig; +use tips_simulator::MempoolListenerConfig; use alloy_primitives::{Bytes, B256}; use alloy_rpc_types_mev::EthSendBundle; use uuid::Uuid; @@ -41,7 +41,7 @@ fn test_simulation_request_creation() { // Test mempool simulator configuration creation #[test] fn test_mempool_simulator_config() { - let config = MempoolSimulatorConfig { + let config = MempoolListenerConfig { kafka_brokers: vec!["localhost:9092".to_string()], kafka_topic: "mempool-events".to_string(), kafka_group_id: "tips-simulator".to_string(), From b65a3870e41018b537a1f62f1879f7423dae8771 Mon Sep 17 00:00:00 2001 From: Niran Babalola Date: Sun, 21 Sep 2025 00:50:53 -0500 Subject: [PATCH 09/39] Clean up symbols --- crates/simulator/src/lib.rs | 6 +++--- crates/simulator/src/listeners/exex.rs | 10 ++++++---- crates/simulator/src/main.rs | 4 ++-- crates/simulator/src/worker_pool.rs | 10 ++++++---- 4 files changed, 17 insertions(+), 13 deletions(-) diff --git a/crates/simulator/src/lib.rs b/crates/simulator/src/lib.rs index 471fb4b..8ca1702 100644 --- a/crates/simulator/src/lib.rs +++ b/crates/simulator/src/lib.rs @@ -74,7 +74,7 @@ where let state_provider_factory = Arc::new(ctx.components.provider().clone()); - let worker_pool = crate::worker_pool::SimulationWorkerPool::new( + let worker_pool = SimulationWorkerPool::new( Arc::new(common_components.simulator), state_provider_factory, config.max_concurrent_simulations, @@ -110,7 +110,7 @@ where let common_components = init_common_components(config.database_url.clone(), simulation_timeout_ms).await?; - let worker_pool = crate::worker_pool::SimulationWorkerPool::new( + let worker_pool = SimulationWorkerPool::new( Arc::new(common_components.simulator), provider.clone(), max_concurrent_simulations, @@ -135,7 +135,7 @@ where /// /// Returns the shared worker pool and both listeners. The worker pool is created /// but NOT started. -pub async fn init_shared_event_listeners( +pub async fn init_listeners_with_shared_workers( exex_ctx: ExExContext, exex_config: ExExSimulationConfig, mempool_config: MempoolListenerConfig, diff --git a/crates/simulator/src/listeners/exex.rs b/crates/simulator/src/listeners/exex.rs index eb877ac..c3b7ff5 100644 --- a/crates/simulator/src/listeners/exex.rs +++ b/crates/simulator/src/listeners/exex.rs @@ -1,5 +1,7 @@ use crate::types::SimulationRequest; use crate::worker_pool::{SimulationWorkerPool, SimulationTask}; +use crate::engine::SimulationEngine; +use crate::publisher::SimulationResultPublisher; use alloy_consensus::BlockHeader; use alloy_primitives::B256; @@ -58,8 +60,8 @@ where pub struct ExExEventListener where Node: FullNodeComponents, - E: crate::engine::SimulationEngine + Clone + 'static, - P: crate::publisher::SimulationResultPublisher + Clone + 'static, + E: SimulationEngine + Clone + 'static, + P: SimulationResultPublisher + Clone + 'static, D: tips_datastore::BundleDatastore, { /// The execution extension context @@ -73,8 +75,8 @@ where impl ExExEventListener where Node: FullNodeComponents, - E: crate::engine::SimulationEngine + Clone + 'static, - P: crate::publisher::SimulationResultPublisher + Clone + 'static, + E: SimulationEngine + Clone + 'static, + P: SimulationResultPublisher + Clone + 'static, D: tips_datastore::BundleDatastore + 'static, { /// Create a new ExEx event listener diff --git a/crates/simulator/src/main.rs b/crates/simulator/src/main.rs index 6444f48..3026f98 100644 --- a/crates/simulator/src/main.rs +++ b/crates/simulator/src/main.rs @@ -1,7 +1,7 @@ use clap::Parser; use reth_node_ethereum::EthereumNode; use tips_simulator::{ - init_shared_event_listeners, + init_listeners_with_shared_workers, run_listeners_with_shared_workers, SimulatorNodeConfig, MempoolListenerConfig @@ -30,7 +30,7 @@ async fn main() -> eyre::Result<()> { .node(EthereumNode::default()) .install_exex("tips-simulator", move |ctx| async move { let (worker_pool, exex_listener, mempool_listener) = - init_shared_event_listeners( + init_listeners_with_shared_workers( ctx, exex_config, mempool_config, diff --git a/crates/simulator/src/worker_pool.rs b/crates/simulator/src/worker_pool.rs index 4fdd024..2fc283e 100644 --- a/crates/simulator/src/worker_pool.rs +++ b/crates/simulator/src/worker_pool.rs @@ -1,4 +1,6 @@ use crate::core::BundleSimulator; +use crate::engine::SimulationEngine; +use crate::publisher::SimulationResultPublisher; use crate::types::SimulationRequest; use std::sync::Arc; use std::sync::atomic::{AtomicU64, Ordering}; @@ -15,8 +17,8 @@ pub struct SimulationTask { /// Generic simulation worker pool that can be shared across different simulators pub struct SimulationWorkerPool where - E: crate::engine::SimulationEngine, - P: crate::publisher::SimulationResultPublisher, + E: SimulationEngine, + P: SimulationResultPublisher, S: StateProviderFactory, { /// Core bundle simulator @@ -37,8 +39,8 @@ where impl SimulationWorkerPool where - E: crate::engine::SimulationEngine + Clone + 'static, - P: crate::publisher::SimulationResultPublisher + Clone + 'static, + E: SimulationEngine + Clone + 'static, + P: SimulationResultPublisher + Clone + 'static, S: reth_provider::StateProviderFactory + Send + Sync + 'static, { /// Create a new simulation worker pool From 18b9d4cf00e7a29123d7345ad5dca5650477d828 Mon Sep 17 00:00:00 2001 From: Niran Babalola Date: Sun, 21 Sep 2025 00:57:10 -0500 Subject: [PATCH 10/39] Encapsulate the listeners with their worker pool --- crates/simulator/src/lib.rs | 164 +++++++++++++++++++---------------- crates/simulator/src/main.rs | 22 +++-- 2 files changed, 98 insertions(+), 88 deletions(-) diff --git a/crates/simulator/src/lib.rs b/crates/simulator/src/lib.rs index 8ca1702..98b9663 100644 --- a/crates/simulator/src/lib.rs +++ b/crates/simulator/src/lib.rs @@ -131,86 +131,98 @@ where } -/// Initialize both event listeners with a shared worker pool +/// Encapsulates both event listeners with their shared worker pool /// -/// Returns the shared worker pool and both listeners. The worker pool is created -/// but NOT started. -pub async fn init_listeners_with_shared_workers( - exex_ctx: ExExContext, - exex_config: ExExSimulationConfig, - mempool_config: MempoolListenerConfig, - max_concurrent_simulations: usize, - simulation_timeout_ms: u64, -) -> Result<(Arc>, TipsExExEventListener, TipsMempoolEventListener)> -where - Node: FullNodeComponents, -{ - info!("Initializing shared event listeners"); - - let common_components = init_common_components(exex_config.database_url.clone(), simulation_timeout_ms).await?; - - let state_provider_factory = Arc::new(exex_ctx.components.provider().clone()); - - let shared_worker_pool = Arc::new(SimulationWorkerPool::new( - Arc::new(common_components.simulator), - state_provider_factory.clone(), - max_concurrent_simulations, - )); - - let exex_listener = ExExEventListener::new( - exex_ctx, - common_components.datastore, - shared_worker_pool.clone(), - ); - - let mempool_listener = MempoolEventListener::new( - state_provider_factory, - mempool_config, - shared_worker_pool.clone(), - )?; - - Ok((shared_worker_pool, exex_listener, mempool_listener)) -} - -/// Run both listeners with lifecycle management for the shared worker pool -/// Starts the worker pool, runs both listeners concurrently, and ensures proper shutdown -pub async fn run_listeners_with_shared_workers( - mut worker_pool: Arc>, +/// This struct ensures that the ExEx and mempool listeners always use the same +/// worker pool instance, preventing potential misconfigurations. +pub struct ListenersWithWorkers { + worker_pool: Arc>, exex_listener: TipsExExEventListener, mempool_listener: TipsMempoolEventListener, -) -> Result<()> -where - Node: FullNodeComponents, -{ - info!("Starting shared worker pool"); - - Arc::get_mut(&mut worker_pool) - .ok_or_else(|| eyre::eyre!("Cannot get mutable reference to worker pool"))? - .start(); - - info!("Running listeners concurrently"); - - let result = tokio::select! { - res = exex_listener.run() => { - info!("ExEx listener completed"); - res - }, - res = mempool_listener.run() => { - info!("Mempool listener completed"); - res - }, - }; +} + +impl ListenersWithWorkers { + /// Initialize both event listeners with a shared worker pool + /// + /// The worker pool is created but NOT started. Call `run()` to start + /// the worker pool and begin processing events. + pub async fn new( + exex_ctx: ExExContext, + exex_config: ExExSimulationConfig, + mempool_config: MempoolListenerConfig, + max_concurrent_simulations: usize, + simulation_timeout_ms: u64, + ) -> Result { + info!("Initializing shared event listeners"); + + let common_components = init_common_components(exex_config.database_url.clone(), simulation_timeout_ms).await?; + + let state_provider_factory = Arc::new(exex_ctx.components.provider().clone()); + + let shared_worker_pool = Arc::new(SimulationWorkerPool::new( + Arc::new(common_components.simulator), + state_provider_factory.clone(), + max_concurrent_simulations, + )); + + let exex_listener = ExExEventListener::new( + exex_ctx, + common_components.datastore, + shared_worker_pool.clone(), + ); + + let mempool_listener = MempoolEventListener::new( + state_provider_factory, + mempool_config, + shared_worker_pool.clone(), + )?; + + info!( + max_concurrent = max_concurrent_simulations, + "Both ExEx and mempool event listeners initialized successfully" + ); + + Ok(Self { + worker_pool: shared_worker_pool, + exex_listener, + mempool_listener, + }) + } - info!("Shutting down worker pool"); - match Arc::try_unwrap(worker_pool) { - Ok(pool) => { - pool.shutdown().await; - info!("Worker pool shutdown complete"); - } - Err(_) => { - error!("Failed to get ownership of worker pool for shutdown"); + /// Run both listeners with lifecycle management for the shared worker pool + /// + /// Starts the worker pool, runs both listeners concurrently, and ensures proper shutdown + pub async fn run(mut self) -> Result<()> { + info!("Starting shared worker pool"); + + Arc::get_mut(&mut self.worker_pool) + .ok_or_else(|| eyre::eyre!("Cannot get mutable reference to worker pool"))? + .start(); + + info!("Running listeners concurrently"); + + let result = tokio::select! { + res = self.exex_listener.run() => { + info!("ExEx listener completed"); + res + }, + res = self.mempool_listener.run() => { + info!("Mempool listener completed"); + res + }, + }; + + info!("Shutting down worker pool"); + match Arc::try_unwrap(self.worker_pool) { + Ok(pool) => { + pool.shutdown().await; + info!("Worker pool shutdown complete"); + } + Err(_) => { + error!("Failed to get ownership of worker pool for shutdown"); + } } + + result } - - result } diff --git a/crates/simulator/src/main.rs b/crates/simulator/src/main.rs index 3026f98..478587a 100644 --- a/crates/simulator/src/main.rs +++ b/crates/simulator/src/main.rs @@ -1,8 +1,7 @@ use clap::Parser; use reth_node_ethereum::EthereumNode; use tips_simulator::{ - init_listeners_with_shared_workers, - run_listeners_with_shared_workers, + ListenersWithWorkers, SimulatorNodeConfig, MempoolListenerConfig }; @@ -29,19 +28,18 @@ async fn main() -> eyre::Result<()> { let handle = builder .node(EthereumNode::default()) .install_exex("tips-simulator", move |ctx| async move { - let (worker_pool, exex_listener, mempool_listener) = - init_listeners_with_shared_workers( - ctx, - exex_config, - mempool_config, - config.max_concurrent_simulations, - config.simulation_timeout_ms - ).await - .map_err(|e| eyre::eyre!("Failed to initialize listeners: {}", e))?; + let listeners = ListenersWithWorkers::new( + ctx, + exex_config, + mempool_config, + config.max_concurrent_simulations, + config.simulation_timeout_ms + ).await + .map_err(|e| eyre::eyre!("Failed to initialize listeners: {}", e))?; info!("Both ExEx and mempool event listeners initialized successfully"); - Ok(run_listeners_with_shared_workers(worker_pool, exex_listener, mempool_listener)) + Ok(listeners.run()) }) .launch() .await?; From 0ebef94eb3a42ccd28a14fcfdcc2bf0dc69dc850 Mon Sep 17 00:00:00 2001 From: Niran Babalola Date: Sun, 21 Sep 2025 00:59:25 -0500 Subject: [PATCH 11/39] Remove old config --- crates/simulator/src/config.rs | 23 ----------------------- crates/simulator/src/lib.rs | 2 +- 2 files changed, 1 insertion(+), 24 deletions(-) diff --git a/crates/simulator/src/config.rs b/crates/simulator/src/config.rs index ed329c8..9ac5f08 100644 --- a/crates/simulator/src/config.rs +++ b/crates/simulator/src/config.rs @@ -39,19 +39,6 @@ pub struct SimulatorNodeConfig { pub kafka_group_id: String, } -/// Legacy standalone ExEx config (for library use) -#[derive(Debug, Clone)] -pub struct SimulatorExExConfig { - /// PostgreSQL database connection URL - pub database_url: String, - - /// Maximum number of concurrent simulations - pub max_concurrent_simulations: usize, - - /// Timeout for individual simulations in milliseconds - pub simulation_timeout_ms: u64, -} - impl From<&SimulatorNodeConfig> for ExExSimulationConfig { fn from(config: &SimulatorNodeConfig) -> Self { Self { @@ -62,16 +49,6 @@ impl From<&SimulatorNodeConfig> for ExExSimulationConfig { } } -impl From for ExExSimulationConfig { - fn from(config: SimulatorExExConfig) -> Self { - Self { - database_url: config.database_url, - max_concurrent_simulations: config.max_concurrent_simulations, - simulation_timeout_ms: config.simulation_timeout_ms, - } - } -} - impl From<&SimulatorNodeConfig> for MempoolListenerConfig { fn from(config: &SimulatorNodeConfig) -> Self { Self { diff --git a/crates/simulator/src/lib.rs b/crates/simulator/src/lib.rs index 98b9663..ecdf135 100644 --- a/crates/simulator/src/lib.rs +++ b/crates/simulator/src/lib.rs @@ -13,7 +13,7 @@ use std::sync::Arc; use tracing::{info, error}; use crate::worker_pool::SimulationWorkerPool; -pub use config::{SimulatorExExConfig, SimulatorNodeConfig}; +pub use config::SimulatorNodeConfig; pub use core::BundleSimulator; pub use engine::{create_simulation_engine, SimulationEngine, RethSimulationEngine}; pub use listeners::{ExExEventListener, MempoolEventListener, MempoolListenerConfig}; From 6058c489b63f91eb14cc154a9a4ed491020c8726 Mon Sep 17 00:00:00 2001 From: Niran Babalola Date: Sun, 21 Sep 2025 01:30:54 -0500 Subject: [PATCH 12/39] Implement TipsSimulationPublisher --- crates/audit/src/storage.rs | 29 ++++ crates/audit/src/types.rs | 11 ++ crates/simulator/src/core.rs | 6 +- crates/simulator/src/lib.rs | 56 +++++-- crates/simulator/src/listeners/exex.rs | 6 +- crates/simulator/src/listeners/mempool.rs | 6 +- crates/simulator/src/publisher.rs | 184 ++++++++-------------- crates/simulator/src/worker_pool.rs | 6 +- 8 files changed, 158 insertions(+), 146 deletions(-) diff --git a/crates/audit/src/storage.rs b/crates/audit/src/storage.rs index c2ac569..b9fdc9b 100644 --- a/crates/audit/src/storage.rs +++ b/crates/audit/src/storage.rs @@ -74,6 +74,16 @@ pub enum BundleHistoryEvent { timestamp: i64, reason: DropReason, }, + Simulated { + key: String, + timestamp: i64, + simulation_id: uuid::Uuid, + block_number: u64, + success: bool, + gas_used: Option, + execution_time_us: u128, + error_reason: Option, + }, } impl BundleHistoryEvent { @@ -86,6 +96,7 @@ impl BundleHistoryEvent { BundleHistoryEvent::FlashblockIncluded { key, .. } => key, BundleHistoryEvent::BlockIncluded { key, .. } => key, BundleHistoryEvent::Dropped { key, .. } => key, + BundleHistoryEvent::Simulated { key, .. } => key, } } } @@ -164,6 +175,24 @@ fn update_bundle_history_transform( timestamp: event.timestamp, reason: reason.clone(), }, + MempoolEvent::Simulated { + simulation_id, + block_number, + success, + gas_used, + execution_time_us, + error_reason, + .. + } => BundleHistoryEvent::Simulated { + key: event.key.clone(), + timestamp: event.timestamp, + simulation_id: *simulation_id, + block_number: *block_number, + success: *success, + gas_used: *gas_used, + execution_time_us: *execution_time_us, + error_reason: error_reason.clone(), + } }; history.push(history_event); diff --git a/crates/audit/src/types.rs b/crates/audit/src/types.rs index add8114..dbab76b 100644 --- a/crates/audit/src/types.rs +++ b/crates/audit/src/types.rs @@ -68,6 +68,15 @@ pub enum MempoolEvent { bundle_id: BundleId, reason: DropReason, }, + Simulated { + bundle_id: BundleId, + simulation_id: uuid::Uuid, + block_number: u64, + success: bool, + gas_used: Option, + execution_time_us: u128, + error_reason: Option, + }, } impl MempoolEvent { @@ -80,6 +89,7 @@ impl MempoolEvent { MempoolEvent::FlashblockIncluded { bundle_id, .. } => *bundle_id, MempoolEvent::BlockIncluded { bundle_id, .. } => *bundle_id, MempoolEvent::Dropped { bundle_id, .. } => *bundle_id, + MempoolEvent::Simulated { bundle_id, .. } => *bundle_id, } } @@ -111,6 +121,7 @@ impl MempoolEvent { MempoolEvent::FlashblockIncluded { .. } => vec![], MempoolEvent::BlockIncluded { .. } => vec![], MempoolEvent::Dropped { .. } => vec![], + MempoolEvent::Simulated { .. } => vec![], } } } diff --git a/crates/simulator/src/core.rs b/crates/simulator/src/core.rs index 2c18c91..10c8b05 100644 --- a/crates/simulator/src/core.rs +++ b/crates/simulator/src/core.rs @@ -1,5 +1,5 @@ use crate::engine::SimulationEngine; -use crate::publisher::SimulationResultPublisher; +use crate::publisher::SimulationPublisher; use crate::types::SimulationRequest; use eyre::Result; use reth_provider::StateProviderFactory; @@ -10,7 +10,7 @@ use tracing::{error, info}; pub struct BundleSimulator where E: SimulationEngine, - P: SimulationResultPublisher, + P: SimulationPublisher, { engine: E, publisher: P, @@ -19,7 +19,7 @@ where impl BundleSimulator where E: SimulationEngine, - P: SimulationResultPublisher, + P: SimulationPublisher, { pub fn new(engine: E, publisher: P) -> Self { Self { diff --git a/crates/simulator/src/lib.rs b/crates/simulator/src/lib.rs index ecdf135..2a09d39 100644 --- a/crates/simulator/src/lib.rs +++ b/crates/simulator/src/lib.rs @@ -17,31 +17,46 @@ pub use config::SimulatorNodeConfig; pub use core::BundleSimulator; pub use engine::{create_simulation_engine, SimulationEngine, RethSimulationEngine}; pub use listeners::{ExExEventListener, MempoolEventListener, MempoolListenerConfig}; -pub use publisher::{create_database_publisher, SimulationResultPublisher, DatabaseResultPublisher}; +pub use publisher::{SimulationPublisher, TipsSimulationPublisher}; pub use types::{SimulationResult, SimulationError, ExExSimulationConfig}; // Type aliases for concrete implementations -pub type TipsBundleSimulator = BundleSimulator; -pub type TipsExExEventListener = ExExEventListener; -pub type TipsMempoolEventListener = MempoolEventListener; +pub type TipsBundleSimulator = BundleSimulator; +pub type TipsExExEventListener = ExExEventListener; +pub type TipsMempoolEventListener = MempoolEventListener; // Initialization functions /// Common initialization components shared across listeners struct CommonListenerComponents { datastore: Arc, - simulator: BundleSimulator, + simulator: BundleSimulator, } /// Initialize common listener components (database, publisher, engine, core simulator) -async fn init_common_components(database_url: String, simulation_timeout_ms: u64) -> Result { +async fn init_common_components( + database_url: String, + simulation_timeout_ms: u64, + kafka_brokers: String, + kafka_topic: String, +) -> Result { let datastore = Arc::new( tips_datastore::PostgresDatastore::connect(database_url).await .map_err(|e| eyre::eyre!("Failed to connect to database: {}", e))? ); - let publisher = create_database_publisher(datastore.clone()); - info!("Database publisher initialized"); + // Create Kafka producer + let kafka_producer = rdkafka::config::ClientConfig::new() + .set("bootstrap.servers", &kafka_brokers) + .set("message.timeout.ms", "5000") + .create::() + .map_err(|e| eyre::eyre!("Failed to create Kafka producer: {}", e))?; + + let publisher = TipsSimulationPublisher::new(datastore.clone(), kafka_producer, kafka_topic); + info!( + kafka_brokers = %kafka_brokers, + "Database publisher with Kafka initialized" + ); let engine = create_simulation_engine(simulation_timeout_ms); info!( @@ -64,13 +79,20 @@ async fn init_common_components(database_url: String, simulation_timeout_ms: u64 pub async fn init_exex_event_listener( ctx: ExExContext, config: ExExSimulationConfig, + kafka_brokers: String, + kafka_topic: String, ) -> Result> where Node: FullNodeComponents, { info!("Initializing ExEx event listener"); - let common_components = init_common_components(config.database_url.clone(), config.simulation_timeout_ms).await?; + let common_components = init_common_components( + config.database_url.clone(), + config.simulation_timeout_ms, + kafka_brokers, + kafka_topic, + ).await?; let state_provider_factory = Arc::new(ctx.components.provider().clone()); @@ -108,7 +130,12 @@ where { info!("Initializing mempool event listener"); - let common_components = init_common_components(config.database_url.clone(), simulation_timeout_ms).await?; + let common_components = init_common_components( + config.database_url.clone(), + simulation_timeout_ms, + config.kafka_brokers.join(","), + config.kafka_topic.clone(), + ).await?; let worker_pool = SimulationWorkerPool::new( Arc::new(common_components.simulator), @@ -136,7 +163,7 @@ where /// This struct ensures that the ExEx and mempool listeners always use the same /// worker pool instance, preventing potential misconfigurations. pub struct ListenersWithWorkers { - worker_pool: Arc>, + worker_pool: Arc>, exex_listener: TipsExExEventListener, mempool_listener: TipsMempoolEventListener, } @@ -155,7 +182,12 @@ impl ListenersWithWorkers { ) -> Result { info!("Initializing shared event listeners"); - let common_components = init_common_components(exex_config.database_url.clone(), simulation_timeout_ms).await?; + let common_components = init_common_components( + exex_config.database_url.clone(), + simulation_timeout_ms, + mempool_config.kafka_brokers.join(","), + mempool_config.kafka_topic.clone(), + ).await?; let state_provider_factory = Arc::new(exex_ctx.components.provider().clone()); diff --git a/crates/simulator/src/listeners/exex.rs b/crates/simulator/src/listeners/exex.rs index c3b7ff5..9d9b025 100644 --- a/crates/simulator/src/listeners/exex.rs +++ b/crates/simulator/src/listeners/exex.rs @@ -1,7 +1,7 @@ use crate::types::SimulationRequest; use crate::worker_pool::{SimulationWorkerPool, SimulationTask}; use crate::engine::SimulationEngine; -use crate::publisher::SimulationResultPublisher; +use crate::publisher::SimulationPublisher; use alloy_consensus::BlockHeader; use alloy_primitives::B256; @@ -61,7 +61,7 @@ pub struct ExExEventListener where Node: FullNodeComponents, E: SimulationEngine + Clone + 'static, - P: SimulationResultPublisher + Clone + 'static, + P: SimulationPublisher + Clone + 'static, D: tips_datastore::BundleDatastore, { /// The execution extension context @@ -76,7 +76,7 @@ impl ExExEventListener where Node: FullNodeComponents, E: SimulationEngine + Clone + 'static, - P: SimulationResultPublisher + Clone + 'static, + P: SimulationPublisher + Clone + 'static, D: tips_datastore::BundleDatastore + 'static, { /// Create a new ExEx event listener diff --git a/crates/simulator/src/listeners/mempool.rs b/crates/simulator/src/listeners/mempool.rs index 748cf67..8a1cf14 100644 --- a/crates/simulator/src/listeners/mempool.rs +++ b/crates/simulator/src/listeners/mempool.rs @@ -1,5 +1,5 @@ use crate::engine::SimulationEngine; -use crate::publisher::SimulationResultPublisher; +use crate::publisher::SimulationPublisher; use crate::types::SimulationRequest; use crate::worker_pool::{SimulationTask, SimulationWorkerPool}; use eyre::Result; @@ -36,7 +36,7 @@ pub struct MempoolEventListener where Node: FullNodeComponents, E: SimulationEngine, - P: SimulationResultPublisher, + P: SimulationPublisher, { /// State provider factory for getting current block info provider: Arc, @@ -52,7 +52,7 @@ impl MempoolEventListener where Node: FullNodeComponents, E: SimulationEngine + Clone + 'static, - P: SimulationResultPublisher + Clone + 'static, + P: SimulationPublisher + Clone + 'static, { /// Create a new mempool event listener pub fn new( diff --git a/crates/simulator/src/publisher.rs b/crates/simulator/src/publisher.rs index b4643ea..e8c4889 100644 --- a/crates/simulator/src/publisher.rs +++ b/crates/simulator/src/publisher.rs @@ -2,72 +2,39 @@ use crate::types::SimulationResult; use eyre::Result; use async_trait::async_trait; use rdkafka::producer::FutureProducer; -use serde_json; +use std::collections::HashMap; use std::sync::Arc; use tips_audit::{MempoolEventPublisher, KafkaMempoolEventPublisher}; -use tips_datastore::PostgresDatastore; +use tips_datastore::{PostgresDatastore, BundleDatastore, postgres::StateDiff}; use tracing::{debug, error, info, warn}; -use uuid::Uuid; #[async_trait] -pub trait SimulationResultPublisher: Send + Sync { +pub trait SimulationPublisher: Send + Sync { /// Store a simulation result async fn publish_result(&self, result: SimulationResult) -> Result<()>; - - /// Get simulation results for a bundle - async fn get_results_for_bundle(&self, bundle_id: Uuid) -> Result>; - - /// Get a specific simulation result by ID - async fn get_result_by_id(&self, result_id: Uuid) -> Result>; } #[derive(Clone)] -pub struct DatabaseResultPublisher { +pub struct TipsSimulationPublisher { datastore: Arc, kafka_publisher: Option>, } -impl DatabaseResultPublisher { +impl TipsSimulationPublisher { pub fn new( datastore: Arc, - kafka_publisher: Option>, + producer: FutureProducer, + topic: String, ) -> Self { + let kafka_publisher = Arc::new(KafkaMempoolEventPublisher::new(producer, topic)); Self { datastore, - kafka_publisher, + kafka_publisher: Some(kafka_publisher), } } - pub fn with_kafka( - datastore: Arc, - producer: FutureProducer, - topic: String, - ) -> Self { - let publisher = Arc::new(KafkaMempoolEventPublisher::new(producer, topic)); - Self::new(datastore, Some(publisher)) - } - - /// Convert SimulationResult to database format - fn result_to_db_format(&self, result: &SimulationResult) -> Result { - Ok(DatabaseSimulation { - id: result.id, - bundle_id: result.bundle_id, - block_number: result.block_number as i64, - block_hash: format!("0x{}", hex::encode(result.block_hash.as_slice())), - success: result.success, - gas_used: result.gas_used.map(|g| g as i64), - execution_time_us: result.execution_time_us as i64, - state_diff: serde_json::to_value(&result.state_diff)?, - error_reason: result.error_reason.clone(), - created_at: result.created_at, - updated_at: result.created_at, // For new records, created_at == updated_at - }) - } - /// Store result in database async fn store_in_database(&self, result: &SimulationResult) -> Result<()> { - let _db_result = self.result_to_db_format(result)?; - info!( simulation_id = %result.id, bundle_id = %result.bundle_id, @@ -76,28 +43,51 @@ impl DatabaseResultPublisher { "Storing simulation result in database" ); - // TODO: This would need to be implemented with proper sqlx queries - // For now, we'll use the datastore interface if it has simulation methods - // Otherwise, we need to add simulation-specific methods to the datastore + // Convert state diff from alloy format to datastore format + let state_diff = self.convert_state_diff(&result.state_diff)?; - // Placeholder implementation - in a real scenario, we'd add methods to PostgresDatastore - // like: datastore.store_simulation_result(result).await?; + // Store the simulation using the datastore interface + let simulation_id = self.datastore.insert_simulation( + result.bundle_id, + result.block_number, + format!("0x{}", hex::encode(result.block_hash.as_slice())), + result.execution_time_us as u64, + result.gas_used.unwrap_or(0), + state_diff, + ).await.map_err(|e| eyre::eyre!("Failed to insert simulation: {}", e))?; debug!( - simulation_id = %result.id, - "Database storage placeholder - would insert simulation result here" + simulation_id = %simulation_id, + bundle_id = %result.bundle_id, + "Successfully stored simulation result in database" ); Ok(()) } + /// Convert state diff from simulator format to datastore format + fn convert_state_diff(&self, state_diff: &HashMap>) -> Result { + // StateDiff expects HashMap> + // where StorageKey is B256 and StorageValue is U256 + let mut converted = HashMap::new(); + + for (address, storage) in state_diff { + let mut storage_map = HashMap::new(); + for (key, value) in storage { + // Convert U256 key to B256 for storage key + let key_bytes = key.to_be_bytes::<32>(); + let storage_key = alloy_primitives::B256::from(key_bytes); + storage_map.insert(storage_key, *value); + } + converted.insert(*address, storage_map); + } + + Ok(converted) + } + /// Publish result to Kafka if configured async fn publish_to_kafka(&self, result: &SimulationResult) -> Result<()> { - if let Some(ref _publisher) = self.kafka_publisher { - // Create a custom event type for simulation results - // For now, we'll create a mock event - in the future, we might want to extend - // the MempoolEvent enum to include simulation results - + if let Some(ref publisher) = self.kafka_publisher { debug!( simulation_id = %result.id, bundle_id = %result.bundle_id, @@ -105,18 +95,24 @@ impl DatabaseResultPublisher { "Publishing simulation result to Kafka" ); - // TODO: Implement proper simulation result event - // For now, this is commented out as we'd need to extend the MempoolEvent enum + let event = tips_audit::types::MempoolEvent::Simulated { + bundle_id: result.bundle_id, + simulation_id: result.id, + block_number: result.block_number, + success: result.success, + gas_used: result.gas_used, + execution_time_us: result.execution_time_us, + error_reason: result.error_reason.clone(), + }; - // let event = MempoolEvent::SimulationComplete { - // bundle_id: result.bundle_id, - // simulation_id: result.id, - // success: result.success, - // gas_used: result.gas_used, - // execution_time_us: result.execution_time_us, - // }; - - // publisher.publish(event).await?; + publisher.publish(event).await + .map_err(|e| eyre::eyre!("Failed to publish simulation event: {}", e))?; + + debug!( + simulation_id = %result.id, + bundle_id = %result.bundle_id, + "Successfully published simulation result to Kafka" + ); } Ok(()) @@ -124,7 +120,7 @@ impl DatabaseResultPublisher { } #[async_trait] -impl SimulationResultPublisher for DatabaseResultPublisher { +impl SimulationPublisher for TipsSimulationPublisher { async fn publish_result(&self, result: SimulationResult) -> Result<()> { info!( simulation_id = %result.id, @@ -161,60 +157,4 @@ impl SimulationResultPublisher for DatabaseResultPublisher { Ok(()) } - - async fn get_results_for_bundle(&self, bundle_id: Uuid) -> Result> { - info!(bundle_id = %bundle_id, "Fetching simulation results for bundle"); - - // TODO: Implement actual database query - // For now, return empty vec as placeholder - - debug!(bundle_id = %bundle_id, "No simulation results found"); - Ok(vec![]) - } - - async fn get_result_by_id(&self, result_id: Uuid) -> Result> { - info!(simulation_id = %result_id, "Fetching simulation result by ID"); - - // TODO: Implement actual database query - // For now, return None as placeholder - - debug!(simulation_id = %result_id, "Simulation result not found"); - Ok(None) - } -} - -/// Database representation of a simulation result -/// This matches the expected database schema -#[derive(Debug, Clone)] -struct DatabaseSimulation { - id: Uuid, - bundle_id: Uuid, - block_number: i64, - block_hash: String, - success: bool, - gas_used: Option, - execution_time_us: i64, - state_diff: serde_json::Value, - error_reason: Option, - created_at: chrono::DateTime, - updated_at: chrono::DateTime, -} - -/// Create a result publisher with database storage -pub fn create_database_publisher( - datastore: Arc, -) -> DatabaseResultPublisher { - DatabaseResultPublisher::new(datastore, None) } - -/// Create a result publisher with database storage and Kafka publishing -pub fn create_database_kafka_publisher( - datastore: Arc, - producer: FutureProducer, - topic: String, -) -> impl SimulationResultPublisher { - DatabaseResultPublisher::with_kafka(datastore, producer, topic) -} - -// We'll need to add hex as a dependency for block hash formatting -// For now, using a simple placeholder diff --git a/crates/simulator/src/worker_pool.rs b/crates/simulator/src/worker_pool.rs index 2fc283e..45a3a52 100644 --- a/crates/simulator/src/worker_pool.rs +++ b/crates/simulator/src/worker_pool.rs @@ -1,6 +1,6 @@ use crate::core::BundleSimulator; use crate::engine::SimulationEngine; -use crate::publisher::SimulationResultPublisher; +use crate::publisher::SimulationPublisher; use crate::types::SimulationRequest; use std::sync::Arc; use std::sync::atomic::{AtomicU64, Ordering}; @@ -18,7 +18,7 @@ pub struct SimulationTask { pub struct SimulationWorkerPool where E: SimulationEngine, - P: SimulationResultPublisher, + P: SimulationPublisher, S: StateProviderFactory, { /// Core bundle simulator @@ -40,7 +40,7 @@ where impl SimulationWorkerPool where E: SimulationEngine + Clone + 'static, - P: SimulationResultPublisher + Clone + 'static, + P: SimulationPublisher + Clone + 'static, S: reth_provider::StateProviderFactory + Send + Sync + 'static, { /// Create a new simulation worker pool From 97a35ec9b801ae3f95138a577271aaed03970499 Mon Sep 17 00:00:00 2001 From: Niran Babalola Date: Sun, 21 Sep 2025 22:56:42 -0500 Subject: [PATCH 13/39] Draft implementation of bundle simulation using the reth block builder --- Cargo.toml | 3 + crates/simulator/Cargo.toml | 5 + crates/simulator/src/engine.rs | 368 +++++++++++---------------------- crates/simulator/src/lib.rs | 85 +++++--- crates/simulator/src/main.rs | 3 +- 5 files changed, 189 insertions(+), 275 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index fc1ba0d..26260c7 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -21,6 +21,9 @@ reth-node-api = { git = "https://github.com/paradigmxyz/reth", tag = "v1.7.0" } reth-evm = { git = "https://github.com/paradigmxyz/reth", tag = "v1.7.0" } reth-node-builder = { git = "https://github.com/paradigmxyz/reth", tag = "v1.7.0" } reth-node-ethereum = { git = "https://github.com/paradigmxyz/reth", tag = "v1.7.0" } +reth-node-optimism = { git = "https://github.com/paradigmxyz/reth", tag = "v1.7.0" } +reth-revm = { git = "https://github.com/paradigmxyz/reth", tag = "v1.7.0" } +reth-chainspec = { git = "https://github.com/paradigmxyz/reth", tag = "v1.7.0" } # alloy alloy-primitives = { version = "1.3.1", default-features = false, features = [ diff --git a/crates/simulator/Cargo.toml b/crates/simulator/Cargo.toml index 9778e9d..c23190e 100644 --- a/crates/simulator/Cargo.toml +++ b/crates/simulator/Cargo.toml @@ -43,12 +43,17 @@ reth-node-api.workspace = true reth-evm.workspace = true reth-node-builder.workspace = true reth-node-ethereum.workspace = true +reth-revm.workspace = true +reth-chainspec.workspace = true +reth-optimism-evm = { git = "https://github.com/paradigmxyz/reth", tag = "v1.7.0" } +reth-optimism-primitives = { git = "https://github.com/paradigmxyz/reth", tag = "v1.7.0", features = ["serde", "serde-bincode-compat"] } # Additional dependencies for simulation std-semaphore = "0.1" tokio-util = { version = "0.7", features = ["time"] } hex = "0.4" rdkafka.workspace = true +revm-primitives = { version = "3.1.1", default-features = false } [dev-dependencies] tokio-test = "0.4.4" diff --git a/crates/simulator/src/engine.rs b/crates/simulator/src/engine.rs index f597fec..38eec2b 100644 --- a/crates/simulator/src/engine.rs +++ b/crates/simulator/src/engine.rs @@ -1,19 +1,25 @@ use crate::types::{SimulationError, SimulationRequest, SimulationResult}; -use alloy_consensus::transaction::{SignerRecoverable, Transaction}; +use alloy_consensus::{transaction::SignerRecoverable, BlockHeader}; use alloy_primitives::{Address, B256, U256}; use alloy_eips::eip2718::Decodable2718; use alloy_rpc_types::BlockNumberOrTag; use eyre::Result; use async_trait::async_trait; -use op_alloy_consensus::OpTxEnvelope; -use reth_provider::{StateProvider, StateProviderFactory}; +use reth_node_api::FullNodeComponents; +use reth_provider::{StateProvider, StateProviderFactory, HeaderProvider}; +use reth_revm::{database::StateProviderDatabase, db::State}; +use reth_evm::ConfigureEvm; +use reth_evm::NextBlockEnvAttributes; +use reth_evm::execute::BlockBuilder; use std::collections::HashMap; use std::sync::Arc; use std::time::Instant; -use tokio::time::Duration; -use tracing::{debug, error, info, warn}; +use tracing::{error, info}; use uuid::Uuid; +// FIXME: The block time should be retrieved from the reth node. +const BLOCK_TIME: u64 = 2; + /// Create state provider from ExEx context /// /// This function prepares the necessary components for EVM simulation: @@ -99,8 +105,12 @@ pub trait SimulationEngine: Send + Sync { } #[derive(Clone)] -pub struct RethSimulationEngine { - timeout: Duration, +pub struct RethSimulationEngine +where + Node: FullNodeComponents, +{ + provider: Arc, + evm_config: Node::Evm, } /// Represents the execution context for a bundle simulation @@ -118,202 +128,29 @@ struct ExecutionContext { gas_used: u64, } -impl RethSimulationEngine { - pub fn new(timeout_ms: u64) -> Self { +impl RethSimulationEngine +where + Node: FullNodeComponents, +{ + pub fn new(provider: Arc, evm_config: Node::Evm) -> Self { Self { - timeout: Duration::from_millis(timeout_ms), - } - } - - /// Extract transaction details from raw transaction bytes - fn decode_transaction(&self, tx_bytes: &[u8]) -> Result { - OpTxEnvelope::decode_2718_exact(tx_bytes) - .map_err(|e| eyre::eyre!("Failed to decode transaction: {}", e)) - } - - /// Validate that a transaction can be executed in the current context - fn validate_transaction( - &self, - tx: &OpTxEnvelope, - context: &ExecutionContext, - ) -> Result<(), SimulationError> { - let sender = tx.recover_signer() - .map_err(|_| SimulationError::Unknown { - message: "Failed to recover transaction sender".to_string() - })?; - - // Check nonce - let expected_nonce = context.initial_nonces.get(&sender) - .copied() - .unwrap_or(0); - let tx_nonce = tx.nonce(); - - if tx_nonce != expected_nonce { - return Err(SimulationError::InvalidNonce { - tx_index: 0, // TODO: Pass actual tx index - expected: expected_nonce, - actual: tx_nonce, - }); - } - - // Check balance for gas payment - let gas_fee = U256::from(tx.gas_limit()) * U256::from(tx.max_fee_per_gas()); - let available_balance = context.initial_balances.get(&sender) - .copied() - .unwrap_or(U256::ZERO); - - if available_balance < gas_fee { - return Err(SimulationError::InsufficientBalance { - tx_index: 0, // TODO: Pass actual tx index - required: gas_fee, - available: available_balance, - }); - } - - Ok(()) - } - - /// Simulate a single transaction execution - fn simulate_transaction( - &self, - tx: &OpTxEnvelope, - context: &mut ExecutionContext, - tx_index: usize, - ) -> Result<(), SimulationError> { - // For now, this is a placeholder implementation - // In a full implementation, this would: - // 1. Create an EVM instance with the current state - // 2. Execute the transaction - // 3. Track gas usage and state changes - // 4. Handle reverts appropriately - - debug!( - tx_index = tx_index, - tx_hash = ?tx.hash(), - gas_limit = tx.gas_limit(), - "Simulating transaction" - ); - - // Validate the transaction first - self.validate_transaction(tx, context)?; - - // Simulate gas usage (placeholder logic) - let estimated_gas = std::cmp::min(tx.gas_limit(), 100_000); // Simple estimation - context.gas_used += estimated_gas; - - // Simulate some state changes (placeholder) - if let Some(to) = tx.to() { - let storage_slot = U256::from(tx_index); - let new_value = U256::from(context.gas_used); - - context.storage_changes - .entry(Address::from(*to)) - .or_insert_with(HashMap::new) - .insert(storage_slot, new_value); - } - - // Update nonce for sender - let sender = tx.recover_signer() - .map_err(|_| SimulationError::Unknown { - message: "Failed to recover sender".to_string() - })?; - - if let Some(nonce) = context.initial_nonces.get_mut(&sender) { - *nonce += 1; + provider, + evm_config, } - - debug!( - tx_index = tx_index, - gas_used = estimated_gas, - total_gas = context.gas_used, - "Transaction simulation completed" - ); - - Ok(()) } - /// Initialize execution context by fetching initial state - fn initialize_context( - &self, - request: &SimulationRequest, - state_provider: &S, - ) -> Result - where - S: StateProvider, - { - let mut initial_balances = HashMap::new(); - let mut initial_nonces = HashMap::new(); - - // Extract all addresses involved in the bundle - let mut addresses = std::collections::HashSet::new(); - - for tx_bytes in &request.bundle.txs { - match self.decode_transaction(tx_bytes) { - Ok(tx) => { - if let Ok(sender) = tx.recover_signer() { - addresses.insert(sender); - } - if let Some(to) = tx.to() { - addresses.insert(Address::from(*to)); - } - } - Err(e) => { - warn!(error = %e, "Failed to decode transaction in bundle"); - } - } - } - - // Fetch initial state for all addresses - for address in addresses { - match state_provider.account_balance(&address) { - Ok(Some(balance)) => { - initial_balances.insert(address, balance); - } - Ok(None) => { - initial_balances.insert(address, U256::ZERO); - } - Err(e) => { - error!( - error = %e, - address = %address, - "Failed to fetch balance for address" - ); - } - } - - match state_provider.account_nonce(&address) { - Ok(Some(nonce)) => { - initial_nonces.insert(address, nonce); - } - Ok(None) => { - initial_nonces.insert(address, 0); - } - Err(e) => { - error!( - error = %e, - address = %address, - "Failed to fetch nonce for address" - ); - } - } - } - - Ok(ExecutionContext { - block_number: request.block_number, - initial_balances, - initial_nonces, - storage_changes: HashMap::new(), - gas_used: 0, - }) - } } #[async_trait] -impl SimulationEngine for RethSimulationEngine { +impl SimulationEngine for RethSimulationEngine +where + Node: FullNodeComponents, + ::Evm: ConfigureEvm, +{ async fn simulate_bundle( &self, request: SimulationRequest, - state_provider: &S, + _state_provider: &S, ) -> Result where S: StateProvider + Send + Sync, @@ -329,63 +166,108 @@ impl SimulationEngine for RethSimulationEngine { "Starting bundle simulation" ); - // Initialize execution context - let mut context = self.initialize_context(&request, state_provider) - .map_err(|e| eyre::eyre!("Failed to initialize context: {}", e))?; + // Get the parent header for building the next block + let header = self + .provider + .sealed_header_by_hash(request.block_hash) + .map_err(|e| eyre::eyre!("Failed to get parent header: {}", e))? + .ok_or_else(|| eyre::eyre!("Parent block {} not found", request.block_hash))?; + + // Create the state database and builder for next block + let state_provider = self.provider.state_by_block_hash(request.block_hash)?; + let state_db = StateProviderDatabase::new(state_provider); + let mut db = State::builder().with_database(state_db).with_bundle_update().build(); + let attributes = NextBlockEnvAttributes { + timestamp: header.timestamp() + BLOCK_TIME, // Optimism 2-second block time + suggested_fee_recipient: header.beneficiary(), + prev_randao: B256::random(), + gas_limit: header.gas_limit(), + parent_beacon_block_root: header.parent_beacon_block_root(), + withdrawals: None, + }; + + // NOTE: We use the reth block builder here, which diverges from op-rbuilder. It's + // not yet clear which builder we want to simulate with, so we're using reth because + // it's easy. + let mut builder = self + .evm_config + .builder_for_next_block(&mut db, &header, attributes) + .map_err(|e| eyre::eyre!("Failed to init block builder: {}", e))?; + + // Variables to track bundle execution + let mut total_gas_used = 0u64; + let all_storage_changes = HashMap::new(); + let mut failed = false; + let mut failure_reason = None; + + // Apply pre-execution changes + builder.apply_pre_execution_changes().map_err(|e| eyre::eyre!("Failed pre-exec: {}", e))?; // Simulate each transaction in the bundle for (tx_index, tx_bytes) in request.bundle.txs.iter().enumerate() { - let tx = self.decode_transaction(tx_bytes) - .map_err(|e| SimulationError::Unknown { - message: format!("Failed to decode transaction {}: {}", tx_index, e) - })?; - - if let Err(sim_error) = self.simulate_transaction(&tx, &mut context, tx_index) { - let execution_time = start_time.elapsed().as_micros(); - - error!( - bundle_id = %request.bundle_id, - simulation_id = %simulation_id, - tx_index = tx_index, - error = %sim_error, - "Bundle simulation failed" - ); - - return Ok(SimulationResult::failure( - simulation_id, - request.bundle_id, - request.block_number, - request.block_hash, - execution_time, - sim_error, - )); + // Decode bytes into the node's SignedTx type and recover the signer for execution + type NodeSignedTxTy = + <<::Types as reth_node_api::NodeTypes>::Primitives as reth_node_api::NodePrimitives>::SignedTx; + let mut reader = tx_bytes.iter().as_slice(); + let signed: NodeSignedTxTy = Decodable2718::decode_2718(&mut reader) + .map_err(|e| eyre::eyre!("Failed to decode tx {tx_index}: {e}"))?; + let recovered = signed + .try_into_recovered() + .map_err(|e| eyre::eyre!("Failed to recover tx {tx_index}: {e}"))?; + + match builder.execute_transaction(recovered) { + Ok(gas_used) => { + total_gas_used = total_gas_used.saturating_add(gas_used); + } + Err(e) => { + failed = true; + failure_reason = Some(SimulationError::Unknown { message: format!("Execution failed: {}", e) }); + break; + } } } let execution_time = start_time.elapsed().as_micros(); - info!( - bundle_id = %request.bundle_id, - simulation_id = %simulation_id, - gas_used = context.gas_used, - execution_time_us = execution_time, - storage_changes = context.storage_changes.len(), - "Bundle simulation completed successfully" - ); - - Ok(SimulationResult::success( - simulation_id, - request.bundle_id, - request.block_number, - request.block_hash, - context.gas_used, - execution_time, - context.storage_changes, - )) + if failed { + error!( + bundle_id = %request.bundle_id, + simulation_id = %simulation_id, + error = ?failure_reason, + "Bundle simulation failed" + ); + + Ok(SimulationResult::failure( + simulation_id, + request.bundle_id, + request.block_number, + request.block_hash, + execution_time, + failure_reason.unwrap_or(SimulationError::Unknown { + message: "Unknown failure".to_string(), + }), + )) + } else { + info!( + bundle_id = %request.bundle_id, + simulation_id = %simulation_id, + gas_used = total_gas_used, + execution_time_us = execution_time, + storage_changes = all_storage_changes.len(), + "Bundle simulation completed successfully" + ); + + // TODO: Collect the state diff. + + Ok(SimulationResult::success( + simulation_id, + request.bundle_id, + request.block_number, + request.block_hash, + total_gas_used, + execution_time, + all_storage_changes, + )) + } } } - -/// Create a bundle simulation engine -pub fn create_simulation_engine(timeout_ms: u64) -> RethSimulationEngine { - RethSimulationEngine::new(timeout_ms) -} diff --git a/crates/simulator/src/lib.rs b/crates/simulator/src/lib.rs index 2a09d39..24ceeb6 100644 --- a/crates/simulator/src/lib.rs +++ b/crates/simulator/src/lib.rs @@ -9,37 +9,47 @@ pub mod worker_pool; use eyre::Result; use reth_exex::ExExContext; use reth_node_api::FullNodeComponents; +use reth_evm::{ConfigureEvm, NextBlockEnvAttributes}; use std::sync::Arc; use tracing::{info, error}; use crate::worker_pool::SimulationWorkerPool; pub use config::SimulatorNodeConfig; pub use core::BundleSimulator; -pub use engine::{create_simulation_engine, SimulationEngine, RethSimulationEngine}; +pub use engine::{SimulationEngine, RethSimulationEngine}; pub use listeners::{ExExEventListener, MempoolEventListener, MempoolListenerConfig}; pub use publisher::{SimulationPublisher, TipsSimulationPublisher}; pub use types::{SimulationResult, SimulationError, ExExSimulationConfig}; // Type aliases for concrete implementations -pub type TipsBundleSimulator = BundleSimulator; -pub type TipsExExEventListener = ExExEventListener; -pub type TipsMempoolEventListener = MempoolEventListener; +pub type TipsBundleSimulator = BundleSimulator, TipsSimulationPublisher>; +pub type TipsExExEventListener = ExExEventListener, TipsSimulationPublisher, tips_datastore::PostgresDatastore>; +pub type TipsMempoolEventListener = MempoolEventListener, TipsSimulationPublisher>; // Initialization functions /// Common initialization components shared across listeners -struct CommonListenerComponents { +struct CommonListenerComponents +where + Node: FullNodeComponents, + ::Evm: ConfigureEvm, +{ datastore: Arc, - simulator: BundleSimulator, + simulator: BundleSimulator, TipsSimulationPublisher>, } /// Initialize common listener components (database, publisher, engine, core simulator) -async fn init_common_components( +async fn init_common_components( + provider: Arc, + evm_config: Node::Evm, database_url: String, - simulation_timeout_ms: u64, kafka_brokers: String, kafka_topic: String, -) -> Result { +) -> Result> +where + Node: FullNodeComponents, + ::Evm: ConfigureEvm, +{ let datastore = Arc::new( tips_datastore::PostgresDatastore::connect(database_url).await .map_err(|e| eyre::eyre!("Failed to connect to database: {}", e))? @@ -58,11 +68,8 @@ async fn init_common_components( "Database publisher with Kafka initialized" ); - let engine = create_simulation_engine(simulation_timeout_ms); - info!( - timeout_ms = simulation_timeout_ms, - "Simulation engine initialized" - ); + let engine = RethSimulationEngine::new(provider, evm_config); + info!("Simulation engine initialized"); let simulator = BundleSimulator::new(engine, publisher); info!("Core bundle simulator initialized"); @@ -84,21 +91,25 @@ pub async fn init_exex_event_listener( ) -> Result> where Node: FullNodeComponents, + ::Evm: ConfigureEvm, { info!("Initializing ExEx event listener"); + + let state_provider_factory = Arc::new(ctx.components.provider().clone()); + let provider = Arc::new(ctx.components.provider().clone()); + let evm_config = ctx.components.evm_config().clone(); let common_components = init_common_components( - config.database_url.clone(), - config.simulation_timeout_ms, + provider, + evm_config, + config.database_url.clone(), kafka_brokers, kafka_topic, ).await?; - let state_provider_factory = Arc::new(ctx.components.provider().clone()); - let worker_pool = SimulationWorkerPool::new( Arc::new(common_components.simulator), - state_provider_factory, + state_provider_factory.clone(), config.max_concurrent_simulations, ); @@ -120,19 +131,22 @@ where /// /// Note: The worker pool is created but NOT started. pub async fn init_mempool_event_listener( + ctx: Arc>, provider: Arc, config: MempoolListenerConfig, max_concurrent_simulations: usize, - simulation_timeout_ms: u64, ) -> Result> where Node: FullNodeComponents, + ::Evm: ConfigureEvm, { info!("Initializing mempool event listener"); + let evm_config = ctx.components.evm_config().clone(); let common_components = init_common_components( - config.database_url.clone(), - simulation_timeout_ms, + provider.clone(), + evm_config, + config.database_url.clone(), config.kafka_brokers.join(","), config.kafka_topic.clone(), ).await?; @@ -162,13 +176,21 @@ where /// /// This struct ensures that the ExEx and mempool listeners always use the same /// worker pool instance, preventing potential misconfigurations. -pub struct ListenersWithWorkers { - worker_pool: Arc>, +pub struct ListenersWithWorkers +where + Node: FullNodeComponents, + ::Evm: ConfigureEvm, +{ + worker_pool: Arc, TipsSimulationPublisher, Node::Provider>>, exex_listener: TipsExExEventListener, mempool_listener: TipsMempoolEventListener, } -impl ListenersWithWorkers { +impl ListenersWithWorkers +where + Node: FullNodeComponents, + ::Evm: ConfigureEvm, +{ /// Initialize both event listeners with a shared worker pool /// /// The worker pool is created but NOT started. Call `run()` to start @@ -178,19 +200,22 @@ impl ListenersWithWorkers { exex_config: ExExSimulationConfig, mempool_config: MempoolListenerConfig, max_concurrent_simulations: usize, - simulation_timeout_ms: u64, + _simulation_timeout_ms: u64, ) -> Result { info!("Initializing shared event listeners"); + let state_provider_factory = Arc::new(exex_ctx.components.provider().clone()); + let provider = Arc::new(exex_ctx.components.provider().clone()); + let evm_config = exex_ctx.components.evm_config().clone(); + let common_components = init_common_components( - exex_config.database_url.clone(), - simulation_timeout_ms, + provider, + evm_config, + exex_config.database_url.clone(), mempool_config.kafka_brokers.join(","), mempool_config.kafka_topic.clone(), ).await?; - let state_provider_factory = Arc::new(exex_ctx.components.provider().clone()); - let shared_worker_pool = Arc::new(SimulationWorkerPool::new( Arc::new(common_components.simulator), state_provider_factory.clone(), diff --git a/crates/simulator/src/main.rs b/crates/simulator/src/main.rs index 478587a..6b11814 100644 --- a/crates/simulator/src/main.rs +++ b/crates/simulator/src/main.rs @@ -1,5 +1,4 @@ use clap::Parser; -use reth_node_ethereum::EthereumNode; use tips_simulator::{ ListenersWithWorkers, SimulatorNodeConfig, @@ -26,7 +25,7 @@ async fn main() -> eyre::Result<()> { config.node.run(|builder, _| async move { let handle = builder - .node(EthereumNode::default()) + .node(reth_node_ethereum::EthereumNode::default()) .install_exex("tips-simulator", move |ctx| async move { let listeners = ListenersWithWorkers::new( ctx, From 5d2cce4fec8e7bb0129a0322e911f87d24f5c2e1 Mon Sep 17 00:00:00 2001 From: Niran Babalola Date: Mon, 22 Sep 2025 00:04:04 -0500 Subject: [PATCH 14/39] Collect state diffs --- crates/simulator/src/engine.rs | 82 +++++++++++++++++++++------------- 1 file changed, 50 insertions(+), 32 deletions(-) diff --git a/crates/simulator/src/engine.rs b/crates/simulator/src/engine.rs index 38eec2b..3027b92 100644 --- a/crates/simulator/src/engine.rs +++ b/crates/simulator/src/engine.rs @@ -186,43 +186,45 @@ where withdrawals: None, }; - // NOTE: We use the reth block builder here, which diverges from op-rbuilder. It's - // not yet clear which builder we want to simulate with, so we're using reth because - // it's easy. - let mut builder = self - .evm_config - .builder_for_next_block(&mut db, &header, attributes) - .map_err(|e| eyre::eyre!("Failed to init block builder: {}", e))?; - // Variables to track bundle execution let mut total_gas_used = 0u64; - let all_storage_changes = HashMap::new(); + let all_storage_changes: HashMap> = HashMap::new(); let mut failed = false; let mut failure_reason = None; - // Apply pre-execution changes - builder.apply_pre_execution_changes().map_err(|e| eyre::eyre!("Failed pre-exec: {}", e))?; + // Apply pre-execution changes and simulate transactions in a scope + // to ensure builder is dropped before we call take_bundle() + { + // NOTE: We use the reth block builder here, which diverges from op-rbuilder. It's + // not yet clear which builder we want to simulate with, so we're using reth because + // it's easy. + let mut builder = self + .evm_config + .builder_for_next_block(&mut db, &header, attributes) + .map_err(|e| eyre::eyre!("Failed to init block builder: {}", e))?; + builder.apply_pre_execution_changes().map_err(|e| eyre::eyre!("Failed pre-exec: {}", e))?; - // Simulate each transaction in the bundle - for (tx_index, tx_bytes) in request.bundle.txs.iter().enumerate() { - // Decode bytes into the node's SignedTx type and recover the signer for execution - type NodeSignedTxTy = - <<::Types as reth_node_api::NodeTypes>::Primitives as reth_node_api::NodePrimitives>::SignedTx; - let mut reader = tx_bytes.iter().as_slice(); - let signed: NodeSignedTxTy = Decodable2718::decode_2718(&mut reader) - .map_err(|e| eyre::eyre!("Failed to decode tx {tx_index}: {e}"))?; - let recovered = signed - .try_into_recovered() - .map_err(|e| eyre::eyre!("Failed to recover tx {tx_index}: {e}"))?; + // Simulate each transaction in the bundle + for (tx_index, tx_bytes) in request.bundle.txs.iter().enumerate() { + // Decode bytes into the node's SignedTx type and recover the signer for execution + type NodeSignedTxTy = + <<::Types as reth_node_api::NodeTypes>::Primitives as reth_node_api::NodePrimitives>::SignedTx; + let mut reader = tx_bytes.iter().as_slice(); + let signed: NodeSignedTxTy = Decodable2718::decode_2718(&mut reader) + .map_err(|e| eyre::eyre!("Failed to decode tx {tx_index}: {e}"))?; + let recovered = signed + .try_into_recovered() + .map_err(|e| eyre::eyre!("Failed to recover tx {tx_index}: {e}"))?; - match builder.execute_transaction(recovered) { - Ok(gas_used) => { - total_gas_used = total_gas_used.saturating_add(gas_used); - } - Err(e) => { - failed = true; - failure_reason = Some(SimulationError::Unknown { message: format!("Execution failed: {}", e) }); - break; + match builder.execute_transaction(recovered) { + Ok(gas_used) => { + total_gas_used = total_gas_used.saturating_add(gas_used); + } + Err(e) => { + failed = true; + failure_reason = Some(SimulationError::Unknown { message: format!("Execution failed: {}", e) }); + break; + } } } } @@ -257,7 +259,23 @@ where "Bundle simulation completed successfully" ); - // TODO: Collect the state diff. + // Collect the state diff + let bundle = db.take_bundle(); + + // Extract storage changes from the bundle + let mut modified_storage_slots = HashMap::new(); + for (address, account) in bundle.state() { + let mut storage_changes = HashMap::new(); + for (slot, slot_value) in account.storage.iter() { + // Only include modified slots (non-zero values or explicitly set to zero) + if slot_value.present_value != slot_value.original_value() { + storage_changes.insert(*slot, slot_value.present_value); + } + } + if !storage_changes.is_empty() { + modified_storage_slots.insert(*address, storage_changes); + } + } Ok(SimulationResult::success( simulation_id, @@ -266,7 +284,7 @@ where request.block_hash, total_gas_used, execution_time, - all_storage_changes, + modified_storage_slots, )) } } From aab1ee674a15aab40301edf935e51804ccf33784 Mon Sep 17 00:00:00 2001 From: Niran Babalola Date: Mon, 22 Sep 2025 00:06:25 -0500 Subject: [PATCH 15/39] Remove unused struct --- crates/simulator/src/engine.rs | 14 -------------- 1 file changed, 14 deletions(-) diff --git a/crates/simulator/src/engine.rs b/crates/simulator/src/engine.rs index 3027b92..e383ba3 100644 --- a/crates/simulator/src/engine.rs +++ b/crates/simulator/src/engine.rs @@ -113,20 +113,6 @@ where evm_config: Node::Evm, } -/// Represents the execution context for a bundle simulation -#[derive(Debug)] -struct ExecutionContext { - /// Block number for simulation - block_number: u64, - /// Initial balances of involved accounts - initial_balances: HashMap, - /// Initial nonces of involved accounts - initial_nonces: HashMap, - /// Storage changes during simulation - storage_changes: HashMap>, - /// Gas used so far - gas_used: u64, -} impl RethSimulationEngine where From b8ff80c0f1dd49fbee85b3f1a587c7c54f9b56bd Mon Sep 17 00:00:00 2001 From: Niran Babalola Date: Mon, 22 Sep 2025 00:16:36 -0500 Subject: [PATCH 16/39] Fix dependencies after rebase --- Cargo.lock | 6284 ++++++++++++++++++++++++++++++++++++++++++++++++---- Cargo.toml | 1 + 2 files changed, 5860 insertions(+), 425 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 604a6ff..d10a4f2 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -17,6 +17,41 @@ version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa" +[[package]] +name = "aead" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d122413f284cf2d62fb1b7db97e02edb8cda96d769b16e443a4f6195e35662b0" +dependencies = [ + "crypto-common", + "generic-array", +] + +[[package]] +name = "aes" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b169f7a6d4742236a0a00c541b845991d0ac43e546831af1249753ab4c3aa3a0" +dependencies = [ + "cfg-if", + "cipher", + "cpufeatures", +] + +[[package]] +name = "aes-gcm" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "831010a0f742e1209b3bcea8fab6a8e149051ba6099432c8cb2cc117dec3ead1" +dependencies = [ + "aead", + "aes", + "cipher", + "ctr", + "ghash", + "subtle", +] + [[package]] name = "ahash" version = "0.8.12" @@ -39,6 +74,21 @@ dependencies = [ "memchr", ] +[[package]] +name = "alloc-no-stdlib" +version = "2.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc7bb162ec39d46ab1ca8c77bf72e890535becd1751bb45f64c597edb4c8c6b3" + +[[package]] +name = "alloc-stdlib" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94fb8275041c72129eb51b7d0322c29b8387a0386127718b096429201a5d6ece" +dependencies = [ + "alloc-no-stdlib", +] + [[package]] name = "allocator-api2" version = "0.2.21" @@ -51,11 +101,11 @@ version = "0.2.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8e4e355f312b270bca5144af5f003e7d238037e47a818766f9107f966cbecf52" dependencies = [ - "alloy-primitives", + "alloy-primitives 1.3.1", "alloy-rlp", "num_enum", "serde", - "strum", + "strum 0.27.2", ] [[package]] @@ -65,14 +115,14 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b190875b4e4d8838a49e9c1489a27c07583232a269a1a625a8260049134bd6be" dependencies = [ "alloy-eips", - "alloy-primitives", + "alloy-primitives 1.3.1", "alloy-rlp", "alloy-serde", "alloy-trie", "alloy-tx-macros", "auto_impl", "c-kzg", - "derive_more", + "derive_more 2.0.1", "either", "k256", "once_cell", @@ -80,7 +130,7 @@ dependencies = [ "secp256k1 0.30.0", "serde", "serde_with", - "thiserror", + "thiserror 2.0.16", ] [[package]] @@ -91,23 +141,40 @@ checksum = "545370c7dc047fa2c632a965b76bb429cc24674d2fcddacdcb0d998b09731b9e" dependencies = [ "alloy-consensus", "alloy-eips", - "alloy-primitives", + "alloy-primitives 1.3.1", "alloy-rlp", "alloy-serde", "serde", ] +[[package]] +name = "alloy-dyn-abi" +version = "1.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a3f56873f3cac7a2c63d8e98a4314b8311aa96adb1a0f82ae923eb2119809d2c" +dependencies = [ + "alloy-json-abi", + "alloy-primitives 1.3.1", + "alloy-sol-type-parser", + "alloy-sol-types", + "derive_more 2.0.1", + "itoa", + "serde", + "serde_json", + "winnow", +] + [[package]] name = "alloy-eip2124" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "741bdd7499908b3aa0b159bba11e71c8cddd009a2c2eb7a06e825f1ec87900a5" dependencies = [ - "alloy-primitives", + "alloy-primitives 1.3.1", "alloy-rlp", "crc", "serde", - "thiserror", + "thiserror 2.0.16", ] [[package]] @@ -116,7 +183,7 @@ version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7b82752a889170df67bbb36d42ca63c531eb16274f0d7299ae2a680facba17bd" dependencies = [ - "alloy-primitives", + "alloy-primitives 1.3.1", "alloy-rlp", "serde", ] @@ -127,11 +194,12 @@ version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9d4769c6ffddca380b0070d71c8b7f30bed375543fe76bb2f74ec0acf4b7cd16" dependencies = [ - "alloy-primitives", + "alloy-primitives 1.3.1", "alloy-rlp", "k256", "serde", - "thiserror", + "serde_with", + "thiserror 2.0.16", ] [[package]] @@ -143,17 +211,19 @@ dependencies = [ "alloy-eip2124", "alloy-eip2930", "alloy-eip7702", - "alloy-primitives", + "alloy-primitives 1.3.1", "alloy-rlp", "alloy-serde", "auto_impl", "c-kzg", - "derive_more", + "derive_more 2.0.1", "either", + "ethereum_ssz", + "ethereum_ssz_derive", "serde", "serde_with", "sha2 0.10.9", - "thiserror", + "thiserror 2.0.16", ] [[package]] @@ -165,15 +235,15 @@ dependencies = [ "alloy-consensus", "alloy-eips", "alloy-hardforks", - "alloy-primitives", + "alloy-primitives 1.3.1", "alloy-rpc-types-eth", "alloy-sol-types", "auto_impl", - "derive_more", + "derive_more 2.0.1", "op-alloy-consensus 0.19.1", "op-revm", "revm", - "thiserror", + "thiserror 2.0.16", ] [[package]] @@ -183,7 +253,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3865dd77a0fcbe61a35f08171af54d54617372df0544d7626f9ee5a42103c825" dependencies = [ "alloy-eips", - "alloy-primitives", + "alloy-primitives 1.3.1", "alloy-serde", "alloy-trie", "serde", @@ -198,9 +268,10 @@ checksum = "8d66cfdf265bf52c0c4a952960c854c3683c71ff2fc02c9b8c317c691fd3bc28" dependencies = [ "alloy-chains", "alloy-eip2124", - "alloy-primitives", + "alloy-primitives 1.3.1", "auto_impl", "dyn-clone", + "serde", ] [[package]] @@ -209,7 +280,7 @@ version = "1.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "125a1c373261b252e53e04d6e92c37d881833afc1315fceab53fd46045695640" dependencies = [ - "alloy-primitives", + "alloy-primitives 1.3.1", "alloy-sol-type-parser", "serde", "serde_json", @@ -221,12 +292,12 @@ version = "1.0.33" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d24aba9adc7e22cec5ae396980cac73792f5cb5407dc1efc07292e7f96fb65d5" dependencies = [ - "alloy-primitives", + "alloy-primitives 1.3.1", "alloy-sol-types", "http 1.3.1", "serde", "serde_json", - "thiserror", + "thiserror 2.0.16", "tracing", ] @@ -241,7 +312,7 @@ dependencies = [ "alloy-eips", "alloy-json-rpc", "alloy-network-primitives", - "alloy-primitives", + "alloy-primitives 1.3.1", "alloy-rpc-types-any", "alloy-rpc-types-eth", "alloy-serde", @@ -249,11 +320,11 @@ dependencies = [ "alloy-sol-types", "async-trait", "auto_impl", - "derive_more", + "derive_more 2.0.1", "futures-utils-wasm", "serde", "serde_json", - "thiserror", + "thiserror 2.0.16", ] [[package]] @@ -264,11 +335,56 @@ checksum = "f37bf78f46f2717973639c4f11e6330691fea62c4d116d720e0dcfd49080c126" dependencies = [ "alloy-consensus", "alloy-eips", - "alloy-primitives", + "alloy-primitives 1.3.1", "alloy-serde", "serde", ] +[[package]] +name = "alloy-op-evm" +version = "0.20.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed9b726869a13d5d958f2f78fbef7ce522689c4d40d613c16239f5e286fbeb1a" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-evm", + "alloy-op-hardforks", + "alloy-primitives 1.3.1", + "auto_impl", + "op-alloy-consensus 0.19.1", + "op-revm", + "revm", +] + +[[package]] +name = "alloy-op-hardforks" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8a2823360cd87c008df4b8b78794924948c3508e745dfed7d2b685774cb473e" +dependencies = [ + "alloy-chains", + "alloy-hardforks", + "auto_impl", +] + +[[package]] +name = "alloy-primitives" +version = "0.7.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ccb3ead547f4532bc8af961649942f0b9c16ee9226e26caa3f38420651cc0bf4" +dependencies = [ + "alloy-rlp", + "bytes", + "cfg-if", + "const-hex", + "derive_more 0.99.20", + "hex-literal", + "itoa", + "ruint", + "tiny-keccak", +] + [[package]] name = "alloy-primitives" version = "1.3.1" @@ -279,7 +395,7 @@ dependencies = [ "bytes", "cfg-if", "const-hex", - "derive_more", + "derive_more 2.0.1", "foldhash", "getrandom 0.3.3", "hashbrown 0.15.5", @@ -309,17 +425,20 @@ dependencies = [ "alloy-json-rpc", "alloy-network", "alloy-network-primitives", - "alloy-primitives", + "alloy-primitives 1.3.1", + "alloy-pubsub", "alloy-rpc-client", "alloy-rpc-types-eth", "alloy-signer", "alloy-sol-types", "alloy-transport", "alloy-transport-http", + "alloy-transport-ipc", + "alloy-transport-ws", "async-stream", "async-trait", "auto_impl", - "dashmap", + "dashmap 6.1.0", "either", "futures", "futures-utils-wasm", @@ -329,13 +448,35 @@ dependencies = [ "reqwest", "serde", "serde_json", - "thiserror", + "thiserror 2.0.16", "tokio", "tracing", "url", "wasmtimer", ] +[[package]] +name = "alloy-pubsub" +version = "1.0.33" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69a1849b705f8659614e2f2c3aea4a71514a33b085ce66078cd0ceb30db4c7f3" +dependencies = [ + "alloy-json-rpc", + "alloy-primitives 1.3.1", + "alloy-transport", + "auto_impl", + "bimap", + "futures", + "parking_lot", + "serde", + "serde_json", + "tokio", + "tokio-stream", + "tower", + "tracing", + "wasmtimer", +] + [[package]] name = "alloy-rlp" version = "0.3.12" @@ -365,9 +506,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bca26070f1fc94d69e8d41fcde991b0556dbf8fac737dc09102d461d957a1bb9" dependencies = [ "alloy-json-rpc", - "alloy-primitives", + "alloy-primitives 1.3.1", + "alloy-pubsub", "alloy-transport", "alloy-transport-http", + "alloy-transport-ipc", + "alloy-transport-ws", "futures", "pin-project", "reqwest", @@ -387,7 +531,9 @@ version = "1.0.33" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c079797bbda28d6a5a2e89bcbf788bf85b4ae2a4f0e57eed9e2d66637fe78c58" dependencies = [ - "alloy-primitives", + "alloy-primitives 1.3.1", + "alloy-rpc-types-engine", + "alloy-rpc-types-eth", "alloy-serde", "serde", ] @@ -399,11 +545,23 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4d56c8ce360ec766720d8a655fe448b94428ad1aea44aad488a3461ee4dc1f40" dependencies = [ "alloy-genesis", - "alloy-primitives", + "alloy-primitives 1.3.1", "serde", "serde_json", ] +[[package]] +name = "alloy-rpc-types-anvil" +version = "1.0.33" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "254361c29ceea0c673be16838066465ffad41bdebf62acae97a2acb6ec085556" +dependencies = [ + "alloy-primitives 1.3.1", + "alloy-rpc-types-eth", + "alloy-serde", + "serde", +] + [[package]] name = "alloy-rpc-types-any" version = "1.0.33" @@ -415,6 +573,37 @@ dependencies = [ "alloy-serde", ] +[[package]] +name = "alloy-rpc-types-beacon" +version = "1.0.33" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "03d445c60ce294b942c4df010c0406ee17d5b8e498d38a9d2523287fc3e438a2" +dependencies = [ + "alloy-eips", + "alloy-primitives 1.3.1", + "alloy-rpc-types-engine", + "ethereum_ssz", + "ethereum_ssz_derive", + "serde", + "serde_json", + "serde_with", + "thiserror 2.0.16", + "tree_hash", + "tree_hash_derive", +] + +[[package]] +name = "alloy-rpc-types-debug" +version = "1.0.33" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "01289dae0aa187f76bb964f3fa2dcd86e70de033f3f048caddf677066e8f47e7" +dependencies = [ + "alloy-primitives 1.3.1", + "derive_more 2.0.1", + "serde", + "serde_with", +] + [[package]] name = "alloy-rpc-types-engine" version = "1.0.33" @@ -423,10 +612,16 @@ checksum = "997de3fb8ad67674af70c123d2c6344e8fb0cbbd7fb96fde106ee9e45a2912d2" dependencies = [ "alloy-consensus", "alloy-eips", - "alloy-primitives", + "alloy-primitives 1.3.1", "alloy-rlp", - "derive_more", - "strum", + "alloy-serde", + "derive_more 2.0.1", + "ethereum_ssz", + "ethereum_ssz_derive", + "jsonwebtoken", + "rand 0.8.5", + "serde", + "strum 0.27.2", ] [[package]] @@ -439,7 +634,7 @@ dependencies = [ "alloy-consensus-any", "alloy-eips", "alloy-network-primitives", - "alloy-primitives", + "alloy-primitives 1.3.1", "alloy-rlp", "alloy-serde", "alloy-sol-types", @@ -447,7 +642,7 @@ dependencies = [ "serde", "serde_json", "serde_with", - "thiserror", + "thiserror 2.0.16", ] [[package]] @@ -458,7 +653,7 @@ checksum = "5a04717f44c5404b1ef497f221869f243d5c9ea5bdfd5da8c25d6736a9d2b2e1" dependencies = [ "alloy-consensus", "alloy-eips", - "alloy-primitives", + "alloy-primitives 1.3.1", "alloy-rpc-types-eth", "alloy-serde", "serde", @@ -471,12 +666,24 @@ version = "1.0.33" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3741bce3ede19ed040d8f357a88a4aae8f714e4d07da7f2a11b77a698386d7e1" dependencies = [ - "alloy-primitives", + "alloy-primitives 1.3.1", "alloy-rpc-types-eth", "alloy-serde", "serde", "serde_json", - "thiserror", + "thiserror 2.0.16", +] + +[[package]] +name = "alloy-rpc-types-txpool" +version = "1.0.33" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5d36139df081dd51d204aa7e96ecd84ec2c4446501bca0ee75f7abdc5b0e8fc" +dependencies = [ + "alloy-primitives 1.3.1", + "alloy-rpc-types-eth", + "alloy-serde", + "serde", ] [[package]] @@ -485,7 +692,7 @@ version = "1.0.33" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5f0ee5af728e144e0e5bde52114c7052249a9833d9fba79aeacfbdee1aad69e8" dependencies = [ - "alloy-primitives", + "alloy-primitives 1.3.1", "serde", "serde_json", ] @@ -496,13 +703,29 @@ version = "1.0.33" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0efbce76baf1b012e379a5e486822c71b0de0a957ddedd5410427789516a47b9" dependencies = [ - "alloy-primitives", + "alloy-primitives 1.3.1", "async-trait", "auto_impl", "either", "elliptic-curve 0.13.8", "k256", - "thiserror", + "thiserror 2.0.16", +] + +[[package]] +name = "alloy-signer-local" +version = "1.0.33" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f52345adc3b784889659ff2930c02047974916b6aacbf0ae013ee6578d2df266" +dependencies = [ + "alloy-consensus", + "alloy-network", + "alloy-primitives 1.3.1", + "alloy-signer", + "async-trait", + "k256", + "rand 0.8.5", + "thiserror 2.0.16", ] [[package]] @@ -570,7 +793,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f5383d34ea00079e6dd89c652bcbdb764db160cef84e6250926961a0b2295d04" dependencies = [ "alloy-json-abi", - "alloy-primitives", + "alloy-primitives 1.3.1", "alloy-sol-macro", "serde", ] @@ -582,16 +805,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7200a72ccda236bc841df56964b1f816f451e317b172538ba3977357e789b8bd" dependencies = [ "alloy-json-rpc", - "alloy-primitives", + "alloy-primitives 1.3.1", "auto_impl", "base64 0.22.1", - "derive_more", + "derive_more 2.0.1", "futures", "futures-utils-wasm", "parking_lot", "serde", "serde_json", - "thiserror", + "thiserror 2.0.16", "tokio", "tower", "tracing", @@ -614,16 +837,54 @@ dependencies = [ "url", ] +[[package]] +name = "alloy-transport-ipc" +version = "1.0.33" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dba7bd49c83873ea3e9447531fd26a205638b0c6d46c244c9cbfeb839855a420" +dependencies = [ + "alloy-json-rpc", + "alloy-pubsub", + "alloy-transport", + "bytes", + "futures", + "interprocess", + "pin-project", + "serde", + "serde_json", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "alloy-transport-ws" +version = "1.0.33" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a252f2451d53d6e0d85dfa96f193e698a777d1def51e8611805c0fd7e40d8e2" +dependencies = [ + "alloy-pubsub", + "alloy-transport", + "futures", + "http 1.3.1", + "rustls 0.23.31", + "serde_json", + "tokio", + "tokio-tungstenite", + "tracing", + "ws_stream_wasm", +] + [[package]] name = "alloy-trie" version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e3412d52bb97c6c6cc27ccc28d4e6e8cf605469101193b50b0bd5813b1f990b5" dependencies = [ - "alloy-primitives", + "alloy-primitives 1.3.1", "alloy-rlp", "arrayvec", - "derive_more", + "derive_more 2.0.1", "nybbles", "serde", "smallvec", @@ -636,7 +897,7 @@ version = "1.0.33" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fb91a93165a8646618ae6366f301ec1edd52f452665c371e12201516593925a0" dependencies = [ - "alloy-primitives", + "alloy-primitives 1.3.1", "darling 0.21.3", "proc-macro2", "quote", @@ -1027,6 +1288,25 @@ dependencies = [ "serde", ] +[[package]] +name = "asn1_der" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "155a5a185e42c6b77ac7b88a15143d930a9e9727a5b7b77eed417404ab15c247" + +[[package]] +name = "async-compression" +version = "0.4.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "977eb15ea9efd848bb8a4a1a2500347ed7f0bf794edf0dc3ddcf439f43d36b23" +dependencies = [ + "compression-codecs", + "compression-core", + "futures-core", + "pin-project-lite", + "tokio", +] + [[package]] name = "async-stream" version = "0.3.6" @@ -1060,6 +1340,17 @@ dependencies = [ "syn 2.0.106", ] +[[package]] +name = "async_io_stream" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6d7b9decdf35d8908a7e3ef02f64c5e9b1695e230154c0e8de3969142d9b94c" +dependencies = [ + "futures", + "pharos", + "rustc_version 0.4.1", +] + [[package]] name = "atoi" version = "2.0.0" @@ -1160,7 +1451,7 @@ version = "0.30.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dbfd150b5dbdb988bcc8fb1fe787eb6b7ee6180ca24da683b61ea5405f3d43ff" dependencies = [ - "bindgen", + "bindgen 0.69.5", "cc", "cmake", "dunce", @@ -1545,7 +1836,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "592277618714fbcecda9a02ba7a8781f319d26532a88553bbacc77ba5d2b3a8d" dependencies = [ "fastrand", - "gloo-timers", + "gloo-timers 0.3.0", "tokio", ] @@ -1564,6 +1855,12 @@ dependencies = [ "windows-targets 0.52.6", ] +[[package]] +name = "base-x" +version = "0.2.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4cbbc9d0964165b47557570cce6c952866c2678457aca742aafc9fb771d30270" + [[package]] name = "base16ct" version = "0.1.1" @@ -1604,6 +1901,21 @@ version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "55248b47b0caf0546f7988906588779981c43bb1bc9d0c44087278f80cdb44ba" +[[package]] +name = "bimap" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "230c5f1ca6a325a32553f8640d31ac9b49f2411e901e427570154868b46da4f7" + +[[package]] +name = "bincode" +version = "1.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1f45e9417d87227c7a56d22e471c6206462cba514c7590c09aff4cf6d1ddcad" +dependencies = [ + "serde", +] + [[package]] name = "bindgen" version = "0.69.5" @@ -1627,6 +1939,24 @@ dependencies = [ "which", ] +[[package]] +name = "bindgen" +version = "0.70.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f49d8fed880d473ea71efb9bf597651e77201bdd4893efe54c9e5d65ae04ce6f" +dependencies = [ + "bitflags 2.9.4", + "cexpr", + "clang-sys", + "itertools 0.13.0", + "proc-macro2", + "quote", + "regex", + "rustc-hash 1.1.0", + "shlex", + "syn 2.0.106", +] + [[package]] name = "bit-set" version = "0.8.0" @@ -1704,6 +2034,15 @@ dependencies = [ "generic-array", ] +[[package]] +name = "block-padding" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8894febbff9f758034a5b8e12d87918f56dfc64a8e1fe757d65e29041538d93" +dependencies = [ + "generic-array", +] + [[package]] name = "blst" version = "0.3.15" @@ -1717,23 +2056,162 @@ dependencies = [ ] [[package]] -name = "bollard" -version = "0.18.1" +name = "boa_ast" +version = "0.20.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97ccca1260af6a459d75994ad5acc1651bcabcbdbc41467cc9786519ab854c30" +checksum = "2c340fe0f0b267787095cbe35240c6786ff19da63ec7b69367ba338eace8169b" dependencies = [ - "base64 0.22.1", - "bollard-stubs", - "bytes", - "futures-core", - "futures-util", - "hex", - "home", - "http 1.3.1", - "http-body-util", - "hyper 1.7.0", - "hyper-named-pipe", - "hyper-rustls 0.27.7", + "bitflags 2.9.4", + "boa_interner", + "boa_macros", + "boa_string", + "indexmap 2.11.4", + "num-bigint", + "rustc-hash 2.1.1", +] + +[[package]] +name = "boa_engine" +version = "0.20.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f620c3f06f51e65c0504ddf04978be1b814ac6586f0b45f6019801ab5efd37f9" +dependencies = [ + "arrayvec", + "bitflags 2.9.4", + "boa_ast", + "boa_gc", + "boa_interner", + "boa_macros", + "boa_parser", + "boa_profiler", + "boa_string", + "bytemuck", + "cfg-if", + "dashmap 6.1.0", + "fast-float2", + "hashbrown 0.15.5", + "icu_normalizer 1.5.0", + "indexmap 2.11.4", + "intrusive-collections", + "itertools 0.13.0", + "num-bigint", + "num-integer", + "num-traits", + "num_enum", + "once_cell", + "pollster", + "portable-atomic", + "rand 0.8.5", + "regress", + "rustc-hash 2.1.1", + "ryu-js", + "serde", + "serde_json", + "sptr", + "static_assertions", + "tap", + "thin-vec", + "thiserror 2.0.16", + "time", +] + +[[package]] +name = "boa_gc" +version = "0.20.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2425c0b7720d42d73eaa6a883fbb77a5c920da8694964a3d79a67597ac55cce2" +dependencies = [ + "boa_macros", + "boa_profiler", + "boa_string", + "hashbrown 0.15.5", + "thin-vec", +] + +[[package]] +name = "boa_interner" +version = "0.20.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42407a3b724cfaecde8f7d4af566df4b56af32a2f11f0956f5570bb974e7f749" +dependencies = [ + "boa_gc", + "boa_macros", + "hashbrown 0.15.5", + "indexmap 2.11.4", + "once_cell", + "phf", + "rustc-hash 2.1.1", + "static_assertions", +] + +[[package]] +name = "boa_macros" +version = "0.20.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9fd3f870829131332587f607a7ff909f1af5fc523fd1b192db55fbbdf52e8d3c" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.106", + "synstructure", +] + +[[package]] +name = "boa_parser" +version = "0.20.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9cc142dac798cdc6e2dbccfddeb50f36d2523bb977a976e19bdb3ae19b740804" +dependencies = [ + "bitflags 2.9.4", + "boa_ast", + "boa_interner", + "boa_macros", + "boa_profiler", + "fast-float2", + "icu_properties 1.5.1", + "num-bigint", + "num-traits", + "regress", + "rustc-hash 2.1.1", +] + +[[package]] +name = "boa_profiler" +version = "0.20.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4064908e7cdf9b6317179e9b04dcb27f1510c1c144aeab4d0394014f37a0f922" + +[[package]] +name = "boa_string" +version = "0.20.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7debc13fbf7997bf38bf8e9b20f1ad5e2a7d27a900e1f6039fe244ce30f589b5" +dependencies = [ + "fast-float2", + "paste", + "rustc-hash 2.1.1", + "sptr", + "static_assertions", +] + +[[package]] +name = "bollard" +version = "0.18.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97ccca1260af6a459d75994ad5acc1651bcabcbdbc41467cc9786519ab854c30" +dependencies = [ + "base64 0.22.1", + "bollard-stubs", + "bytes", + "futures-core", + "futures-util", + "hex", + "home", + "http 1.3.1", + "http-body-util", + "hyper 1.7.0", + "hyper-named-pipe", + "hyper-rustls 0.27.7", "hyper-util", "hyperlocal", "log", @@ -1747,7 +2225,7 @@ dependencies = [ "serde_json", "serde_repr", "serde_urlencoded", - "thiserror", + "thiserror 2.0.16", "tokio", "tokio-util", "tower-service", @@ -1766,6 +2244,45 @@ dependencies = [ "serde_with", ] +[[package]] +name = "boyer-moore-magiclen" +version = "0.2.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95e6233f2d926b5b123caf9d58e3885885255567fbe7776a7fdcae2a4d7241c4" +dependencies = [ + "debug-helper", +] + +[[package]] +name = "brotli" +version = "8.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4bd8b9603c7aa97359dbd97ecf258968c95f3adddd6db2f7e7a5bef101c84560" +dependencies = [ + "alloc-no-stdlib", + "alloc-stdlib", + "brotli-decompressor", +] + +[[package]] +name = "brotli-decompressor" +version = "5.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "874bb8112abecc98cbd6d81ea4fa7e94fb9449648c93cc89aa40c81c24d7de03" +dependencies = [ + "alloc-no-stdlib", + "alloc-stdlib", +] + +[[package]] +name = "bs58" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf88ba1141d185c399bee5288d850d63b8369520c1eafc32a0430b5b6c287bf4" +dependencies = [ + "tinyvec", +] + [[package]] name = "bumpalo" version = "3.19.0" @@ -1778,6 +2295,32 @@ version = "1.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7575182f7272186991736b70173b0ea045398f984bf5ebbb3804736ce1330c9d" +[[package]] +name = "bytecount" +version = "0.6.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "175812e0be2bccb6abe50bb8d566126198344f707e304f45c648fd8f2cc0365e" + +[[package]] +name = "bytemuck" +version = "1.23.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3995eaeebcdf32f91f980d360f78732ddc061097ab4e39991ae7a6ace9194677" +dependencies = [ + "bytemuck_derive", +] + +[[package]] +name = "bytemuck_derive" +version = "1.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4f154e572231cb6ba2bd1176980827e3d5dc04cc183a75dea38109fbdd672d29" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.106", +] + [[package]] name = "byteorder" version = "1.5.0" @@ -1818,6 +2361,66 @@ dependencies = [ "serde", ] +[[package]] +name = "camino" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e1de8bc0aa9e9385ceb3bf0c152e3a9b9544f6c4a912c8ae504e80c1f0368603" +dependencies = [ + "serde_core", +] + +[[package]] +name = "cargo-platform" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e35af189006b9c0f00a064685c727031e3ed2d8020f7ba284d78cc2671bd36ea" +dependencies = [ + "serde", +] + +[[package]] +name = "cargo_metadata" +version = "0.14.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4acbb09d9ee8e23699b9634375c72795d095bf268439da88562cf9b501f181fa" +dependencies = [ + "camino", + "cargo-platform", + "semver 1.0.27", + "serde", + "serde_json", +] + +[[package]] +name = "cargo_metadata" +version = "0.19.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd5eb614ed4c27c5d706420e4320fbe3216ab31fa1c33cd8246ac36dae4479ba" +dependencies = [ + "camino", + "cargo-platform", + "semver 1.0.27", + "serde", + "serde_json", + "thiserror 2.0.16", +] + +[[package]] +name = "cassowary" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df8670b8c7b9dae1793364eafadf7239c40d669904660c5960d74cfd80b46a53" + +[[package]] +name = "castaway" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dec551ab6e7578819132c713a93c022a05d60159dc86e7a7050223577484c55a" +dependencies = [ + "rustversion", +] + [[package]] name = "cc" version = "1.2.15" @@ -1829,6 +2432,12 @@ dependencies = [ "shlex", ] +[[package]] +name = "cesu8" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d43a04d8753f35258c91f8ec639f792891f748a1edbd759cf1dcea3382ad83c" + [[package]] name = "cexpr" version = "0.6.0" @@ -1857,11 +2466,23 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "145052bdd345b87320e369255277e3fb5152762ad123a901ef5c262dd38fe8d2" dependencies = [ "iana-time-zone", + "js-sys", "num-traits", "serde", + "wasm-bindgen", "windows-link 0.2.0", ] +[[package]] +name = "cipher" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "773f3b9af64447d2ce9850330c473515014aa235e6a783b02db81ff39e4a3dad" +dependencies = [ + "crypto-common", + "inout", +] + [[package]] name = "clang-sys" version = "1.8.1" @@ -1928,6 +2549,70 @@ version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b05b61dc5112cbb17e4b6cd61790d9845d13888356391624cbe7e41efeac1e75" +[[package]] +name = "combine" +version = "4.6.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba5a308b75df32fe02788e748662718f03fde005016435c444eea572398219fd" +dependencies = [ + "bytes", + "memchr", +] + +[[package]] +name = "comfy-table" +version = "7.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b03b7db8e0b4b2fdad6c551e634134e99ec000e5c8c3b6856c65e8bbaded7a3b" +dependencies = [ + "crossterm 0.29.0", + "unicode-segmentation", + "unicode-width 0.2.0", +] + +[[package]] +name = "compact_str" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b79c4069c6cad78e2e0cdfcbd26275770669fb39fd308a752dc110e83b9af32" +dependencies = [ + "castaway", + "cfg-if", + "itoa", + "rustversion", + "ryu", + "static_assertions", +] + +[[package]] +name = "compression-codecs" +version = "0.4.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "485abf41ac0c8047c07c87c72c8fb3eb5197f6e9d7ded615dfd1a00ae00a0f64" +dependencies = [ + "brotli", + "compression-core", + "flate2", + "memchr", + "zstd", + "zstd-safe", +] + +[[package]] +name = "compression-core" +version = "0.4.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e47641d3deaf41fb1538ac1f54735925e275eaf3bf4d55c81b137fba797e5cbb" + +[[package]] +name = "concat-kdf" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2d72c1252426a83be2092dd5884a5f6e3b8e7180f6891b6263d2c21b92ec8816" +dependencies = [ + "digest 0.10.7", +] + [[package]] name = "concurrent-queue" version = "2.5.0" @@ -1975,6 +2660,12 @@ dependencies = [ "unicode-xid", ] +[[package]] +name = "convert_case" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e" + [[package]] name = "convert_case" version = "0.7.1" @@ -2010,6 +2701,15 @@ version = "0.8.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" +[[package]] +name = "core2" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b49ba7ef1ad6107f8824dbe97de947cbaac53c44e7f9756a1fba0d37c1eec505" +dependencies = [ + "memchr", +] + [[package]] name = "cpufeatures" version = "0.2.17" @@ -2062,6 +2762,15 @@ version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "790eea4361631c5e7d22598ecd5723ff611904e3344ce8720784c93e3d83d40b" +[[package]] +name = "crossbeam-channel" +version = "0.5.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "82b8f8f868b36967f9606790d1903570de9ceaf870a7bf9fbbd3016d636a2cb2" +dependencies = [ + "crossbeam-utils", +] + [[package]] name = "crossbeam-deque" version = "0.8.6" @@ -2097,11 +2806,50 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" [[package]] -name = "crunchy" -version = "0.2.4" +name = "crossterm" +version = "0.28.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "460fbee9c2c2f33933d720630a6a0bac33ba7053db5344fac858d4b8952d77d5" - +checksum = "829d955a0bb380ef178a640b91779e3987da38c9aea133b20614cfed8cdea9c6" +dependencies = [ + "bitflags 2.9.4", + "crossterm_winapi", + "mio", + "parking_lot", + "rustix 0.38.44", + "signal-hook", + "signal-hook-mio", + "winapi", +] + +[[package]] +name = "crossterm" +version = "0.29.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d8b9f2e4c67f833b660cdb0a3523065869fb35570177239812ed4c905aeff87b" +dependencies = [ + "bitflags 2.9.4", + "crossterm_winapi", + "document-features", + "parking_lot", + "rustix 1.1.2", + "winapi", +] + +[[package]] +name = "crossterm_winapi" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "acdd7c62a3665c7f6830a51635d9ac9b23ed385797f70a83bb8bafe9c572ab2b" +dependencies = [ + "winapi", +] + +[[package]] +name = "crunchy" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "460fbee9c2c2f33933d720630a6a0bac33ba7053db5344fac858d4b8952d77d5" + [[package]] name = "crypto-bigint" version = "0.4.9" @@ -2133,9 +2881,46 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" dependencies = [ "generic-array", + "rand_core 0.6.4", "typenum", ] +[[package]] +name = "ctr" +version = "0.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0369ee1ad671834580515889b80f2ea915f23b8be8d0daa4bbaf2ac5c7590835" +dependencies = [ + "cipher", +] + +[[package]] +name = "curve25519-dalek" +version = "4.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97fb8b7c4503de7d6ae7b42ab72a5a59857b4c937ec27a3d4539dba95b5ab2be" +dependencies = [ + "cfg-if", + "cpufeatures", + "curve25519-dalek-derive", + "digest 0.10.7", + "fiat-crypto", + "rustc_version 0.4.1", + "subtle", + "zeroize", +] + +[[package]] +name = "curve25519-dalek-derive" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.106", +] + [[package]] name = "darling" version = "0.20.11" @@ -2207,6 +2992,19 @@ dependencies = [ "syn 2.0.106", ] +[[package]] +name = "dashmap" +version = "5.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "978747c1d849a7d2ee5e8adc0159961c48fb7e5db2f06af6723b80123bb53856" +dependencies = [ + "cfg-if", + "hashbrown 0.14.5", + "lock_api", + "once_cell", + "parking_lot_core", +] + [[package]] name = "dashmap" version = "6.1.0" @@ -2221,6 +3019,49 @@ dependencies = [ "parking_lot_core", ] +[[package]] +name = "data-encoding" +version = "2.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a2330da5de22e8a3cb63252ce2abb30116bf5265e89c0e01bc17015ce30a476" + +[[package]] +name = "data-encoding-macro" +version = "0.1.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "47ce6c96ea0102f01122a185683611bd5ac8d99e62bc59dd12e6bda344ee673d" +dependencies = [ + "data-encoding", + "data-encoding-macro-internal", +] + +[[package]] +name = "data-encoding-macro-internal" +version = "0.1.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8d162beedaa69905488a8da94f5ac3edb4dd4788b732fadb7bd120b2625c1976" +dependencies = [ + "data-encoding", + "syn 2.0.106", +] + +[[package]] +name = "debug-helper" +version = "0.3.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f578e8e2c440e7297e008bb5486a3a8a194775224bbc23729b0dbdfaeebf162e" + +[[package]] +name = "delay_map" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "88e365f083a5cb5972d50ce8b1b2c9f125dc5ec0f50c0248cfb568ae59efcf0b" +dependencies = [ + "futures", + "tokio", + "tokio-util", +] + [[package]] name = "der" version = "0.6.1" @@ -2274,6 +3115,50 @@ dependencies = [ "syn 2.0.106", ] +[[package]] +name = "derive_builder" +version = "0.20.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "507dfb09ea8b7fa618fcf76e953f4f5e192547945816d5358edffe39f6f94947" +dependencies = [ + "derive_builder_macro", +] + +[[package]] +name = "derive_builder_core" +version = "0.20.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2d5bcf7b024d6835cfb3d473887cd966994907effbe9227e8c8219824d06c4e8" +dependencies = [ + "darling 0.20.11", + "proc-macro2", + "quote", + "syn 2.0.106", +] + +[[package]] +name = "derive_builder_macro" +version = "0.20.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ab63b0e2bf4d5928aff72e83a7dace85d7bba5fe12dcc3c5a572d78caffd3f3c" +dependencies = [ + "derive_builder_core", + "syn 2.0.106", +] + +[[package]] +name = "derive_more" +version = "0.99.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6edb4b64a43d977b8e99788fe3a04d483834fba1215a7e02caa415b626497f7f" +dependencies = [ + "convert_case 0.4.0", + "proc-macro2", + "quote", + "rustc_version 0.4.1", + "syn 2.0.106", +] + [[package]] name = "derive_more" version = "2.0.1" @@ -2289,13 +3174,19 @@ version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bda628edc44c4bb645fbe0f758797143e4e07926f7ebf4e9bdfbd3d2ce621df3" dependencies = [ - "convert_case", + "convert_case 0.7.1", "proc-macro2", "quote", "syn 2.0.106", "unicode-xid", ] +[[package]] +name = "diff" +version = "0.1.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56254986775e3233ffa9c4d7d3faaf6d36a2c09d30b20687e9f88bc8bafc16c8" + [[package]] name = "digest" version = "0.9.0" @@ -2317,6 +3208,81 @@ dependencies = [ "subtle", ] +[[package]] +name = "dirs" +version = "6.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3e8aa94d75141228480295a7d0e7feb620b1a5ad9f12bc40be62411e38cce4e" +dependencies = [ + "dirs-sys", +] + +[[package]] +name = "dirs-next" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b98cf8ebf19c3d1b223e151f99a4f9f0690dca41414773390fc824184ac833e1" +dependencies = [ + "cfg-if", + "dirs-sys-next", +] + +[[package]] +name = "dirs-sys" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e01a3366d27ee9890022452ee61b2b63a67e6f13f58900b651ff5665f0bb1fab" +dependencies = [ + "libc", + "option-ext", + "redox_users 0.5.2", + "windows-sys 0.61.0", +] + +[[package]] +name = "dirs-sys-next" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ebda144c4fe02d1f7ea1a7d9641b6fc6b580adcfa024ae48797ecdeb6825b4d" +dependencies = [ + "libc", + "redox_users 0.4.6", + "winapi", +] + +[[package]] +name = "discv5" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4b4e7798d2ff74e29cee344dc490af947ae657d6ab5273dde35d58ce06a4d71" +dependencies = [ + "aes", + "aes-gcm", + "alloy-rlp", + "arrayvec", + "ctr", + "delay_map", + "enr", + "fnv", + "futures", + "hashlink 0.9.1", + "hex", + "hkdf", + "lazy_static", + "libp2p-identity", + "lru 0.12.5", + "more-asserts", + "multiaddr", + "parking_lot", + "rand 0.8.5", + "smallvec", + "socket2 0.5.10", + "tokio", + "tracing", + "uint 0.10.0", + "zeroize", +] + [[package]] name = "displaydoc" version = "0.2.5" @@ -2339,6 +3305,21 @@ dependencies = [ "serde_json", ] +[[package]] +name = "doctest-file" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aac81fa3e28d21450aa4d2ac065992ba96a1d7303efbce51a95f4fd175b67562" + +[[package]] +name = "document-features" +version = "0.2.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95249b50c6c185bee49034bcb378a49dc2b5dff0be90ff6616d31d64febab05d" +dependencies = [ + "litrs", +] + [[package]] name = "dotenvy" version = "0.15.7" @@ -2384,6 +3365,31 @@ dependencies = [ "spki 0.7.3", ] +[[package]] +name = "ed25519" +version = "2.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "115531babc129696a58c64a4fef0a8bf9e9698629fb97e9e40767d235cfbcd53" +dependencies = [ + "pkcs8 0.10.2", + "signature 2.2.0", +] + +[[package]] +name = "ed25519-dalek" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70e796c081cee67dc755e1a36a0a172b897fab85fc3f6bc48307991f64e4eca9" +dependencies = [ + "curve25519-dalek", + "ed25519", + "rand_core 0.6.4", + "serde", + "sha2 0.10.9", + "subtle", + "zeroize", +] + [[package]] name = "educe" version = "0.6.0" @@ -2454,14 +3460,29 @@ dependencies = [ "alloy-rlp", "base64 0.22.1", "bytes", + "ed25519-dalek", "hex", + "k256", "log", "rand 0.8.5", "secp256k1 0.30.0", + "serde", "sha3", "zeroize", ] +[[package]] +name = "enum-as-inner" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1e6a265c649f3f5979b601d26f1d05ada116434c87741c9493cb56218f76cbc" +dependencies = [ + "heck", + "proc-macro2", + "quote", + "syn 2.0.106", +] + [[package]] name = "enum-ordinalize" version = "4.3.0" @@ -2482,6 +3503,17 @@ dependencies = [ "syn 2.0.106", ] +[[package]] +name = "enumn" +version = "0.1.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f9ed6b3789237c8a0c1c505af1c7eb2c560df6186f01b098c3a1064ea532f38" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.106", +] + [[package]] name = "equivalent" version = "1.0.2" @@ -2498,6 +3530,15 @@ dependencies = [ "windows-sys 0.61.0", ] +[[package]] +name = "error-chain" +version = "0.12.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2d2f06b9cac1506ece98fe3231e3cc9c4410ec3d5b1f24ae1c8946f0742cdefc" +dependencies = [ + "version_check", +] + [[package]] name = "etcetera" version = "0.8.0" @@ -2510,32 +3551,89 @@ dependencies = [ ] [[package]] -name = "event-listener" -version = "5.4.1" +name = "ethereum_hashing" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e13b66accf52311f30a0db42147dadea9850cb48cd070028831ae5f5d4b856ab" +checksum = "c853bd72c9e5787f8aafc3df2907c2ed03cff3150c3acd94e2e53a98ab70a8ab" dependencies = [ - "concurrent-queue", - "parking", - "pin-project-lite", + "cpufeatures", + "ring", + "sha2 0.10.9", ] [[package]] -name = "eyre" -version = "0.6.12" +name = "ethereum_serde_utils" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7cd915d99f24784cdc19fd37ef22b97e3ff0ae756c7e492e9fbfe897d61e2aec" +checksum = "3dc1355dbb41fbbd34ec28d4fb2a57d9a70c67ac3c19f6a5ca4d4a176b9e997a" dependencies = [ - "indenter", - "once_cell", + "alloy-primitives 1.3.1", + "hex", + "serde", + "serde_derive", + "serde_json", ] [[package]] -name = "fastrand" -version = "2.3.0" +name = "ethereum_ssz" +version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" - +checksum = "9ca8ba45b63c389c6e115b095ca16381534fdcc03cf58176a3f8554db2dbe19b" +dependencies = [ + "alloy-primitives 1.3.1", + "ethereum_serde_utils", + "itertools 0.13.0", + "serde", + "serde_derive", + "smallvec", + "typenum", +] + +[[package]] +name = "ethereum_ssz_derive" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0dd55d08012b4e0dfcc92b8d6081234df65f2986ad34cc76eeed69c5e2ce7506" +dependencies = [ + "darling 0.20.11", + "proc-macro2", + "quote", + "syn 2.0.106", +] + +[[package]] +name = "event-listener" +version = "5.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e13b66accf52311f30a0db42147dadea9850cb48cd070028831ae5f5d4b856ab" +dependencies = [ + "concurrent-queue", + "parking", + "pin-project-lite", +] + +[[package]] +name = "eyre" +version = "0.6.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7cd915d99f24784cdc19fd37ef22b97e3ff0ae756c7e492e9fbfe897d61e2aec" +dependencies = [ + "indenter", + "once_cell", +] + +[[package]] +name = "fast-float2" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8eb564c5c7423d25c886fb561d1e4ee69f72354d16918afa32c08811f6b6a55" + +[[package]] +name = "fastrand" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" + [[package]] name = "fastrlp" version = "0.3.1" @@ -2558,6 +3656,16 @@ dependencies = [ "bytes", ] +[[package]] +name = "fdlimit" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e182f7dbc2ef73d9ef67351c5fbbea084729c48362d3ce9dd44c28e32e277fe5" +dependencies = [ + "libc", + "thiserror 1.0.69", +] + [[package]] name = "ff" version = "0.12.1" @@ -2578,6 +3686,12 @@ dependencies = [ "subtle", ] +[[package]] +name = "fiat-crypto" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "28dea519a9695b9977216879a3ebfddf92f1c08c05d984f8996aecd6ecdc811d" + [[package]] name = "filetime" version = "0.2.26" @@ -2602,6 +3716,16 @@ dependencies = [ "static_assertions", ] +[[package]] +name = "flate2" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4a3d7db9596fecd151c5f638c0ee5d5bd487b6e0ea232e5dc96d5250f6f94b1d" +dependencies = [ + "crc32fast", + "miniz_oxide", +] + [[package]] name = "flume" version = "0.11.1" @@ -2655,6 +3779,15 @@ version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "42703706b716c37f96a77aea830392ad231f44c9e9a67872fa5548707e11b11c" +[[package]] +name = "fsevent-sys" +version = "4.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76ee7a02da4d231650c7cea31349b889be2f45ddb3ef3032d2ec8185f6313fd2" +dependencies = [ + "libc", +] + [[package]] name = "funty" version = "2.0.0" @@ -2743,6 +3876,16 @@ version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988" +[[package]] +name = "futures-timer" +version = "3.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f288b0a4f20f9a56b5d1da57e2227c661b7b16168e2f72365f57b63326e29b24" +dependencies = [ + "gloo-timers 0.2.6", + "send_wrapper 0.4.0", +] + [[package]] name = "futures-util" version = "0.3.31" @@ -2767,12 +3910,27 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "42012b0f064e01aa58b545fe3727f90f7dd4020f4a3ea735b50344965f5a57e9" +[[package]] +name = "generator" +version = "0.8.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "605183a538e3e2a9c1038635cc5c2d194e2ee8fd0d1b66b8349fad7dbacce5a2" +dependencies = [ + "cc", + "cfg-if", + "libc", + "log", + "rustversion", + "windows 0.61.3", +] + [[package]] name = "generic-array" version = "0.14.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" dependencies = [ + "serde", "typenum", "version_check", "zeroize", @@ -2805,18 +3963,74 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "ghash" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0d8a4362ccb29cb0b265253fb0a2728f592895ee6854fd9bc13f2ffda266ff1" +dependencies = [ + "opaque-debug", + "polyval", +] + [[package]] name = "gimli" version = "0.31.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f" +[[package]] +name = "git2" +version = "0.20.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2deb07a133b1520dc1a5690e9bd08950108873d7ed5de38dcc74d3b5ebffa110" +dependencies = [ + "bitflags 2.9.4", + "libc", + "libgit2-sys", + "log", + "url", +] + [[package]] name = "glob" version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0cc23270f6e1808e30a928bdc84dea0b9b4136a8bc82338574f23baf47bbd280" +[[package]] +name = "gloo-net" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c06f627b1a58ca3d42b45d6104bf1e1a03799df472df00988b6ba21accc10580" +dependencies = [ + "futures-channel", + "futures-core", + "futures-sink", + "gloo-utils", + "http 1.3.1", + "js-sys", + "pin-project", + "serde", + "serde_json", + "thiserror 1.0.69", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", +] + +[[package]] +name = "gloo-timers" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b995a66bb87bebce9a0f4a95aed01daca4872c050bfcb21653361c03bc35e5c" +dependencies = [ + "futures-channel", + "futures-core", + "js-sys", + "wasm-bindgen", +] + [[package]] name = "gloo-timers" version = "0.3.0" @@ -2829,6 +4043,19 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "gloo-utils" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b5555354113b18c547c1d3a98fbf7fb32a9ff4f6fa112ce823a21641a0ba3aa" +dependencies = [ + "js-sys", + "serde", + "serde_json", + "wasm-bindgen", + "web-sys", +] + [[package]] name = "gmp-mpfr-sys" version = "1.6.8" @@ -2916,6 +4143,10 @@ name = "hashbrown" version = "0.14.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" +dependencies = [ + "ahash", + "allocator-api2", +] [[package]] name = "hashbrown" @@ -2935,6 +4166,15 @@ version = "0.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5419bdc4f6a9207fbeba6d11b604d481addf78ecd10c11ad51e76c2f6482748d" +[[package]] +name = "hashlink" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ba4ff7128dee98c7dc9794b6a411377e1404dba1c97deb8d1a55297bd25d8af" +dependencies = [ + "hashbrown 0.14.5", +] + [[package]] name = "hashlink" version = "0.10.0" @@ -2944,6 +4184,16 @@ dependencies = [ "hashbrown 0.15.5", ] +[[package]] +name = "hdrhistogram" +version = "7.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "765c9198f173dd59ce26ff9f95ef0aafd0a0fe01fb9d72841bc5066a4c06511d" +dependencies = [ + "byteorder", + "num-traits", +] + [[package]] name = "heck" version = "0.5.0" @@ -2971,6 +4221,60 @@ dependencies = [ "arrayvec", ] +[[package]] +name = "hex-literal" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6fe2267d4ed49bc07b63801559be28c718ea06c4738b7a03c94df7386d2cde46" + +[[package]] +name = "hickory-proto" +version = "0.25.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8a6fe56c0038198998a6f217ca4e7ef3a5e51f46163bd6dd60b5c71ca6c6502" +dependencies = [ + "async-trait", + "cfg-if", + "data-encoding", + "enum-as-inner", + "futures-channel", + "futures-io", + "futures-util", + "idna", + "ipnet", + "once_cell", + "rand 0.9.2", + "ring", + "serde", + "thiserror 2.0.16", + "tinyvec", + "tokio", + "tracing", + "url", +] + +[[package]] +name = "hickory-resolver" +version = "0.25.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc62a9a99b0bfb44d2ab95a7208ac952d31060efc16241c87eaf36406fecf87a" +dependencies = [ + "cfg-if", + "futures-util", + "hickory-proto", + "ipconfig", + "moka", + "once_cell", + "parking_lot", + "rand 0.9.2", + "resolv-conf", + "serde", + "smallvec", + "thiserror 2.0.16", + "tokio", + "tracing", +] + [[package]] name = "hkdf" version = "0.12.4" @@ -3054,6 +4358,12 @@ dependencies = [ "pin-project-lite", ] +[[package]] +name = "http-range-header" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9171a2ea8a68358193d15dd5d70c1c10a2afc3e7e4c5bc92bc9f025cebd7359c" + [[package]] name = "httparse" version = "1.10.1" @@ -3066,6 +4376,28 @@ version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" +[[package]] +name = "human_bytes" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91f255a4535024abf7640cb288260811fc14794f62b063652ed349f9a6c2348e" + +[[package]] +name = "humantime" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "135b12329e5e3ce057a9f972339ea52bc954fe1e9358ef27f95e89716fbc5424" + +[[package]] +name = "humantime-serde" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57a3db5ea5923d99402c94e9feb261dc5ee9b4efa158b0315f788cf549cc200c" +dependencies = [ + "humantime", + "serde", +] + [[package]] name = "hyper" version = "0.14.32" @@ -3153,12 +4485,14 @@ dependencies = [ "http 1.3.1", "hyper 1.7.0", "hyper-util", + "log", "rustls 0.23.31", "rustls-native-certs 0.8.1", "rustls-pki-types", "tokio", "tokio-rustls 0.26.3", "tower-service", + "webpki-roots 1.0.2", ] [[package]] @@ -3228,7 +4562,7 @@ dependencies = [ "js-sys", "log", "wasm-bindgen", - "windows-core", + "windows-core 0.62.0", ] [[package]] @@ -3240,6 +4574,18 @@ dependencies = [ "cc", ] +[[package]] +name = "icu_collections" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db2fa452206ebee18c4b5c2274dbf1de17008e874b4dc4f0aea9d01ca79e4526" +dependencies = [ + "displaydoc", + "yoke 0.7.5", + "zerofrom", + "zerovec 0.10.4", +] + [[package]] name = "icu_collections" version = "2.0.0" @@ -3248,9 +4594,9 @@ checksum = "200072f5d0e3614556f94a9930d5dc3e0662a652823904c3a75dc3b0af7fee47" dependencies = [ "displaydoc", "potential_utf", - "yoke", + "yoke 0.8.0", "zerofrom", - "zerovec", + "zerovec 0.11.4", ] [[package]] @@ -3260,70 +4606,176 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0cde2700ccaed3872079a65fb1a78f6c0a36c91570f28755dda67bc8f7d9f00a" dependencies = [ "displaydoc", - "litemap", - "tinystr", - "writeable", - "zerovec", + "litemap 0.8.0", + "tinystr 0.8.1", + "writeable 0.6.1", + "zerovec 0.11.4", ] [[package]] -name = "icu_normalizer" -version = "2.0.0" +name = "icu_locid" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "436880e8e18df4d7bbc06d58432329d6458cc84531f7ac5f024e93deadb37979" +checksum = "13acbb8371917fc971be86fc8057c41a64b521c184808a698c02acc242dbf637" dependencies = [ "displaydoc", - "icu_collections", - "icu_normalizer_data", - "icu_properties", - "icu_provider", - "smallvec", - "zerovec", + "litemap 0.7.5", + "tinystr 0.7.6", + "writeable 0.5.5", + "zerovec 0.10.4", ] [[package]] -name = "icu_normalizer_data" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "00210d6893afc98edb752b664b8890f0ef174c8adbb8d0be9710fa66fbbf72d3" - -[[package]] -name = "icu_properties" -version = "2.0.1" +name = "icu_locid_transform" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "016c619c1eeb94efb86809b015c58f479963de65bdb6253345c1a1276f22e32b" +checksum = "01d11ac35de8e40fdeda00d9e1e9d92525f3f9d887cdd7aa81d727596788b54e" dependencies = [ "displaydoc", - "icu_collections", - "icu_locale_core", - "icu_properties_data", - "icu_provider", - "potential_utf", - "zerotrie", - "zerovec", + "icu_locid", + "icu_locid_transform_data", + "icu_provider 1.5.0", + "tinystr 0.7.6", + "zerovec 0.10.4", ] [[package]] -name = "icu_properties_data" -version = "2.0.1" +name = "icu_locid_transform_data" +version = "1.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "298459143998310acd25ffe6810ed544932242d3f07083eee1084d83a71bd632" +checksum = "7515e6d781098bf9f7205ab3fc7e9709d34554ae0b21ddbcb5febfa4bc7df11d" [[package]] -name = "icu_provider" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" +name = "icu_normalizer" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19ce3e0da2ec68599d193c93d088142efd7f9c5d6fc9b803774855747dc6a84f" +dependencies = [ + "displaydoc", + "icu_collections 1.5.0", + "icu_normalizer_data 1.5.1", + "icu_properties 1.5.1", + "icu_provider 1.5.0", + "smallvec", + "utf16_iter", + "utf8_iter", + "write16", + "zerovec 0.10.4", +] + +[[package]] +name = "icu_normalizer" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "436880e8e18df4d7bbc06d58432329d6458cc84531f7ac5f024e93deadb37979" +dependencies = [ + "displaydoc", + "icu_collections 2.0.0", + "icu_normalizer_data 2.0.0", + "icu_properties 2.0.1", + "icu_provider 2.0.0", + "smallvec", + "zerovec 0.11.4", +] + +[[package]] +name = "icu_normalizer_data" +version = "1.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c5e8338228bdc8ab83303f16b797e177953730f601a96c25d10cb3ab0daa0cb7" + +[[package]] +name = "icu_normalizer_data" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00210d6893afc98edb752b664b8890f0ef174c8adbb8d0be9710fa66fbbf72d3" + +[[package]] +name = "icu_properties" +version = "1.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93d6020766cfc6302c15dbbc9c8778c37e62c14427cb7f6e601d849e092aeef5" +dependencies = [ + "displaydoc", + "icu_collections 1.5.0", + "icu_locid_transform", + "icu_properties_data 1.5.1", + "icu_provider 1.5.0", + "tinystr 0.7.6", + "zerovec 0.10.4", +] + +[[package]] +name = "icu_properties" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "016c619c1eeb94efb86809b015c58f479963de65bdb6253345c1a1276f22e32b" +dependencies = [ + "displaydoc", + "icu_collections 2.0.0", + "icu_locale_core", + "icu_properties_data 2.0.1", + "icu_provider 2.0.0", + "potential_utf", + "zerotrie", + "zerovec 0.11.4", +] + +[[package]] +name = "icu_properties_data" +version = "1.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85fb8799753b75aee8d2a21d7c14d9f38921b54b3dbda10f5a3c7a7b82dba5e2" + +[[package]] +name = "icu_properties_data" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "298459143998310acd25ffe6810ed544932242d3f07083eee1084d83a71bd632" + +[[package]] +name = "icu_provider" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ed421c8a8ef78d3e2dbc98a973be2f3770cb42b606e3ab18d6237c4dfde68d9" +dependencies = [ + "displaydoc", + "icu_locid", + "icu_provider_macros", + "stable_deref_trait", + "tinystr 0.7.6", + "writeable 0.5.5", + "yoke 0.7.5", + "zerofrom", + "zerovec 0.10.4", +] + +[[package]] +name = "icu_provider" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "03c80da27b5f4187909049ee2d72f276f0d9f99a42c306bd0131ecfe04d8e5af" dependencies = [ "displaydoc", "icu_locale_core", "stable_deref_trait", - "tinystr", - "writeable", - "yoke", + "tinystr 0.8.1", + "writeable 0.6.1", + "yoke 0.8.0", "zerofrom", "zerotrie", - "zerovec", + "zerovec 0.11.4", +] + +[[package]] +name = "icu_provider_macros" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.106", ] [[package]] @@ -3349,8 +4801,18 @@ version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3acae9609540aa318d1bc588455225fb2085b9ed0c4f6bd0d9d5bcd86f1a0344" dependencies = [ - "icu_normalizer", - "icu_properties", + "icu_normalizer 2.0.0", + "icu_properties 2.0.1", +] + +[[package]] +name = "if-addrs" +version = "0.13.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69b2eeee38fef3aa9b4cc5f1beea8a2444fc00e7377cafae396de3f5c2065e24" +dependencies = [ + "libc", + "windows-sys 0.59.0", ] [[package]] @@ -3421,6 +4883,79 @@ dependencies = [ "serde_core", ] +[[package]] +name = "indoc" +version = "2.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4c7245a08504955605670dbf141fceab975f15ca21570696aebe9d2e71576bd" + +[[package]] +name = "inotify" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f37dccff2791ab604f9babef0ba14fbe0be30bd368dc541e2b08d07c8aa908f3" +dependencies = [ + "bitflags 2.9.4", + "inotify-sys", + "libc", +] + +[[package]] +name = "inotify-sys" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e05c02b5e89bff3b946cedeca278abc628fe811e604f027c45a8aa3cf793d0eb" +dependencies = [ + "libc", +] + +[[package]] +name = "inout" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "879f10e63c20629ecabbb64a8010319738c66a5cd0c29b02d63d272b03751d01" +dependencies = [ + "block-padding", + "generic-array", +] + +[[package]] +name = "instability" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "435d80800b936787d62688c927b6490e887c7ef5ff9ce922c6c6050fca75eb9a" +dependencies = [ + "darling 0.20.11", + "indoc", + "proc-macro2", + "quote", + "syn 2.0.106", +] + +[[package]] +name = "interprocess" +version = "2.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d941b405bd2322993887859a8ee6ac9134945a24ec5ec763a8a962fc64dfec2d" +dependencies = [ + "doctest-file", + "futures-core", + "libc", + "recvmsg", + "tokio", + "widestring", + "windows-sys 0.52.0", +] + +[[package]] +name = "intrusive-collections" +version = "0.9.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "189d0897e4cbe8c75efedf3502c18c887b05046e59d28404d4d8e46cbc4d1e86" +dependencies = [ + "memoffset", +] + [[package]] name = "io-uring" version = "0.7.10" @@ -3432,6 +4967,18 @@ dependencies = [ "libc", ] +[[package]] +name = "ipconfig" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b58db92f96b720de98181bbbe63c831e87005ab460c1bf306eb2622b4707997f" +dependencies = [ + "socket2 0.5.10", + "widestring", + "windows-sys 0.48.0", + "winreg", +] + [[package]] name = "ipnet" version = "2.11.0" @@ -3496,6 +5043,28 @@ version = "1.0.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c" +[[package]] +name = "jni" +version = "0.21.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a87aa2bb7d2af34197c04845522473242e1aa17c12f4935d5856491a7fb8c97" +dependencies = [ + "cesu8", + "cfg-if", + "combine", + "jni-sys", + "log", + "thiserror 1.0.69", + "walkdir", + "windows-sys 0.45.0", +] + +[[package]] +name = "jni-sys" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8eaf4bc02d17cbdd7ff4c7438cafcdf7fb9a4613313ad11b4f8fefe7d3fa0130" + [[package]] name = "jobserver" version = "0.1.34" @@ -3522,14 +5091,43 @@ version = "0.26.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f3f48dc3e6b8bd21e15436c1ddd0bc22a6a54e8ec46fedd6adf3425f396ec6a" dependencies = [ + "jsonrpsee-client-transport", "jsonrpsee-core", + "jsonrpsee-http-client", "jsonrpsee-proc-macros", "jsonrpsee-server", "jsonrpsee-types", + "jsonrpsee-wasm-client", + "jsonrpsee-ws-client", "tokio", "tracing", ] +[[package]] +name = "jsonrpsee-client-transport" +version = "0.26.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf36eb27f8e13fa93dcb50ccb44c417e25b818cfa1a481b5470cd07b19c60b98" +dependencies = [ + "base64 0.22.1", + "futures-channel", + "futures-util", + "gloo-net", + "http 1.3.1", + "jsonrpsee-core", + "pin-project", + "rustls 0.23.31", + "rustls-pki-types", + "rustls-platform-verifier", + "soketto", + "thiserror 2.0.16", + "tokio", + "tokio-rustls 0.26.3", + "tokio-util", + "tracing", + "url", +] + [[package]] name = "jsonrpsee-core" version = "0.26.0" @@ -3538,6 +5136,7 @@ checksum = "316c96719901f05d1137f19ba598b5fe9c9bc39f4335f67f6be8613921946480" dependencies = [ "async-trait", "bytes", + "futures-timer", "futures-util", "http 1.3.1", "http-body 1.0.1", @@ -3549,10 +5148,35 @@ dependencies = [ "rustc-hash 2.1.1", "serde", "serde_json", - "thiserror", + "thiserror 2.0.16", "tokio", + "tokio-stream", "tower", "tracing", + "wasm-bindgen-futures", +] + +[[package]] +name = "jsonrpsee-http-client" +version = "0.26.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "790bedefcec85321e007ff3af84b4e417540d5c87b3c9779b9e247d1bcc3dab8" +dependencies = [ + "base64 0.22.1", + "http-body 1.0.1", + "hyper 1.7.0", + "hyper-rustls 0.27.7", + "hyper-util", + "jsonrpsee-core", + "jsonrpsee-types", + "rustls 0.23.31", + "rustls-platform-verifier", + "serde", + "serde_json", + "thiserror 2.0.16", + "tokio", + "tower", + "url", ] [[package]] @@ -3587,7 +5211,7 @@ dependencies = [ "serde", "serde_json", "soketto", - "thiserror", + "thiserror 2.0.16", "tokio", "tokio-stream", "tokio-util", @@ -3604,7 +5228,48 @@ dependencies = [ "http 1.3.1", "serde", "serde_json", - "thiserror", + "thiserror 2.0.16", +] + +[[package]] +name = "jsonrpsee-wasm-client" +version = "0.26.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7902885de4779f711a95d82c8da2d7e5f9f3a7c7cfa44d51c067fd1c29d72a3c" +dependencies = [ + "jsonrpsee-client-transport", + "jsonrpsee-core", + "jsonrpsee-types", + "tower", +] + +[[package]] +name = "jsonrpsee-ws-client" +version = "0.26.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b6fceceeb05301cc4c065ab3bd2fa990d41ff4eb44e4ca1b30fa99c057c3e79" +dependencies = [ + "http 1.3.1", + "jsonrpsee-client-transport", + "jsonrpsee-core", + "jsonrpsee-types", + "tower", + "url", +] + +[[package]] +name = "jsonwebtoken" +version = "9.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a87cc7a48537badeae96744432de36f4be2b4a34a05a5ef32e9dd8a1c169dde" +dependencies = [ + "base64 0.22.1", + "js-sys", + "pem", + "ring", + "serde", + "serde_json", + "simple_asn1", ] [[package]] @@ -3619,6 +5284,7 @@ dependencies = [ "once_cell", "serdect", "sha2 0.10.9", + "signature 2.2.0", ] [[package]] @@ -3640,6 +5306,26 @@ dependencies = [ "sha3-asm", ] +[[package]] +name = "kqueue" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eac30106d7dce88daf4a3fcb4879ea939476d5074a9b7ddd0fb97fa4bed5596a" +dependencies = [ + "kqueue-sys", + "libc", +] + +[[package]] +name = "kqueue-sys" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed9625ffda8729b85e45cf04090035ac368927b8cebc34898e7c120f52e4838b" +dependencies = [ + "bitflags 1.3.2", + "libc", +] + [[package]] name = "lazy_static" version = "1.5.0" @@ -3661,6 +5347,18 @@ version = "0.2.175" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6a82ae493e598baaea5209805c49bbf2ea7de956d50d7da0da1164f9c6d28543" +[[package]] +name = "libgit2-sys" +version = "0.18.2+1.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1c42fe03df2bd3c53a3a9c7317ad91d80c81cd1fb0caec8d7cc4cd2bfa10c222" +dependencies = [ + "cc", + "libc", + "libz-sys", + "pkg-config", +] + [[package]] name = "libloading" version = "0.8.8" @@ -3677,6 +5375,36 @@ version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f9fbbcab51052fe104eb5e5d351cf728d30a5be1fe14d9be8a3b097481fb97de" +[[package]] +name = "libp2p-identity" +version = "0.2.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3104e13b51e4711ff5738caa1fb54467c8604c2e94d607e27745bcf709068774" +dependencies = [ + "asn1_der", + "bs58", + "ed25519-dalek", + "hkdf", + "k256", + "multihash", + "quick-protobuf", + "sha2 0.10.9", + "thiserror 2.0.16", + "tracing", + "zeroize", +] + +[[package]] +name = "libproc" +version = "0.14.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e78a09b56be5adbcad5aa1197371688dc6bb249a26da3bca2011ee2fb987ebfb" +dependencies = [ + "bindgen 0.70.1", + "errno", + "libc", +] + [[package]] name = "libredox" version = "0.1.10" @@ -3756,6 +5484,22 @@ dependencies = [ "vcpkg", ] +[[package]] +name = "linked-hash-map" +version = "0.5.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f" + +[[package]] +name = "linked_hash_set" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bae85b5be22d9843c80e5fc80e9b64c8a3b1f98f867c709956eca3efff4e92e2" +dependencies = [ + "linked-hash-map", + "serde", +] + [[package]] name = "linux-raw-sys" version = "0.4.15" @@ -3768,12 +5512,24 @@ version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "df1d3c3b53da64cf5760482273a98e575c651a67eec7f77df96b5b642de8f039" +[[package]] +name = "litemap" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23fb14cb19457329c82206317a5663005a4d404783dc74f4252769b0d5f42856" + [[package]] name = "litemap" version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "241eaef5fd12c88705a01fc1066c48c4b36e0dd4377dcdc7ec3942cea7a69956" +[[package]] +name = "litrs" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f5e54036fe321fd421e10d732f155734c4e4afd610dd556d9a82833ab3ee0bed" + [[package]] name = "lock_api" version = "0.4.13" @@ -3782,6 +5538,7 @@ checksum = "96936507f153605bddfcda068dd804796c84324ed2510809e5b2a624c81da765" dependencies = [ "autocfg", "scopeguard", + "serde", ] [[package]] @@ -3790,6 +5547,19 @@ version = "0.4.28" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "34080505efa8e45a4b816c349525ebe327ceaa8559756f0356cba97ef3bf7432" +[[package]] +name = "loom" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "419e0dc8046cb947daa77eb95ae174acfbddb7673b4151f56d1eed8e93fbfaca" +dependencies = [ + "cfg-if", + "generator", + "scoped-tls", + "tracing", + "tracing-subscriber 0.3.20", +] + [[package]] name = "lru" version = "0.12.5" @@ -3814,6 +5584,40 @@ version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "112b39cec0b298b6c1999fee3e31427f74f676e4cb9879ed1a121b43661a4154" +[[package]] +name = "lz4" +version = "1.28.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a20b523e860d03443e98350ceaac5e71c6ba89aea7d960769ec3ce37f4de5af4" +dependencies = [ + "lz4-sys", +] + +[[package]] +name = "lz4-sys" +version = "1.11.1+lz4-1.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6bd8c0d6c6ed0cd30b3652886bb8711dc4bb01d637a68105a3d5158039b418e6" +dependencies = [ + "cc", + "libc", +] + +[[package]] +name = "lz4_flex" +version = "0.11.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08ab2867e3eeeca90e844d1940eab391c9dc5228783db2ed999acbc0a9ed375a" + +[[package]] +name = "mach2" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d640282b302c0bb0a2a8e0233ead9035e3bed871f0b7e81fe4a1ec829765db44" +dependencies = [ + "libc", +] + [[package]] name = "macro-string" version = "0.1.4" @@ -3850,6 +5654,24 @@ version = "2.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "32a282da65faaf38286cf3be983213fcf1d2e2a58700e808f83f4ea9a4804bc0" +[[package]] +name = "memmap2" +version = "0.9.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "843a98750cd611cc2965a8213b53b43e715f13c37a9e096c6408e69990961db7" +dependencies = [ + "libc", +] + +[[package]] +name = "memoffset" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "488016bfae457b036d996092f6cb448677611ce4449e970ceaf42695203f218a" +dependencies = [ + "autocfg", +] + [[package]] name = "metrics" version = "0.24.2" @@ -3872,6 +5694,83 @@ dependencies = [ "syn 2.0.106", ] +[[package]] +name = "metrics-exporter-prometheus" +version = "0.16.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd7399781913e5393588a8d8c6a2867bf85fb38eaf2502fdce465aad2dc6f034" +dependencies = [ + "base64 0.22.1", + "indexmap 2.11.4", + "metrics", + "metrics-util", + "quanta", + "thiserror 1.0.69", +] + +[[package]] +name = "metrics-process" +version = "2.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4a82c8add4382f29a122fa64fff1891453ed0f6b2867d971e7d60cb8dfa322ff" +dependencies = [ + "libc", + "libproc", + "mach2", + "metrics", + "once_cell", + "procfs", + "rlimit", + "windows 0.58.0", +] + +[[package]] +name = "metrics-util" +version = "0.19.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8496cc523d1f94c1385dd8f0f0c2c480b2b8aeccb5b7e4485ad6365523ae376" +dependencies = [ + "crossbeam-epoch", + "crossbeam-utils", + "hashbrown 0.15.5", + "metrics", + "quanta", + "rand 0.9.2", + "rand_xoshiro", + "sketches-ddsketch", +] + +[[package]] +name = "mime" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" + +[[package]] +name = "mime_guess" +version = "2.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f7c44f8e672c00fe5308fa235f821cb4198414e1c77935c1ab6948d3fd78550e" +dependencies = [ + "mime", + "unicase", +] + +[[package]] +name = "mini-moka" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c325dfab65f261f386debee8b0969da215b3fa0037e74c8a1234db7ba986d803" +dependencies = [ + "crossbeam-channel", + "crossbeam-utils", + "dashmap 5.5.3", + "skeptic", + "smallvec", + "tagptr", + "triomphe", +] + [[package]] name = "minimal-lexical" version = "0.2.1" @@ -3894,10 +5793,97 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "78bed444cc8a2160f01cbcf811ef18cac863ad68ae8ca62092e8db51d51c761c" dependencies = [ "libc", + "log", "wasi 0.11.1+wasi-snapshot-preview1", "windows-sys 0.59.0", ] +[[package]] +name = "modular-bitfield" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a53d79ba8304ac1c4f9eb3b9d281f21f7be9d4626f72ce7df4ad8fbde4f38a74" +dependencies = [ + "modular-bitfield-impl", + "static_assertions", +] + +[[package]] +name = "modular-bitfield-impl" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a7d5f7076603ebc68de2dc6a650ec331a062a13abaa346975be747bbfa4b789" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "moka" +version = "0.12.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a9321642ca94a4282428e6ea4af8cc2ca4eac48ac7a6a4ea8f33f76d0ce70926" +dependencies = [ + "crossbeam-channel", + "crossbeam-epoch", + "crossbeam-utils", + "loom", + "parking_lot", + "portable-atomic", + "rustc_version 0.4.1", + "smallvec", + "tagptr", + "thiserror 1.0.69", + "uuid", +] + +[[package]] +name = "more-asserts" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fafa6961cabd9c63bcd77a45d7e3b7f3b552b70417831fb0f56db717e72407e" + +[[package]] +name = "multiaddr" +version = "0.18.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fe6351f60b488e04c1d21bc69e56b89cb3f5e8f5d22557d6e8031bdfd79b6961" +dependencies = [ + "arrayref", + "byteorder", + "data-encoding", + "libp2p-identity", + "multibase", + "multihash", + "percent-encoding", + "serde", + "static_assertions", + "unsigned-varint", + "url", +] + +[[package]] +name = "multibase" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b3539ec3c1f04ac9748a260728e855f261b4977f5c3406612c884564f329404" +dependencies = [ + "base-x", + "data-encoding", + "data-encoding-macro", +] + +[[package]] +name = "multihash" +version = "0.19.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6b430e7953c29dd6a09afc29ff0bb69c6e306329ee6794700aee27b76a1aea8d" +dependencies = [ + "core2", + "unsigned-varint", +] + [[package]] name = "native-tls" version = "0.2.14" @@ -3925,6 +5911,39 @@ dependencies = [ "minimal-lexical", ] +[[package]] +name = "notify" +version = "8.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4d3d07927151ff8575b7087f245456e549fea62edf0ec4e565a5ee50c8402bc3" +dependencies = [ + "bitflags 2.9.4", + "fsevent-sys", + "inotify", + "kqueue", + "libc", + "log", + "mio", + "notify-types", + "walkdir", + "windows-sys 0.60.2", +] + +[[package]] +name = "notify-types" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e0826a989adedc2a244799e823aece04662b66609d96af8dff7ac6df9a8925d" + +[[package]] +name = "ntapi" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e8a3895c6391c39d7fe7ebc444a87eb2991b2a0bc718fdabd071eec617fc68e4" +dependencies = [ + "winapi", +] + [[package]] name = "nu-ansi-term" version = "0.50.1" @@ -3956,6 +5975,7 @@ checksum = "a5e44f723f1133c9deac646763579fdb3ac745e418f2a7af9cd0c431da1f20b9" dependencies = [ "num-integer", "num-traits", + "serde", ] [[package]] @@ -4063,6 +6083,15 @@ dependencies = [ "syn 2.0.106", ] +[[package]] +name = "num_threads" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c7398b9c8b70908f6371f47ed36737907c87c52af34c268fed0bf0ceb92ead9" +dependencies = [ + "libc", +] + [[package]] name = "nybbles" version = "0.4.5" @@ -4110,10 +6139,13 @@ checksum = "d9ade20c592484ba1ea538006e0454284174447a3adf9bb59fa99ed512f95493" dependencies = [ "alloy-consensus", "alloy-eips", - "alloy-primitives", + "alloy-primitives 1.3.1", "alloy-rlp", - "derive_more", - "thiserror", + "alloy-serde", + "derive_more 2.0.1", + "serde", + "serde_with", + "thiserror 2.0.16", ] [[package]] @@ -4125,13 +6157,13 @@ dependencies = [ "alloy-consensus", "alloy-eips", "alloy-network", - "alloy-primitives", + "alloy-primitives 1.3.1", "alloy-rlp", "alloy-rpc-types-eth", "alloy-serde", - "derive_more", + "derive_more 2.0.1", "serde", - "thiserror", + "thiserror 2.0.16", ] [[package]] @@ -4142,12 +6174,31 @@ checksum = "f80108e3b36901200a4c5df1db1ee9ef6ce685b59ea79d7be1713c845e3765da" dependencies = [ "alloy-consensus", "alloy-network", - "alloy-primitives", + "alloy-primitives 1.3.1", "alloy-provider", "alloy-rpc-types-eth", "alloy-signer", "op-alloy-consensus 0.20.0", - "op-alloy-rpc-types", + "op-alloy-rpc-types 0.20.0", +] + +[[package]] +name = "op-alloy-rpc-types" +version = "0.19.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9076d4fcb8e260cec8ad01cd155200c0dbb562e62adb553af245914f30854e29" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-network-primitives", + "alloy-primitives 1.3.1", + "alloy-rpc-types-eth", + "alloy-serde", + "derive_more 2.0.1", + "op-alloy-consensus 0.19.1", + "serde", + "serde_json", + "thiserror 2.0.16", ] [[package]] @@ -4159,14 +6210,33 @@ dependencies = [ "alloy-consensus", "alloy-eips", "alloy-network-primitives", - "alloy-primitives", + "alloy-primitives 1.3.1", "alloy-rpc-types-eth", "alloy-serde", - "derive_more", + "derive_more 2.0.1", "op-alloy-consensus 0.20.0", "serde", "serde_json", - "thiserror", + "thiserror 2.0.16", +] + +[[package]] +name = "op-alloy-rpc-types-engine" +version = "0.19.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d4256b1eda5766a9fa7de5874e54515994500bef632afda41e940aed015f9455" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-primitives 1.3.1", + "alloy-rlp", + "alloy-rpc-types-engine", + "derive_more 2.0.1", + "ethereum_ssz", + "ethereum_ssz_derive", + "op-alloy-consensus 0.19.1", + "snap", + "thiserror 2.0.16", ] [[package]] @@ -4230,6 +6300,12 @@ dependencies = [ "vcpkg", ] +[[package]] +name = "option-ext" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d" + [[package]] name = "outref" version = "0.5.2" @@ -4259,6 +6335,16 @@ dependencies = [ "sha2 0.10.9", ] +[[package]] +name = "page_size" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30d5b2194ed13191c1999ae0704b7839fb18384fa22e49b57eeaa97d79ce40da" +dependencies = [ + "libc", + "winapi", +] + [[package]] name = "parity-scale-codec" version = "3.7.5" @@ -4268,6 +6354,7 @@ dependencies = [ "arrayvec", "bitvec", "byte-slice-cast", + "bytes", "const_format", "impl-trait-for-tuples", "parity-scale-codec-derive", @@ -4348,13 +6435,23 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" [[package]] -name = "pem-rfc7468" -version = "0.7.0" +name = "pem" +version = "3.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "88b39c9bfcfc231068454382784bb460aae594343fb030d46e9f50a645418412" +checksum = "38af38e8470ac9dee3ce1bae1af9c1671fffc44ddfd8bd1d0a3445bf349a8ef3" dependencies = [ - "base64ct", -] + "base64 0.22.1", + "serde", +] + +[[package]] +name = "pem-rfc7468" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "88b39c9bfcfc231068454382784bb460aae594343fb030d46e9f50a645418412" +dependencies = [ + "base64ct", +] [[package]] name = "percent-encoding" @@ -4369,10 +6466,20 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "21e0a3a33733faeaf8651dfee72dd0f388f0c8e5ad496a3478fa5a922f49cfa8" dependencies = [ "memchr", - "thiserror", + "thiserror 2.0.16", "ucd-trie", ] +[[package]] +name = "pharos" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e9567389417feee6ce15dd6527a8a1ecac205ef62c2932bcf3d9f6fc5b78b414" +dependencies = [ + "futures", + "rustc_version 0.4.1", +] + [[package]] name = "phf" version = "0.11.3" @@ -4485,6 +6592,24 @@ version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c" +[[package]] +name = "pollster" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f3a9f18d041e6d0e102a0a46750538147e5e8992d3b4873aaafee2520b00ce3" + +[[package]] +name = "polyval" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d1fe60d06143b2430aa532c94cfe9e29783047f06c0d7fd359a9a51b729fa25" +dependencies = [ + "cfg-if", + "cpufeatures", + "opaque-debug", + "universal-hash", +] + [[package]] name = "portable-atomic" version = "1.11.1" @@ -4497,7 +6622,7 @@ version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "84df19adbe5b5a0782edcab45899906947ab039ccf4573713735ee7de1e6b08a" dependencies = [ - "zerovec", + "zerovec 0.11.4", ] [[package]] @@ -4515,6 +6640,16 @@ dependencies = [ "zerocopy", ] +[[package]] +name = "pretty_assertions" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3ae130e2f271fbc2ac3a40fb1d07180839cdbbe443c7a27e1e3c13c5cac0116d" +dependencies = [ + "diff", + "yansi", +] + [[package]] name = "prettyplease" version = "0.2.37" @@ -4542,7 +6677,7 @@ checksum = "0b34d9fd68ae0b74a41b21c03c2f62847aa0ffea044eee893b4c140b37e244e2" dependencies = [ "fixed-hash", "impl-codec", - "uint", + "uint 0.9.5", ] [[package]] @@ -4551,7 +6686,7 @@ version = "3.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "219cb19e96be00ab2e37d6e299658a0cfa83e52429179969b0f0121b4ac46983" dependencies = [ - "toml_edit", + "toml_edit 0.23.6", ] [[package]] @@ -4585,6 +6720,31 @@ dependencies = [ "unicode-ident", ] +[[package]] +name = "procfs" +version = "0.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc5b72d8145275d844d4b5f6d4e1eef00c8cd889edb6035c21675d1bb1f45c9f" +dependencies = [ + "bitflags 2.9.4", + "chrono", + "flate2", + "hex", + "procfs-core", + "rustix 0.38.44", +] + +[[package]] +name = "procfs-core" +version = "0.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "239df02d8349b06fc07398a3a1697b06418223b1c7725085e801e7c0fc6a12ec" +dependencies = [ + "bitflags 2.9.4", + "chrono", + "hex", +] + [[package]] name = "proptest" version = "1.7.0" @@ -4605,12 +6765,47 @@ dependencies = [ "unarray", ] +[[package]] +name = "pulldown-cmark" +version = "0.9.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57206b407293d2bcd3af849ce869d52068623f19e1b5ff8e8778e3309439682b" +dependencies = [ + "bitflags 2.9.4", + "memchr", + "unicase", +] + +[[package]] +name = "quanta" +version = "0.12.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3ab5a9d756f0d97bdc89019bd2e4ea098cf9cde50ee7564dde6b81ccc8f06c7" +dependencies = [ + "crossbeam-utils", + "libc", + "once_cell", + "raw-cpuid", + "wasi 0.11.1+wasi-snapshot-preview1", + "web-sys", + "winapi", +] + [[package]] name = "quick-error" version = "1.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0" +[[package]] +name = "quick-protobuf" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d6da84cc204722a989e01ba2f6e1e276e190f22263d0cb6ce8526fcdb0d2e1f" +dependencies = [ + "byteorder", +] + [[package]] name = "quinn" version = "0.11.9" @@ -4625,7 +6820,7 @@ dependencies = [ "rustc-hash 2.1.1", "rustls 0.23.31", "socket2 0.6.0", - "thiserror", + "thiserror 2.0.16", "tokio", "tracing", "web-time", @@ -4646,7 +6841,7 @@ dependencies = [ "rustls 0.23.31", "rustls-pki-types", "slab", - "thiserror", + "thiserror 2.0.16", "tinyvec", "tracing", "web-time", @@ -4758,6 +6953,45 @@ dependencies = [ "rand_core 0.9.3", ] +[[package]] +name = "rand_xoshiro" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f703f4665700daf5512dcca5f43afa6af89f09db47fb56be587f80636bda2d41" +dependencies = [ + "rand_core 0.9.3", +] + +[[package]] +name = "ratatui" +version = "0.29.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eabd94c2f37801c20583fc49dd5cd6b0ba68c716787c2dd6ed18571e1e63117b" +dependencies = [ + "bitflags 2.9.4", + "cassowary", + "compact_str", + "crossterm 0.28.1", + "indoc", + "instability", + "itertools 0.13.0", + "lru 0.12.5", + "paste", + "strum 0.26.3", + "unicode-segmentation", + "unicode-truncate", + "unicode-width 0.2.0", +] + +[[package]] +name = "raw-cpuid" +version = "11.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "498cd0dc59d73224351ee52a95fee0f1a617a2eae0e7d9d720cc622c73a54186" +dependencies = [ + "bitflags 2.9.4", +] + [[package]] name = "rayon" version = "1.11.0" @@ -4808,6 +7042,12 @@ dependencies = [ "pkg-config", ] +[[package]] +name = "recvmsg" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3edd4d5d42c92f0a659926464d4cce56b562761267ecf0f469d85b7de384175" + [[package]] name = "redox_syscall" version = "0.3.5" @@ -4826,6 +7066,28 @@ dependencies = [ "bitflags 2.9.4", ] +[[package]] +name = "redox_users" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba009ff324d1fc1b900bd1fdb31564febe58a8ccc8a6fdbb93b543d33b13ca43" +dependencies = [ + "getrandom 0.2.16", + "libredox", + "thiserror 1.0.69", +] + +[[package]] +name = "redox_users" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4e608c6638b9c18977b00b475ac1f28d14e84b27d8d42f70e0bf1e3dec127ac" +dependencies = [ + "getrandom 0.2.16", + "libredox", + "thiserror 2.0.16", +] + [[package]] name = "ref-cast" version = "1.0.24" @@ -4881,6 +7143,16 @@ version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "caf4aa5b0f434c91fe5c7f1ecb6a5ece2130b02ad2a590589dda5146df959001" +[[package]] +name = "regress" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "145bb27393fe455dd64d6cbc8d059adfa392590a45eadf079c01b11857e7b010" +dependencies = [ + "hashbrown 0.15.5", + "memchr", +] + [[package]] name = "reqwest" version = "0.12.23" @@ -4889,7 +7161,9 @@ checksum = "d429f34c8092b2d42c7c93cec323bb4adeb7c67698f70839adec842ec10c7ceb" dependencies = [ "base64 0.22.1", "bytes", + "futures-channel", "futures-core", + "futures-util", "http 1.3.1", "http-body 1.0.1", "http-body-util", @@ -4913,13 +7187,92 @@ dependencies = [ "tokio", "tokio-native-tls", "tokio-rustls 0.26.3", + "tokio-util", "tower", "tower-http", "tower-service", "url", "wasm-bindgen", "wasm-bindgen-futures", + "wasm-streams", "web-sys", + "webpki-roots 1.0.2", +] + +[[package]] +name = "resolv-conf" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6b3789b30bd25ba102de4beabd95d21ac45b69b1be7d14522bab988c526d6799" + +[[package]] +name = "reth" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "alloy-rpc-types", + "aquamarine", + "clap", + "eyre", + "reth-chainspec", + "reth-cli-runner", + "reth-cli-util", + "reth-consensus", + "reth-consensus-common", + "reth-db", + "reth-ethereum-cli", + "reth-ethereum-payload-builder", + "reth-ethereum-primitives", + "reth-evm", + "reth-network", + "reth-network-api", + "reth-node-api", + "reth-node-builder", + "reth-node-core", + "reth-node-ethereum", + "reth-node-metrics", + "reth-payload-builder", + "reth-payload-primitives", + "reth-primitives", + "reth-provider", + "reth-ress-protocol", + "reth-ress-provider", + "reth-revm", + "reth-rpc", + "reth-rpc-api", + "reth-rpc-builder", + "reth-rpc-convert", + "reth-rpc-eth-types", + "reth-rpc-server-types", + "reth-tasks", + "reth-tokio-util", + "reth-transaction-pool", + "tokio", + "tracing", +] + +[[package]] +name = "reth-basic-payload-builder" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-primitives 1.3.1", + "futures-core", + "futures-util", + "metrics", + "reth-chain-state", + "reth-metrics", + "reth-payload-builder", + "reth-payload-builder-primitives", + "reth-payload-primitives", + "reth-primitives-traits", + "reth-revm", + "reth-storage-api", + "reth-tasks", + "tokio", + "tracing", ] [[package]] @@ -4929,11 +7282,12 @@ source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229 dependencies = [ "alloy-consensus", "alloy-eips", - "alloy-primitives", - "derive_more", + "alloy-primitives 1.3.1", + "derive_more 2.0.1", "metrics", "parking_lot", "pin-project", + "rand 0.9.2", "reth-chainspec", "reth-errors", "reth-ethereum-primitives", @@ -4943,6 +7297,8 @@ dependencies = [ "reth-storage-api", "reth-trie", "revm-database", + "revm-state", + "serde", "tokio", "tokio-stream", "tracing", @@ -4958,10 +7314,10 @@ dependencies = [ "alloy-eips", "alloy-evm", "alloy-genesis", - "alloy-primitives", + "alloy-primitives 1.3.1", "alloy-trie", "auto_impl", - "derive_more", + "derive_more 2.0.1", "reth-ethereum-forks", "reth-network-peers", "reth-primitives-traits", @@ -4969,315 +7325,2070 @@ dependencies = [ ] [[package]] -name = "reth-codecs-derive" +name = "reth-cli" version = "1.7.0" source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" dependencies = [ - "convert_case", - "proc-macro2", - "quote", - "syn 2.0.106", + "alloy-genesis", + "clap", + "eyre", + "reth-cli-runner", + "reth-db", + "serde_json", + "shellexpand", ] [[package]] -name = "reth-consensus" +name = "reth-cli-commands" version = "1.7.0" source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" dependencies = [ + "alloy-chains", "alloy-consensus", - "alloy-primitives", - "auto_impl", - "reth-execution-types", + "alloy-eips", + "alloy-primitives 1.3.1", + "alloy-rlp", + "backon", + "clap", + "comfy-table", + "crossterm 0.28.1", + "eyre", + "fdlimit", + "futures", + "human_bytes", + "humantime", + "itertools 0.14.0", + "lz4", + "ratatui", + "reqwest", + "reth-chainspec", + "reth-cli", + "reth-cli-runner", + "reth-cli-util", + "reth-codecs", + "reth-config", + "reth-consensus", + "reth-db", + "reth-db-api", + "reth-db-common", + "reth-discv4", + "reth-discv5", + "reth-downloaders", + "reth-ecies", + "reth-era", + "reth-era-downloader", + "reth-era-utils", + "reth-eth-wire", + "reth-etl", + "reth-evm", + "reth-exex", + "reth-fs-util", + "reth-net-nat", + "reth-network", + "reth-network-p2p", + "reth-network-peers", + "reth-node-api", + "reth-node-builder", + "reth-node-core", + "reth-node-events", + "reth-node-metrics", "reth-primitives-traits", - "thiserror", + "reth-provider", + "reth-prune", + "reth-revm", + "reth-stages", + "reth-static-file", + "reth-static-file-types", + "reth-trie", + "reth-trie-common", + "reth-trie-db", + "secp256k1 0.30.0", + "serde", + "serde_json", + "tar", + "tokio", + "tokio-stream", + "toml", + "tracing", + "zstd", ] [[package]] -name = "reth-db-models" +name = "reth-cli-runner" version = "1.7.0" source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" dependencies = [ - "alloy-eips", - "alloy-primitives", - "reth-primitives-traits", + "reth-tasks", + "tokio", + "tracing", ] [[package]] -name = "reth-errors" +name = "reth-cli-util" version = "1.7.0" source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" dependencies = [ - "reth-consensus", - "reth-execution-errors", - "reth-storage-errors", - "thiserror", + "alloy-eips", + "alloy-primitives 1.3.1", + "cfg-if", + "eyre", + "libc", + "rand 0.8.5", + "reth-fs-util", + "secp256k1 0.30.0", + "serde", + "thiserror 2.0.16", + "tikv-jemallocator", ] [[package]] -name = "reth-eth-wire-types" +name = "reth-codecs" version = "1.7.0" source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" dependencies = [ - "alloy-chains", "alloy-consensus", "alloy-eips", - "alloy-hardforks", - "alloy-primitives", - "alloy-rlp", + "alloy-genesis", + "alloy-primitives 1.3.1", + "alloy-trie", "bytes", - "derive_more", - "reth-chainspec", + "modular-bitfield", + "op-alloy-consensus 0.19.1", "reth-codecs-derive", - "reth-ethereum-primitives", - "reth-primitives-traits", + "reth-zstd-compressors", "serde", - "thiserror", ] [[package]] -name = "reth-ethereum-forks" +name = "reth-codecs-derive" version = "1.7.0" source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" dependencies = [ - "alloy-eip2124", - "alloy-hardforks", - "alloy-primitives", - "auto_impl", - "once_cell", - "rustc-hash 2.1.1", + "convert_case 0.7.1", + "proc-macro2", + "quote", + "syn 2.0.106", ] [[package]] -name = "reth-ethereum-primitives" +name = "reth-config" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "eyre", + "humantime-serde", + "reth-network-types", + "reth-prune-types", + "reth-stages-types", + "serde", + "toml", + "url", +] + +[[package]] +name = "reth-consensus" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "alloy-consensus", + "alloy-primitives 1.3.1", + "auto_impl", + "reth-execution-types", + "reth-primitives-traits", + "thiserror 2.0.16", +] + +[[package]] +name = "reth-consensus-common" version = "1.7.0" source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" dependencies = [ "alloy-consensus", "alloy-eips", - "alloy-primitives", - "alloy-rlp", + "reth-chainspec", + "reth-consensus", "reth-primitives-traits", - "reth-zstd-compressors", - "serde", - "serde_with", ] [[package]] -name = "reth-evm" +name = "reth-consensus-debug-client" version = "1.7.0" source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" dependencies = [ "alloy-consensus", "alloy-eips", - "alloy-evm", - "alloy-primitives", + "alloy-json-rpc", + "alloy-primitives 1.3.1", + "alloy-provider", + "alloy-rpc-types-engine", "auto_impl", - "derive_more", - "futures-util", - "reth-execution-errors", - "reth-execution-types", + "derive_more 2.0.1", + "eyre", + "futures", + "reqwest", + "reth-node-api", "reth-primitives-traits", - "reth-storage-api", - "reth-storage-errors", - "reth-trie-common", - "revm", + "reth-tracing", + "ringbuffer", + "serde", + "serde_json", + "tokio", ] [[package]] -name = "reth-execution-errors" +name = "reth-db" version = "1.7.0" source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" dependencies = [ - "alloy-evm", - "alloy-primitives", - "alloy-rlp", - "nybbles", + "alloy-primitives 1.3.1", + "derive_more 2.0.1", + "eyre", + "metrics", + "page_size", + "reth-db-api", + "reth-fs-util", + "reth-libmdbx", + "reth-metrics", + "reth-nippy-jar", + "reth-static-file-types", "reth-storage-errors", - "thiserror", + "reth-tracing", + "rustc-hash 2.1.1", + "strum 0.27.2", + "sysinfo", + "thiserror 2.0.16", ] [[package]] -name = "reth-execution-types" +name = "reth-db-api" version = "1.7.0" source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" dependencies = [ "alloy-consensus", - "alloy-eips", - "alloy-evm", - "alloy-primitives", - "derive_more", + "alloy-genesis", + "alloy-primitives 1.3.1", + "bytes", + "derive_more 2.0.1", + "metrics", + "modular-bitfield", + "parity-scale-codec", + "reth-codecs", + "reth-db-models", "reth-ethereum-primitives", "reth-primitives-traits", + "reth-prune-types", + "reth-stages-types", + "reth-storage-errors", "reth-trie-common", - "revm", + "roaring", "serde", - "serde_with", ] [[package]] -name = "reth-fs-util" +name = "reth-db-common" version = "1.7.0" source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" dependencies = [ + "alloy-consensus", + "alloy-genesis", + "alloy-primitives 1.3.1", + "boyer-moore-magiclen", + "eyre", + "reth-chainspec", + "reth-codecs", + "reth-config", + "reth-db-api", + "reth-etl", + "reth-execution-errors", + "reth-fs-util", + "reth-node-types", + "reth-primitives-traits", + "reth-provider", + "reth-stages-types", + "reth-static-file-types", + "reth-trie", + "reth-trie-db", "serde", "serde_json", - "thiserror", + "thiserror 2.0.16", + "tracing", ] [[package]] -name = "reth-metrics" +name = "reth-db-models" version = "1.7.0" source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" dependencies = [ - "metrics", - "metrics-derive", + "alloy-eips", + "alloy-primitives 1.3.1", + "bytes", + "modular-bitfield", + "reth-codecs", + "reth-primitives-traits", + "serde", ] [[package]] -name = "reth-net-banlist" +name = "reth-discv4" version = "1.7.0" source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" dependencies = [ - "alloy-primitives", + "alloy-primitives 1.3.1", + "alloy-rlp", + "discv5", + "enr", + "generic-array", + "itertools 0.14.0", + "parking_lot", + "rand 0.8.5", + "reth-ethereum-forks", + "reth-net-banlist", + "reth-net-nat", + "reth-network-peers", + "schnellru", + "secp256k1 0.30.0", + "serde", + "thiserror 2.0.16", + "tokio", + "tokio-stream", + "tracing", ] [[package]] -name = "reth-network-api" +name = "reth-discv5" version = "1.7.0" source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" dependencies = [ - "alloy-consensus", - "alloy-primitives", - "alloy-rpc-types-admin", - "alloy-rpc-types-eth", - "auto_impl", - "derive_more", + "alloy-primitives 1.3.1", + "alloy-rlp", + "derive_more 2.0.1", + "discv5", "enr", "futures", - "reth-eth-wire-types", + "itertools 0.14.0", + "metrics", + "rand 0.9.2", + "reth-chainspec", + "reth-ethereum-forks", + "reth-metrics", + "reth-network-peers", + "secp256k1 0.30.0", + "thiserror 2.0.16", + "tokio", + "tracing", +] + +[[package]] +name = "reth-dns-discovery" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "alloy-primitives 1.3.1", + "data-encoding", + "enr", + "hickory-resolver", + "linked_hash_set", + "parking_lot", "reth-ethereum-forks", - "reth-network-p2p", "reth-network-peers", - "reth-network-types", "reth-tokio-util", - "thiserror", + "schnellru", + "secp256k1 0.30.0", + "serde", + "serde_with", + "thiserror 2.0.16", "tokio", "tokio-stream", + "tracing", ] [[package]] -name = "reth-network-p2p" +name = "reth-downloaders" version = "1.7.0" source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" dependencies = [ "alloy-consensus", "alloy-eips", - "alloy-primitives", - "auto_impl", - "derive_more", + "alloy-primitives 1.3.1", + "alloy-rlp", "futures", + "futures-util", + "itertools 0.14.0", + "metrics", + "pin-project", + "rayon", + "reth-config", "reth-consensus", - "reth-eth-wire-types", - "reth-ethereum-primitives", + "reth-metrics", + "reth-network-p2p", "reth-network-peers", - "reth-network-types", "reth-primitives-traits", - "reth-storage-errors", + "reth-storage-api", + "reth-tasks", + "thiserror 2.0.16", "tokio", + "tokio-stream", + "tokio-util", "tracing", ] [[package]] -name = "reth-network-peers" +name = "reth-ecies" version = "1.7.0" source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" dependencies = [ - "alloy-primitives", + "aes", + "alloy-primitives 1.3.1", "alloy-rlp", + "block-padding", + "byteorder", + "cipher", + "concat-kdf", + "ctr", + "digest 0.10.7", + "futures", + "generic-array", + "hmac", + "pin-project", + "rand 0.8.5", + "reth-network-peers", "secp256k1 0.30.0", - "serde_with", - "thiserror", - "url", + "sha2 0.10.9", + "sha3", + "thiserror 2.0.16", + "tokio", + "tokio-stream", + "tokio-util", + "tracing", + "typenum", ] [[package]] -name = "reth-network-types" +name = "reth-engine-local" version = "1.7.0" source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" dependencies = [ - "alloy-eip2124", - "reth-net-banlist", - "reth-network-peers", - "serde_json", + "alloy-consensus", + "alloy-primitives 1.3.1", + "alloy-rpc-types-engine", + "eyre", + "futures-util", + "reth-chainspec", + "reth-engine-primitives", + "reth-ethereum-engine-primitives", + "reth-payload-builder", + "reth-payload-primitives", + "reth-provider", + "reth-transaction-pool", + "tokio", + "tokio-stream", "tracing", ] [[package]] -name = "reth-primitives-traits" +name = "reth-engine-primitives" version = "1.7.0" source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" dependencies = [ "alloy-consensus", "alloy-eips", - "alloy-genesis", - "alloy-primitives", - "alloy-rlp", - "alloy-rpc-types-eth", - "alloy-trie", + "alloy-primitives 1.3.1", + "alloy-rpc-types-engine", "auto_impl", - "bytes", - "derive_more", - "once_cell", - "op-alloy-consensus 0.19.1", - "revm-bytecode", - "revm-primitives", - "revm-state", - "secp256k1 0.30.0", + "futures", + "reth-chain-state", + "reth-errors", + "reth-ethereum-primitives", + "reth-evm", + "reth-execution-types", + "reth-payload-builder-primitives", + "reth-payload-primitives", + "reth-primitives-traits", + "reth-trie-common", "serde", - "serde_with", - "thiserror", + "thiserror 2.0.16", + "tokio", ] [[package]] -name = "reth-prune-types" +name = "reth-engine-service" version = "1.7.0" source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" dependencies = [ - "alloy-primitives", - "derive_more", - "serde", - "thiserror", + "futures", + "pin-project", + "reth-chainspec", + "reth-consensus", + "reth-engine-primitives", + "reth-engine-tree", + "reth-ethereum-primitives", + "reth-evm", + "reth-network-p2p", + "reth-node-types", + "reth-payload-builder", + "reth-provider", + "reth-prune", + "reth-stages-api", + "reth-tasks", + "thiserror 2.0.16", ] [[package]] -name = "reth-revm" +name = "reth-engine-tree" version = "1.7.0" source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" dependencies = [ - "alloy-primitives", + "alloy-consensus", + "alloy-eips", + "alloy-evm", + "alloy-primitives 1.3.1", + "alloy-rlp", + "alloy-rpc-types-engine", + "derive_more 2.0.1", + "futures", + "metrics", + "mini-moka", + "parking_lot", + "rayon", + "reth-chain-state", + "reth-consensus", + "reth-db", + "reth-engine-primitives", + "reth-errors", + "reth-ethereum-primitives", + "reth-evm", + "reth-execution-types", + "reth-metrics", + "reth-network-p2p", + "reth-payload-builder", + "reth-payload-primitives", "reth-primitives-traits", - "reth-storage-api", - "reth-storage-errors", + "reth-provider", + "reth-prune", + "reth-revm", + "reth-stages-api", + "reth-tasks", + "reth-trie", + "reth-trie-db", + "reth-trie-parallel", + "reth-trie-sparse", + "reth-trie-sparse-parallel", "revm", + "revm-primitives 20.2.1", + "schnellru", + "smallvec", + "thiserror 2.0.16", + "tokio", + "tracing", ] [[package]] -name = "reth-rpc-convert" +name = "reth-engine-util" version = "1.7.0" source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" dependencies = [ "alloy-consensus", - "alloy-json-rpc", - "alloy-network", - "alloy-primitives", - "alloy-rpc-types-eth", - "alloy-signer", + "alloy-rpc-types-engine", + "eyre", + "futures", + "itertools 0.14.0", + "pin-project", + "reth-chainspec", + "reth-engine-primitives", + "reth-engine-tree", + "reth-errors", + "reth-evm", + "reth-fs-util", + "reth-payload-primitives", + "reth-primitives-traits", + "reth-revm", + "reth-storage-api", + "serde", + "serde_json", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "reth-era" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-primitives 1.3.1", + "alloy-rlp", + "ethereum_ssz", + "ethereum_ssz_derive", + "reth-ethereum-primitives", + "snap", + "thiserror 2.0.16", +] + +[[package]] +name = "reth-era-downloader" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "alloy-primitives 1.3.1", + "bytes", + "eyre", + "futures-util", + "reqwest", + "reth-fs-util", + "sha2 0.10.9", + "tokio", +] + +[[package]] +name = "reth-era-utils" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "alloy-consensus", + "alloy-primitives 1.3.1", + "eyre", + "futures-util", + "reth-db-api", + "reth-era", + "reth-era-downloader", + "reth-etl", + "reth-fs-util", + "reth-primitives-traits", + "reth-provider", + "reth-stages-types", + "reth-storage-api", + "tokio", + "tracing", +] + +[[package]] +name = "reth-errors" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "reth-consensus", + "reth-execution-errors", + "reth-storage-errors", + "thiserror 2.0.16", +] + +[[package]] +name = "reth-eth-wire" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "alloy-chains", + "alloy-primitives 1.3.1", + "alloy-rlp", + "bytes", + "derive_more 2.0.1", + "futures", + "pin-project", + "reth-codecs", + "reth-ecies", + "reth-eth-wire-types", + "reth-ethereum-forks", + "reth-metrics", + "reth-network-peers", + "reth-primitives-traits", + "serde", + "snap", + "thiserror 2.0.16", + "tokio", + "tokio-stream", + "tokio-util", + "tracing", +] + +[[package]] +name = "reth-eth-wire-types" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "alloy-chains", + "alloy-consensus", + "alloy-eips", + "alloy-hardforks", + "alloy-primitives 1.3.1", + "alloy-rlp", + "bytes", + "derive_more 2.0.1", + "reth-chainspec", + "reth-codecs-derive", + "reth-ethereum-primitives", + "reth-primitives-traits", + "serde", + "thiserror 2.0.16", +] + +[[package]] +name = "reth-ethereum-cli" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "clap", + "eyre", + "reth-chainspec", + "reth-cli", + "reth-cli-commands", + "reth-cli-runner", + "reth-db", + "reth-node-api", + "reth-node-builder", + "reth-node-core", + "reth-node-ethereum", + "reth-node-metrics", + "reth-tracing", + "tracing", +] + +[[package]] +name = "reth-ethereum-consensus" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-primitives 1.3.1", + "reth-chainspec", + "reth-consensus", + "reth-consensus-common", + "reth-execution-types", + "reth-primitives-traits", + "tracing", +] + +[[package]] +name = "reth-ethereum-engine-primitives" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "alloy-eips", + "alloy-primitives 1.3.1", + "alloy-rlp", + "alloy-rpc-types-engine", + "reth-engine-primitives", + "reth-ethereum-primitives", + "reth-payload-primitives", + "reth-primitives-traits", + "serde", + "sha2 0.10.9", + "thiserror 2.0.16", +] + +[[package]] +name = "reth-ethereum-forks" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "alloy-eip2124", + "alloy-hardforks", + "alloy-primitives 1.3.1", + "auto_impl", + "once_cell", + "rustc-hash 2.1.1", +] + +[[package]] +name = "reth-ethereum-payload-builder" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-primitives 1.3.1", + "alloy-rlp", + "alloy-rpc-types-engine", + "reth-basic-payload-builder", + "reth-chainspec", + "reth-consensus-common", + "reth-errors", + "reth-ethereum-primitives", + "reth-evm", + "reth-evm-ethereum", + "reth-payload-builder", + "reth-payload-builder-primitives", + "reth-payload-primitives", + "reth-payload-validator", + "reth-primitives-traits", + "reth-revm", + "reth-storage-api", + "reth-transaction-pool", + "revm", + "tracing", +] + +[[package]] +name = "reth-ethereum-primitives" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-primitives 1.3.1", + "alloy-rlp", + "modular-bitfield", + "reth-codecs", + "reth-primitives-traits", + "reth-zstd-compressors", + "serde", + "serde_with", +] + +[[package]] +name = "reth-etl" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "rayon", + "reth-db-api", + "tempfile", +] + +[[package]] +name = "reth-evm" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-evm", + "alloy-primitives 1.3.1", + "auto_impl", + "derive_more 2.0.1", + "futures-util", + "metrics", + "reth-execution-errors", + "reth-execution-types", + "reth-metrics", + "reth-primitives-traits", + "reth-storage-api", + "reth-storage-errors", + "reth-trie-common", + "revm", +] + +[[package]] +name = "reth-evm-ethereum" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-evm", + "alloy-primitives 1.3.1", + "alloy-rpc-types-engine", + "reth-chainspec", + "reth-ethereum-forks", + "reth-ethereum-primitives", + "reth-evm", + "reth-execution-types", + "reth-primitives-traits", + "reth-storage-errors", + "revm", +] + +[[package]] +name = "reth-execution-errors" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "alloy-evm", + "alloy-primitives 1.3.1", + "alloy-rlp", + "nybbles", + "reth-storage-errors", + "thiserror 2.0.16", +] + +[[package]] +name = "reth-execution-types" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-evm", + "alloy-primitives 1.3.1", + "derive_more 2.0.1", + "reth-ethereum-primitives", + "reth-primitives-traits", + "reth-trie-common", + "revm", + "serde", + "serde_with", +] + +[[package]] +name = "reth-exex" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-primitives 1.3.1", + "eyre", + "futures", + "itertools 0.14.0", + "metrics", + "parking_lot", + "reth-chain-state", + "reth-chainspec", + "reth-config", + "reth-ethereum-primitives", + "reth-evm", + "reth-exex-types", + "reth-fs-util", + "reth-metrics", + "reth-node-api", + "reth-node-core", + "reth-payload-builder", + "reth-primitives-traits", + "reth-provider", + "reth-prune-types", + "reth-revm", + "reth-stages-api", + "reth-tasks", + "reth-tracing", + "rmp-serde", + "thiserror 2.0.16", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "reth-exex-types" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "alloy-eips", + "alloy-primitives 1.3.1", + "reth-chain-state", + "reth-execution-types", + "reth-primitives-traits", + "serde", + "serde_with", +] + +[[package]] +name = "reth-fs-util" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "serde", + "serde_json", + "thiserror 2.0.16", +] + +[[package]] +name = "reth-invalid-block-hooks" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "alloy-consensus", + "alloy-primitives 1.3.1", + "alloy-rlp", + "alloy-rpc-types-debug", + "eyre", + "futures", + "jsonrpsee", + "pretty_assertions", + "reth-engine-primitives", + "reth-evm", + "reth-primitives-traits", + "reth-provider", + "reth-revm", + "reth-rpc-api", + "reth-tracing", + "reth-trie", + "revm-bytecode", + "revm-database", + "serde", + "serde_json", +] + +[[package]] +name = "reth-ipc" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "bytes", + "futures", + "futures-util", + "interprocess", + "jsonrpsee", + "pin-project", + "serde_json", + "thiserror 2.0.16", + "tokio", + "tokio-stream", + "tokio-util", + "tower", + "tracing", +] + +[[package]] +name = "reth-libmdbx" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "bitflags 2.9.4", + "byteorder", + "dashmap 6.1.0", + "derive_more 2.0.1", + "indexmap 2.11.4", + "parking_lot", + "reth-mdbx-sys", + "smallvec", + "thiserror 2.0.16", + "tracing", +] + +[[package]] +name = "reth-mdbx-sys" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "bindgen 0.70.1", + "cc", +] + +[[package]] +name = "reth-metrics" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "futures", + "metrics", + "metrics-derive", + "tokio", + "tokio-util", +] + +[[package]] +name = "reth-net-banlist" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "alloy-primitives 1.3.1", +] + +[[package]] +name = "reth-net-nat" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "futures-util", + "if-addrs", + "reqwest", + "serde_with", + "thiserror 2.0.16", + "tokio", + "tracing", +] + +[[package]] +name = "reth-network" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-primitives 1.3.1", + "alloy-rlp", + "aquamarine", + "auto_impl", + "derive_more 2.0.1", + "discv5", + "enr", + "futures", + "itertools 0.14.0", + "metrics", + "parking_lot", + "pin-project", + "rand 0.8.5", + "rand 0.9.2", + "reth-chainspec", + "reth-consensus", + "reth-discv4", + "reth-discv5", + "reth-dns-discovery", + "reth-ecies", + "reth-eth-wire", + "reth-eth-wire-types", + "reth-ethereum-forks", + "reth-ethereum-primitives", + "reth-fs-util", + "reth-metrics", + "reth-net-banlist", + "reth-network-api", + "reth-network-p2p", + "reth-network-peers", + "reth-network-types", + "reth-primitives-traits", + "reth-storage-api", + "reth-tasks", + "reth-tokio-util", + "reth-transaction-pool", + "rustc-hash 2.1.1", + "schnellru", + "secp256k1 0.30.0", + "serde", + "smallvec", + "thiserror 2.0.16", + "tokio", + "tokio-stream", + "tokio-util", + "tracing", +] + +[[package]] +name = "reth-network-api" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "alloy-consensus", + "alloy-primitives 1.3.1", + "alloy-rpc-types-admin", + "alloy-rpc-types-eth", + "auto_impl", + "derive_more 2.0.1", + "enr", + "futures", + "reth-eth-wire-types", + "reth-ethereum-forks", + "reth-network-p2p", + "reth-network-peers", + "reth-network-types", + "reth-tokio-util", + "serde", + "thiserror 2.0.16", + "tokio", + "tokio-stream", +] + +[[package]] +name = "reth-network-p2p" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-primitives 1.3.1", + "auto_impl", + "derive_more 2.0.1", + "futures", + "reth-consensus", + "reth-eth-wire-types", + "reth-ethereum-primitives", + "reth-network-peers", + "reth-network-types", + "reth-primitives-traits", + "reth-storage-errors", + "tokio", + "tracing", +] + +[[package]] +name = "reth-network-peers" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "alloy-primitives 1.3.1", + "alloy-rlp", + "enr", + "secp256k1 0.30.0", + "serde_with", + "thiserror 2.0.16", + "tokio", + "url", +] + +[[package]] +name = "reth-network-types" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "alloy-eip2124", + "humantime-serde", + "reth-net-banlist", + "reth-network-peers", + "serde", + "serde_json", + "tracing", +] + +[[package]] +name = "reth-nippy-jar" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "anyhow", + "bincode", + "derive_more 2.0.1", + "lz4_flex", + "memmap2", + "reth-fs-util", + "serde", + "thiserror 2.0.16", + "tracing", + "zstd", +] + +[[package]] +name = "reth-node-api" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "alloy-rpc-types-engine", + "eyre", + "reth-basic-payload-builder", + "reth-consensus", + "reth-db-api", + "reth-engine-primitives", + "reth-evm", + "reth-network-api", + "reth-node-core", + "reth-node-types", + "reth-payload-builder", + "reth-payload-builder-primitives", + "reth-payload-primitives", + "reth-provider", + "reth-tasks", + "reth-tokio-util", + "reth-transaction-pool", +] + +[[package]] +name = "reth-node-builder" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-primitives 1.3.1", + "alloy-provider", + "alloy-rpc-types", + "alloy-rpc-types-engine", + "aquamarine", + "eyre", + "fdlimit", + "futures", + "jsonrpsee", + "rayon", + "reth-basic-payload-builder", + "reth-chain-state", + "reth-chainspec", + "reth-cli-util", + "reth-config", + "reth-consensus", + "reth-consensus-debug-client", + "reth-db-api", + "reth-db-common", + "reth-downloaders", + "reth-engine-local", + "reth-engine-primitives", + "reth-engine-service", + "reth-engine-tree", + "reth-engine-util", + "reth-evm", + "reth-exex", + "reth-fs-util", + "reth-invalid-block-hooks", + "reth-network", + "reth-network-api", + "reth-network-p2p", + "reth-node-api", + "reth-node-core", + "reth-node-ethstats", + "reth-node-events", + "reth-node-metrics", + "reth-payload-builder", + "reth-primitives-traits", + "reth-provider", + "reth-prune", + "reth-rpc", + "reth-rpc-api", + "reth-rpc-builder", + "reth-rpc-engine-api", + "reth-rpc-eth-types", + "reth-rpc-layer", + "reth-stages", + "reth-static-file", + "reth-tasks", + "reth-tokio-util", + "reth-tracing", + "reth-transaction-pool", + "secp256k1 0.30.0", + "serde_json", + "tokio", + "tokio-stream", + "tracing", +] + +[[package]] +name = "reth-node-core" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-primitives 1.3.1", + "alloy-rpc-types-engine", + "clap", + "derive_more 2.0.1", + "dirs-next", + "eyre", + "futures", + "humantime", + "rand 0.9.2", + "reth-chainspec", + "reth-cli-util", + "reth-config", + "reth-consensus", + "reth-db", + "reth-discv4", + "reth-discv5", + "reth-engine-local", + "reth-engine-primitives", + "reth-ethereum-forks", + "reth-net-nat", + "reth-network", + "reth-network-p2p", + "reth-network-peers", + "reth-primitives-traits", + "reth-prune-types", + "reth-rpc-convert", + "reth-rpc-eth-types", + "reth-rpc-server-types", + "reth-stages-types", + "reth-storage-api", + "reth-storage-errors", + "reth-tracing", + "reth-transaction-pool", + "secp256k1 0.30.0", + "serde", + "shellexpand", + "strum 0.27.2", + "thiserror 2.0.16", + "toml", + "tracing", + "url", + "vergen", + "vergen-git2", +] + +[[package]] +name = "reth-node-ethereum" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "alloy-eips", + "alloy-network", + "alloy-rpc-types-engine", + "alloy-rpc-types-eth", + "eyre", + "reth-chainspec", + "reth-engine-local", + "reth-engine-primitives", + "reth-ethereum-consensus", + "reth-ethereum-engine-primitives", + "reth-ethereum-payload-builder", + "reth-ethereum-primitives", + "reth-evm", + "reth-evm-ethereum", + "reth-network", + "reth-node-api", + "reth-node-builder", + "reth-payload-primitives", + "reth-primitives-traits", + "reth-provider", + "reth-revm", + "reth-rpc", + "reth-rpc-api", + "reth-rpc-builder", + "reth-rpc-eth-api", + "reth-rpc-eth-types", + "reth-rpc-server-types", + "reth-tracing", + "reth-transaction-pool", + "revm", + "tokio", +] + +[[package]] +name = "reth-node-ethstats" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "alloy-consensus", + "alloy-primitives 1.3.1", + "chrono", + "futures-util", + "reth-chain-state", + "reth-network-api", + "reth-primitives-traits", + "reth-storage-api", + "reth-transaction-pool", + "serde", + "serde_json", + "thiserror 2.0.16", + "tokio", + "tokio-stream", + "tokio-tungstenite", + "tracing", + "url", +] + +[[package]] +name = "reth-node-events" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-primitives 1.3.1", + "alloy-rpc-types-engine", + "derive_more 2.0.1", + "futures", + "humantime", + "pin-project", + "reth-engine-primitives", + "reth-network-api", + "reth-primitives-traits", + "reth-prune-types", + "reth-stages", + "reth-static-file-types", + "reth-storage-api", + "tokio", + "tracing", +] + +[[package]] +name = "reth-node-metrics" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "eyre", + "http 1.3.1", + "jsonrpsee-server", + "metrics", + "metrics-exporter-prometheus", + "metrics-process", + "metrics-util", + "procfs", + "reth-metrics", + "reth-tasks", + "tikv-jemalloc-ctl", + "tokio", + "tower", + "tracing", +] + +[[package]] +name = "reth-node-types" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "reth-chainspec", + "reth-db-api", + "reth-engine-primitives", + "reth-payload-primitives", + "reth-primitives-traits", +] + +[[package]] +name = "reth-optimism-chainspec" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "alloy-chains", + "alloy-consensus", + "alloy-eips", + "alloy-genesis", + "alloy-hardforks", + "alloy-primitives 1.3.1", + "derive_more 2.0.1", + "miniz_oxide", + "op-alloy-consensus 0.19.1", + "op-alloy-rpc-types 0.19.1", + "reth-chainspec", + "reth-ethereum-forks", + "reth-network-peers", + "reth-optimism-forks", + "reth-optimism-primitives", + "reth-primitives-traits", + "serde", + "serde_json", + "thiserror 2.0.16", +] + +[[package]] +name = "reth-optimism-consensus" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-primitives 1.3.1", + "alloy-trie", + "reth-chainspec", + "reth-consensus", + "reth-consensus-common", + "reth-execution-types", + "reth-optimism-chainspec", + "reth-optimism-forks", + "reth-optimism-primitives", + "reth-primitives-traits", + "reth-storage-api", + "reth-storage-errors", + "reth-trie-common", + "revm", + "thiserror 2.0.16", + "tracing", +] + +[[package]] +name = "reth-optimism-evm" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-evm", + "alloy-op-evm", + "alloy-primitives 1.3.1", + "op-alloy-consensus 0.19.1", + "op-alloy-rpc-types-engine", + "op-revm", + "reth-chainspec", + "reth-evm", + "reth-execution-errors", + "reth-execution-types", + "reth-optimism-chainspec", + "reth-optimism-consensus", + "reth-optimism-forks", + "reth-optimism-primitives", + "reth-primitives-traits", + "reth-storage-errors", + "revm", + "thiserror 2.0.16", +] + +[[package]] +name = "reth-optimism-forks" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "alloy-op-hardforks", + "alloy-primitives 1.3.1", + "once_cell", + "reth-ethereum-forks", +] + +[[package]] +name = "reth-optimism-primitives" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-primitives 1.3.1", + "alloy-rlp", + "bytes", + "op-alloy-consensus 0.19.1", + "reth-codecs", + "reth-primitives-traits", + "reth-zstd-compressors", + "serde", + "serde_with", +] + +[[package]] +name = "reth-payload-builder" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "alloy-consensus", + "alloy-rpc-types", + "futures-util", + "metrics", + "reth-chain-state", + "reth-ethereum-engine-primitives", + "reth-metrics", + "reth-payload-builder-primitives", + "reth-payload-primitives", + "reth-primitives-traits", + "tokio", + "tokio-stream", + "tracing", +] + +[[package]] +name = "reth-payload-builder-primitives" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "pin-project", + "reth-payload-primitives", + "tokio", + "tokio-stream", + "tracing", +] + +[[package]] +name = "reth-payload-primitives" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "alloy-eips", + "alloy-primitives 1.3.1", + "alloy-rpc-types-engine", + "auto_impl", + "op-alloy-rpc-types-engine", + "reth-chain-state", + "reth-chainspec", + "reth-errors", + "reth-primitives-traits", + "serde", + "thiserror 2.0.16", + "tokio", +] + +[[package]] +name = "reth-payload-validator" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "alloy-consensus", + "alloy-rpc-types-engine", + "reth-primitives-traits", +] + +[[package]] +name = "reth-primitives" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "alloy-consensus", + "c-kzg", + "once_cell", + "reth-ethereum-forks", + "reth-ethereum-primitives", + "reth-primitives-traits", + "reth-static-file-types", +] + +[[package]] +name = "reth-primitives-traits" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-genesis", + "alloy-primitives 1.3.1", + "alloy-rlp", + "alloy-rpc-types-eth", + "alloy-trie", + "auto_impl", + "byteorder", + "bytes", + "derive_more 2.0.1", + "modular-bitfield", + "once_cell", + "op-alloy-consensus 0.19.1", + "rayon", + "reth-codecs", + "revm-bytecode", + "revm-primitives 20.2.1", + "revm-state", + "secp256k1 0.30.0", + "serde", + "serde_with", + "thiserror 2.0.16", +] + +[[package]] +name = "reth-provider" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-primitives 1.3.1", + "alloy-rpc-types-engine", + "dashmap 6.1.0", + "eyre", + "itertools 0.14.0", + "metrics", + "notify", + "parking_lot", + "rayon", + "reth-chain-state", + "reth-chainspec", + "reth-codecs", + "reth-db", + "reth-db-api", + "reth-errors", + "reth-ethereum-primitives", + "reth-evm", + "reth-execution-types", + "reth-fs-util", + "reth-metrics", + "reth-nippy-jar", + "reth-node-types", + "reth-primitives-traits", + "reth-prune-types", + "reth-stages-types", + "reth-static-file-types", + "reth-storage-api", + "reth-storage-errors", + "reth-trie", + "reth-trie-db", + "revm-database", + "strum 0.27.2", + "tracing", +] + +[[package]] +name = "reth-prune" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-primitives 1.3.1", + "itertools 0.14.0", + "metrics", + "rayon", + "reth-chainspec", + "reth-config", + "reth-db-api", + "reth-errors", + "reth-exex-types", + "reth-metrics", + "reth-primitives-traits", + "reth-provider", + "reth-prune-types", + "reth-static-file-types", + "reth-tokio-util", + "rustc-hash 2.1.1", + "thiserror 2.0.16", + "tokio", + "tracing", +] + +[[package]] +name = "reth-prune-types" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "alloy-primitives 1.3.1", + "derive_more 2.0.1", + "modular-bitfield", + "reth-codecs", + "serde", + "thiserror 2.0.16", +] + +[[package]] +name = "reth-ress-protocol" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "alloy-consensus", + "alloy-primitives 1.3.1", + "alloy-rlp", + "futures", + "reth-eth-wire", + "reth-ethereum-primitives", + "reth-network", + "reth-network-api", + "reth-storage-errors", + "tokio", + "tokio-stream", + "tracing", +] + +[[package]] +name = "reth-ress-provider" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "alloy-consensus", + "alloy-primitives 1.3.1", + "eyre", + "futures", + "parking_lot", + "reth-chain-state", + "reth-errors", + "reth-ethereum-primitives", + "reth-evm", + "reth-node-api", + "reth-primitives-traits", + "reth-ress-protocol", + "reth-revm", + "reth-storage-api", + "reth-tasks", + "reth-tokio-util", + "reth-trie", + "schnellru", + "tokio", + "tracing", +] + +[[package]] +name = "reth-revm" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "alloy-primitives 1.3.1", + "reth-primitives-traits", + "reth-storage-api", + "reth-storage-errors", + "reth-trie", + "revm", +] + +[[package]] +name = "reth-rpc" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "alloy-consensus", + "alloy-dyn-abi", + "alloy-eips", + "alloy-evm", + "alloy-genesis", + "alloy-network", + "alloy-primitives 1.3.1", + "alloy-rlp", + "alloy-rpc-client", + "alloy-rpc-types", + "alloy-rpc-types-admin", + "alloy-rpc-types-beacon", + "alloy-rpc-types-debug", + "alloy-rpc-types-engine", + "alloy-rpc-types-eth", + "alloy-rpc-types-mev", + "alloy-rpc-types-trace", + "alloy-rpc-types-txpool", + "alloy-serde", + "alloy-signer", + "alloy-signer-local", + "async-trait", + "derive_more 2.0.1", + "futures", + "http 1.3.1", + "http-body 1.0.1", + "hyper 1.7.0", + "itertools 0.14.0", + "jsonrpsee", + "jsonrpsee-types", + "jsonwebtoken", + "parking_lot", + "pin-project", + "reth-chain-state", + "reth-chainspec", + "reth-consensus", + "reth-consensus-common", + "reth-engine-primitives", + "reth-errors", + "reth-evm", + "reth-evm-ethereum", + "reth-execution-types", + "reth-metrics", + "reth-network-api", + "reth-network-peers", + "reth-network-types", + "reth-node-api", + "reth-primitives-traits", + "reth-revm", + "reth-rpc-api", + "reth-rpc-convert", + "reth-rpc-engine-api", + "reth-rpc-eth-api", + "reth-rpc-eth-types", + "reth-rpc-server-types", + "reth-storage-api", + "reth-tasks", + "reth-transaction-pool", + "reth-trie-common", + "revm", + "revm-inspectors", + "revm-primitives 20.2.1", + "serde", + "serde_json", + "sha2 0.10.9", + "thiserror 2.0.16", + "tokio", + "tokio-stream", + "tower", + "tracing", + "tracing-futures", +] + +[[package]] +name = "reth-rpc-api" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "alloy-eips", + "alloy-genesis", + "alloy-json-rpc", + "alloy-primitives 1.3.1", + "alloy-rpc-types", + "alloy-rpc-types-admin", + "alloy-rpc-types-anvil", + "alloy-rpc-types-beacon", + "alloy-rpc-types-debug", + "alloy-rpc-types-engine", + "alloy-rpc-types-eth", + "alloy-rpc-types-mev", + "alloy-rpc-types-trace", + "alloy-rpc-types-txpool", + "alloy-serde", + "jsonrpsee", + "reth-chain-state", + "reth-engine-primitives", + "reth-network-peers", + "reth-rpc-eth-api", + "reth-trie-common", +] + +[[package]] +name = "reth-rpc-builder" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "alloy-network", + "alloy-provider", + "http 1.3.1", + "jsonrpsee", + "metrics", + "pin-project", + "reth-chain-state", + "reth-chainspec", + "reth-consensus", + "reth-evm", + "reth-ipc", + "reth-metrics", + "reth-network-api", + "reth-node-core", + "reth-primitives-traits", + "reth-rpc", + "reth-rpc-api", + "reth-rpc-eth-api", + "reth-rpc-eth-types", + "reth-rpc-layer", + "reth-rpc-server-types", + "reth-storage-api", + "reth-tasks", + "reth-transaction-pool", + "serde", + "thiserror 2.0.16", + "tokio", + "tokio-util", + "tower", + "tower-http", + "tracing", +] + +[[package]] +name = "reth-rpc-convert" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "alloy-consensus", + "alloy-json-rpc", + "alloy-network", + "alloy-primitives 1.3.1", + "alloy-rpc-types-eth", + "alloy-signer", "jsonrpsee-types", "reth-ethereum-primitives", "reth-evm", "reth-primitives-traits", "revm-context", - "thiserror", + "thiserror 2.0.16", +] + +[[package]] +name = "reth-rpc-engine-api" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "alloy-eips", + "alloy-primitives 1.3.1", + "alloy-rpc-types-engine", + "async-trait", + "jsonrpsee-core", + "jsonrpsee-types", + "metrics", + "parking_lot", + "reth-chainspec", + "reth-engine-primitives", + "reth-metrics", + "reth-payload-builder", + "reth-payload-builder-primitives", + "reth-payload-primitives", + "reth-primitives-traits", + "reth-rpc-api", + "reth-storage-api", + "reth-tasks", + "reth-transaction-pool", + "serde", + "thiserror 2.0.16", + "tokio", + "tracing", +] + +[[package]] +name = "reth-rpc-eth-api" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "alloy-consensus", + "alloy-dyn-abi", + "alloy-eips", + "alloy-evm", + "alloy-json-rpc", + "alloy-network", + "alloy-primitives 1.3.1", + "alloy-rlp", + "alloy-rpc-types-eth", + "alloy-rpc-types-mev", + "alloy-serde", + "async-trait", + "auto_impl", + "dyn-clone", + "futures", + "jsonrpsee", + "jsonrpsee-types", + "parking_lot", + "reth-chain-state", + "reth-chainspec", + "reth-errors", + "reth-evm", + "reth-network-api", + "reth-node-api", + "reth-primitives-traits", + "reth-revm", + "reth-rpc-convert", + "reth-rpc-eth-types", + "reth-rpc-server-types", + "reth-storage-api", + "reth-tasks", + "reth-transaction-pool", + "reth-trie-common", + "revm", + "revm-inspectors", + "tokio", + "tracing", ] [[package]] @@ -5289,12 +9400,12 @@ dependencies = [ "alloy-eips", "alloy-evm", "alloy-network", - "alloy-primitives", + "alloy-primitives 1.3.1", "alloy-rpc-client", "alloy-rpc-types-eth", "alloy-sol-types", "alloy-transport", - "derive_more", + "derive_more 2.0.1", "futures", "itertools 0.14.0", "jsonrpsee-core", @@ -5317,30 +9428,115 @@ dependencies = [ "reth-tasks", "reth-transaction-pool", "reth-trie", - "revm", - "revm-inspectors", - "schnellru", - "serde", - "thiserror", + "revm", + "revm-inspectors", + "schnellru", + "serde", + "thiserror 2.0.16", + "tokio", + "tokio-stream", + "tracing", +] + +[[package]] +name = "reth-rpc-layer" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "alloy-rpc-types-engine", + "http 1.3.1", + "jsonrpsee-http-client", + "pin-project", + "tower", + "tower-http", + "tracing", +] + +[[package]] +name = "reth-rpc-server-types" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "alloy-eips", + "alloy-primitives 1.3.1", + "alloy-rpc-types-engine", + "jsonrpsee-core", + "jsonrpsee-types", + "reth-errors", + "reth-network-api", + "serde", + "strum 0.27.2", +] + +[[package]] +name = "reth-stages" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-primitives 1.3.1", + "bincode", + "eyre", + "futures-util", + "itertools 0.14.0", + "num-traits", + "rayon", + "reqwest", + "reth-codecs", + "reth-config", + "reth-consensus", + "reth-db", + "reth-db-api", + "reth-era", + "reth-era-downloader", + "reth-era-utils", + "reth-etl", + "reth-evm", + "reth-execution-types", + "reth-exex", + "reth-fs-util", + "reth-network-p2p", + "reth-primitives-traits", + "reth-provider", + "reth-prune", + "reth-prune-types", + "reth-revm", + "reth-stages-api", + "reth-static-file-types", + "reth-storage-errors", + "reth-trie", + "reth-trie-db", + "thiserror 2.0.16", "tokio", - "tokio-stream", "tracing", ] [[package]] -name = "reth-rpc-server-types" +name = "reth-stages-api" version = "1.7.0" source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" dependencies = [ "alloy-eips", - "alloy-primitives", - "alloy-rpc-types-engine", - "jsonrpsee-core", - "jsonrpsee-types", + "alloy-primitives 1.3.1", + "aquamarine", + "auto_impl", + "futures-util", + "metrics", + "reth-consensus", "reth-errors", - "reth-network-api", - "serde", - "strum", + "reth-metrics", + "reth-network-p2p", + "reth-primitives-traits", + "reth-provider", + "reth-prune", + "reth-stages-types", + "reth-static-file", + "reth-static-file-types", + "reth-tokio-util", + "thiserror 2.0.16", + "tokio", + "tracing", ] [[package]] @@ -5348,8 +9544,32 @@ name = "reth-stages-types" version = "1.7.0" source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" dependencies = [ - "alloy-primitives", + "alloy-primitives 1.3.1", + "bytes", + "modular-bitfield", + "reth-codecs", "reth-trie-common", + "serde", +] + +[[package]] +name = "reth-static-file" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "alloy-primitives 1.3.1", + "parking_lot", + "rayon", + "reth-codecs", + "reth-db-api", + "reth-primitives-traits", + "reth-provider", + "reth-prune-types", + "reth-stages-types", + "reth-static-file-types", + "reth-storage-errors", + "reth-tokio-util", + "tracing", ] [[package]] @@ -5357,10 +9577,11 @@ name = "reth-static-file-types" version = "1.7.0" source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" dependencies = [ - "alloy-primitives", - "derive_more", + "alloy-primitives 1.3.1", + "clap", + "derive_more 2.0.1", "serde", - "strum", + "strum 0.27.2", ] [[package]] @@ -5370,10 +9591,11 @@ source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229 dependencies = [ "alloy-consensus", "alloy-eips", - "alloy-primitives", + "alloy-primitives 1.3.1", "alloy-rpc-types-engine", "auto_impl", "reth-chainspec", + "reth-db-api", "reth-db-models", "reth-ethereum-primitives", "reth-execution-types", @@ -5391,14 +9613,14 @@ version = "1.7.0" source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" dependencies = [ "alloy-eips", - "alloy-primitives", + "alloy-primitives 1.3.1", "alloy-rlp", - "derive_more", + "derive_more 2.0.1", "reth-primitives-traits", "reth-prune-types", "reth-static-file-types", "revm-database-interface", - "thiserror", + "thiserror 2.0.16", ] [[package]] @@ -5410,8 +9632,10 @@ dependencies = [ "dyn-clone", "futures-util", "metrics", + "pin-project", + "rayon", "reth-metrics", - "thiserror", + "thiserror 2.0.16", "tokio", "tracing", "tracing-futures", @@ -5427,6 +9651,21 @@ dependencies = [ "tracing", ] +[[package]] +name = "reth-tracing" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "clap", + "eyre", + "rolling-file", + "tracing", + "tracing-appender", + "tracing-journald", + "tracing-logfmt", + "tracing-subscriber 0.3.20", +] + [[package]] name = "reth-transaction-pool" version = "1.7.0" @@ -5434,7 +9673,7 @@ source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229 dependencies = [ "alloy-consensus", "alloy-eips", - "alloy-primitives", + "alloy-primitives 1.3.1", "alloy-rlp", "aquamarine", "auto_impl", @@ -5443,6 +9682,7 @@ dependencies = [ "metrics", "parking_lot", "pin-project", + "rand 0.9.2", "reth-chain-state", "reth-chainspec", "reth-eth-wire-types", @@ -5454,13 +9694,13 @@ dependencies = [ "reth-storage-api", "reth-tasks", "revm-interpreter", - "revm-primitives", + "revm-primitives 20.2.1", "rustc-hash 2.1.1", "schnellru", "serde", "serde_json", "smallvec", - "thiserror", + "thiserror 2.0.16", "tokio", "tokio-stream", "tracing", @@ -5473,12 +9713,14 @@ source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229 dependencies = [ "alloy-consensus", "alloy-eips", - "alloy-primitives", + "alloy-primitives 1.3.1", "alloy-rlp", "alloy-trie", "auto_impl", "itertools 0.14.0", + "metrics", "reth-execution-errors", + "reth-metrics", "reth-primitives-traits", "reth-stages-types", "reth-storage-errors", @@ -5494,38 +9736,98 @@ version = "1.7.0" source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" dependencies = [ "alloy-consensus", - "alloy-primitives", + "alloy-primitives 1.3.1", "alloy-rlp", "alloy-rpc-types-eth", "alloy-serde", "alloy-trie", "bytes", - "derive_more", + "derive_more 2.0.1", "itertools 0.14.0", "nybbles", "rayon", + "reth-codecs", "reth-primitives-traits", "revm-database", "serde", "serde_with", ] +[[package]] +name = "reth-trie-db" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "alloy-primitives 1.3.1", + "reth-db-api", + "reth-execution-errors", + "reth-primitives-traits", + "reth-trie", + "tracing", +] + +[[package]] +name = "reth-trie-parallel" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "alloy-primitives 1.3.1", + "alloy-rlp", + "derive_more 2.0.1", + "itertools 0.14.0", + "metrics", + "rayon", + "reth-db-api", + "reth-execution-errors", + "reth-metrics", + "reth-provider", + "reth-storage-errors", + "reth-trie", + "reth-trie-common", + "reth-trie-db", + "reth-trie-sparse", + "thiserror 2.0.16", + "tokio", + "tracing", +] + [[package]] name = "reth-trie-sparse" version = "1.7.0" source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" dependencies = [ - "alloy-primitives", + "alloy-primitives 1.3.1", "alloy-rlp", "alloy-trie", "auto_impl", + "metrics", + "rayon", "reth-execution-errors", + "reth-metrics", "reth-primitives-traits", "reth-trie-common", "smallvec", "tracing", ] +[[package]] +name = "reth-trie-sparse-parallel" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "alloy-primitives 1.3.1", + "alloy-rlp", + "alloy-trie", + "metrics", + "rayon", + "reth-execution-errors", + "reth-metrics", + "reth-trie-common", + "reth-trie-sparse", + "smallvec", + "tracing", +] + [[package]] name = "reth-zstd-compressors" version = "1.7.0" @@ -5549,7 +9851,7 @@ dependencies = [ "revm-inspector", "revm-interpreter", "revm-precompile", - "revm-primitives", + "revm-primitives 20.2.1", "revm-state", ] @@ -5561,7 +9863,7 @@ checksum = "66c52031b73cae95d84cd1b07725808b5fd1500da3e5e24574a3b2dc13d9f16d" dependencies = [ "bitvec", "phf", - "revm-primitives", + "revm-primitives 20.2.1", "serde", ] @@ -5577,7 +9879,7 @@ dependencies = [ "revm-bytecode", "revm-context-interface", "revm-database-interface", - "revm-primitives", + "revm-primitives 20.2.1", "revm-state", "serde", ] @@ -5593,7 +9895,7 @@ dependencies = [ "auto_impl", "either", "revm-database-interface", - "revm-primitives", + "revm-primitives 20.2.1", "revm-state", "serde", ] @@ -5607,7 +9909,7 @@ dependencies = [ "alloy-eips", "revm-bytecode", "revm-database-interface", - "revm-primitives", + "revm-primitives 20.2.1", "revm-state", "serde", ] @@ -5620,7 +9922,7 @@ checksum = "8c523c77e74eeedbac5d6f7c092e3851dbe9c7fec6f418b85992bd79229db361" dependencies = [ "auto_impl", "either", - "revm-primitives", + "revm-primitives 20.2.1", "revm-state", "serde", ] @@ -5639,7 +9941,7 @@ dependencies = [ "revm-database-interface", "revm-interpreter", "revm-precompile", - "revm-primitives", + "revm-primitives 20.2.1", "revm-state", "serde", ] @@ -5656,7 +9958,7 @@ dependencies = [ "revm-database-interface", "revm-handler", "revm-interpreter", - "revm-primitives", + "revm-primitives 20.2.1", "revm-state", "serde", "serde_json", @@ -5668,16 +9970,18 @@ version = "0.29.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8fdb678b03faa678a7007a7c761a78efa9ca9adcd9434ef3d1ad894aec6e43d1" dependencies = [ - "alloy-primitives", + "alloy-primitives 1.3.1", "alloy-rpc-types-eth", "alloy-rpc-types-trace", "alloy-sol-types", "anstyle", + "boa_engine", + "boa_gc", "colorchoice", "revm", "serde", "serde_json", - "thiserror", + "thiserror 2.0.16", ] [[package]] @@ -5688,7 +9992,7 @@ checksum = "53d6406b711fac73b4f13120f359ed8e65964380dd6182bd12c4c09ad0d4641f" dependencies = [ "revm-bytecode", "revm-context-interface", - "revm-primitives", + "revm-primitives 20.2.1", "serde", ] @@ -5705,25 +10009,43 @@ dependencies = [ "ark-serialize 0.5.0", "arrayref", "aurora-engine-modexp", + "blst", "c-kzg", "cfg-if", "k256", "libsecp256k1", "p256 0.13.2", - "revm-primitives", + "revm-primitives 20.2.1", "ripemd", "rug", "secp256k1 0.31.1", "sha2 0.10.9", ] +[[package]] +name = "revm-primitives" +version = "3.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cbbc9640790cebcb731289afb7a7d96d16ad94afeb64b5d0b66443bd151e79d6" +dependencies = [ + "alloy-primitives 0.7.7", + "auto_impl", + "bitflags 2.9.4", + "bitvec", + "cfg-if", + "dyn-clone", + "enumn", + "hashbrown 0.14.5", + "hex", +] + [[package]] name = "revm-primitives" version = "20.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5aa29d9da06fe03b249b6419b33968ecdf92ad6428e2f012dc57bcd619b5d94e" dependencies = [ - "alloy-primitives", + "alloy-primitives 1.3.1", "num_enum", "once_cell", "serde", @@ -5737,7 +10059,7 @@ checksum = "1f64fbacb86008394aaebd3454f9643b7d5a782bd251135e17c5b33da592d84d" dependencies = [ "bitflags 2.9.4", "revm-bytecode", - "revm-primitives", + "revm-primitives 20.2.1", "serde", ] @@ -5776,6 +10098,12 @@ dependencies = [ "windows-sys 0.52.0", ] +[[package]] +name = "ringbuffer" +version = "0.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3df6368f71f205ff9c33c076d170dd56ebf68e8161c733c0caa07a7a5509ed53" + [[package]] name = "ripemd" version = "0.1.3" @@ -5785,6 +10113,15 @@ dependencies = [ "digest 0.10.7", ] +[[package]] +name = "rlimit" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7043b63bd0cd1aaa628e476b80e6d4023a3b50eb32789f2728908107bd0c793a" +dependencies = [ + "libc", +] + [[package]] name = "rlp" version = "0.5.2" @@ -5795,6 +10132,47 @@ dependencies = [ "rustc-hex", ] +[[package]] +name = "rmp" +version = "0.8.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "228ed7c16fa39782c3b3468e974aec2795e9089153cd08ee2e9aefb3613334c4" +dependencies = [ + "byteorder", + "num-traits", + "paste", +] + +[[package]] +name = "rmp-serde" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "52e599a477cf9840e92f2cde9a7189e67b42c57532749bf90aea6ec10facd4db" +dependencies = [ + "byteorder", + "rmp", + "serde", +] + +[[package]] +name = "roaring" +version = "0.10.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19e8d2cfa184d94d0726d650a9f4a1be7f9b76ac9fdb954219878dc00c1c1e7b" +dependencies = [ + "bytemuck", + "byteorder", +] + +[[package]] +name = "rolling-file" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8395b4f860856b740f20a296ea2cd4d823e81a2658cf05ef61be22916026a906" +dependencies = [ + "chrono", +] + [[package]] name = "route-recognizer" version = "0.3.1" @@ -5956,6 +10334,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c0ebcbd2f03de0fc1122ad9bb24b127a5a6cd51d72604a3f3c50ac459762b6cc" dependencies = [ "aws-lc-rs", + "log", "once_cell", "ring", "rustls-pki-types", @@ -6016,6 +10395,33 @@ dependencies = [ "zeroize", ] +[[package]] +name = "rustls-platform-verifier" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19787cda76408ec5404443dc8b31795c87cd8fec49762dc75fa727740d34acc1" +dependencies = [ + "core-foundation 0.10.1", + "core-foundation-sys", + "jni", + "log", + "once_cell", + "rustls 0.23.31", + "rustls-native-certs 0.8.1", + "rustls-platform-verifier-android", + "rustls-webpki 0.103.4", + "security-framework 3.4.0", + "security-framework-sys", + "webpki-root-certs 0.26.11", + "windows-sys 0.59.0", +] + +[[package]] +name = "rustls-platform-verifier-android" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f87165f0995f63a9fbeea62b64d10b4d9d8e78ec6d7d51fb2125fda7bb36788f" + [[package]] name = "rustls-webpki" version = "0.101.7" @@ -6062,6 +10468,21 @@ version = "1.0.20" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f" +[[package]] +name = "ryu-js" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd29631678d6fb0903b69223673e122c32e9ae559d0960a38d574695ebc0ea15" + +[[package]] +name = "same-file" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" +dependencies = [ + "winapi-util", +] + [[package]] name = "schannel" version = "0.1.28" @@ -6106,6 +10527,12 @@ dependencies = [ "hashbrown 0.13.2", ] +[[package]] +name = "scoped-tls" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e1cf6437eb19a8f4a6cc0f7dca544973b0b78843adbfeb3683d1a94a0024a294" + [[package]] name = "scopeguard" version = "1.2.0" @@ -6242,6 +10669,10 @@ name = "semver" version = "1.0.27" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d767eb0aabc880b29956c35734170f26ed551a859dbd361d140cdbeca61ab1e2" +dependencies = [ + "serde", + "serde_core", +] [[package]] name = "semver-parser" @@ -6252,6 +10683,18 @@ dependencies = [ "pest", ] +[[package]] +name = "send_wrapper" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f638d531eccd6e23b980caf34876660d38e265409d8e99b397ab71eb3612fad0" + +[[package]] +name = "send_wrapper" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd0b0ec5f1c1ca621c432a25813d8d60c88abe6d3e08a3eb9cf37d97a0fe3d73" + [[package]] name = "serde" version = "1.0.225" @@ -6307,6 +10750,15 @@ dependencies = [ "syn 2.0.106", ] +[[package]] +name = "serde_spanned" +version = "0.6.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf41e0cfaf7226dca15e8197172c295a782857fcb97fad1808a166870dee75a3" +dependencies = [ + "serde", +] + [[package]] name = "serde_urlencoded" version = "0.7.1" @@ -6425,12 +10877,42 @@ dependencies = [ "lazy_static", ] +[[package]] +name = "shellexpand" +version = "3.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b1fdf65dd6331831494dd616b30351c38e96e45921a27745cf98490458b90bb" +dependencies = [ + "dirs", +] + [[package]] name = "shlex" version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" +[[package]] +name = "signal-hook" +version = "0.3.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d881a16cf4426aa584979d30bd82cb33429027e42122b169753d6ef1085ed6e2" +dependencies = [ + "libc", + "signal-hook-registry", +] + +[[package]] +name = "signal-hook-mio" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34db1a06d485c9142248b7a054f034b349b212551f3dfd19c94d45a754a217cd" +dependencies = [ + "libc", + "mio", + "signal-hook", +] + [[package]] name = "signal-hook-registry" version = "1.4.6" @@ -6451,20 +10933,53 @@ dependencies = [ ] [[package]] -name = "signature" -version = "2.2.0" +name = "signature" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de" +dependencies = [ + "digest 0.10.7", + "rand_core 0.6.4", +] + +[[package]] +name = "simple_asn1" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "297f631f50729c8c99b84667867963997ec0b50f32b2a7dbcab828ef0541e8bb" +dependencies = [ + "num-bigint", + "num-traits", + "thiserror 2.0.16", + "time", +] + +[[package]] +name = "siphasher" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56199f7ddabf13fe5074ce809e7d3f42b42ae711800501b5b16ea82ad029c39d" + +[[package]] +name = "skeptic" +version = "0.13.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de" +checksum = "16d23b015676c90a0f01c197bfdc786c20342c73a0afdda9025adb0bc42940a8" dependencies = [ - "digest 0.10.7", - "rand_core 0.6.4", + "bytecount", + "cargo_metadata 0.14.2", + "error-chain", + "glob", + "pulldown-cmark", + "tempfile", + "walkdir", ] [[package]] -name = "siphasher" -version = "1.0.1" +name = "sketches-ddsketch" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56199f7ddabf13fe5074ce809e7d3f42b42ae711800501b5b16ea82ad029c39d" +checksum = "c1e9a774a6c28142ac54bb25d25562e6bcf957493a184f15ad4eebccb23e410a" [[package]] name = "slab" @@ -6481,6 +10996,12 @@ dependencies = [ "serde", ] +[[package]] +name = "snap" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b6b67fb9a61334225b5b790716f609cd58395f895b3fe8b328786812a40bc3b" + [[package]] name = "socket2" version = "0.5.10" @@ -6546,6 +11067,12 @@ dependencies = [ "der 0.7.10", ] +[[package]] +name = "sptr" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b9b39299b249ad65f3b7e96443bad61c02ca5cd3589f46cb6d610a0fd6c0d6a" + [[package]] name = "sqlx" version = "0.8.6" @@ -6577,7 +11104,7 @@ dependencies = [ "futures-io", "futures-util", "hashbrown 0.15.5", - "hashlink", + "hashlink 0.10.0", "indexmap 2.11.4", "log", "memchr", @@ -6588,7 +11115,7 @@ dependencies = [ "serde_json", "sha2 0.10.9", "smallvec", - "thiserror", + "thiserror 2.0.16", "tokio", "tokio-stream", "tracing", @@ -6672,7 +11199,7 @@ dependencies = [ "smallvec", "sqlx-core", "stringprep", - "thiserror", + "thiserror 2.0.16", "tracing", "uuid", "whoami", @@ -6711,7 +11238,7 @@ dependencies = [ "smallvec", "sqlx-core", "stringprep", - "thiserror", + "thiserror 2.0.16", "tracing", "uuid", "whoami", @@ -6737,7 +11264,7 @@ dependencies = [ "serde", "serde_urlencoded", "sqlx-core", - "thiserror", + "thiserror 2.0.16", "tracing", "url", "uuid", @@ -6755,6 +11282,12 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" +[[package]] +name = "std-semaphore" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "33ae9eec00137a8eed469fb4148acd9fc6ac8c3f9b110f52cd34698c8b5bfa0e" + [[package]] name = "stringprep" version = "0.1.5" @@ -6795,13 +11328,35 @@ dependencies = [ "syn 2.0.106", ] +[[package]] +name = "strum" +version = "0.26.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fec0f0aef304996cf250b31b5a10dee7980c85da9d759361292b8bca5a18f06" +dependencies = [ + "strum_macros 0.26.4", +] + [[package]] name = "strum" version = "0.27.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "af23d6f6c1a224baef9d3f61e287d2761385a5b88fdab4eb4c6f11aeb54c4bcf" dependencies = [ - "strum_macros", + "strum_macros 0.27.2", +] + +[[package]] +name = "strum_macros" +version = "0.26.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c6bee85a5a24955dc440386795aa378cd9cf82acd5f764469152d2270e581be" +dependencies = [ + "heck", + "proc-macro2", + "quote", + "rustversion", + "syn 2.0.106", ] [[package]] @@ -6876,12 +11431,42 @@ dependencies = [ "syn 2.0.106", ] +[[package]] +name = "sysinfo" +version = "0.33.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fc858248ea01b66f19d8e8a6d55f41deaf91e9d495246fd01368d99935c6c01" +dependencies = [ + "core-foundation-sys", + "libc", + "memchr", + "ntapi", + "windows 0.57.0", +] + +[[package]] +name = "tagptr" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b2093cf4c8eb1e67749a6762251bc9cd836b6fc171623bd0a9d324d37af2417" + [[package]] name = "tap" version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" +[[package]] +name = "tar" +version = "0.4.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d863878d212c87a19c1a610eb53bb01fe12951c0501cf5a0d65f724914a667a" +dependencies = [ + "filetime", + "libc", + "xattr", +] + [[package]] name = "tempfile" version = "3.22.0" @@ -6916,7 +11501,7 @@ dependencies = [ "serde", "serde_json", "serde_with", - "thiserror", + "thiserror 2.0.16", "tokio", "tokio-stream", "tokio-tar", @@ -6933,13 +11518,39 @@ dependencies = [ "testcontainers", ] +[[package]] +name = "thin-vec" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "144f754d318415ac792f9d69fc87abbbfc043ce2ef041c60f16ad828f638717d" + +[[package]] +name = "thiserror" +version = "1.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52" +dependencies = [ + "thiserror-impl 1.0.69", +] + [[package]] name = "thiserror" version = "2.0.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3467d614147380f2e4e374161426ff399c91084acd2363eaf549172b3d5e60c0" dependencies = [ - "thiserror-impl", + "thiserror-impl 2.0.16", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.106", ] [[package]] @@ -6971,6 +11582,37 @@ dependencies = [ "num_cpus", ] +[[package]] +name = "tikv-jemalloc-ctl" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f21f216790c8df74ce3ab25b534e0718da5a1916719771d3fec23315c99e468b" +dependencies = [ + "libc", + "paste", + "tikv-jemalloc-sys", +] + +[[package]] +name = "tikv-jemalloc-sys" +version = "0.6.0+5.3.0-1-ge13ca993e8ccb9ba9847cc330696e02839f328f7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd3c60906412afa9c2b5b5a48ca6a5abe5736aec9eb48ad05037a677e52e4e2d" +dependencies = [ + "cc", + "libc", +] + +[[package]] +name = "tikv-jemallocator" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4cec5ff18518d81584f477e9bfdf957f5bb0979b0bac3af4ca30b5b3ae2d2865" +dependencies = [ + "libc", + "tikv-jemalloc-sys", +] + [[package]] name = "time" version = "0.3.44" @@ -6979,7 +11621,10 @@ checksum = "91e7d9e3bb61134e77bde20dd4825b97c010155709965fedf0f49bb138e52a9d" dependencies = [ "deranged", "itoa", + "js-sys", + "libc", "num-conv", + "num_threads", "powerfmt", "serde", "time-core", @@ -7011,6 +11656,16 @@ dependencies = [ "crunchy", ] +[[package]] +name = "tinystr" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9117f5d4db391c1cf6927e7bea3db74b9a1c1add8f7eda9ffd5364f40f57b82f" +dependencies = [ + "displaydoc", + "zerovec 0.10.4", +] + [[package]] name = "tinystr" version = "0.8.1" @@ -7018,7 +11673,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5d4f6d1145dcb577acf783d4e601bc1d76a13337bb54e6233add580b07344c8b" dependencies = [ "displaydoc", - "zerovec", + "zerovec 0.11.4", ] [[package]] @@ -7041,7 +11696,7 @@ name = "tips-audit" version = "0.1.0" dependencies = [ "alloy-consensus", - "alloy-primitives", + "alloy-primitives 1.3.1", "alloy-provider", "alloy-rpc-types-mev", "anyhow", @@ -7069,7 +11724,7 @@ name = "tips-datastore" version = "0.1.0" dependencies = [ "alloy-consensus", - "alloy-primitives", + "alloy-primitives 1.3.1", "alloy-rpc-types-mev", "anyhow", "async-trait", @@ -7089,7 +11744,7 @@ name = "tips-ingress-rpc" version = "0.1.0" dependencies = [ "alloy-consensus", - "alloy-primitives", + "alloy-primitives 1.3.1", "alloy-provider", "alloy-rpc-types-mev", "anyhow", @@ -7137,7 +11792,7 @@ dependencies = [ name = "tips-maintenance" version = "0.1.0" dependencies = [ - "alloy-primitives", + "alloy-primitives 1.3.1", "alloy-provider", "alloy-rpc-types", "anyhow", @@ -7154,6 +11809,54 @@ dependencies = [ "url", ] +[[package]] +name = "tips-simulator" +version = "0.1.0" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-primitives 1.3.1", + "alloy-rpc-types", + "alloy-rpc-types-mev", + "anyhow", + "async-trait", + "chrono", + "clap", + "dotenvy", + "eyre", + "futures-util", + "hex", + "op-alloy-consensus 0.20.0", + "rdkafka", + "reth", + "reth-chainspec", + "reth-evm", + "reth-execution-types", + "reth-exex", + "reth-node-api", + "reth-node-builder", + "reth-node-ethereum", + "reth-optimism-evm", + "reth-optimism-primitives", + "reth-primitives", + "reth-provider", + "reth-revm", + "revm-primitives 3.1.1", + "serde", + "serde_json", + "std-semaphore", + "testcontainers", + "testcontainers-modules", + "tips-audit", + "tips-datastore", + "tokio", + "tokio-test", + "tokio-util", + "tracing", + "tracing-subscriber 0.3.20", + "uuid", +] + [[package]] name = "tokio" version = "1.47.1" @@ -7242,6 +11945,36 @@ dependencies = [ "xattr", ] +[[package]] +name = "tokio-test" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2468baabc3311435b55dd935f702f42cd1b8abb7e754fb7dfb16bd36aa88f9f7" +dependencies = [ + "async-stream", + "bytes", + "futures-core", + "tokio", + "tokio-stream", +] + +[[package]] +name = "tokio-tungstenite" +version = "0.26.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a9daff607c6d2bf6c16fd681ccb7eecc83e4e2cdc1ca067ffaadfca5de7f084" +dependencies = [ + "futures-util", + "log", + "rustls 0.23.31", + "rustls-native-certs 0.8.1", + "rustls-pki-types", + "tokio", + "tokio-rustls 0.26.3", + "tungstenite", + "webpki-roots 0.26.11", +] + [[package]] name = "tokio-util" version = "0.7.16" @@ -7253,9 +11986,31 @@ dependencies = [ "futures-io", "futures-sink", "pin-project-lite", + "slab", "tokio", ] +[[package]] +name = "toml" +version = "0.8.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc1beb996b9d83529a9e75c17a1686767d148d70663143c7854d8b4a09ced362" +dependencies = [ + "serde", + "serde_spanned", + "toml_datetime 0.6.11", + "toml_edit 0.22.27", +] + +[[package]] +name = "toml_datetime" +version = "0.6.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22cddaf88f4fbc13c51aebbf5f8eceb5c7c5a9da2ac40a13519eb5b0a0e8f11c" +dependencies = [ + "serde", +] + [[package]] name = "toml_datetime" version = "0.7.2" @@ -7265,6 +12020,20 @@ dependencies = [ "serde_core", ] +[[package]] +name = "toml_edit" +version = "0.22.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41fe8c660ae4257887cf66394862d21dbca4a6ddd26f04a3560410406a2f819a" +dependencies = [ + "indexmap 2.11.4", + "serde", + "serde_spanned", + "toml_datetime 0.6.11", + "toml_write", + "winnow", +] + [[package]] name = "toml_edit" version = "0.23.6" @@ -7272,7 +12041,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f3effe7c0e86fdff4f69cdd2ccc1b96f933e24811c5441d44904e8683e27184b" dependencies = [ "indexmap 2.11.4", - "toml_datetime", + "toml_datetime 0.7.2", "toml_parser", "winnow", ] @@ -7286,6 +12055,12 @@ dependencies = [ "winnow", ] +[[package]] +name = "toml_write" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d99f8c9a7727884afe522e9bd5edbfc91a3312b36a77b5fb8926e4c31a41801" + [[package]] name = "tower" version = "0.5.2" @@ -7294,11 +12069,16 @@ checksum = "d039ad9159c98b70ecfd540b2573b97f7f52c3e8d9f8ad57a24b916a536975f9" dependencies = [ "futures-core", "futures-util", + "hdrhistogram", + "indexmap 2.11.4", "pin-project-lite", + "slab", "sync_wrapper", "tokio", + "tokio-util", "tower-layer", "tower-service", + "tracing", ] [[package]] @@ -7307,16 +12087,29 @@ version = "0.6.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "adc82fd73de2a9722ac5da747f12383d2bfdb93591ee6c58486e0097890f05f2" dependencies = [ + "async-compression", + "base64 0.22.1", "bitflags 2.9.4", "bytes", + "futures-core", "futures-util", "http 1.3.1", "http-body 1.0.1", + "http-body-util", + "http-range-header", + "httpdate", "iri-string", + "mime", + "mime_guess", + "percent-encoding", "pin-project-lite", + "tokio", + "tokio-util", "tower", "tower-layer", "tower-service", + "tracing", + "uuid", ] [[package]] @@ -7343,6 +12136,18 @@ dependencies = [ "tracing-core", ] +[[package]] +name = "tracing-appender" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3566e8ce28cc0a3fe42519fc80e6b4c943cc4c8cef275620eb8dac2d3d4e06cf" +dependencies = [ + "crossbeam-channel", + "thiserror 1.0.69", + "time", + "tracing-subscriber 0.3.20", +] + [[package]] name = "tracing-attributes" version = "0.1.30" @@ -7374,6 +12179,17 @@ dependencies = [ "tracing", ] +[[package]] +name = "tracing-journald" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc0b4143302cf1022dac868d521e36e8b27691f72c84b3311750d5188ebba657" +dependencies = [ + "libc", + "tracing-core", + "tracing-subscriber 0.3.20", +] + [[package]] name = "tracing-log" version = "0.2.0" @@ -7385,6 +12201,28 @@ dependencies = [ "tracing-core", ] +[[package]] +name = "tracing-logfmt" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6b1f47d22deb79c3f59fcf2a1f00f60cbdc05462bf17d1cd356c1fefa3f444bd" +dependencies = [ + "time", + "tracing", + "tracing-core", + "tracing-subscriber 0.3.20", +] + +[[package]] +name = "tracing-serde" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "704b1aeb7be0d0a84fc9828cae51dab5970fee5088f83d1dd7ee6f6246fc6ff1" +dependencies = [ + "serde", + "tracing-core", +] + [[package]] name = "tracing-subscriber" version = "0.2.25" @@ -7404,20 +12242,73 @@ dependencies = [ "nu-ansi-term", "once_cell", "regex-automata", + "serde", + "serde_json", "sharded-slab", "smallvec", "thread_local", "tracing", "tracing-core", "tracing-log", + "tracing-serde", +] + +[[package]] +name = "tree_hash" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee44f4cef85f88b4dea21c0b1f58320bdf35715cf56d840969487cff00613321" +dependencies = [ + "alloy-primitives 1.3.1", + "ethereum_hashing", + "ethereum_ssz", + "smallvec", + "typenum", +] + +[[package]] +name = "tree_hash_derive" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0bee2ea1551f90040ab0e34b6fb7f2fa3bad8acc925837ac654f2c78a13e3089" +dependencies = [ + "darling 0.20.11", + "proc-macro2", + "quote", + "syn 2.0.106", ] +[[package]] +name = "triomphe" +version = "0.1.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef8f7726da4807b58ea5c96fdc122f80702030edc33b35aff9190a51148ccc85" + [[package]] name = "try-lock" version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" +[[package]] +name = "tungstenite" +version = "0.26.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4793cb5e56680ecbb1d843515b23b6de9a75eb04b66643e256a396d43be33c13" +dependencies = [ + "bytes", + "data-encoding", + "http 1.3.1", + "httparse", + "log", + "rand 0.9.2", + "rustls 0.23.31", + "rustls-pki-types", + "sha1", + "thiserror 2.0.16", + "utf-8", +] + [[package]] name = "typenum" version = "1.18.0" @@ -7442,12 +12333,30 @@ dependencies = [ "static_assertions", ] +[[package]] +name = "uint" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "909988d098b2f738727b161a106cfc7cab00c539c2687a8836f8e565976fb53e" +dependencies = [ + "byteorder", + "crunchy", + "hex", + "static_assertions", +] + [[package]] name = "unarray" version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "eaea85b334db583fe3274d12b4cd1880032beab409c0d774be044d4480ab9a94" +[[package]] +name = "unicase" +version = "2.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75b844d17643ee918803943289730bec8aac480150456169e647ed0b576ba539" + [[package]] name = "unicode-bidi" version = "0.3.18" @@ -7481,12 +12390,51 @@ version = "1.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493" +[[package]] +name = "unicode-truncate" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b3644627a5af5fa321c95b9b235a72fd24cd29c648c2c379431e6628655627bf" +dependencies = [ + "itertools 0.13.0", + "unicode-segmentation", + "unicode-width 0.1.14", +] + +[[package]] +name = "unicode-width" +version = "0.1.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7dd6e30e90baa6f72411720665d41d89b9a3d039dc45b8faea1ddd07f617f6af" + +[[package]] +name = "unicode-width" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fc81956842c57dac11422a97c3b8195a1ff727f06e85c84ed2e8aa277c9a0fd" + [[package]] name = "unicode-xid" version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853" +[[package]] +name = "universal-hash" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc1de2c688dc15305988b563c3854064043356019f97a4b46276fe734c4f07ea" +dependencies = [ + "crypto-common", + "subtle", +] + +[[package]] +name = "unsigned-varint" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eb066959b24b5196ae73cb057f45598450d2c5f71460e98c49b738086eff9c06" + [[package]] name = "untrusted" version = "0.9.0" @@ -7511,6 +12459,18 @@ version = "2.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "daf8dba3b7eb870caf1ddeed7bc9d2a049f3cfdfae7cb521b087cc33ae4c49da" +[[package]] +name = "utf-8" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9" + +[[package]] +name = "utf16_iter" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8232dd3cdaed5356e0f716d285e4b40b932ac434100fe9b7e0e8e935b9e6246" + [[package]] name = "utf8_iter" version = "1.0.4" @@ -7547,6 +12507,47 @@ version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" +[[package]] +name = "vergen" +version = "9.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6b2bf58be11fc9414104c6d3a2e464163db5ef74b12296bda593cac37b6e4777" +dependencies = [ + "anyhow", + "cargo_metadata 0.19.2", + "derive_builder", + "regex", + "rustversion", + "time", + "vergen-lib", +] + +[[package]] +name = "vergen-git2" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4f6ee511ec45098eabade8a0750e76eec671e7fb2d9360c563911336bea9cac1" +dependencies = [ + "anyhow", + "derive_builder", + "git2", + "rustversion", + "time", + "vergen", + "vergen-lib", +] + +[[package]] +name = "vergen-lib" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b07e6010c0f3e59fcb164e0163834597da68d1f864e2b8ca49f74de01e9c166" +dependencies = [ + "anyhow", + "derive_builder", + "rustversion", +] + [[package]] name = "version_check" version = "0.9.5" @@ -7568,6 +12569,16 @@ dependencies = [ "libc", ] +[[package]] +name = "walkdir" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b" +dependencies = [ + "same-file", + "winapi-util", +] + [[package]] name = "want" version = "0.3.1" @@ -7671,110 +12682,309 @@ dependencies = [ ] [[package]] -name = "wasm-bindgen-shared" -version = "0.2.103" +name = "wasm-bindgen-shared" +version = "0.2.103" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "293c37f4efa430ca14db3721dfbe48d8c33308096bd44d80ebaa775ab71ba1cf" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "wasm-streams" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "15053d8d85c7eccdbefef60f06769760a563c7f0a9d6902a13d35c7800b0ad65" +dependencies = [ + "futures-util", + "js-sys", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", +] + +[[package]] +name = "wasmtimer" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1c598d6b99ea013e35844697fc4670d08339d5cda15588f193c6beedd12f644b" +dependencies = [ + "futures", + "js-sys", + "parking_lot", + "pin-utils", + "slab", + "wasm-bindgen", +] + +[[package]] +name = "web-sys" +version = "0.3.80" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fbe734895e869dc429d78c4b433f8d17d95f8d05317440b4fad5ab2d33e596dc" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "web-time" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "webpki-root-certs" +version = "0.26.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75c7f0ef91146ebfb530314f5f1d24528d7f0767efbfd31dce919275413e393e" +dependencies = [ + "webpki-root-certs 1.0.2", +] + +[[package]] +name = "webpki-root-certs" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4e4ffd8df1c57e87c325000a3d6ef93db75279dc3a231125aac571650f22b12a" +dependencies = [ + "rustls-pki-types", +] + +[[package]] +name = "webpki-roots" +version = "0.26.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "521bc38abb08001b01866da9f51eb7c5d647a19260e00054a8c7fd5f9e57f7a9" +dependencies = [ + "webpki-roots 1.0.2", +] + +[[package]] +name = "webpki-roots" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e8983c3ab33d6fb807cfcdad2491c4ea8cbc8ed839181c7dfd9c67c83e261b2" +dependencies = [ + "rustls-pki-types", +] + +[[package]] +name = "which" +version = "4.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87ba24419a2078cd2b0f2ede2691b6c66d8e47836da3b6db8265ebad47afbfc7" +dependencies = [ + "either", + "home", + "once_cell", + "rustix 0.38.44", +] + +[[package]] +name = "whoami" +version = "1.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d4a4db5077702ca3015d3d02d74974948aba2ad9e12ab7df718ee64ccd7e97d" +dependencies = [ + "libredox", + "wasite", +] + +[[package]] +name = "widestring" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd7cf3379ca1aac9eea11fba24fd7e315d621f8dfe35c8d7d2be8b793726e07d" + +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-util" +version = "0.1.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c2a7b1c03c876122aa43f3020e6c3c3ee5c05081c9a00739faf7503aeba10d22" +dependencies = [ + "windows-sys 0.61.0", +] + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" + +[[package]] +name = "windows" +version = "0.57.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "12342cb4d8e3b046f3d80effd474a7a02447231330ef77d71daa6fbc40681143" +dependencies = [ + "windows-core 0.57.0", + "windows-targets 0.52.6", +] + +[[package]] +name = "windows" +version = "0.58.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd04d41d93c4992d421894c18c8b43496aa748dd4c081bac0dc93eb0489272b6" +dependencies = [ + "windows-core 0.58.0", + "windows-targets 0.52.6", +] + +[[package]] +name = "windows" +version = "0.61.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9babd3a767a4c1aef6900409f85f5d53ce2544ccdfaa86dad48c91782c6d6893" +dependencies = [ + "windows-collections", + "windows-core 0.61.2", + "windows-future", + "windows-link 0.1.3", + "windows-numerics", +] + +[[package]] +name = "windows-collections" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "293c37f4efa430ca14db3721dfbe48d8c33308096bd44d80ebaa775ab71ba1cf" +checksum = "3beeceb5e5cfd9eb1d76b381630e82c4241ccd0d27f1a39ed41b2760b255c5e8" dependencies = [ - "unicode-ident", + "windows-core 0.61.2", ] [[package]] -name = "wasmtimer" -version = "0.4.3" +name = "windows-core" +version = "0.57.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1c598d6b99ea013e35844697fc4670d08339d5cda15588f193c6beedd12f644b" +checksum = "d2ed2439a290666cd67ecce2b0ffaad89c2a56b976b736e6ece670297897832d" dependencies = [ - "futures", - "js-sys", - "parking_lot", - "pin-utils", - "slab", - "wasm-bindgen", + "windows-implement 0.57.0", + "windows-interface 0.57.0", + "windows-result 0.1.2", + "windows-targets 0.52.6", ] [[package]] -name = "web-sys" -version = "0.3.80" +name = "windows-core" +version = "0.58.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fbe734895e869dc429d78c4b433f8d17d95f8d05317440b4fad5ab2d33e596dc" +checksum = "6ba6d44ec8c2591c134257ce647b7ea6b20335bf6379a27dac5f1641fcf59f99" dependencies = [ - "js-sys", - "wasm-bindgen", + "windows-implement 0.58.0", + "windows-interface 0.58.0", + "windows-result 0.2.0", + "windows-strings 0.1.0", + "windows-targets 0.52.6", ] [[package]] -name = "web-time" -version = "1.1.0" +name = "windows-core" +version = "0.61.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb" +checksum = "c0fdd3ddb90610c7638aa2b3a3ab2904fb9e5cdbecc643ddb3647212781c4ae3" dependencies = [ - "js-sys", - "wasm-bindgen", + "windows-implement 0.60.0", + "windows-interface 0.59.1", + "windows-link 0.1.3", + "windows-result 0.3.4", + "windows-strings 0.4.2", ] [[package]] -name = "which" -version = "4.4.2" +name = "windows-core" +version = "0.62.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87ba24419a2078cd2b0f2ede2691b6c66d8e47836da3b6db8265ebad47afbfc7" +checksum = "57fe7168f7de578d2d8a05b07fd61870d2e73b4020e9f49aa00da8471723497c" dependencies = [ - "either", - "home", - "once_cell", - "rustix 0.38.44", + "windows-implement 0.60.0", + "windows-interface 0.59.1", + "windows-link 0.2.0", + "windows-result 0.4.0", + "windows-strings 0.5.0", ] [[package]] -name = "whoami" -version = "1.6.1" +name = "windows-future" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d4a4db5077702ca3015d3d02d74974948aba2ad9e12ab7df718ee64ccd7e97d" +checksum = "fc6a41e98427b19fe4b73c550f060b59fa592d7d686537eebf9385621bfbad8e" dependencies = [ - "libredox", - "wasite", + "windows-core 0.61.2", + "windows-link 0.1.3", + "windows-threading", ] [[package]] -name = "winapi" -version = "0.3.9" +name = "windows-implement" +version = "0.57.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +checksum = "9107ddc059d5b6fbfbffdfa7a7fe3e22a226def0b2608f72e9d552763d3e1ad7" dependencies = [ - "winapi-i686-pc-windows-gnu", - "winapi-x86_64-pc-windows-gnu", + "proc-macro2", + "quote", + "syn 2.0.106", ] [[package]] -name = "winapi-i686-pc-windows-gnu" -version = "0.4.0" +name = "windows-implement" +version = "0.58.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" +checksum = "2bbd5b46c938e506ecbce286b6628a02171d56153ba733b6c741fc627ec9579b" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.106", +] [[package]] -name = "winapi-x86_64-pc-windows-gnu" -version = "0.4.0" +name = "windows-implement" +version = "0.60.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" +checksum = "a47fddd13af08290e67f4acabf4b459f647552718f683a7b415d290ac744a836" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.106", +] [[package]] -name = "windows-core" -version = "0.62.0" +name = "windows-interface" +version = "0.57.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57fe7168f7de578d2d8a05b07fd61870d2e73b4020e9f49aa00da8471723497c" +checksum = "29bee4b38ea3cde66011baa44dba677c432a78593e202392d1e9070cf2a7fca7" dependencies = [ - "windows-implement", - "windows-interface", - "windows-link 0.2.0", - "windows-result", - "windows-strings", + "proc-macro2", + "quote", + "syn 2.0.106", ] [[package]] -name = "windows-implement" -version = "0.60.0" +name = "windows-interface" +version = "0.58.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a47fddd13af08290e67f4acabf4b459f647552718f683a7b415d290ac744a836" +checksum = "053c4c462dc91d3b1504c6fe5a726dd15e216ba718e84a0e46a88fbe5ded3515" dependencies = [ "proc-macro2", "quote", @@ -7804,6 +13014,43 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "45e46c0661abb7180e7b9c281db115305d49ca1709ab8242adf09666d2173c65" +[[package]] +name = "windows-numerics" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9150af68066c4c5c07ddc0ce30421554771e528bde427614c61038bc2c92c2b1" +dependencies = [ + "windows-core 0.61.2", + "windows-link 0.1.3", +] + +[[package]] +name = "windows-result" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e383302e8ec8515204254685643de10811af0ed97ea37210dc26fb0032647f8" +dependencies = [ + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-result" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d1043d8214f791817bab27572aaa8af63732e11bf84aa21a45a78d6c317ae0e" +dependencies = [ + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-result" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56f42bd332cc6c8eac5af113fc0c1fd6a8fd2aa08a0119358686e5160d0586c6" +dependencies = [ + "windows-link 0.1.3", +] + [[package]] name = "windows-result" version = "0.4.0" @@ -7813,6 +13060,25 @@ dependencies = [ "windows-link 0.2.0", ] +[[package]] +name = "windows-strings" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4cd9b125c486025df0eabcb585e62173c6c9eddcec5d117d3b6e8c30e2ee4d10" +dependencies = [ + "windows-result 0.2.0", + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-strings" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56e6c93f3a0c3b36176cb1327a4958a0353d5d166c2a35cb268ace15e91d3b57" +dependencies = [ + "windows-link 0.1.3", +] + [[package]] name = "windows-strings" version = "0.5.0" @@ -7822,6 +13088,15 @@ dependencies = [ "windows-link 0.2.0", ] +[[package]] +name = "windows-sys" +version = "0.45.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75283be5efb2831d37ea142365f009c02ec203cd29a3ebecbc093d52315b66d0" +dependencies = [ + "windows-targets 0.42.2", +] + [[package]] name = "windows-sys" version = "0.48.0" @@ -7867,6 +13142,21 @@ dependencies = [ "windows-link 0.2.0", ] +[[package]] +name = "windows-targets" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e5180c00cd44c9b1c88adb3693291f1cd93605ded80c250a75d472756b4d071" +dependencies = [ + "windows_aarch64_gnullvm 0.42.2", + "windows_aarch64_msvc 0.42.2", + "windows_i686_gnu 0.42.2", + "windows_i686_msvc 0.42.2", + "windows_x86_64_gnu 0.42.2", + "windows_x86_64_gnullvm 0.42.2", + "windows_x86_64_msvc 0.42.2", +] + [[package]] name = "windows-targets" version = "0.48.5" @@ -7915,6 +13205,21 @@ dependencies = [ "windows_x86_64_msvc 0.53.0", ] +[[package]] +name = "windows-threading" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b66463ad2e0ea3bbf808b7f1d371311c80e115c0b71d60efc142cafbcfb057a6" +dependencies = [ + "windows-link 0.1.3", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "597a5118570b68bc08d8d59125332c54f1ba9d9adeedeef5b99b02ba2b0698f8" + [[package]] name = "windows_aarch64_gnullvm" version = "0.48.5" @@ -7933,6 +13238,12 @@ version = "0.53.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "86b8d5f90ddd19cb4a147a5fa63ca848db3df085e25fee3cc10b39b6eebae764" +[[package]] +name = "windows_aarch64_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e08e8864a60f06ef0d0ff4ba04124db8b0fb3be5776a5cd47641e942e58c4d43" + [[package]] name = "windows_aarch64_msvc" version = "0.48.5" @@ -7951,6 +13262,12 @@ version = "0.53.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c7651a1f62a11b8cbd5e0d42526e55f2c99886c77e007179efff86c2b137e66c" +[[package]] +name = "windows_i686_gnu" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c61d927d8da41da96a81f029489353e68739737d3beca43145c8afec9a31a84f" + [[package]] name = "windows_i686_gnu" version = "0.48.5" @@ -7981,6 +13298,12 @@ version = "0.53.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9ce6ccbdedbf6d6354471319e781c0dfef054c81fbc7cf83f338a4296c0cae11" +[[package]] +name = "windows_i686_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "44d840b6ec649f480a41c8d80f9c65108b92d89345dd94027bfe06ac444d1060" + [[package]] name = "windows_i686_msvc" version = "0.48.5" @@ -7999,6 +13322,12 @@ version = "0.53.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "581fee95406bb13382d2f65cd4a908ca7b1e4c2f1917f143ba16efe98a589b5d" +[[package]] +name = "windows_x86_64_gnu" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8de912b8b8feb55c064867cf047dda097f92d51efad5b491dfb98f6bbb70cb36" + [[package]] name = "windows_x86_64_gnu" version = "0.48.5" @@ -8017,6 +13346,12 @@ version = "0.53.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2e55b5ac9ea33f2fc1716d1742db15574fd6fc8dadc51caab1c16a3d3b4190ba" +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26d41b46a36d453748aedef1486d5c7a85db22e56aff34643984ea85514e94a3" + [[package]] name = "windows_x86_64_gnullvm" version = "0.48.5" @@ -8035,6 +13370,12 @@ version = "0.53.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0a6e035dd0599267ce1ee132e51c27dd29437f63325753051e71dd9e42406c57" +[[package]] +name = "windows_x86_64_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9aec5da331524158c6d1a4ac0ab1541149c0b9505fde06423b02f5ef0106b9f0" + [[package]] name = "windows_x86_64_msvc" version = "0.48.5" @@ -8062,18 +13403,59 @@ dependencies = [ "memchr", ] +[[package]] +name = "winreg" +version = "0.50.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "524e57b2c537c0f9b1e69f1965311ec12182b4122e45035b1508cd24d2adadb1" +dependencies = [ + "cfg-if", + "windows-sys 0.48.0", +] + [[package]] name = "wit-bindgen" version = "0.46.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f17a85883d4e6d00e8a97c586de764dabcc06133f7f1d55dce5cdc070ad7fe59" +[[package]] +name = "write16" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d1890f4022759daae28ed4fe62859b1236caebfc61ede2f63ed4e695f3f6d936" + +[[package]] +name = "writeable" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e9df38ee2d2c3c5948ea468a8406ff0db0b29ae1ffde1bcf20ef305bcc95c51" + [[package]] name = "writeable" version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ea2f10b9bb0928dfb1b42b65e1f9e36f7f54dbdf08457afefb38afcdec4fa2bb" +[[package]] +name = "ws_stream_wasm" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c173014acad22e83f16403ee360115b38846fe754e735c5d9d3803fe70c6abc" +dependencies = [ + "async_io_stream", + "futures", + "js-sys", + "log", + "pharos", + "rustc_version 0.4.1", + "send_wrapper 0.6.0", + "thiserror 2.0.16", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", +] + [[package]] name = "wyz" version = "0.5.1" @@ -8099,6 +13481,24 @@ version = "0.13.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "66fee0b777b0f5ac1c69bb06d361268faafa61cd4682ae064a171c16c433e9e4" +[[package]] +name = "yansi" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cfe53a6657fd280eaa890a3bc59152892ffa3e30101319d168b781ed6529b049" + +[[package]] +name = "yoke" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "120e6aef9aa629e3d4f52dc8cc43a015c7724194c97dfaf45180d2daf2b77f40" +dependencies = [ + "serde", + "stable_deref_trait", + "yoke-derive 0.7.5", + "zerofrom", +] + [[package]] name = "yoke" version = "0.8.0" @@ -8107,10 +13507,22 @@ checksum = "5f41bb01b8226ef4bfd589436a297c53d118f65921786300e427be8d487695cc" dependencies = [ "serde", "stable_deref_trait", - "yoke-derive", + "yoke-derive 0.8.0", "zerofrom", ] +[[package]] +name = "yoke-derive" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2380878cad4ac9aac1e2435f3eb4020e8374b5f13c296cb75b4620ff8e229154" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.106", + "synstructure", +] + [[package]] name = "yoke-derive" version = "0.8.0" @@ -8191,8 +13603,19 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "36f0bbd478583f79edad978b407914f61b2972f5af6fa089686016be8f9af595" dependencies = [ "displaydoc", - "yoke", + "yoke 0.8.0", + "zerofrom", +] + +[[package]] +name = "zerovec" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa2b893d79df23bfb12d5461018d408ea19dfafe76c2c7ef6d4eba614f8ff079" +dependencies = [ + "yoke 0.7.5", "zerofrom", + "zerovec-derive 0.10.3", ] [[package]] @@ -8201,9 +13624,20 @@ version = "0.11.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e7aa2bd55086f1ab526693ecbe444205da57e25f4489879da80635a46d90e73b" dependencies = [ - "yoke", + "yoke 0.8.0", "zerofrom", - "zerovec-derive", + "zerovec-derive 0.11.1", +] + +[[package]] +name = "zerovec-derive" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.106", ] [[package]] diff --git a/Cargo.toml b/Cargo.toml index 26260c7..c46a855 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -59,6 +59,7 @@ sqlx = { version = "0.8.6", features = [ uuid = { version = "1.18.1", features = ["v4", "serde"] } serde = { version = "1.0.219", features = ["derive"] } eyre = "0.6.12" +chrono = { version = "0.4", features = ["serde"] } async-trait = "0.1.89" serde_json = "1.0.143" dotenvy = "0.15.7" From 2d11117ea9197163c8e3d771ed957cddaea13709 Mon Sep 17 00:00:00 2001 From: Niran Babalola Date: Mon, 22 Sep 2025 23:06:23 -0500 Subject: [PATCH 17/39] Tweak reference handling and clones --- crates/simulator/src/core.rs | 4 +- crates/simulator/src/engine.rs | 25 +++++----- crates/simulator/src/lib.rs | 42 +++++++--------- crates/simulator/src/listeners/exex.rs | 2 +- crates/simulator/src/listeners/mempool.rs | 11 +++-- crates/simulator/src/worker_pool.rs | 60 +++++++++++------------ 6 files changed, 69 insertions(+), 75 deletions(-) diff --git a/crates/simulator/src/core.rs b/crates/simulator/src/core.rs index 10c8b05..837c098 100644 --- a/crates/simulator/src/core.rs +++ b/crates/simulator/src/core.rs @@ -32,7 +32,7 @@ where /// Convenience method that handles state provider creation pub async fn simulate( &self, - request: SimulationRequest, + request: &SimulationRequest, state_provider_factory: &F, ) -> Result<()> where @@ -46,7 +46,7 @@ where .map_err(|e| eyre::eyre!("Failed to get state provider: {}", e))?; // Run the simulation - match self.engine.simulate_bundle(request.clone(), &state_provider).await { + match self.engine.simulate_bundle(request, &state_provider).await { Ok(result) => { info!( bundle_id = %request.bundle_id, diff --git a/crates/simulator/src/engine.rs b/crates/simulator/src/engine.rs index e383ba3..7c7a024 100644 --- a/crates/simulator/src/engine.rs +++ b/crates/simulator/src/engine.rs @@ -1,6 +1,6 @@ use crate::types::{SimulationError, SimulationRequest, SimulationResult}; use alloy_consensus::{transaction::SignerRecoverable, BlockHeader}; -use alloy_primitives::{Address, B256, U256}; +use alloy_primitives::B256; use alloy_eips::eip2718::Decodable2718; use alloy_rpc_types::BlockNumberOrTag; use eyre::Result; @@ -97,7 +97,7 @@ pub trait SimulationEngine: Send + Sync { /// Simulate a bundle execution async fn simulate_bundle( &self, - request: SimulationRequest, + request: &SimulationRequest, state_provider: &S, ) -> Result where @@ -135,7 +135,7 @@ where { async fn simulate_bundle( &self, - request: SimulationRequest, + request: &SimulationRequest, _state_provider: &S, ) -> Result where @@ -174,7 +174,6 @@ where // Variables to track bundle execution let mut total_gas_used = 0u64; - let all_storage_changes: HashMap> = HashMap::new(); let mut failed = false; let mut failure_reason = None; @@ -236,15 +235,6 @@ where }), )) } else { - info!( - bundle_id = %request.bundle_id, - simulation_id = %simulation_id, - gas_used = total_gas_used, - execution_time_us = execution_time, - storage_changes = all_storage_changes.len(), - "Bundle simulation completed successfully" - ); - // Collect the state diff let bundle = db.take_bundle(); @@ -263,6 +253,15 @@ where } } + info!( + bundle_id = %request.bundle_id, + simulation_id = %simulation_id, + gas_used = total_gas_used, + execution_time_us = execution_time, + storage_changes = modified_storage_slots.len(), + "Bundle simulation completed successfully" + ); + Ok(SimulationResult::success( simulation_id, request.bundle_id, diff --git a/crates/simulator/src/lib.rs b/crates/simulator/src/lib.rs index 24ceeb6..30facad 100644 --- a/crates/simulator/src/lib.rs +++ b/crates/simulator/src/lib.rs @@ -12,7 +12,6 @@ use reth_node_api::FullNodeComponents; use reth_evm::{ConfigureEvm, NextBlockEnvAttributes}; use std::sync::Arc; use tracing::{info, error}; -use crate::worker_pool::SimulationWorkerPool; pub use config::SimulatorNodeConfig; pub use core::BundleSimulator; @@ -20,6 +19,7 @@ pub use engine::{SimulationEngine, RethSimulationEngine}; pub use listeners::{ExExEventListener, MempoolEventListener, MempoolListenerConfig}; pub use publisher::{SimulationPublisher, TipsSimulationPublisher}; pub use types::{SimulationResult, SimulationError, ExExSimulationConfig}; +pub use worker_pool::SimulationWorkerPool; // Type aliases for concrete implementations pub type TipsBundleSimulator = BundleSimulator, TipsSimulationPublisher>; @@ -62,13 +62,13 @@ where .create::() .map_err(|e| eyre::eyre!("Failed to create Kafka producer: {}", e))?; - let publisher = TipsSimulationPublisher::new(datastore.clone(), kafka_producer, kafka_topic); + let publisher = TipsSimulationPublisher::new(Arc::clone(&datastore), kafka_producer, kafka_topic); info!( kafka_brokers = %kafka_brokers, "Database publisher with Kafka initialized" ); - let engine = RethSimulationEngine::new(provider, evm_config); + let engine = RethSimulationEngine::new(Arc::clone(&provider), evm_config); info!("Simulation engine initialized"); let simulator = BundleSimulator::new(engine, publisher); @@ -95,12 +95,11 @@ where { info!("Initializing ExEx event listener"); - let state_provider_factory = Arc::new(ctx.components.provider().clone()); let provider = Arc::new(ctx.components.provider().clone()); let evm_config = ctx.components.evm_config().clone(); let common_components = init_common_components( - provider, + Arc::clone(&provider), evm_config, config.database_url.clone(), kafka_brokers, @@ -109,14 +108,14 @@ where let worker_pool = SimulationWorkerPool::new( Arc::new(common_components.simulator), - state_provider_factory.clone(), + Arc::clone(&provider), config.max_concurrent_simulations, ); let consensus_listener = ExExEventListener::new( ctx, common_components.datastore, - Arc::new(worker_pool), + worker_pool, ); info!( @@ -144,7 +143,7 @@ where let evm_config = ctx.components.evm_config().clone(); let common_components = init_common_components( - provider.clone(), + Arc::clone(&provider), evm_config, config.database_url.clone(), config.kafka_brokers.join(","), @@ -153,14 +152,14 @@ where let worker_pool = SimulationWorkerPool::new( Arc::new(common_components.simulator), - provider.clone(), + Arc::clone(&provider), max_concurrent_simulations, ); let mempool_listener = MempoolEventListener::new( - provider, + Arc::clone(&provider), config, - Arc::new(worker_pool), + worker_pool, )?; info!( @@ -204,34 +203,33 @@ where ) -> Result { info!("Initializing shared event listeners"); - let state_provider_factory = Arc::new(exex_ctx.components.provider().clone()); let provider = Arc::new(exex_ctx.components.provider().clone()); let evm_config = exex_ctx.components.evm_config().clone(); let common_components = init_common_components( - provider, + Arc::clone(&provider), evm_config, exex_config.database_url.clone(), mempool_config.kafka_brokers.join(","), mempool_config.kafka_topic.clone(), ).await?; - let shared_worker_pool = Arc::new(SimulationWorkerPool::new( + let shared_worker_pool = SimulationWorkerPool::new( Arc::new(common_components.simulator), - state_provider_factory.clone(), + Arc::clone(&provider), max_concurrent_simulations, - )); + ); let exex_listener = ExExEventListener::new( exex_ctx, common_components.datastore, - shared_worker_pool.clone(), + Arc::clone(&shared_worker_pool), ); let mempool_listener = MempoolEventListener::new( - state_provider_factory, + Arc::clone(&provider), mempool_config, - shared_worker_pool.clone(), + Arc::clone(&shared_worker_pool), )?; info!( @@ -249,12 +247,10 @@ where /// Run both listeners with lifecycle management for the shared worker pool /// /// Starts the worker pool, runs both listeners concurrently, and ensures proper shutdown - pub async fn run(mut self) -> Result<()> { + pub async fn run(self) -> Result<()> { info!("Starting shared worker pool"); - Arc::get_mut(&mut self.worker_pool) - .ok_or_else(|| eyre::eyre!("Cannot get mutable reference to worker pool"))? - .start(); + self.worker_pool.start().await; info!("Running listeners concurrently"); diff --git a/crates/simulator/src/listeners/exex.rs b/crates/simulator/src/listeners/exex.rs index 9d9b025..90e343f 100644 --- a/crates/simulator/src/listeners/exex.rs +++ b/crates/simulator/src/listeners/exex.rs @@ -48,7 +48,7 @@ where // For now, we generate new IDs for each bundle let result = bundles_with_metadata .into_iter() - .map(|bwm| (Uuid::new_v4(), bwm.bundle.clone())) + .map(|bwm| (Uuid::new_v4(), bwm.bundle)) .collect(); Ok(result) diff --git a/crates/simulator/src/listeners/mempool.rs b/crates/simulator/src/listeners/mempool.rs index 8a1cf14..2db289b 100644 --- a/crates/simulator/src/listeners/mempool.rs +++ b/crates/simulator/src/listeners/mempool.rs @@ -99,7 +99,7 @@ where // Start Kafka listener in a separate task let consumer = self.consumer; - let provider = self.provider; + let provider = Arc::clone(&self.provider); let topic = self.topic.clone(); let listener_handle: tokio::task::JoinHandle> = tokio::spawn(async move { info!(topic = %topic, "Starting Kafka mempool event listener"); @@ -175,25 +175,26 @@ where }); // Process simulation requests using the shared worker pool - let worker_pool = self.worker_pool.clone(); + let worker_pool = Arc::clone(&self.worker_pool); let processing_handle = tokio::spawn(async move { while let Some(request) = receiver.recv().await { + let bundle_id = request.bundle_id; debug!( - bundle_id = %request.bundle_id, + bundle_id = %bundle_id, block_number = request.block_number, "Queuing bundle simulation for mempool event" ); // Create simulation task let task = SimulationTask { - request: request.clone(), + request, }; // Queue simulation using shared worker pool if let Err(e) = worker_pool.queue_simulation(task).await { error!( error = %e, - bundle_id = %request.bundle_id, + bundle_id = %bundle_id, "Failed to queue simulation task" ); } diff --git a/crates/simulator/src/worker_pool.rs b/crates/simulator/src/worker_pool.rs index 45a3a52..8472d01 100644 --- a/crates/simulator/src/worker_pool.rs +++ b/crates/simulator/src/worker_pool.rs @@ -30,9 +30,9 @@ where /// Channel for receiving simulation requests in workers simulation_rx: Arc>>, /// Latest block number being processed (for cancellation) - latest_block: Arc, - /// Worker task handles - worker_handles: JoinSet<()>, + latest_block: AtomicU64, + /// Worker task handles (wrapped in Mutex for interior mutability) + worker_handles: tokio::sync::Mutex>, /// Maximum number of concurrent simulations max_concurrent: usize, } @@ -48,40 +48,42 @@ where simulator: Arc>, state_provider_factory: Arc, max_concurrent_simulations: usize, - ) -> Self { + ) -> Arc { let (simulation_tx, simulation_rx) = mpsc::channel(1000); - Self { + Arc::new(Self { simulator, state_provider_factory, simulation_tx, simulation_rx: Arc::new(tokio::sync::Mutex::new(simulation_rx)), - latest_block: Arc::new(AtomicU64::new(0)), - worker_handles: JoinSet::new(), + latest_block: AtomicU64::new(0), + worker_handles: tokio::sync::Mutex::new(JoinSet::new()), max_concurrent: max_concurrent_simulations, - } + }) } /// Start simulation worker tasks - pub fn start(&mut self) { + /// Returns true if workers were started, false if already running + pub async fn start(self: &Arc) -> bool { + let mut handles = self.worker_handles.lock().await; + + if !handles.is_empty() { + debug!("Simulation workers already started"); + return false; + } info!(num_workers = self.max_concurrent, "Starting simulation workers"); for worker_id in 0..self.max_concurrent { - let simulator = self.simulator.clone(); - let state_provider_factory = self.state_provider_factory.clone(); - let simulation_rx = self.simulation_rx.clone(); - let latest_block = self.latest_block.clone(); + let pool = Arc::clone(self); - self.worker_handles.spawn(async move { + handles.spawn(async move { Self::simulation_worker( worker_id, - simulator, - state_provider_factory, - simulation_rx, - latest_block, + pool, ).await }); } + true } /// Queue a simulation task @@ -97,12 +99,13 @@ where /// Wait for all workers to complete - pub async fn shutdown(mut self) { + pub async fn shutdown(self) { // Close the channel to signal workers to stop drop(self.simulation_tx); // Wait for workers to complete - while let Some(result) = self.worker_handles.join_next().await { + let mut handles = self.worker_handles.lock().await; + while let Some(result) = handles.join_next().await { if let Err(e) = result { tracing::error!(error = %e, "Worker task failed"); } @@ -112,20 +115,15 @@ where /// Worker task that processes simulation requests async fn simulation_worker( worker_id: usize, - simulator: Arc>, - state_provider_factory: Arc, - simulation_rx: Arc>>, - latest_block: Arc, - ) - where - S: reth_provider::StateProviderFactory, - { + pool: Arc, + ) { debug!(worker_id, "Simulation worker started"); loop { // Get the next simulation task let task = { - let mut rx = simulation_rx.lock().await; + // FIXME: This lock looks like it prevents multiple workers from running in parallel. + let mut rx = pool.simulation_rx.lock().await; rx.recv().await }; @@ -135,7 +133,7 @@ where }; // Check if this simulation is for an old block - let current_latest = latest_block.load(Ordering::Acquire); + let current_latest = pool.latest_block.load(Ordering::Acquire); if task.request.block_number < current_latest { warn!( worker_id, @@ -148,7 +146,7 @@ where } // Execute the simulation - match simulator.simulate(task.request.clone(), state_provider_factory.as_ref()).await { + match pool.simulator.simulate(&task.request, pool.state_provider_factory.as_ref()).await { Ok(_) => { debug!( worker_id, From 7e26250de34f75253672e184e4fb6305e0d4fffe Mon Sep 17 00:00:00 2001 From: Niran Babalola Date: Tue, 23 Sep 2025 01:35:45 -0500 Subject: [PATCH 18/39] Add simulator support for builder playground --- .env.example | 3 + Cargo.lock | 27 ++ crates/simulator/Cargo.toml | 8 + crates/simulator/src/config.rs | 61 ---- crates/simulator/src/config/mod.rs | 129 ++++++++ crates/simulator/src/config/playground.rs | 346 ++++++++++++++++++++++ crates/simulator/src/main.rs | 11 +- crates/simulator/src/playground.rs | 323 ++++++++++++++++++++ justfile | 8 +- 9 files changed, 848 insertions(+), 68 deletions(-) delete mode 100644 crates/simulator/src/config.rs create mode 100644 crates/simulator/src/config/mod.rs create mode 100644 crates/simulator/src/config/playground.rs create mode 100644 crates/simulator/src/playground.rs diff --git a/.env.example b/.env.example index 62434d8..68862e4 100644 --- a/.env.example +++ b/.env.example @@ -28,6 +28,9 @@ TIPS_MAINTENANCE_KAFKA_TOPIC=tips-audit TIPS_MAINTENANCE_POLL_INTERVAL_MS=250 TIPS_MAINTENANCE_LOG_LEVEL=debug +# Simulator +TIPS_SIMULATOR_DATABASE_URL=postgresql://postgres:postgres@localhost:5432/postgres + # TIPS UI TIPS_DATABASE_URL=postgresql://postgres:postgres@localhost:5432/postgres TIPS_UI_AWS_REGION=us-east-1 diff --git a/Cargo.lock b/Cargo.lock index d10a4f2..22c1c86 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -10803,6 +10803,19 @@ dependencies = [ "syn 2.0.106", ] +[[package]] +name = "serde_yaml" +version = "0.9.34+deprecated" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a8b1a1a2ebf674015cc02edccce75287f1a0130d394307b36743c2f5d504b47" +dependencies = [ + "indexmap 2.11.4", + "itoa", + "ryu", + "serde", + "unsafe-libyaml", +] + [[package]] name = "serdect" version = "0.2.0" @@ -11827,12 +11840,16 @@ dependencies = [ "futures-util", "hex", "op-alloy-consensus 0.20.0", + "rand 0.8.5", "rdkafka", "reth", "reth-chainspec", + "reth-cli", "reth-evm", "reth-execution-types", "reth-exex", + "reth-network", + "reth-network-peers", "reth-node-api", "reth-node-builder", "reth-node-ethereum", @@ -11842,8 +11859,11 @@ dependencies = [ "reth-provider", "reth-revm", "revm-primitives 3.1.1", + "secp256k1 0.30.0", "serde", "serde_json", + "serde_yaml", + "shellexpand", "std-semaphore", "testcontainers", "testcontainers-modules", @@ -11854,6 +11874,7 @@ dependencies = [ "tokio-util", "tracing", "tracing-subscriber 0.3.20", + "url", "uuid", ] @@ -12429,6 +12450,12 @@ dependencies = [ "subtle", ] +[[package]] +name = "unsafe-libyaml" +version = "0.2.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "673aac59facbab8a9007c7f6108d11f63b603f7cabff99fabf650fea5c32b861" + [[package]] name = "unsigned-varint" version = "0.8.0" diff --git a/crates/simulator/Cargo.toml b/crates/simulator/Cargo.toml index c23190e..3b2c5c0 100644 --- a/crates/simulator/Cargo.toml +++ b/crates/simulator/Cargo.toml @@ -54,6 +54,14 @@ tokio-util = { version = "0.7", features = ["time"] } hex = "0.4" rdkafka.workspace = true revm-primitives = { version = "3.1.1", default-features = false } +shellexpand = "3.1" +rand = "0.8" +secp256k1 = "0.30" +serde_yaml = "0.9" +reth-network = { git = "https://github.com/paradigmxyz/reth", tag = "v1.7.0" } +reth-network-peers = { git = "https://github.com/paradigmxyz/reth", tag = "v1.7.0" } +reth-cli = { git = "https://github.com/paradigmxyz/reth", tag = "v1.7.0" } +url = "2.5" [dev-dependencies] tokio-test = "0.4.4" diff --git a/crates/simulator/src/config.rs b/crates/simulator/src/config.rs deleted file mode 100644 index 9ac5f08..0000000 --- a/crates/simulator/src/config.rs +++ /dev/null @@ -1,61 +0,0 @@ -use crate::types::ExExSimulationConfig; -use crate::listeners::MempoolListenerConfig; -use clap::Parser; - -/// Combined configuration for reth node with simulator ExEx -#[derive(Parser, Debug)] -#[command(author, version, about = "Reth node with Tips Simulator ExEx")] -pub struct SimulatorNodeConfig { - /// Reth node arguments - #[command(flatten)] - pub node: reth::cli::Cli, - - /// Data directory for simulator - #[arg(long, env = "TIPS_SIMULATOR_DATADIR", default_value = "~/.tips-simulator-reth")] - pub datadir: std::path::PathBuf, - - /// PostgreSQL database connection URL for simulator - #[arg(long, env = "TIPS_SIMULATOR_DATABASE_URL")] - pub database_url: String, - - /// Maximum number of concurrent simulations - #[arg(long, env = "TIPS_SIMULATOR_MAX_CONCURRENT", default_value = "10")] - pub max_concurrent_simulations: usize, - - /// Timeout for individual simulations in milliseconds - #[arg(long, env = "TIPS_SIMULATOR_TIMEOUT_MS", default_value = "5000")] - pub simulation_timeout_ms: u64, - - /// Kafka brokers for mempool events (comma-separated) - #[arg(long, env = "TIPS_SIMULATOR_KAFKA_BROKERS", default_value = "localhost:9092")] - pub kafka_brokers: String, - - /// Kafka topic for mempool events - #[arg(long, env = "TIPS_SIMULATOR_KAFKA_TOPIC", default_value = "mempool-events")] - pub kafka_topic: String, - - /// Kafka consumer group ID - #[arg(long, env = "TIPS_SIMULATOR_KAFKA_GROUP_ID", default_value = "tips-simulator")] - pub kafka_group_id: String, -} - -impl From<&SimulatorNodeConfig> for ExExSimulationConfig { - fn from(config: &SimulatorNodeConfig) -> Self { - Self { - database_url: config.database_url.clone(), - max_concurrent_simulations: config.max_concurrent_simulations, - simulation_timeout_ms: config.simulation_timeout_ms, - } - } -} - -impl From<&SimulatorNodeConfig> for MempoolListenerConfig { - fn from(config: &SimulatorNodeConfig) -> Self { - Self { - kafka_brokers: config.kafka_brokers.split(',').map(|s| s.trim().to_string()).collect(), - kafka_topic: config.kafka_topic.clone(), - kafka_group_id: config.kafka_group_id.clone(), - database_url: config.database_url.clone(), - } - } -} diff --git a/crates/simulator/src/config/mod.rs b/crates/simulator/src/config/mod.rs new file mode 100644 index 0000000..8e3885d --- /dev/null +++ b/crates/simulator/src/config/mod.rs @@ -0,0 +1,129 @@ +pub mod playground; + +pub use playground::PlaygroundOptions; + +use crate::types::ExExSimulationConfig; +use crate::listeners::MempoolListenerConfig; +use anyhow::{Result, anyhow}; +use clap::Parser; +use eyre; +use tracing::info; + +/// Combined configuration for reth node with simulator ExEx +#[derive(Parser, Debug)] +#[command(author, version, about = "Reth node with Tips Simulator ExEx")] +pub struct SimulatorNodeConfig { + /// Reth node arguments + #[command(flatten)] + pub node: reth::cli::Cli, + + /// Data directory for simulator + #[arg(long, env = "TIPS_SIMULATOR_DATADIR", default_value = "~/.tips-simulator-reth")] + pub datadir: std::path::PathBuf, + + /// PostgreSQL database connection URL for simulator + #[arg(long, env = "TIPS_SIMULATOR_DATABASE_URL")] + pub database_url: String, + + /// Maximum number of concurrent simulations + #[arg(long, env = "TIPS_SIMULATOR_MAX_CONCURRENT", default_value = "10")] + pub max_concurrent_simulations: usize, + + /// Timeout for individual simulations in milliseconds + #[arg(long, env = "TIPS_SIMULATOR_TIMEOUT_MS", default_value = "5000")] + pub simulation_timeout_ms: u64, + + /// Kafka brokers for mempool events (comma-separated) + #[arg(long, env = "TIPS_SIMULATOR_KAFKA_BROKERS", default_value = "localhost:9092")] + pub kafka_brokers: String, + + /// Kafka topic for mempool events + #[arg(long, env = "TIPS_SIMULATOR_KAFKA_TOPIC", default_value = "mempool-events")] + pub kafka_topic: String, + + /// Kafka consumer group ID + #[arg(long, env = "TIPS_SIMULATOR_KAFKA_GROUP_ID", default_value = "tips-simulator")] + pub kafka_group_id: String, + + /// Path to builder playground to automatically start up the node connected to it + #[arg( + long = "builder.playground", + num_args = 0..=1, + default_missing_value = "$HOME/.playground/devnet/", + value_parser = expand_path, + env = "TIPS_SIMULATOR_PLAYGROUND_DIR", + )] + pub playground: Option, +} + +impl From<&SimulatorNodeConfig> for ExExSimulationConfig { + fn from(config: &SimulatorNodeConfig) -> Self { + Self { + database_url: config.database_url.clone(), + max_concurrent_simulations: config.max_concurrent_simulations, + simulation_timeout_ms: config.simulation_timeout_ms, + } + } +} + +impl From<&SimulatorNodeConfig> for MempoolListenerConfig { + fn from(config: &SimulatorNodeConfig) -> Self { + Self { + kafka_brokers: config.kafka_brokers.split(',').map(|s| s.trim().to_string()).collect(), + kafka_topic: config.kafka_topic.clone(), + kafka_group_id: config.kafka_group_id.clone(), + database_url: config.database_url.clone(), + } + } +} + +fn expand_path(s: &str) -> Result { + shellexpand::full(s) + .map_err(|e| anyhow!("expansion error for `{s}`: {e}"))? + .into_owned() + .parse() + .map_err(|e| anyhow!("invalid path after expansion: {e}")) +} + +/// Parse CLI args with playground configuration if specified +pub fn parse_config_with_playground() -> eyre::Result { + // Debug: print raw args + eprintln!("Raw args: {:?}", std::env::args().collect::>()); + + // First, parse just to check if playground is specified + let initial_config = SimulatorNodeConfig::parse(); + + eprintln!("Parsed initial config, playground: {:?}", initial_config.playground); + + if let Some(ref playground_dir) = initial_config.playground { + eprintln!("Detected playground configuration, loading from: {}", playground_dir.display()); + + // Load playground options + let options = PlaygroundOptions::new(playground_dir) + .map_err(|e| eyre::eyre!("Failed to load playground options: {}", e))?; + + // Get original args + let mut args: Vec = std::env::args().collect(); + + // Get playground args + let playground_args = options.to_cli_args(); + eprintln!("Playground args to insert: {:?}", playground_args); + + // Find where to insert playground args (after "node" subcommand) + if let Some(node_pos) = args.iter().position(|arg| arg == "node") { + // Insert playground args right after "node" + // Insert in reverse order to maintain correct positions + for arg in playground_args.into_iter().rev() { + args.insert(node_pos + 1, arg); + } + } + + eprintln!("Final args with playground config: {:?}", args); + info!("Re-parsing with playground configuration arguments"); + + // Re-parse with playground args included + Ok(SimulatorNodeConfig::parse_from(args)) + } else { + Ok(initial_config) + } +} diff --git a/crates/simulator/src/config/playground.rs b/crates/simulator/src/config/playground.rs new file mode 100644 index 0000000..3db94cf --- /dev/null +++ b/crates/simulator/src/config/playground.rs @@ -0,0 +1,346 @@ +//! Automatic builder playground configuration for tips-simulator. +//! +//! This module is used to configure tips-simulator to run against a running op-builder playground. +//! +//! To setup the playground, checkout this repository: +//! +//! https://github.com/flashbots/builder-playground +//! +//! Then run the following command: +//! +//! go run main.go cook opstack --external-builder http://host.docker.internal:4444 +//! +//! Wait until the playground is up and running, then run the following command to start +//! tips-simulator against the playground: +//! +//! target/debug/tips-simulator --builder.playground node +//! +//! This will automatically try to detect the playground configuration and apply +//! it to the tips-simulator startup settings. + +use alloy_primitives::hex; +use anyhow::{Result, anyhow}; +use reth_chainspec::ChainSpec; +use reth_network::config::SecretKey; +use reth_network_peers::TrustedPeer; +use serde_json::Value; +use std::{ + fs::read_to_string, + net::{IpAddr, Ipv4Addr, SocketAddr}, + path::{Path, PathBuf}, + sync::Arc, + time::Duration, +}; +use url::{Host, Url}; + +#[derive(Clone, Debug)] +pub struct PlaygroundOptions { + /// Chain spec loaded from playground + pub chain: Arc, + + /// HTTP RPC port + pub http_port: u16, + + /// Auth RPC address + pub authrpc_addr: IpAddr, + + /// Auth RPC port + pub authrpc_port: u16, + + /// JWT secret path + pub authrpc_jwtsecret: PathBuf, + + /// P2P network port + pub port: u16, + + /// Trusted peer for the playground network + pub trusted_peer: TrustedPeer, + + /// Chain block time + pub chain_block_time: Duration, +} + +impl PlaygroundOptions { + /// Creates a new `PlaygroundOptions` instance with the specified genesis path. + pub fn new(path: &Path) -> Result { + if !path.exists() { + return Err(anyhow!( + "Playground data directory {} does not exist", + path.display() + )); + } + + let genesis_path = existing_path(path, "l2-genesis.json")?; + let chain = load_chain_spec(&genesis_path)?; + + let authrpc_addr = Ipv4Addr::UNSPECIFIED.into(); + let http_port = pick_preferred_port(2222, 3000..9999); + eprintln!("Selected HTTP port: {}", http_port); + let authrpc_jwtsecret = existing_path(path, "jwtsecret")?.into(); + let port = pick_preferred_port(30333, 30000..65535); + eprintln!("Selected P2P port: {}", port); + let chain_block_time = extract_chain_block_time(path)?; + let default_authrpc_port = extract_authrpc_port(path)?; + let authrpc_port = pick_preferred_port(default_authrpc_port, 4000..9000); + eprintln!("Selected Auth RPC port: {}", authrpc_port); + let trusted_peer_port = extract_trusted_peer_port(path)?; + let trusted_peer_key = extract_deterministic_p2p_key(path)?; + + // Create a trusted peer from the extracted information + let trusted_peer = TrustedPeer::from_secret_key( + Host::Ipv4(Ipv4Addr::LOCALHOST), + trusted_peer_port, + &trusted_peer_key, + ); + + Ok(Self { + chain, + http_port, + authrpc_addr, + authrpc_port, + authrpc_jwtsecret, + port, + trusted_peer, + chain_block_time, + }) + } + + /// Get command line arguments that should be applied to reth node + pub fn to_cli_args(&self) -> Vec { + let mut args = vec![]; + + // HTTP RPC settings + args.push("--http".to_string()); + args.push("--http.port".to_string()); + args.push(self.http_port.to_string()); + args.push("--http.addr".to_string()); + args.push("127.0.0.1".to_string()); // Explicitly bind to localhost + + // Network settings + args.push("--port".to_string()); + args.push(self.port.to_string()); + args.push("--disable-discovery".to_string()); + + // Add trusted peer + args.push("--trusted-peers".to_string()); + args.push(self.trusted_peer.to_string()); + + // Auth RPC settings + args.push("--authrpc.addr".to_string()); + args.push(self.authrpc_addr.to_string()); + args.push("--authrpc.port".to_string()); + args.push(self.authrpc_port.to_string()); + args.push("--authrpc.jwtsecret".to_string()); + args.push(self.authrpc_jwtsecret.to_string_lossy().to_string()); + + args + } + + /// Get the chain spec for use in the node builder + pub fn chain(&self) -> Arc { + Arc::clone(&self.chain) + } + + /// Get the chain block time + pub fn chain_block_time(&self) -> Duration { + self.chain_block_time + } +} + +fn load_chain_spec(genesis_path: &str) -> Result> { + // Read the genesis file + let genesis_content = read_to_string(genesis_path) + .map_err(|e| anyhow!("Failed to read genesis file: {}", e))?; + + // Parse as JSON to extract chain ID + let genesis_json: Value = serde_json::from_str(&genesis_content) + .map_err(|e| anyhow!("Failed to parse genesis JSON: {}", e))?; + + let _chain_id = genesis_json["config"]["chainId"] + .as_u64() + .ok_or_else(|| anyhow!("Missing chainId in genesis config"))?; + + // For now, we'll create a basic chain spec with the chain ID + // This is a simplified approach - in production you'd want to fully parse the genesis + use reth_chainspec::MAINNET; + + // Use mainnet spec as a base + // Note: In a real implementation, you'd want to create a custom ChainSpec from the genesis file + let spec = MAINNET.clone(); + + Ok(spec) +} + +fn existing_path(base: &Path, relative: &str) -> Result { + let path = base.join(relative); + if path.exists() { + Ok(path.to_string_lossy().to_string()) + } else { + Err(anyhow!( + "Expected file {relative} is not present in playground directory {}", + base.display() + )) + } +} + +fn pick_random_port(range: std::ops::Range) -> u16 { + use rand::Rng; + let mut rng = rand::thread_rng(); + + loop { + // Generate a random port number in the range + let port = rng.gen_range(range.clone()); + + // Check if the port is already in use + let socket = SocketAddr::new(IpAddr::V4(Ipv4Addr::LOCALHOST), port); + if std::net::TcpListener::bind(socket).is_ok() { + return port; + } + } +} + +fn pick_preferred_port(preferred: u16, fallback_range: std::ops::Range) -> u16 { + if !is_port_free(preferred) { + eprintln!("Port {} is not free, picking random port from range {:?}", preferred, fallback_range); + return pick_random_port(fallback_range); + } + + preferred +} + +fn is_port_free(port: u16) -> bool { + // Check if we can bind to the port on both localhost and all interfaces + // Different services bind to different addresses + + // Check all interfaces (0.0.0.0) - used by P2P and Auth RPC + let socket_all = SocketAddr::new(IpAddr::V4(Ipv4Addr::UNSPECIFIED), port); + let all_free = std::net::TcpListener::bind(socket_all).is_ok(); + + // Check localhost (127.0.0.1) - used by HTTP RPC + let socket_local = SocketAddr::new(IpAddr::V4(Ipv4Addr::LOCALHOST), port); + let local_free = std::net::TcpListener::bind(socket_local).is_ok(); + + all_free && local_free +} + +fn extract_chain_block_time(basepath: &Path) -> Result { + Ok(Duration::from_secs( + serde_json::from_str::(&read_to_string(existing_path(basepath, "rollup.json")?)?)? + .get("block_time") + .and_then(|v| v.as_u64()) + .ok_or_else(|| anyhow!("Missing chain_block_time in rollup.json"))?, + )) +} + +fn extract_deterministic_p2p_key(basepath: &Path) -> Result { + let key = read_to_string(existing_path(basepath, "enode-key-1.txt")?)?; + let key_bytes = hex::decode(key.trim()).map_err(|e| anyhow!("Invalid hex key: {e}"))?; + + // Create secp256k1 secret key first + let secp_key = secp256k1::SecretKey::from_slice(&key_bytes) + .map_err(|e| anyhow!("Invalid secret key: {e}"))?; + + // Convert to reth's SecretKey type + Ok(SecretKey::from(secp_key)) +} + +fn read_docker_compose(basepath: &Path) -> Result { + let docker_compose = read_to_string(existing_path(basepath, "docker-compose.yaml")?)?; + serde_yaml::from_str(&docker_compose).map_err(|e| anyhow!("Invalid docker-compose file: {e}")) +} + +fn extract_service_command_flag(basepath: &Path, service: &str, flag: &str) -> Result { + let docker_compose = read_docker_compose(basepath)?; + let args = docker_compose["services"][service]["command"] + .as_sequence() + .ok_or(anyhow!( + "docker-compose.yaml is missing command line arguments for {service}" + ))? + .iter() + .map(|s| { + s.as_str().ok_or_else(|| { + anyhow!("docker-compose.yaml service command line argument is not a string") + }) + }) + .collect::>>()?; + + let index = args + .iter() + .position(|arg| *arg == flag) + .ok_or_else(|| anyhow!("docker_compose: {flag} not found on {service} service"))?; + + let value = args + .get(index + 1) + .ok_or_else(|| anyhow!("docker_compose: {flag} value not found"))?; + + Ok(value.to_string()) +} + +fn extract_authrpc_port(basepath: &Path) -> Result { + let builder_url = extract_service_command_flag(basepath, "rollup-boost", "--builder-url")?; + let url = Url::parse(&builder_url).map_err(|e| anyhow!("Invalid builder-url: {e}"))?; + url.port().ok_or_else(|| anyhow!("missing builder-url port")) +} + +fn extract_trusted_peer_port(basepath: &Path) -> Result { + let docker_compose = read_docker_compose(basepath)?; + + // first we need to find the internal port of the op-geth service from the docker-compose.yaml + // command line arguments used to start the op-geth service + + let Some(opgeth_args) = docker_compose["services"]["op-geth"]["command"][1].as_str() else { + return Err(anyhow!( + "docker-compose.yaml is missing command line arguments for op-geth" + )); + }; + + let opgeth_args = opgeth_args.split_whitespace().collect::>(); + let port_param_position = opgeth_args + .iter() + .position(|arg| *arg == "--port") + .ok_or_else(|| anyhow!("docker_compose: --port param not found on op-geth service"))?; + + let port_value = opgeth_args + .get(port_param_position + 1) + .ok_or_else(|| anyhow!("docker_compose: --port value not found"))?; + + let port_value = port_value + .parse::() + .map_err(|e| anyhow!("Invalid port value: {e}"))?; + + // now we need to find the external port of the op-geth service from the docker-compose.yaml + // ports mapping used to start the op-geth service + let Some(opgeth_ports) = docker_compose["services"]["op-geth"]["ports"].as_sequence() else { + return Err(anyhow!( + "docker-compose.yaml is missing ports mapping for op-geth" + )); + }; + let ports_mapping = opgeth_ports + .iter() + .map(|s| { + s.as_str().ok_or_else(|| { + anyhow!("docker-compose.yaml service ports mapping in op-geth is not a string") + }) + }) + .collect::>>()?; + + // port mappings is in the format [..., "127.0.0.1:30304:30303", ...] + // we need to find the mapping that contains the port value we found earlier + // and extract the external port from it + let port_mapping = ports_mapping + .iter() + .find(|mapping| mapping.contains(&format!(":{port_value}"))) + .ok_or_else(|| { + anyhow!("docker_compose: external port mapping not found for {port_value} for op-geth") + })?; + + // extract the external port from the mapping + let port_mapping = port_mapping + .split(':') + .nth(1) + .ok_or_else(|| anyhow!("docker_compose: external port mapping for op-geth is not valid"))?; + + port_mapping + .parse::() + .map_err(|e| anyhow!("Invalid external port mapping value for op-geth: {e}")) +} diff --git a/crates/simulator/src/main.rs b/crates/simulator/src/main.rs index 6b11814..848c355 100644 --- a/crates/simulator/src/main.rs +++ b/crates/simulator/src/main.rs @@ -1,16 +1,14 @@ -use clap::Parser; use tips_simulator::{ ListenersWithWorkers, - SimulatorNodeConfig, - MempoolListenerConfig + MempoolListenerConfig, + config::parse_config_with_playground, }; use tracing::info; -#[tokio::main] -async fn main() -> eyre::Result<()> { +fn main() -> eyre::Result<()> { dotenvy::dotenv().ok(); - let config = SimulatorNodeConfig::parse(); + let config = parse_config_with_playground()?; let exex_config: tips_simulator::types::ExExSimulationConfig = (&config).into(); let mempool_config: MempoolListenerConfig = (&config).into(); @@ -20,6 +18,7 @@ async fn main() -> eyre::Result<()> { timeout_ms = config.simulation_timeout_ms, kafka_brokers = %config.kafka_brokers, kafka_topic = %config.kafka_topic, + playground = config.playground.is_some(), "Starting reth node with both ExEx and mempool event listeners" ); diff --git a/crates/simulator/src/playground.rs b/crates/simulator/src/playground.rs new file mode 100644 index 0000000..cbbf9f7 --- /dev/null +++ b/crates/simulator/src/playground.rs @@ -0,0 +1,323 @@ +//! Automatic builder playground configuration for tips-simulator. +//! +//! This module is used to configure tips-simulator to run against a running op-builder playground. +//! +//! To setup the playground, checkout this repository: +//! +//! https://github.com/flashbots/builder-playground +//! +//! Then run the following command: +//! +//! go run main.go cook opstack --external-builder http://host.docker.internal:4444 +//! +//! Wait until the playground is up and running, then run the following command to start +//! tips-simulator against the playground: +//! +//! target/debug/tips-simulator --builder.playground +//! +//! This will automatically try to detect the playground configuration and apply +//! it to the tips-simulator startup settings. + +use alloy_primitives::hex; +use anyhow::{Result, anyhow}; +use reth_chainspec::ChainSpec; +use reth_network_peers::TrustedPeer; +use serde_json::Value; +use std::{ + fs::read_to_string, + net::{IpAddr, Ipv4Addr, SocketAddr}, + path::{Path, PathBuf}, + sync::Arc, + time::Duration, +}; +use url::Url; + +pub struct PlaygroundOptions { + /// Chain spec loaded from playground + pub chain: Arc, + + /// HTTP RPC port + pub http_port: u16, + + /// Auth RPC address + pub authrpc_addr: IpAddr, + + /// Auth RPC port + pub authrpc_port: u16, + + /// JWT secret path + pub authrpc_jwtsecret: PathBuf, + + /// P2P network port + pub port: u16, + + /// Trusted peer for the playground network + pub trusted_peer: TrustedPeer, + + /// Chain block time + pub chain_block_time: Duration, +} + +impl PlaygroundOptions { + /// Creates a new `PlaygroundOptions` instance with the specified genesis path. + pub fn new(path: &Path) -> Result { + if !path.exists() { + return Err(anyhow!( + "Playground data directory {} does not exist", + path.display() + )); + } + + let genesis_path = existing_path(path, "l2-genesis.json")?; + let chain = load_chain_spec(&genesis_path)?; + + let authrpc_addr = Ipv4Addr::UNSPECIFIED.into(); + let http_port = pick_preferred_port(2222, 3000..9999); + let authrpc_jwtsecret = existing_path(path, "jwtsecret")?.into(); + let port = pick_preferred_port(30333, 30000..65535); + let chain_block_time = extract_chain_block_time(path)?; + let authrpc_port = extract_authrpc_port(path)?; + let trusted_peer_port = extract_trusted_peer_port(path)?; + let trusted_peer_key = extract_deterministic_p2p_key(path)?; + + // Create a trusted peer from the extracted information + let trusted_peer = create_trusted_peer(trusted_peer_port, trusted_peer_key)?; + + Ok(Self { + chain, + http_port, + authrpc_addr, + authrpc_port, + authrpc_jwtsecret, + port, + trusted_peer, + chain_block_time, + }) + } + + /// Get command line arguments that should be applied to reth node + pub fn to_cli_args(&self) -> Vec { + let mut args = vec![]; + + // HTTP RPC settings + args.push("--http".to_string()); + args.push("--http.port".to_string()); + args.push(self.http_port.to_string()); + + // Network settings + args.push("--port".to_string()); + args.push(self.port.to_string()); + args.push("--disable-discovery".to_string()); + + // Add trusted peer + args.push("--trusted-peers".to_string()); + args.push(self.trusted_peer.to_string()); + + // Auth RPC settings + args.push("--authrpc.addr".to_string()); + args.push(self.authrpc_addr.to_string()); + args.push("--authrpc.port".to_string()); + args.push(self.authrpc_port.to_string()); + args.push("--authrpc.jwtsecret".to_string()); + args.push(self.authrpc_jwtsecret.to_string_lossy().to_string()); + + args + } + + /// Get the chain spec for use in the node builder + pub fn chain(&self) -> Arc { + Arc::clone(&self.chain) + } + + /// Get the chain block time + pub fn chain_block_time(&self) -> Duration { + self.chain_block_time + } +} + +fn load_chain_spec(genesis_path: &str) -> Result> { + // Read the genesis file + let genesis_content = read_to_string(genesis_path) + .map_err(|e| anyhow!("Failed to read genesis file: {}", e))?; + + // Parse as JSON to extract chain ID + let genesis_json: Value = serde_json::from_str(&genesis_content) + .map_err(|e| anyhow!("Failed to parse genesis JSON: {}", e))?; + + let _chain_id = genesis_json["config"]["chainId"] + .as_u64() + .ok_or_else(|| anyhow!("Missing chainId in genesis config"))?; + + // For now, we'll create a basic chain spec with the chain ID + // This is a simplified approach - in production you'd want to fully parse the genesis + use reth_chainspec::MAINNET; + + // Use mainnet spec as a base + // Note: In a real implementation, you'd want to create a custom ChainSpec from the genesis file + let spec = MAINNET.clone(); + + Ok(spec) +} + +fn existing_path(base: &Path, relative: &str) -> Result { + let path = base.join(relative); + if path.exists() { + Ok(path.to_string_lossy().to_string()) + } else { + Err(anyhow!( + "Expected file {relative} is not present in playground directory {}", + base.display() + )) + } +} + +fn pick_random_port(range: std::ops::Range) -> u16 { + use rand::Rng; + let mut rng = rand::thread_rng(); + + loop { + // Generate a random port number in the range + let port = rng.gen_range(range.clone()); + + // Check if the port is already in use + let socket = SocketAddr::new(IpAddr::V4(Ipv4Addr::LOCALHOST), port); + if std::net::TcpListener::bind(socket).is_ok() { + return port; + } + } +} + +fn pick_preferred_port(preferred: u16, fallback_range: std::ops::Range) -> u16 { + if !is_port_free(preferred) { + return pick_random_port(fallback_range); + } + + preferred +} + +fn is_port_free(port: u16) -> bool { + let socket = SocketAddr::new(IpAddr::V4(Ipv4Addr::LOCALHOST), port); + std::net::TcpListener::bind(socket).is_ok() +} + +fn extract_chain_block_time(basepath: &Path) -> Result { + Ok(Duration::from_secs( + serde_json::from_str::(&read_to_string(existing_path(basepath, "rollup.json")?)?)? + .get("block_time") + .and_then(|v| v.as_u64()) + .ok_or_else(|| anyhow!("Missing chain_block_time in rollup.json"))?, + )) +} + +fn extract_deterministic_p2p_key(basepath: &Path) -> Result> { + let key = read_to_string(existing_path(basepath, "enode-key-1.txt")?)?; + hex::decode(key.trim()).map_err(|e| anyhow!("Invalid hex key: {e}")) +} + +fn create_trusted_peer(port: u16, _key_bytes: Vec) -> Result { + // For now, we'll create a simple trusted peer + // In a real implementation, you'd want to properly derive the peer ID from the key + let peer_str = format!("enode://0000000000000000000000000000000000000000000000000000000000000000@127.0.0.1:{}", port); + peer_str.parse().map_err(|e| anyhow!("Failed to parse trusted peer: {e}")) +} + +fn read_docker_compose(basepath: &Path) -> Result { + let docker_compose = read_to_string(existing_path(basepath, "docker-compose.yaml")?)?; + serde_yaml::from_str(&docker_compose).map_err(|e| anyhow!("Invalid docker-compose file: {e}")) +} + +fn extract_service_command_flag(basepath: &Path, service: &str, flag: &str) -> Result { + let docker_compose = read_docker_compose(basepath)?; + let args = docker_compose["services"][service]["command"] + .as_sequence() + .ok_or(anyhow!( + "docker-compose.yaml is missing command line arguments for {service}" + ))? + .iter() + .map(|s| { + s.as_str().ok_or_else(|| { + anyhow!("docker-compose.yaml service command line argument is not a string") + }) + }) + .collect::>>()?; + + let index = args + .iter() + .position(|arg| *arg == flag) + .ok_or_else(|| anyhow!("docker_compose: {flag} not found on {service} service"))?; + + let value = args + .get(index + 1) + .ok_or_else(|| anyhow!("docker_compose: {flag} value not found"))?; + + Ok(value.to_string()) +} + +fn extract_authrpc_port(basepath: &Path) -> Result { + let builder_url = extract_service_command_flag(basepath, "rollup-boost", "--builder-url")?; + let url = Url::parse(&builder_url).map_err(|e| anyhow!("Invalid builder-url: {e}"))?; + url.port().ok_or_else(|| anyhow!("missing builder-url port")) +} + +fn extract_trusted_peer_port(basepath: &Path) -> Result { + let docker_compose = read_docker_compose(basepath)?; + + // first we need to find the internal port of the op-geth service from the docker-compose.yaml + // command line arguments used to start the op-geth service + + let Some(opgeth_args) = docker_compose["services"]["op-geth"]["command"][1].as_str() else { + return Err(anyhow!( + "docker-compose.yaml is missing command line arguments for op-geth" + )); + }; + + let opgeth_args = opgeth_args.split_whitespace().collect::>(); + let port_param_position = opgeth_args + .iter() + .position(|arg| *arg == "--port") + .ok_or_else(|| anyhow!("docker_compose: --port param not found on op-geth service"))?; + + let port_value = opgeth_args + .get(port_param_position + 1) + .ok_or_else(|| anyhow!("docker_compose: --port value not found"))?; + + let port_value = port_value + .parse::() + .map_err(|e| anyhow!("Invalid port value: {e}"))?; + + // now we need to find the external port of the op-geth service from the docker-compose.yaml + // ports mapping used to start the op-geth service + let Some(opgeth_ports) = docker_compose["services"]["op-geth"]["ports"].as_sequence() else { + return Err(anyhow!( + "docker-compose.yaml is missing ports mapping for op-geth" + )); + }; + let ports_mapping = opgeth_ports + .iter() + .map(|s| { + s.as_str().ok_or_else(|| { + anyhow!("docker-compose.yaml service ports mapping in op-geth is not a string") + }) + }) + .collect::>>()?; + + // port mappings is in the format [..., "127.0.0.1:30304:30303", ...] + // we need to find the mapping that contains the port value we found earlier + // and extract the external port from it + let port_mapping = ports_mapping + .iter() + .find(|mapping| mapping.contains(&format!(":{port_value}"))) + .ok_or_else(|| { + anyhow!("docker_compose: external port mapping not found for {port_value} for op-geth") + })?; + + // extract the external port from the mapping + let port_mapping = port_mapping + .split(':') + .nth(1) + .ok_or_else(|| anyhow!("docker_compose: external port mapping for op-geth is not valid"))?; + + port_mapping + .parse::() + .map_err(|e| anyhow!("Invalid external port mapping value for op-geth: {e}")) +} diff --git a/justfile b/justfile index 4c959b6..4277ec2 100644 --- a/justfile +++ b/justfile @@ -89,5 +89,11 @@ maintenance: ingress-writer: cargo run --bin tips-ingress-writer +simulator: + cargo run --bin tips-simulator node + +simulator-playground dir="$HOME/.playground/devnet/": + cargo run --bin tips-simulator -- --builder.playground {{ dir }} node + ui: - cd ui && yarn dev \ No newline at end of file + cd ui && yarn dev From 4c9f8c31bf2e397b0f9968c5d48b7486f7c42b69 Mon Sep 17 00:00:00 2001 From: Niran Babalola Date: Tue, 23 Sep 2025 12:40:19 -0500 Subject: [PATCH 19/39] Pass the --chain CLI arg when using playground --- Cargo.lock | 350 +++++++++++++++++++++- crates/simulator/Cargo.toml | 3 + crates/simulator/src/config/playground.rs | 45 ++- crates/simulator/src/playground.rs | 323 -------------------- justfile | 4 +- 5 files changed, 371 insertions(+), 354 deletions(-) delete mode 100644 crates/simulator/src/playground.rs diff --git a/Cargo.lock b/Cargo.lock index 22c1c86..4ec034f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -6139,8 +6139,10 @@ checksum = "d9ade20c592484ba1ea538006e0454284174447a3adf9bb59fa99ed512f95493" dependencies = [ "alloy-consensus", "alloy-eips", + "alloy-network", "alloy-primitives 1.3.1", "alloy-rlp", + "alloy-rpc-types-eth", "alloy-serde", "derive_more 2.0.1", "serde", @@ -6166,6 +6168,28 @@ dependencies = [ "thiserror 2.0.16", ] +[[package]] +name = "op-alloy-flz" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a79f352fc3893dcd670172e615afef993a41798a1d3fc0db88a3e60ef2e70ecc" + +[[package]] +name = "op-alloy-network" +version = "0.19.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "84741a798124ceb43979d70db654039937a00979b1341fa8bfdc48473bbd52bf" +dependencies = [ + "alloy-consensus", + "alloy-network", + "alloy-primitives 1.3.1", + "alloy-provider", + "alloy-rpc-types-eth", + "alloy-signer", + "op-alloy-consensus 0.19.1", + "op-alloy-rpc-types 0.19.1", +] + [[package]] name = "op-alloy-network" version = "0.20.0" @@ -6182,6 +6206,16 @@ dependencies = [ "op-alloy-rpc-types 0.20.0", ] +[[package]] +name = "op-alloy-rpc-jsonrpsee" +version = "0.19.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa85f170bf8f914a7619e1447918781a8c5bd1041bb6629940b851e865487156" +dependencies = [ + "alloy-primitives 1.3.1", + "jsonrpsee", +] + [[package]] name = "op-alloy-rpc-types" version = "0.19.1" @@ -6231,10 +6265,12 @@ dependencies = [ "alloy-primitives 1.3.1", "alloy-rlp", "alloy-rpc-types-engine", + "alloy-serde", "derive_more 2.0.1", "ethereum_ssz", "ethereum_ssz_derive", "op-alloy-consensus 0.19.1", + "serde", "snap", "thiserror 2.0.16", ] @@ -7575,6 +7611,7 @@ dependencies = [ "reth-codecs", "reth-db-models", "reth-ethereum-primitives", + "reth-optimism-primitives", "reth-primitives-traits", "reth-prune-types", "reth-stages-types", @@ -7773,9 +7810,11 @@ dependencies = [ "alloy-rpc-types-engine", "eyre", "futures-util", + "op-alloy-rpc-types-engine", "reth-chainspec", "reth-engine-primitives", "reth-ethereum-engine-primitives", + "reth-optimism-chainspec", "reth-payload-builder", "reth-payload-primitives", "reth-provider", @@ -8586,6 +8625,7 @@ dependencies = [ "reth-config", "reth-consensus", "reth-consensus-debug-client", + "reth-db", "reth-db-api", "reth-db-common", "reth-downloaders", @@ -8815,6 +8855,7 @@ dependencies = [ "miniz_oxide", "op-alloy-consensus 0.19.1", "op-alloy-rpc-types 0.19.1", + "paste", "reth-chainspec", "reth-ethereum-forks", "reth-network-peers", @@ -8823,9 +8864,57 @@ dependencies = [ "reth-primitives-traits", "serde", "serde_json", + "tar-no-std", "thiserror 2.0.16", ] +[[package]] +name = "reth-optimism-cli" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-primitives 1.3.1", + "alloy-rlp", + "clap", + "derive_more 2.0.1", + "eyre", + "futures-util", + "op-alloy-consensus 0.19.1", + "reth-chainspec", + "reth-cli", + "reth-cli-commands", + "reth-cli-runner", + "reth-consensus", + "reth-db", + "reth-db-api", + "reth-db-common", + "reth-downloaders", + "reth-execution-types", + "reth-fs-util", + "reth-node-builder", + "reth-node-core", + "reth-node-events", + "reth-node-metrics", + "reth-optimism-chainspec", + "reth-optimism-consensus", + "reth-optimism-evm", + "reth-optimism-node", + "reth-optimism-primitives", + "reth-primitives-traits", + "reth-provider", + "reth-prune", + "reth-stages", + "reth-static-file", + "reth-static-file-types", + "reth-tracing", + "serde", + "tokio", + "tokio-util", + "tracing", +] + [[package]] name = "reth-optimism-consensus" version = "1.7.0" @@ -8873,11 +8962,43 @@ dependencies = [ "reth-optimism-forks", "reth-optimism-primitives", "reth-primitives-traits", + "reth-rpc-eth-api", "reth-storage-errors", "revm", "thiserror 2.0.16", ] +[[package]] +name = "reth-optimism-flashblocks" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "alloy-eips", + "alloy-primitives 1.3.1", + "alloy-rpc-types-engine", + "alloy-serde", + "brotli", + "eyre", + "futures-util", + "reth-chain-state", + "reth-errors", + "reth-evm", + "reth-execution-types", + "reth-optimism-evm", + "reth-optimism-primitives", + "reth-primitives-traits", + "reth-revm", + "reth-rpc-eth-types", + "reth-storage-api", + "reth-tasks", + "serde", + "serde_json", + "tokio", + "tokio-tungstenite", + "tracing", + "url", +] + [[package]] name = "reth-optimism-forks" version = "1.7.0" @@ -8889,6 +9010,91 @@ dependencies = [ "reth-ethereum-forks", ] +[[package]] +name = "reth-optimism-node" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "alloy-consensus", + "alloy-primitives 1.3.1", + "alloy-rpc-types-engine", + "alloy-rpc-types-eth", + "clap", + "eyre", + "op-alloy-consensus 0.19.1", + "op-alloy-rpc-types-engine", + "op-revm", + "reth-chainspec", + "reth-consensus", + "reth-engine-local", + "reth-evm", + "reth-network", + "reth-node-api", + "reth-node-builder", + "reth-node-core", + "reth-optimism-chainspec", + "reth-optimism-consensus", + "reth-optimism-evm", + "reth-optimism-forks", + "reth-optimism-payload-builder", + "reth-optimism-primitives", + "reth-optimism-rpc", + "reth-optimism-storage", + "reth-optimism-txpool", + "reth-payload-builder", + "reth-primitives-traits", + "reth-provider", + "reth-rpc-api", + "reth-rpc-engine-api", + "reth-rpc-server-types", + "reth-tracing", + "reth-transaction-pool", + "reth-trie-common", + "revm", + "serde", + "tokio", + "url", +] + +[[package]] +name = "reth-optimism-payload-builder" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-primitives 1.3.1", + "alloy-rlp", + "alloy-rpc-types-debug", + "alloy-rpc-types-engine", + "derive_more 2.0.1", + "op-alloy-consensus 0.19.1", + "op-alloy-rpc-types-engine", + "reth-basic-payload-builder", + "reth-chain-state", + "reth-chainspec", + "reth-evm", + "reth-execution-types", + "reth-optimism-evm", + "reth-optimism-forks", + "reth-optimism-primitives", + "reth-optimism-txpool", + "reth-payload-builder", + "reth-payload-builder-primitives", + "reth-payload-primitives", + "reth-payload-util", + "reth-payload-validator", + "reth-primitives-traits", + "reth-revm", + "reth-storage-api", + "reth-transaction-pool", + "revm", + "serde", + "sha2 0.10.9", + "thiserror 2.0.16", + "tracing", +] + [[package]] name = "reth-optimism-primitives" version = "1.7.0" @@ -8899,6 +9105,7 @@ dependencies = [ "alloy-primitives 1.3.1", "alloy-rlp", "bytes", + "modular-bitfield", "op-alloy-consensus 0.19.1", "reth-codecs", "reth-primitives-traits", @@ -8907,6 +9114,116 @@ dependencies = [ "serde_with", ] +[[package]] +name = "reth-optimism-rpc" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-json-rpc", + "alloy-primitives 1.3.1", + "alloy-rpc-client", + "alloy-rpc-types-debug", + "alloy-rpc-types-engine", + "alloy-rpc-types-eth", + "alloy-transport", + "alloy-transport-http", + "async-trait", + "derive_more 2.0.1", + "eyre", + "jsonrpsee", + "jsonrpsee-core", + "jsonrpsee-types", + "metrics", + "op-alloy-consensus 0.19.1", + "op-alloy-network 0.19.1", + "op-alloy-rpc-jsonrpsee", + "op-alloy-rpc-types 0.19.1", + "op-alloy-rpc-types-engine", + "op-revm", + "reqwest", + "reth-chainspec", + "reth-evm", + "reth-metrics", + "reth-node-api", + "reth-node-builder", + "reth-optimism-evm", + "reth-optimism-flashblocks", + "reth-optimism-forks", + "reth-optimism-payload-builder", + "reth-optimism-primitives", + "reth-optimism-txpool", + "reth-primitives-traits", + "reth-rpc", + "reth-rpc-api", + "reth-rpc-engine-api", + "reth-rpc-eth-api", + "reth-rpc-eth-types", + "reth-rpc-server-types", + "reth-storage-api", + "reth-tasks", + "reth-transaction-pool", + "revm", + "serde_json", + "thiserror 2.0.16", + "tokio", + "tower", + "tracing", +] + +[[package]] +name = "reth-optimism-storage" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "alloy-consensus", + "alloy-primitives 1.3.1", + "reth-chainspec", + "reth-db-api", + "reth-node-api", + "reth-optimism-primitives", + "reth-primitives-traits", + "reth-provider", + "reth-storage-api", +] + +[[package]] +name = "reth-optimism-txpool" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-json-rpc", + "alloy-primitives 1.3.1", + "alloy-rpc-client", + "alloy-rpc-types-eth", + "alloy-serde", + "c-kzg", + "derive_more 2.0.1", + "futures-util", + "metrics", + "op-alloy-consensus 0.19.1", + "op-alloy-flz", + "op-alloy-rpc-types 0.19.1", + "op-revm", + "parking_lot", + "reth-chain-state", + "reth-chainspec", + "reth-metrics", + "reth-optimism-evm", + "reth-optimism-forks", + "reth-optimism-primitives", + "reth-primitives-traits", + "reth-storage-api", + "reth-transaction-pool", + "serde", + "thiserror 2.0.16", + "tokio", + "tracing", +] + [[package]] name = "reth-payload-builder" version = "1.7.0" @@ -8958,6 +9275,16 @@ dependencies = [ "tokio", ] +[[package]] +name = "reth-payload-util" +version = "1.7.0" +source = "git+https://github.com/paradigmxyz/reth?tag=v1.7.0#9d56da53ec0ad60e229456a0c70b338501d923a5" +dependencies = [ + "alloy-consensus", + "alloy-primitives 1.3.1", + "reth-transaction-pool", +] + [[package]] name = "reth-payload-validator" version = "1.7.0" @@ -9310,9 +9637,15 @@ dependencies = [ "alloy-rpc-types-eth", "alloy-signer", "jsonrpsee-types", + "op-alloy-consensus 0.19.1", + "op-alloy-network 0.19.1", + "op-alloy-rpc-types 0.19.1", + "op-revm", "reth-ethereum-primitives", "reth-evm", + "reth-optimism-primitives", "reth-primitives-traits", + "reth-storage-api", "revm-context", "thiserror 2.0.16", ] @@ -11480,6 +11813,17 @@ dependencies = [ "xattr", ] +[[package]] +name = "tar-no-std" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac9ee8b664c9f1740cd813fea422116f8ba29997bb7c878d1940424889802897" +dependencies = [ + "bitflags 2.9.4", + "log", + "num-traits", +] + [[package]] name = "tempfile" version = "3.22.0" @@ -11768,7 +12112,7 @@ dependencies = [ "eyre", "jsonrpsee", "op-alloy-consensus 0.20.0", - "op-alloy-network", + "op-alloy-network 0.20.0", "rdkafka", "reth-rpc-eth-types", "serde", @@ -11811,7 +12155,7 @@ dependencies = [ "anyhow", "clap", "dotenvy", - "op-alloy-network", + "op-alloy-network 0.20.0", "rdkafka", "serde_json", "tips-audit", @@ -11853,6 +12197,8 @@ dependencies = [ "reth-node-api", "reth-node-builder", "reth-node-ethereum", + "reth-optimism-chainspec", + "reth-optimism-cli", "reth-optimism-evm", "reth-optimism-primitives", "reth-primitives", diff --git a/crates/simulator/Cargo.toml b/crates/simulator/Cargo.toml index 3b2c5c0..d9a0723 100644 --- a/crates/simulator/Cargo.toml +++ b/crates/simulator/Cargo.toml @@ -45,8 +45,11 @@ reth-node-builder.workspace = true reth-node-ethereum.workspace = true reth-revm.workspace = true reth-chainspec.workspace = true +# reth-node-optimism is not available in v1.7.0, using regular node with op components reth-optimism-evm = { git = "https://github.com/paradigmxyz/reth", tag = "v1.7.0" } reth-optimism-primitives = { git = "https://github.com/paradigmxyz/reth", tag = "v1.7.0", features = ["serde", "serde-bincode-compat"] } +reth-optimism-chainspec = { git = "https://github.com/paradigmxyz/reth", tag = "v1.7.0" } +reth-optimism-cli = { git = "https://github.com/paradigmxyz/reth", tag = "v1.7.0" } # Additional dependencies for simulation std-semaphore = "0.1" diff --git a/crates/simulator/src/config/playground.rs b/crates/simulator/src/config/playground.rs index 3db94cf..779e9cb 100644 --- a/crates/simulator/src/config/playground.rs +++ b/crates/simulator/src/config/playground.rs @@ -20,7 +20,9 @@ use alloy_primitives::hex; use anyhow::{Result, anyhow}; -use reth_chainspec::ChainSpec; +use reth_optimism_chainspec::OpChainSpec; +use reth_optimism_cli::chainspec::OpChainSpecParser; +use reth_cli::chainspec::ChainSpecParser; use reth_network::config::SecretKey; use reth_network_peers::TrustedPeer; use serde_json::Value; @@ -36,7 +38,10 @@ use url::{Host, Url}; #[derive(Clone, Debug)] pub struct PlaygroundOptions { /// Chain spec loaded from playground - pub chain: Arc, + pub chain: Arc, + + /// Path to the genesis file + pub genesis_path: String, /// HTTP RPC port pub http_port: u16, @@ -71,7 +76,8 @@ impl PlaygroundOptions { } let genesis_path = existing_path(path, "l2-genesis.json")?; - let chain = load_chain_spec(&genesis_path)?; + let chain = OpChainSpecParser::parse(&genesis_path) + .map_err(|e| anyhow!("Failed to parse chain spec: {}", e))?; let authrpc_addr = Ipv4Addr::UNSPECIFIED.into(); let http_port = pick_preferred_port(2222, 3000..9999); @@ -92,9 +98,13 @@ impl PlaygroundOptions { trusted_peer_port, &trusted_peer_key, ); + + eprintln!("Trusted peer configured: {}", trusted_peer); + eprintln!("Chain block time: {:?}", chain_block_time); Ok(Self { chain, + genesis_path, http_port, authrpc_addr, authrpc_port, @@ -109,6 +119,10 @@ impl PlaygroundOptions { pub fn to_cli_args(&self) -> Vec { let mut args = vec![]; + // Chain configuration + args.push("--chain".to_string()); + args.push(self.genesis_path.clone()); + // HTTP RPC settings args.push("--http".to_string()); args.push("--http.port".to_string()); @@ -137,7 +151,7 @@ impl PlaygroundOptions { } /// Get the chain spec for use in the node builder - pub fn chain(&self) -> Arc { + pub fn chain(&self) -> Arc { Arc::clone(&self.chain) } @@ -147,29 +161,6 @@ impl PlaygroundOptions { } } -fn load_chain_spec(genesis_path: &str) -> Result> { - // Read the genesis file - let genesis_content = read_to_string(genesis_path) - .map_err(|e| anyhow!("Failed to read genesis file: {}", e))?; - - // Parse as JSON to extract chain ID - let genesis_json: Value = serde_json::from_str(&genesis_content) - .map_err(|e| anyhow!("Failed to parse genesis JSON: {}", e))?; - - let _chain_id = genesis_json["config"]["chainId"] - .as_u64() - .ok_or_else(|| anyhow!("Missing chainId in genesis config"))?; - - // For now, we'll create a basic chain spec with the chain ID - // This is a simplified approach - in production you'd want to fully parse the genesis - use reth_chainspec::MAINNET; - - // Use mainnet spec as a base - // Note: In a real implementation, you'd want to create a custom ChainSpec from the genesis file - let spec = MAINNET.clone(); - - Ok(spec) -} fn existing_path(base: &Path, relative: &str) -> Result { let path = base.join(relative); diff --git a/crates/simulator/src/playground.rs b/crates/simulator/src/playground.rs deleted file mode 100644 index cbbf9f7..0000000 --- a/crates/simulator/src/playground.rs +++ /dev/null @@ -1,323 +0,0 @@ -//! Automatic builder playground configuration for tips-simulator. -//! -//! This module is used to configure tips-simulator to run against a running op-builder playground. -//! -//! To setup the playground, checkout this repository: -//! -//! https://github.com/flashbots/builder-playground -//! -//! Then run the following command: -//! -//! go run main.go cook opstack --external-builder http://host.docker.internal:4444 -//! -//! Wait until the playground is up and running, then run the following command to start -//! tips-simulator against the playground: -//! -//! target/debug/tips-simulator --builder.playground -//! -//! This will automatically try to detect the playground configuration and apply -//! it to the tips-simulator startup settings. - -use alloy_primitives::hex; -use anyhow::{Result, anyhow}; -use reth_chainspec::ChainSpec; -use reth_network_peers::TrustedPeer; -use serde_json::Value; -use std::{ - fs::read_to_string, - net::{IpAddr, Ipv4Addr, SocketAddr}, - path::{Path, PathBuf}, - sync::Arc, - time::Duration, -}; -use url::Url; - -pub struct PlaygroundOptions { - /// Chain spec loaded from playground - pub chain: Arc, - - /// HTTP RPC port - pub http_port: u16, - - /// Auth RPC address - pub authrpc_addr: IpAddr, - - /// Auth RPC port - pub authrpc_port: u16, - - /// JWT secret path - pub authrpc_jwtsecret: PathBuf, - - /// P2P network port - pub port: u16, - - /// Trusted peer for the playground network - pub trusted_peer: TrustedPeer, - - /// Chain block time - pub chain_block_time: Duration, -} - -impl PlaygroundOptions { - /// Creates a new `PlaygroundOptions` instance with the specified genesis path. - pub fn new(path: &Path) -> Result { - if !path.exists() { - return Err(anyhow!( - "Playground data directory {} does not exist", - path.display() - )); - } - - let genesis_path = existing_path(path, "l2-genesis.json")?; - let chain = load_chain_spec(&genesis_path)?; - - let authrpc_addr = Ipv4Addr::UNSPECIFIED.into(); - let http_port = pick_preferred_port(2222, 3000..9999); - let authrpc_jwtsecret = existing_path(path, "jwtsecret")?.into(); - let port = pick_preferred_port(30333, 30000..65535); - let chain_block_time = extract_chain_block_time(path)?; - let authrpc_port = extract_authrpc_port(path)?; - let trusted_peer_port = extract_trusted_peer_port(path)?; - let trusted_peer_key = extract_deterministic_p2p_key(path)?; - - // Create a trusted peer from the extracted information - let trusted_peer = create_trusted_peer(trusted_peer_port, trusted_peer_key)?; - - Ok(Self { - chain, - http_port, - authrpc_addr, - authrpc_port, - authrpc_jwtsecret, - port, - trusted_peer, - chain_block_time, - }) - } - - /// Get command line arguments that should be applied to reth node - pub fn to_cli_args(&self) -> Vec { - let mut args = vec![]; - - // HTTP RPC settings - args.push("--http".to_string()); - args.push("--http.port".to_string()); - args.push(self.http_port.to_string()); - - // Network settings - args.push("--port".to_string()); - args.push(self.port.to_string()); - args.push("--disable-discovery".to_string()); - - // Add trusted peer - args.push("--trusted-peers".to_string()); - args.push(self.trusted_peer.to_string()); - - // Auth RPC settings - args.push("--authrpc.addr".to_string()); - args.push(self.authrpc_addr.to_string()); - args.push("--authrpc.port".to_string()); - args.push(self.authrpc_port.to_string()); - args.push("--authrpc.jwtsecret".to_string()); - args.push(self.authrpc_jwtsecret.to_string_lossy().to_string()); - - args - } - - /// Get the chain spec for use in the node builder - pub fn chain(&self) -> Arc { - Arc::clone(&self.chain) - } - - /// Get the chain block time - pub fn chain_block_time(&self) -> Duration { - self.chain_block_time - } -} - -fn load_chain_spec(genesis_path: &str) -> Result> { - // Read the genesis file - let genesis_content = read_to_string(genesis_path) - .map_err(|e| anyhow!("Failed to read genesis file: {}", e))?; - - // Parse as JSON to extract chain ID - let genesis_json: Value = serde_json::from_str(&genesis_content) - .map_err(|e| anyhow!("Failed to parse genesis JSON: {}", e))?; - - let _chain_id = genesis_json["config"]["chainId"] - .as_u64() - .ok_or_else(|| anyhow!("Missing chainId in genesis config"))?; - - // For now, we'll create a basic chain spec with the chain ID - // This is a simplified approach - in production you'd want to fully parse the genesis - use reth_chainspec::MAINNET; - - // Use mainnet spec as a base - // Note: In a real implementation, you'd want to create a custom ChainSpec from the genesis file - let spec = MAINNET.clone(); - - Ok(spec) -} - -fn existing_path(base: &Path, relative: &str) -> Result { - let path = base.join(relative); - if path.exists() { - Ok(path.to_string_lossy().to_string()) - } else { - Err(anyhow!( - "Expected file {relative} is not present in playground directory {}", - base.display() - )) - } -} - -fn pick_random_port(range: std::ops::Range) -> u16 { - use rand::Rng; - let mut rng = rand::thread_rng(); - - loop { - // Generate a random port number in the range - let port = rng.gen_range(range.clone()); - - // Check if the port is already in use - let socket = SocketAddr::new(IpAddr::V4(Ipv4Addr::LOCALHOST), port); - if std::net::TcpListener::bind(socket).is_ok() { - return port; - } - } -} - -fn pick_preferred_port(preferred: u16, fallback_range: std::ops::Range) -> u16 { - if !is_port_free(preferred) { - return pick_random_port(fallback_range); - } - - preferred -} - -fn is_port_free(port: u16) -> bool { - let socket = SocketAddr::new(IpAddr::V4(Ipv4Addr::LOCALHOST), port); - std::net::TcpListener::bind(socket).is_ok() -} - -fn extract_chain_block_time(basepath: &Path) -> Result { - Ok(Duration::from_secs( - serde_json::from_str::(&read_to_string(existing_path(basepath, "rollup.json")?)?)? - .get("block_time") - .and_then(|v| v.as_u64()) - .ok_or_else(|| anyhow!("Missing chain_block_time in rollup.json"))?, - )) -} - -fn extract_deterministic_p2p_key(basepath: &Path) -> Result> { - let key = read_to_string(existing_path(basepath, "enode-key-1.txt")?)?; - hex::decode(key.trim()).map_err(|e| anyhow!("Invalid hex key: {e}")) -} - -fn create_trusted_peer(port: u16, _key_bytes: Vec) -> Result { - // For now, we'll create a simple trusted peer - // In a real implementation, you'd want to properly derive the peer ID from the key - let peer_str = format!("enode://0000000000000000000000000000000000000000000000000000000000000000@127.0.0.1:{}", port); - peer_str.parse().map_err(|e| anyhow!("Failed to parse trusted peer: {e}")) -} - -fn read_docker_compose(basepath: &Path) -> Result { - let docker_compose = read_to_string(existing_path(basepath, "docker-compose.yaml")?)?; - serde_yaml::from_str(&docker_compose).map_err(|e| anyhow!("Invalid docker-compose file: {e}")) -} - -fn extract_service_command_flag(basepath: &Path, service: &str, flag: &str) -> Result { - let docker_compose = read_docker_compose(basepath)?; - let args = docker_compose["services"][service]["command"] - .as_sequence() - .ok_or(anyhow!( - "docker-compose.yaml is missing command line arguments for {service}" - ))? - .iter() - .map(|s| { - s.as_str().ok_or_else(|| { - anyhow!("docker-compose.yaml service command line argument is not a string") - }) - }) - .collect::>>()?; - - let index = args - .iter() - .position(|arg| *arg == flag) - .ok_or_else(|| anyhow!("docker_compose: {flag} not found on {service} service"))?; - - let value = args - .get(index + 1) - .ok_or_else(|| anyhow!("docker_compose: {flag} value not found"))?; - - Ok(value.to_string()) -} - -fn extract_authrpc_port(basepath: &Path) -> Result { - let builder_url = extract_service_command_flag(basepath, "rollup-boost", "--builder-url")?; - let url = Url::parse(&builder_url).map_err(|e| anyhow!("Invalid builder-url: {e}"))?; - url.port().ok_or_else(|| anyhow!("missing builder-url port")) -} - -fn extract_trusted_peer_port(basepath: &Path) -> Result { - let docker_compose = read_docker_compose(basepath)?; - - // first we need to find the internal port of the op-geth service from the docker-compose.yaml - // command line arguments used to start the op-geth service - - let Some(opgeth_args) = docker_compose["services"]["op-geth"]["command"][1].as_str() else { - return Err(anyhow!( - "docker-compose.yaml is missing command line arguments for op-geth" - )); - }; - - let opgeth_args = opgeth_args.split_whitespace().collect::>(); - let port_param_position = opgeth_args - .iter() - .position(|arg| *arg == "--port") - .ok_or_else(|| anyhow!("docker_compose: --port param not found on op-geth service"))?; - - let port_value = opgeth_args - .get(port_param_position + 1) - .ok_or_else(|| anyhow!("docker_compose: --port value not found"))?; - - let port_value = port_value - .parse::() - .map_err(|e| anyhow!("Invalid port value: {e}"))?; - - // now we need to find the external port of the op-geth service from the docker-compose.yaml - // ports mapping used to start the op-geth service - let Some(opgeth_ports) = docker_compose["services"]["op-geth"]["ports"].as_sequence() else { - return Err(anyhow!( - "docker-compose.yaml is missing ports mapping for op-geth" - )); - }; - let ports_mapping = opgeth_ports - .iter() - .map(|s| { - s.as_str().ok_or_else(|| { - anyhow!("docker-compose.yaml service ports mapping in op-geth is not a string") - }) - }) - .collect::>>()?; - - // port mappings is in the format [..., "127.0.0.1:30304:30303", ...] - // we need to find the mapping that contains the port value we found earlier - // and extract the external port from it - let port_mapping = ports_mapping - .iter() - .find(|mapping| mapping.contains(&format!(":{port_value}"))) - .ok_or_else(|| { - anyhow!("docker_compose: external port mapping not found for {port_value} for op-geth") - })?; - - // extract the external port from the mapping - let port_mapping = port_mapping - .split(':') - .nth(1) - .ok_or_else(|| anyhow!("docker_compose: external port mapping for op-geth is not valid"))?; - - port_mapping - .parse::() - .map_err(|e| anyhow!("Invalid external port mapping value for op-geth: {e}")) -} diff --git a/justfile b/justfile index 4277ec2..f98ecbc 100644 --- a/justfile +++ b/justfile @@ -92,8 +92,8 @@ ingress-writer: simulator: cargo run --bin tips-simulator node -simulator-playground dir="$HOME/.playground/devnet/": - cargo run --bin tips-simulator -- --builder.playground {{ dir }} node +simulator-playground: + cargo run --bin tips-simulator -- --builder.playground=$HOME/.playground/devnet/ node --datadir ~/.playground/devnet/tips-simulator ui: cd ui && yarn dev From 994f4d1f815c26960f3123ad2f940c0bed8cae78 Mon Sep 17 00:00:00 2001 From: Niran Babalola Date: Tue, 23 Sep 2025 15:05:01 -0500 Subject: [PATCH 20/39] cargo fmt --- crates/audit/src/storage.rs | 2 +- crates/datastore/src/postgres.rs | 5 +- crates/datastore/src/traits.rs | 9 +- crates/datastore/tests/datastore.rs | 461 +++++++++++++-------- crates/simulator/src/core.rs | 13 +- crates/simulator/src/engine.rs | 53 ++- crates/simulator/src/lib.rs | 91 ++-- crates/simulator/src/listeners/exex.rs | 85 ++-- crates/simulator/src/listeners/mempool.rs | 65 +-- crates/simulator/src/publisher.rs | 62 +-- crates/simulator/src/types.rs | 40 +- crates/simulator/src/worker_pool.rs | 55 +-- crates/simulator/tests/integration_test.rs | 9 +- 13 files changed, 581 insertions(+), 369 deletions(-) diff --git a/crates/audit/src/storage.rs b/crates/audit/src/storage.rs index b9fdc9b..11b7c23 100644 --- a/crates/audit/src/storage.rs +++ b/crates/audit/src/storage.rs @@ -192,7 +192,7 @@ fn update_bundle_history_transform( gas_used: *gas_used, execution_time_us: *execution_time_us, error_reason: error_reason.clone(), - } + }, }; history.push(history_event); diff --git a/crates/datastore/src/postgres.rs b/crates/datastore/src/postgres.rs index 69353d6..6efe8dd 100644 --- a/crates/datastore/src/postgres.rs +++ b/crates/datastore/src/postgres.rs @@ -460,7 +460,10 @@ impl BundleDatastore for PostgresDatastore { } } - async fn select_bundles_with_latest_simulation(&self, filter: BundleFilter) -> Result> { + async fn select_bundles_with_latest_simulation( + &self, + filter: BundleFilter, + ) -> Result> { let base_fee = filter.base_fee.unwrap_or(0); let block_number = filter.block_number.unwrap_or(0) as i64; diff --git a/crates/datastore/src/traits.rs b/crates/datastore/src/traits.rs index 1432360..927425b 100644 --- a/crates/datastore/src/traits.rs +++ b/crates/datastore/src/traits.rs @@ -1,4 +1,6 @@ -use crate::postgres::{BundleFilter, BundleWithMetadata, BundleWithLatestSimulation, Simulation, StateDiff}; +use crate::postgres::{ + BundleFilter, BundleWithLatestSimulation, BundleWithMetadata, Simulation, StateDiff, +}; use alloy_primitives::TxHash; use alloy_rpc_types_mev::EthSendBundle; use anyhow::Result; @@ -41,5 +43,8 @@ pub trait BundleDatastore: Send + Sync { /// Select bundles with their latest simulation /// Only bundles that have at least one simulation are returned - async fn select_bundles_with_latest_simulation(&self, filter: BundleFilter) -> Result>; + async fn select_bundles_with_latest_simulation( + &self, + filter: BundleFilter, + ) -> Result>; } diff --git a/crates/datastore/tests/datastore.rs b/crates/datastore/tests/datastore.rs index 74d53f0..0251620 100644 --- a/crates/datastore/tests/datastore.rs +++ b/crates/datastore/tests/datastore.rs @@ -79,32 +79,33 @@ fn create_test_bundle( fn create_test_state_diff() -> StateDiff { let mut state_diff = HashMap::new(); - + // Create test account address - let account1: Address = "0x742d35cc6635c0532925a3b8d40b33dd33ad7309".parse().unwrap(); - let account2: Address = "0x24ae36512421f1d9f6e074f00ff5b8393f5dd925".parse().unwrap(); - + let account1: Address = "0x742d35cc6635c0532925a3b8d40b33dd33ad7309" + .parse() + .unwrap(); + let account2: Address = "0x24ae36512421f1d9f6e074f00ff5b8393f5dd925" + .parse() + .unwrap(); + // Create storage mappings for account1 let mut account1_storage = HashMap::new(); - account1_storage.insert( - StorageKey::ZERO, - StorageValue::from(U256::from(1)), - ); + account1_storage.insert(StorageKey::ZERO, StorageValue::from(U256::from(1))); account1_storage.insert( StorageKey::from(U256::from(1)), StorageValue::from(U256::from(2)), ); - + // Create storage mappings for account2 let mut account2_storage = HashMap::new(); account2_storage.insert( StorageKey::from(U256::from(3)), StorageValue::from(U256::from(4)), ); - + state_diff.insert(account1, account1_storage); state_diff.insert(account2, account2_storage); - + state_diff } @@ -344,34 +345,45 @@ async fn cancel_bundle_workflow() -> eyre::Result<()> { #[tokio::test] async fn insert_and_get_simulation() -> eyre::Result<()> { let harness = setup_datastore().await?; - + // First create a bundle to link the simulation to let test_bundle = create_test_bundle(12345, Some(1640995200), Some(1640995260))?; - let bundle_id = harness.data_store.insert_bundle(test_bundle).await + let bundle_id = harness + .data_store + .insert_bundle(test_bundle) + .await .map_err(|e| eyre::eyre!(e))?; - + // Create simulation data let block_number = 18500000u64; - let block_hash = "0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef".to_string(); + let block_hash = + "0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef".to_string(); let execution_time_us = 250000u64; let gas_used = 21000u64; let state_diff = create_test_state_diff(); - + // Insert simulation - let simulation_id = harness.data_store.insert_simulation( - bundle_id, - block_number, - block_hash.clone(), - execution_time_us, - gas_used, - state_diff.clone(), - ).await.map_err(|e| eyre::eyre!(e))?; - + let simulation_id = harness + .data_store + .insert_simulation( + bundle_id, + block_number, + block_hash.clone(), + execution_time_us, + gas_used, + state_diff.clone(), + ) + .await + .map_err(|e| eyre::eyre!(e))?; + // Retrieve simulation - let retrieved_simulation = harness.data_store.get_simulation(simulation_id).await + let retrieved_simulation = harness + .data_store + .get_simulation(simulation_id) + .await .map_err(|e| eyre::eyre!(e))?; assert!(retrieved_simulation.is_some(), "Simulation should be found"); - + let simulation = retrieved_simulation.unwrap(); assert_eq!(simulation.id, simulation_id); assert_eq!(simulation.bundle_id, bundle_id); @@ -380,260 +392,379 @@ async fn insert_and_get_simulation() -> eyre::Result<()> { assert_eq!(simulation.execution_time_us, execution_time_us); assert_eq!(simulation.gas_used, gas_used); assert_eq!(simulation.state_diff.len(), state_diff.len()); - + // Verify state diff content for (account, expected_storage) in &state_diff { - let actual_storage = simulation.state_diff.get(account) + let actual_storage = simulation + .state_diff + .get(account) .expect("Account should exist in state diff"); assert_eq!(actual_storage.len(), expected_storage.len()); for (slot, expected_value) in expected_storage { - let actual_value = actual_storage.get(slot) - .expect("Storage slot should exist"); + let actual_value = actual_storage.get(slot).expect("Storage slot should exist"); assert_eq!(actual_value, expected_value); } } - + Ok(()) } #[tokio::test] async fn simulation_with_empty_state_diff() -> eyre::Result<()> { let harness = setup_datastore().await?; - + // Create a bundle let test_bundle = create_test_bundle(12345, None, None)?; - let bundle_id = harness.data_store.insert_bundle(test_bundle).await + let bundle_id = harness + .data_store + .insert_bundle(test_bundle) + .await .map_err(|e| eyre::eyre!(e))?; - + // Create simulation with empty state diff - let simulation_id = harness.data_store.insert_simulation( - bundle_id, - 18500000, - "0xabcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890".to_string(), - 100000, - 15000, - create_empty_state_diff(), - ).await.map_err(|e| eyre::eyre!(e))?; - + let simulation_id = harness + .data_store + .insert_simulation( + bundle_id, + 18500000, + "0xabcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890".to_string(), + 100000, + 15000, + create_empty_state_diff(), + ) + .await + .map_err(|e| eyre::eyre!(e))?; + // Retrieve and verify - let simulation = harness.data_store.get_simulation(simulation_id).await + let simulation = harness + .data_store + .get_simulation(simulation_id) + .await .map_err(|e| eyre::eyre!(e))? .expect("Simulation should exist"); - - assert!(simulation.state_diff.is_empty(), "State diff should be empty"); - + + assert!( + simulation.state_diff.is_empty(), + "State diff should be empty" + ); + Ok(()) } #[tokio::test] async fn multiple_simulations_latest_selection() -> eyre::Result<()> { let harness = setup_datastore().await?; - + // Create a single bundle let test_bundle = create_test_bundle(12345, Some(1000), Some(2000))?; - let bundle_id = harness.data_store.insert_bundle(test_bundle).await + let bundle_id = harness + .data_store + .insert_bundle(test_bundle) + .await .map_err(|e| eyre::eyre!(e))?; - + // Insert multiple simulations with sequential block numbers let base_block = 18500000u64; let mut simulation_ids = Vec::new(); - + for i in 0..5 { let block_number = base_block + i; let block_hash = format!("0x{:064x}", block_number); // Create unique block hash let execution_time = 100000 + (i * 10000); // Varying execution times let gas_used = 21000 + (i * 1000); // Varying gas usage - - let simulation_id = harness.data_store.insert_simulation( - bundle_id, - block_number, - block_hash, - execution_time, - gas_used, - if i % 2 == 0 { create_test_state_diff() } else { create_empty_state_diff() }, - ).await.map_err(|e| eyre::eyre!(e))?; - + + let simulation_id = harness + .data_store + .insert_simulation( + bundle_id, + block_number, + block_hash, + execution_time, + gas_used, + if i % 2 == 0 { + create_test_state_diff() + } else { + create_empty_state_diff() + }, + ) + .await + .map_err(|e| eyre::eyre!(e))?; + simulation_ids.push((simulation_id, block_number, execution_time, gas_used)); } - + // Query for bundles with latest simulation - let results = harness.data_store.select_bundles_with_latest_simulation( - BundleFilter::new() - ).await.map_err(|e| eyre::eyre!(e))?; - + let results = harness + .data_store + .select_bundles_with_latest_simulation(BundleFilter::new()) + .await + .map_err(|e| eyre::eyre!(e))?; + // Should return exactly one bundle assert_eq!(results.len(), 1, "Should return exactly one bundle"); - + let bundle_with_sim = &results[0]; let latest_sim = &bundle_with_sim.latest_simulation; - + // Verify it's the latest simulation (highest block number) let expected_latest_block = base_block + 4; // Last iteration was i=4 - assert_eq!(latest_sim.block_number, expected_latest_block, "Should return simulation with highest block number"); - assert_eq!(latest_sim.bundle_id, bundle_id, "Should reference correct bundle"); - + assert_eq!( + latest_sim.block_number, expected_latest_block, + "Should return simulation with highest block number" + ); + assert_eq!( + latest_sim.bundle_id, bundle_id, + "Should reference correct bundle" + ); + // Verify the execution time and gas used match the latest simulation let expected_execution_time = 100000 + (4 * 10000); // i=4 let expected_gas_used = 21000 + (4 * 1000); // i=4 - assert_eq!(latest_sim.execution_time_us, expected_execution_time, "Execution time should match latest simulation"); - assert_eq!(latest_sim.gas_used, expected_gas_used, "Gas used should match latest simulation"); - + assert_eq!( + latest_sim.execution_time_us, expected_execution_time, + "Execution time should match latest simulation" + ); + assert_eq!( + latest_sim.gas_used, expected_gas_used, + "Gas used should match latest simulation" + ); + // Verify the latest simulation has the expected state diff (should be non-empty since i=4 is even) - assert!(!latest_sim.state_diff.is_empty(), "Latest simulation should have non-empty state diff"); - + assert!( + !latest_sim.state_diff.is_empty(), + "Latest simulation should have non-empty state diff" + ); + // Verify that we can still retrieve all individual simulations for (sim_id, block_num, exec_time, gas) in &simulation_ids { - let individual_sim = harness.data_store.get_simulation(*sim_id).await + let individual_sim = harness + .data_store + .get_simulation(*sim_id) + .await .map_err(|e| eyre::eyre!(e))? .expect("Individual simulation should exist"); - + assert_eq!(individual_sim.block_number, *block_num); assert_eq!(individual_sim.execution_time_us, *exec_time); assert_eq!(individual_sim.gas_used, *gas); } - + Ok(()) } #[tokio::test] async fn select_bundles_with_latest_simulation() -> eyre::Result<()> { let harness = setup_datastore().await?; - + // Create three bundles let bundle1 = create_test_bundle(100, Some(1000), Some(2000))?; let bundle2 = create_test_bundle(200, Some(1500), Some(2500))?; let bundle3 = create_test_bundle(300, None, None)?; - - let bundle1_id = harness.data_store.insert_bundle(bundle1).await + + let bundle1_id = harness + .data_store + .insert_bundle(bundle1) + .await .map_err(|e| eyre::eyre!(e))?; - let bundle2_id = harness.data_store.insert_bundle(bundle2).await + let bundle2_id = harness + .data_store + .insert_bundle(bundle2) + .await .map_err(|e| eyre::eyre!(e))?; - let _bundle3_id = harness.data_store.insert_bundle(bundle3).await + let _bundle3_id = harness + .data_store + .insert_bundle(bundle3) + .await .map_err(|e| eyre::eyre!(e))?; - + // Add multiple simulations for bundle1 (to test "latest" logic) - harness.data_store.insert_simulation( - bundle1_id, - 18500000, - "0x1111111111111111111111111111111111111111111111111111111111111111".to_string(), - 100000, - 21000, - create_test_state_diff(), - ).await.map_err(|e| eyre::eyre!(e))?; - - let latest_sim1_id = harness.data_store.insert_simulation( - bundle1_id, - 18500001, // Higher block number = later - "0x2222222222222222222222222222222222222222222222222222222222222222".to_string(), - 120000, - 22000, - create_empty_state_diff(), - ).await.map_err(|e| eyre::eyre!(e))?; - + harness + .data_store + .insert_simulation( + bundle1_id, + 18500000, + "0x1111111111111111111111111111111111111111111111111111111111111111".to_string(), + 100000, + 21000, + create_test_state_diff(), + ) + .await + .map_err(|e| eyre::eyre!(e))?; + + let latest_sim1_id = harness + .data_store + .insert_simulation( + bundle1_id, + 18500001, // Higher block number = later + "0x2222222222222222222222222222222222222222222222222222222222222222".to_string(), + 120000, + 22000, + create_empty_state_diff(), + ) + .await + .map_err(|e| eyre::eyre!(e))?; + // Add one simulation for bundle2 - let sim2_id = harness.data_store.insert_simulation( - bundle2_id, - 18500002, - "0x3333333333333333333333333333333333333333333333333333333333333333".to_string(), - 90000, - 19000, - create_test_state_diff(), - ).await.map_err(|e| eyre::eyre!(e))?; - + let sim2_id = harness + .data_store + .insert_simulation( + bundle2_id, + 18500002, + "0x3333333333333333333333333333333333333333333333333333333333333333".to_string(), + 90000, + 19000, + create_test_state_diff(), + ) + .await + .map_err(|e| eyre::eyre!(e))?; + // Bundle3 has no simulations - + // Query bundles with latest simulation (no filter) - let results = harness.data_store.select_bundles_with_latest_simulation( - BundleFilter::new() - ).await.map_err(|e| eyre::eyre!(e))?; - + let results = harness + .data_store + .select_bundles_with_latest_simulation(BundleFilter::new()) + .await + .map_err(|e| eyre::eyre!(e))?; + // Should return 2 bundles (bundle1 and bundle2), sorted by minimum_base_fee DESC - assert_eq!(results.len(), 2, "Should return 2 bundles that have simulations"); - + assert_eq!( + results.len(), + 2, + "Should return 2 bundles that have simulations" + ); + // Verify the results contain the correct bundles and latest simulations - let bundle1_result = results.iter().find(|r| r.bundle_with_metadata.bundle.block_number == 100); - let bundle2_result = results.iter().find(|r| r.bundle_with_metadata.bundle.block_number == 200); - + let bundle1_result = results + .iter() + .find(|r| r.bundle_with_metadata.bundle.block_number == 100); + let bundle2_result = results + .iter() + .find(|r| r.bundle_with_metadata.bundle.block_number == 200); + assert!(bundle1_result.is_some(), "Bundle1 should be in results"); assert!(bundle2_result.is_some(), "Bundle2 should be in results"); - + let bundle1_result = bundle1_result.unwrap(); let bundle2_result = bundle2_result.unwrap(); - + // Check that bundle1 has the latest simulation (block 18500001) assert_eq!(bundle1_result.latest_simulation.id, latest_sim1_id); assert_eq!(bundle1_result.latest_simulation.block_number, 18500001); assert_eq!(bundle1_result.latest_simulation.gas_used, 22000); - + // Check that bundle2 has its simulation assert_eq!(bundle2_result.latest_simulation.id, sim2_id); assert_eq!(bundle2_result.latest_simulation.block_number, 18500002); assert_eq!(bundle2_result.latest_simulation.gas_used, 19000); - + Ok(()) } #[tokio::test] async fn select_bundles_with_latest_simulation_filtered() -> eyre::Result<()> { let harness = setup_datastore().await?; - + // Create bundles with different criteria let bundle1 = create_test_bundle(100, Some(1000), Some(2000))?; // Valid for block 100, timestamp 1000-2000 let bundle2 = create_test_bundle(200, Some(1500), Some(2500))?; // Valid for block 200, timestamp 1500-2500 - - let bundle1_id = harness.data_store.insert_bundle(bundle1).await + + let bundle1_id = harness + .data_store + .insert_bundle(bundle1) + .await .map_err(|e| eyre::eyre!(e))?; - let bundle2_id = harness.data_store.insert_bundle(bundle2).await + let bundle2_id = harness + .data_store + .insert_bundle(bundle2) + .await .map_err(|e| eyre::eyre!(e))?; - + // Add simulations to both bundles - harness.data_store.insert_simulation( - bundle1_id, - 18500000, - "0x1111111111111111111111111111111111111111111111111111111111111111".to_string(), - 100000, - 21000, - create_test_state_diff(), - ).await.map_err(|e| eyre::eyre!(e))?; - - harness.data_store.insert_simulation( - bundle2_id, - 18500001, - "0x2222222222222222222222222222222222222222222222222222222222222222".to_string(), - 120000, - 22000, - create_empty_state_diff(), - ).await.map_err(|e| eyre::eyre!(e))?; - + harness + .data_store + .insert_simulation( + bundle1_id, + 18500000, + "0x1111111111111111111111111111111111111111111111111111111111111111".to_string(), + 100000, + 21000, + create_test_state_diff(), + ) + .await + .map_err(|e| eyre::eyre!(e))?; + + harness + .data_store + .insert_simulation( + bundle2_id, + 18500001, + "0x2222222222222222222222222222222222222222222222222222222222222222".to_string(), + 120000, + 22000, + create_empty_state_diff(), + ) + .await + .map_err(|e| eyre::eyre!(e))?; + // Test filtering by block number let block_filter = BundleFilter::new().valid_for_block(200); - let filtered_results = harness.data_store.select_bundles_with_latest_simulation(block_filter).await + let filtered_results = harness + .data_store + .select_bundles_with_latest_simulation(block_filter) + .await .map_err(|e| eyre::eyre!(e))?; - - assert_eq!(filtered_results.len(), 1, "Should return 1 bundle valid for block 200"); - assert_eq!(filtered_results[0].bundle_with_metadata.bundle.block_number, 200); - + + assert_eq!( + filtered_results.len(), + 1, + "Should return 1 bundle valid for block 200" + ); + assert_eq!( + filtered_results[0].bundle_with_metadata.bundle.block_number, + 200 + ); + // Test filtering by timestamp let timestamp_filter = BundleFilter::new().valid_for_timestamp(1200); - let timestamp_results = harness.data_store.select_bundles_with_latest_simulation(timestamp_filter).await + let timestamp_results = harness + .data_store + .select_bundles_with_latest_simulation(timestamp_filter) + .await .map_err(|e| eyre::eyre!(e))?; - - assert_eq!(timestamp_results.len(), 1, "Should return 1 bundle valid for timestamp 1200"); - assert_eq!(timestamp_results[0].bundle_with_metadata.bundle.block_number, 100); - + + assert_eq!( + timestamp_results.len(), + 1, + "Should return 1 bundle valid for timestamp 1200" + ); + assert_eq!( + timestamp_results[0] + .bundle_with_metadata + .bundle + .block_number, + 100 + ); + Ok(()) } #[tokio::test] async fn get_nonexistent_simulation() -> eyre::Result<()> { let harness = setup_datastore().await?; - + // Try to get simulation that doesn't exist let fake_id = Uuid::new_v4(); - let result = harness.data_store.get_simulation(fake_id).await + let result = harness + .data_store + .get_simulation(fake_id) + .await .map_err(|e| eyre::eyre!(e))?; - - assert!(result.is_none(), "Should return None for non-existent simulation"); - + + assert!( + result.is_none(), + "Should return None for non-existent simulation" + ); + Ok(()) } diff --git a/crates/simulator/src/core.rs b/crates/simulator/src/core.rs index 837c098..ebaf1e3 100644 --- a/crates/simulator/src/core.rs +++ b/crates/simulator/src/core.rs @@ -7,7 +7,7 @@ use tracing::{error, info}; /// Core bundle simulator that provides shared simulation logic /// Used by both mempool event simulators and ExEx event simulators -pub struct BundleSimulator +pub struct BundleSimulator where E: SimulationEngine, P: SimulationPublisher, @@ -16,18 +16,15 @@ where publisher: P, } -impl BundleSimulator +impl BundleSimulator where E: SimulationEngine, P: SimulationPublisher, { pub fn new(engine: E, publisher: P) -> Self { - Self { - engine, - publisher, - } + Self { engine, publisher } } - + /// Process a simulation request by creating state provider from factory /// Convenience method that handles state provider creation pub async fn simulate( @@ -44,7 +41,7 @@ where let state_provider = state_provider_factory .state_by_block_hash(request.block_hash) .map_err(|e| eyre::eyre!("Failed to get state provider: {}", e))?; - + // Run the simulation match self.engine.simulate_bundle(request, &state_provider).await { Ok(result) => { diff --git a/crates/simulator/src/engine.rs b/crates/simulator/src/engine.rs index 7c7a024..268db65 100644 --- a/crates/simulator/src/engine.rs +++ b/crates/simulator/src/engine.rs @@ -1,16 +1,16 @@ use crate::types::{SimulationError, SimulationRequest, SimulationResult}; use alloy_consensus::{transaction::SignerRecoverable, BlockHeader}; -use alloy_primitives::B256; use alloy_eips::eip2718::Decodable2718; +use alloy_primitives::B256; use alloy_rpc_types::BlockNumberOrTag; -use eyre::Result; use async_trait::async_trait; -use reth_node_api::FullNodeComponents; -use reth_provider::{StateProvider, StateProviderFactory, HeaderProvider}; -use reth_revm::{database::StateProviderDatabase, db::State}; +use eyre::Result; +use reth_evm::execute::BlockBuilder; use reth_evm::ConfigureEvm; use reth_evm::NextBlockEnvAttributes; -use reth_evm::execute::BlockBuilder; +use reth_node_api::FullNodeComponents; +use reth_provider::{HeaderProvider, StateProvider, StateProviderFactory}; +use reth_revm::{database::StateProviderDatabase, db::State}; use std::collections::HashMap; use std::sync::Arc; use std::time::Instant; @@ -54,14 +54,20 @@ where // Get the state provider at the specified block let state_provider = provider .state_by_block_number_or_tag(BlockNumberOrTag::Number(block_number)) - .map_err(|e| eyre::eyre!("Failed to get state provider at block {}: {}", block_number, e))?; - + .map_err(|e| { + eyre::eyre!( + "Failed to get state provider at block {}: {}", + block_number, + e + ) + })?; + // Get the block hash let block_hash = state_provider .block_hash(block_number) .map_err(|e| eyre::eyre!("Failed to get block hash: {}", e))? .ok_or_else(|| eyre::eyre!("Block {} not found", block_number))?; - + Ok((state_provider, block_hash)) } @@ -71,19 +77,19 @@ where /// use reth_exex::ExExContext; /// use reth_revm::database::StateProviderDatabase; /// use revm::Evm; -/// +/// /// // Get provider from ExEx context /// let provider = ctx.provider.clone(); -/// +/// /// // Prepare EVM state /// let (state_provider, block_hash) = prepare_evm_state::( /// provider.clone(), /// block_number, /// )?; -/// +/// /// // Create state database /// let db = StateProviderDatabase::new(state_provider); -/// +/// /// // Build EVM with the database /// // Note: You would configure the EVM with proper environment settings /// // based on your chain's requirements (gas limits, fork settings, etc.) @@ -113,7 +119,6 @@ where evm_config: Node::Evm, } - impl RethSimulationEngine where Node: FullNodeComponents, @@ -124,7 +129,6 @@ where evm_config, } } - } #[async_trait] @@ -162,9 +166,12 @@ where // Create the state database and builder for next block let state_provider = self.provider.state_by_block_hash(request.block_hash)?; let state_db = StateProviderDatabase::new(state_provider); - let mut db = State::builder().with_database(state_db).with_bundle_update().build(); + let mut db = State::builder() + .with_database(state_db) + .with_bundle_update() + .build(); let attributes = NextBlockEnvAttributes { - timestamp: header.timestamp() + BLOCK_TIME, // Optimism 2-second block time + timestamp: header.timestamp() + BLOCK_TIME, // Optimism 2-second block time suggested_fee_recipient: header.beneficiary(), prev_randao: B256::random(), gas_limit: header.gas_limit(), @@ -187,12 +194,14 @@ where .evm_config .builder_for_next_block(&mut db, &header, attributes) .map_err(|e| eyre::eyre!("Failed to init block builder: {}", e))?; - builder.apply_pre_execution_changes().map_err(|e| eyre::eyre!("Failed pre-exec: {}", e))?; + builder + .apply_pre_execution_changes() + .map_err(|e| eyre::eyre!("Failed pre-exec: {}", e))?; // Simulate each transaction in the bundle for (tx_index, tx_bytes) in request.bundle.txs.iter().enumerate() { // Decode bytes into the node's SignedTx type and recover the signer for execution - type NodeSignedTxTy = + type NodeSignedTxTy = <<::Types as reth_node_api::NodeTypes>::Primitives as reth_node_api::NodePrimitives>::SignedTx; let mut reader = tx_bytes.iter().as_slice(); let signed: NodeSignedTxTy = Decodable2718::decode_2718(&mut reader) @@ -207,7 +216,9 @@ where } Err(e) => { failed = true; - failure_reason = Some(SimulationError::Unknown { message: format!("Execution failed: {}", e) }); + failure_reason = Some(SimulationError::Unknown { + message: format!("Execution failed: {}", e), + }); break; } } @@ -237,7 +248,7 @@ where } else { // Collect the state diff let bundle = db.take_bundle(); - + // Extract storage changes from the bundle let mut modified_storage_slots = HashMap::new(); for (address, account) in bundle.state() { diff --git a/crates/simulator/src/lib.rs b/crates/simulator/src/lib.rs index 30facad..398901c 100644 --- a/crates/simulator/src/lib.rs +++ b/crates/simulator/src/lib.rs @@ -7,29 +7,36 @@ pub mod types; pub mod worker_pool; use eyre::Result; +use reth_evm::{ConfigureEvm, NextBlockEnvAttributes}; use reth_exex::ExExContext; use reth_node_api::FullNodeComponents; -use reth_evm::{ConfigureEvm, NextBlockEnvAttributes}; use std::sync::Arc; -use tracing::{info, error}; +use tracing::{error, info}; pub use config::SimulatorNodeConfig; pub use core::BundleSimulator; -pub use engine::{SimulationEngine, RethSimulationEngine}; +pub use engine::{RethSimulationEngine, SimulationEngine}; pub use listeners::{ExExEventListener, MempoolEventListener, MempoolListenerConfig}; pub use publisher::{SimulationPublisher, TipsSimulationPublisher}; -pub use types::{SimulationResult, SimulationError, ExExSimulationConfig}; +pub use types::{ExExSimulationConfig, SimulationError, SimulationResult}; pub use worker_pool::SimulationWorkerPool; // Type aliases for concrete implementations -pub type TipsBundleSimulator = BundleSimulator, TipsSimulationPublisher>; -pub type TipsExExEventListener = ExExEventListener, TipsSimulationPublisher, tips_datastore::PostgresDatastore>; -pub type TipsMempoolEventListener = MempoolEventListener, TipsSimulationPublisher>; +pub type TipsBundleSimulator = + BundleSimulator, TipsSimulationPublisher>; +pub type TipsExExEventListener = ExExEventListener< + Node, + RethSimulationEngine, + TipsSimulationPublisher, + tips_datastore::PostgresDatastore, +>; +pub type TipsMempoolEventListener = + MempoolEventListener, TipsSimulationPublisher>; // Initialization functions /// Common initialization components shared across listeners -struct CommonListenerComponents +struct CommonListenerComponents where Node: FullNodeComponents, ::Evm: ConfigureEvm, @@ -42,7 +49,7 @@ where async fn init_common_components( provider: Arc, evm_config: Node::Evm, - database_url: String, + database_url: String, kafka_brokers: String, kafka_topic: String, ) -> Result> @@ -51,8 +58,9 @@ where ::Evm: ConfigureEvm, { let datastore = Arc::new( - tips_datastore::PostgresDatastore::connect(database_url).await - .map_err(|e| eyre::eyre!("Failed to connect to database: {}", e))? + tips_datastore::PostgresDatastore::connect(database_url) + .await + .map_err(|e| eyre::eyre!("Failed to connect to database: {}", e))?, ); // Create Kafka producer @@ -62,7 +70,8 @@ where .create::() .map_err(|e| eyre::eyre!("Failed to create Kafka producer: {}", e))?; - let publisher = TipsSimulationPublisher::new(Arc::clone(&datastore), kafka_producer, kafka_topic); + let publisher = + TipsSimulationPublisher::new(Arc::clone(&datastore), kafka_producer, kafka_topic); info!( kafka_brokers = %kafka_brokers, "Database publisher with Kafka initialized" @@ -81,7 +90,7 @@ where } /// Initialize ExEx event listener (ExEx) that processes committed blocks -/// +/// /// Note: The worker pool is created but NOT started. pub async fn init_exex_event_listener( ctx: ExExContext, @@ -94,7 +103,7 @@ where ::Evm: ConfigureEvm, { info!("Initializing ExEx event listener"); - + let provider = Arc::new(ctx.components.provider().clone()); let evm_config = ctx.components.evm_config().clone(); @@ -104,7 +113,8 @@ where config.database_url.clone(), kafka_brokers, kafka_topic, - ).await?; + ) + .await?; let worker_pool = SimulationWorkerPool::new( Arc::new(common_components.simulator), @@ -112,11 +122,7 @@ where config.max_concurrent_simulations, ); - let consensus_listener = ExExEventListener::new( - ctx, - common_components.datastore, - worker_pool, - ); + let consensus_listener = ExExEventListener::new(ctx, common_components.datastore, worker_pool); info!( max_concurrent = config.max_concurrent_simulations, @@ -127,7 +133,7 @@ where } /// Initialize mempool event listener that processes mempool transactions -/// +/// /// Note: The worker pool is created but NOT started. pub async fn init_mempool_event_listener( ctx: Arc>, @@ -148,7 +154,8 @@ where config.database_url.clone(), config.kafka_brokers.join(","), config.kafka_topic.clone(), - ).await?; + ) + .await?; let worker_pool = SimulationWorkerPool::new( Arc::new(common_components.simulator), @@ -156,12 +163,8 @@ where max_concurrent_simulations, ); - let mempool_listener = MempoolEventListener::new( - Arc::clone(&provider), - config, - worker_pool, - )?; - + let mempool_listener = MempoolEventListener::new(Arc::clone(&provider), config, worker_pool)?; + info!( max_concurrent = max_concurrent_simulations, "Mempool event listener initialized successfully" @@ -170,17 +173,18 @@ where Ok(mempool_listener) } - /// Encapsulates both event listeners with their shared worker pool -/// +/// /// This struct ensures that the ExEx and mempool listeners always use the same /// worker pool instance, preventing potential misconfigurations. -pub struct ListenersWithWorkers +pub struct ListenersWithWorkers where Node: FullNodeComponents, ::Evm: ConfigureEvm, { - worker_pool: Arc, TipsSimulationPublisher, Node::Provider>>, + worker_pool: Arc< + SimulationWorkerPool, TipsSimulationPublisher, Node::Provider>, + >, exex_listener: TipsExExEventListener, mempool_listener: TipsMempoolEventListener, } @@ -191,7 +195,7 @@ where ::Evm: ConfigureEvm, { /// Initialize both event listeners with a shared worker pool - /// + /// /// The worker pool is created but NOT started. Call `run()` to start /// the worker pool and begin processing events. pub async fn new( @@ -212,7 +216,8 @@ where exex_config.database_url.clone(), mempool_config.kafka_brokers.join(","), mempool_config.kafka_topic.clone(), - ).await?; + ) + .await?; let shared_worker_pool = SimulationWorkerPool::new( Arc::new(common_components.simulator), @@ -231,29 +236,29 @@ where mempool_config, Arc::clone(&shared_worker_pool), )?; - + info!( max_concurrent = max_concurrent_simulations, "Both ExEx and mempool event listeners initialized successfully" ); - + Ok(Self { worker_pool: shared_worker_pool, exex_listener, mempool_listener, }) } - + /// Run both listeners with lifecycle management for the shared worker pool - /// + /// /// Starts the worker pool, runs both listeners concurrently, and ensures proper shutdown pub async fn run(self) -> Result<()> { info!("Starting shared worker pool"); - + self.worker_pool.start().await; - + info!("Running listeners concurrently"); - + let result = tokio::select! { res = self.exex_listener.run() => { info!("ExEx listener completed"); @@ -264,7 +269,7 @@ where res }, }; - + info!("Shutting down worker pool"); match Arc::try_unwrap(self.worker_pool) { Ok(pool) => { @@ -275,7 +280,7 @@ where error!("Failed to get ownership of worker pool for shutdown"); } } - + result } } diff --git a/crates/simulator/src/listeners/exex.rs b/crates/simulator/src/listeners/exex.rs index 90e343f..b2fd95b 100644 --- a/crates/simulator/src/listeners/exex.rs +++ b/crates/simulator/src/listeners/exex.rs @@ -1,48 +1,53 @@ -use crate::types::SimulationRequest; -use crate::worker_pool::{SimulationWorkerPool, SimulationTask}; use crate::engine::SimulationEngine; use crate::publisher::SimulationPublisher; +use crate::types::SimulationRequest; +use crate::worker_pool::{SimulationTask, SimulationWorkerPool}; use alloy_consensus::BlockHeader; use alloy_primitives::B256; use alloy_rpc_types::BlockNumHash; use alloy_rpc_types_mev::EthSendBundle; use eyre::Result; +use futures_util::StreamExt; use reth_exex::{ExExContext, ExExEvent, ExExNotification}; use reth_node_api::FullNodeComponents; -use futures_util::StreamExt; use std::sync::Arc; use tracing::{debug, error, info, warn}; use uuid::Uuid; /// Datastore-based mempool bundle provider -pub struct DatastoreBundleProvider +pub struct DatastoreBundleProvider where D: tips_datastore::BundleDatastore, { datastore: Arc, } -impl DatastoreBundleProvider +impl DatastoreBundleProvider where D: tips_datastore::BundleDatastore, { pub fn new(datastore: Arc) -> Self { Self { datastore } } - + /// Get all bundles valid for a specific block - pub async fn get_bundles_for_block(&self, block_number: u64) -> Result> { + pub async fn get_bundles_for_block( + &self, + block_number: u64, + ) -> Result> { use tips_datastore::postgres::BundleFilter; - + // Create filter for bundles valid at this block - let filter = BundleFilter::new() - .valid_for_block(block_number); - + let filter = BundleFilter::new().valid_for_block(block_number); + // Fetch bundles from datastore - let bundles_with_metadata = self.datastore.select_bundles(filter).await + let bundles_with_metadata = self + .datastore + .select_bundles(filter) + .await .map_err(|e| eyre::eyre!("Failed to select bundles: {}", e))?; - + // Convert to (Uuid, EthSendBundle) pairs // TODO: The bundle ID should be returned from the datastore query // For now, we generate new IDs for each bundle @@ -50,14 +55,14 @@ where .into_iter() .map(|bwm| (Uuid::new_v4(), bwm.bundle)) .collect(); - + Ok(result) } } /// ExEx event listener that processes chain events and queues bundle simulations /// Processes chain events (commits, reorgs, reverts) and queues simulation tasks -pub struct ExExEventListener +pub struct ExExEventListener where Node: FullNodeComponents, E: SimulationEngine + Clone + 'static, @@ -119,7 +124,12 @@ where } /// Handle ExEx notifications - async fn handle_notification(&mut self, notification: ExExNotification<<::Types as reth_node_api::NodeTypes>::Primitives>) -> Result<()> { + async fn handle_notification( + &mut self, + notification: ExExNotification< + <::Types as reth_node_api::NodeTypes>::Primitives, + >, + ) -> Result<()> { match notification { ExExNotification::ChainCommitted { new } => { info!( @@ -127,7 +137,7 @@ where num_blocks = new.blocks().len(), "Processing committed blocks" ); - + // Process each block in the committed chain for (_block_num, block) in new.blocks() { let block_hash = block.hash(); @@ -137,14 +147,17 @@ where // Notify that we've processed this notification self.ctx .events - .send(ExExEvent::FinishedHeight(BlockNumHash::new(new.tip().number(), new.tip().hash())))?; + .send(ExExEvent::FinishedHeight(BlockNumHash::new( + new.tip().number(), + new.tip().hash(), + )))?; } ExExNotification::ChainReorged { old: _, new } => { warn!( block_range = ?new.range(), "Chain reorg detected, processing new chain" ); - + // Process the new canonical chain for (_block_num, block) in new.blocks() { let block_hash = block.hash(); @@ -153,7 +166,10 @@ where self.ctx .events - .send(ExExEvent::FinishedHeight(BlockNumHash::new(new.tip().number(), new.tip().hash())))?; + .send(ExExEvent::FinishedHeight(BlockNumHash::new( + new.tip().number(), + new.tip().hash(), + )))?; } ExExNotification::ChainReverted { old } => { warn!( @@ -163,7 +179,10 @@ where self.ctx .events - .send(ExExEvent::FinishedHeight(BlockNumHash::new(old.tip().number(), old.tip().hash())))?; + .send(ExExEvent::FinishedHeight(BlockNumHash::new( + old.tip().number(), + old.tip().hash(), + )))?; } } @@ -171,13 +190,16 @@ where } /// Process a single block for potential bundle simulations - async fn process_block(&mut self, block: (&B256, &reth_primitives::RecoveredBlock)) -> Result<()> + async fn process_block( + &mut self, + block: (&B256, &reth_primitives::RecoveredBlock), + ) -> Result<()> where B: reth_node_api::Block, { let (block_hash, sealed_block) = block; let block_number = sealed_block.number(); - + debug!( block_number = block_number, block_hash = ?block_hash, @@ -189,9 +211,8 @@ where // Fetch all bundles valid for this block from datastore use tips_datastore::postgres::BundleFilter; - let filter = BundleFilter::new() - .valid_for_block(block_number); - + let filter = BundleFilter::new().valid_for_block(block_number); + let bundles_with_metadata = match self.datastore.select_bundles(filter).await { Ok(bundles) => bundles, Err(e) => { @@ -203,7 +224,7 @@ where return Ok(()); } }; - + info!( block_number, num_bundles = bundles_with_metadata.len(), @@ -215,7 +236,7 @@ where // TODO: The bundle ID should be returned from the datastore query // For now, we generate new IDs for each bundle let bundle_id = Uuid::new_v4(); - + // Create simulation request let request = SimulationRequest { bundle_id, @@ -223,12 +244,10 @@ where block_number, block_hash: *block_hash, }; - + // Create simulation task - let task = SimulationTask { - request, - }; - + let task = SimulationTask { request }; + // Send to worker queue if let Err(e) = self.worker_pool.queue_simulation(task).await { error!( diff --git a/crates/simulator/src/listeners/mempool.rs b/crates/simulator/src/listeners/mempool.rs index 2db289b..b6ed70c 100644 --- a/crates/simulator/src/listeners/mempool.rs +++ b/crates/simulator/src/listeners/mempool.rs @@ -2,20 +2,20 @@ use crate::engine::SimulationEngine; use crate::publisher::SimulationPublisher; use crate::types::SimulationRequest; use crate::worker_pool::{SimulationTask, SimulationWorkerPool}; +use alloy_primitives::B256; use eyre::Result; use rdkafka::{ config::ClientConfig, consumer::{Consumer, StreamConsumer}, message::Message, }; -use tokio::sync::mpsc; -use tracing::{debug, error, info}; -use std::time::Duration; -use std::sync::Arc; -use alloy_primitives::B256; -use reth_provider::{BlockNumReader, HeaderProvider}; use reth_node_api::FullNodeComponents; +use reth_provider::{BlockNumReader, HeaderProvider}; +use std::sync::Arc; +use std::time::Duration; use tips_audit::types::MempoolEvent; +use tokio::sync::mpsc; +use tracing::{debug, error, info}; /// Configuration for mempool event listening #[derive(Debug, Clone)] @@ -30,9 +30,8 @@ pub struct MempoolListenerConfig { pub database_url: String, } - /// Mempool event listener that processes events and queues simulations -pub struct MempoolEventListener +pub struct MempoolEventListener where Node: FullNodeComponents, E: SimulationEngine, @@ -48,7 +47,7 @@ where worker_pool: Arc>, } -impl MempoolEventListener +impl MempoolEventListener where Node: FullNodeComponents, E: SimulationEngine + Clone + 'static, @@ -72,8 +71,9 @@ where .create() .map_err(|e| eyre::eyre!("Failed to create Kafka consumer: {}", e))?; - consumer.subscribe(&[&config.kafka_topic]) - .map_err(|e| eyre::eyre!("Failed to subscribe to topic {}: {}", config.kafka_topic, e))?; + consumer.subscribe(&[&config.kafka_topic]).map_err(|e| { + eyre::eyre!("Failed to subscribe to topic {}: {}", config.kafka_topic, e) + })?; Ok(Self { provider, @@ -84,7 +84,7 @@ where } /// Run the mempool event listener - pub async fn run(self) -> Result<()> + pub async fn run(self) -> Result<()> where E: 'static, P: 'static, @@ -93,10 +93,10 @@ where topic = %self.topic, "Starting mempool event listener" ); - + // Create channel for simulation requests let (sender, mut receiver) = mpsc::channel::(1000); - + // Start Kafka listener in a separate task let consumer = self.consumer; let provider = Arc::clone(&self.provider); @@ -124,11 +124,13 @@ where // Convert mempool events that contain bundles into simulation requests match event { - MempoolEvent::Created { bundle_id, bundle } | - MempoolEvent::Updated { bundle_id, bundle } => { - let (block_number, block_hash) = match provider.best_block_number() { + MempoolEvent::Created { bundle_id, bundle } + | MempoolEvent::Updated { bundle_id, bundle } => { + let (block_number, block_hash) = match provider.best_block_number() + { Ok(num) => { - let hash = provider.sealed_header(num) + let hash = provider + .sealed_header(num) .unwrap_or_default() .map(|h| h.hash()) .unwrap_or_default(); @@ -136,7 +138,7 @@ where } Err(_) => (0, B256::ZERO), }; - + let simulation_request = SimulationRequest { bundle_id, bundle, @@ -162,7 +164,9 @@ where } // Commit the message - if let Err(e) = consumer.commit_message(&message, rdkafka::consumer::CommitMode::Async) { + if let Err(e) = + consumer.commit_message(&message, rdkafka::consumer::CommitMode::Async) + { error!(error = %e, "Failed to commit Kafka message"); } } @@ -173,7 +177,7 @@ where } } }); - + // Process simulation requests using the shared worker pool let worker_pool = Arc::clone(&self.worker_pool); let processing_handle = tokio::spawn(async move { @@ -184,12 +188,10 @@ where block_number = request.block_number, "Queuing bundle simulation for mempool event" ); - + // Create simulation task - let task = SimulationTask { - request, - }; - + let task = SimulationTask { request }; + // Queue simulation using shared worker pool if let Err(e) = worker_pool.queue_simulation(task).await { error!( @@ -200,16 +202,17 @@ where } } }); - + // Wait for both tasks to complete - let (listener_result, _processing_result) = tokio::try_join!(listener_handle, processing_handle) - .map_err(|e| eyre::eyre!("Task join error: {}", e))?; - + let (listener_result, _processing_result) = + tokio::try_join!(listener_handle, processing_handle) + .map_err(|e| eyre::eyre!("Task join error: {}", e))?; + if let Err(e) = listener_result { error!(error = %e, "Mempool listener task failed"); return Err(e); } - + info!("Mempool event listener completed"); Ok(()) } diff --git a/crates/simulator/src/publisher.rs b/crates/simulator/src/publisher.rs index e8c4889..bb65243 100644 --- a/crates/simulator/src/publisher.rs +++ b/crates/simulator/src/publisher.rs @@ -1,11 +1,11 @@ use crate::types::SimulationResult; -use eyre::Result; use async_trait::async_trait; +use eyre::Result; use rdkafka::producer::FutureProducer; use std::collections::HashMap; use std::sync::Arc; -use tips_audit::{MempoolEventPublisher, KafkaMempoolEventPublisher}; -use tips_datastore::{PostgresDatastore, BundleDatastore, postgres::StateDiff}; +use tips_audit::{KafkaMempoolEventPublisher, MempoolEventPublisher}; +use tips_datastore::{postgres::StateDiff, BundleDatastore, PostgresDatastore}; use tracing::{debug, error, info, warn}; #[async_trait] @@ -21,11 +21,7 @@ pub struct TipsSimulationPublisher { } impl TipsSimulationPublisher { - pub fn new( - datastore: Arc, - producer: FutureProducer, - topic: String, - ) -> Self { + pub fn new(datastore: Arc, producer: FutureProducer, topic: String) -> Self { let kafka_publisher = Arc::new(KafkaMempoolEventPublisher::new(producer, topic)); Self { datastore, @@ -45,32 +41,42 @@ impl TipsSimulationPublisher { // Convert state diff from alloy format to datastore format let state_diff = self.convert_state_diff(&result.state_diff)?; - + // Store the simulation using the datastore interface - let simulation_id = self.datastore.insert_simulation( - result.bundle_id, - result.block_number, - format!("0x{}", hex::encode(result.block_hash.as_slice())), - result.execution_time_us as u64, - result.gas_used.unwrap_or(0), - state_diff, - ).await.map_err(|e| eyre::eyre!("Failed to insert simulation: {}", e))?; - + let simulation_id = self + .datastore + .insert_simulation( + result.bundle_id, + result.block_number, + format!("0x{}", hex::encode(result.block_hash.as_slice())), + result.execution_time_us as u64, + result.gas_used.unwrap_or(0), + state_diff, + ) + .await + .map_err(|e| eyre::eyre!("Failed to insert simulation: {}", e))?; + debug!( simulation_id = %simulation_id, bundle_id = %result.bundle_id, "Successfully stored simulation result in database" ); - + Ok(()) } /// Convert state diff from simulator format to datastore format - fn convert_state_diff(&self, state_diff: &HashMap>) -> Result { + fn convert_state_diff( + &self, + state_diff: &HashMap< + alloy_primitives::Address, + HashMap, + >, + ) -> Result { // StateDiff expects HashMap> // where StorageKey is B256 and StorageValue is U256 let mut converted = HashMap::new(); - + for (address, storage) in state_diff { let mut storage_map = HashMap::new(); for (key, value) in storage { @@ -81,7 +87,7 @@ impl TipsSimulationPublisher { } converted.insert(*address, storage_map); } - + Ok(converted) } @@ -94,7 +100,7 @@ impl TipsSimulationPublisher { success = result.success, "Publishing simulation result to Kafka" ); - + let event = tips_audit::types::MempoolEvent::Simulated { bundle_id: result.bundle_id, simulation_id: result.id, @@ -104,17 +110,19 @@ impl TipsSimulationPublisher { execution_time_us: result.execution_time_us, error_reason: result.error_reason.clone(), }; - - publisher.publish(event).await + + publisher + .publish(event) + .await .map_err(|e| eyre::eyre!("Failed to publish simulation event: {}", e))?; - + debug!( simulation_id = %result.id, bundle_id = %result.bundle_id, "Successfully published simulation result to Kafka" ); } - + Ok(()) } } diff --git a/crates/simulator/src/types.rs b/crates/simulator/src/types.rs index fcf1231..cf6c964 100644 --- a/crates/simulator/src/types.rs +++ b/crates/simulator/src/types.rs @@ -50,9 +50,17 @@ pub enum SimulationError { /// Bundle ran out of gas OutOfGas, /// Invalid nonce in one of the transactions - InvalidNonce { tx_index: usize, expected: u64, actual: u64 }, + InvalidNonce { + tx_index: usize, + expected: u64, + actual: u64, + }, /// Insufficient balance for gas payment - InsufficientBalance { tx_index: usize, required: U256, available: U256 }, + InsufficientBalance { + tx_index: usize, + required: U256, + available: U256, + }, /// State access error (RPC failure, etc.) StateAccessError { message: String }, /// Simulation timeout @@ -66,13 +74,31 @@ impl std::fmt::Display for SimulationError { match self { SimulationError::Revert { reason } => write!(f, "Bundle reverted: {}", reason), SimulationError::OutOfGas => write!(f, "Bundle ran out of gas"), - SimulationError::InvalidNonce { tx_index, expected, actual } => { - write!(f, "Invalid nonce in tx {}: expected {}, got {}", tx_index, expected, actual) + SimulationError::InvalidNonce { + tx_index, + expected, + actual, + } => { + write!( + f, + "Invalid nonce in tx {}: expected {}, got {}", + tx_index, expected, actual + ) } - SimulationError::InsufficientBalance { tx_index, required, available } => { - write!(f, "Insufficient balance in tx {}: required {}, available {}", tx_index, required, available) + SimulationError::InsufficientBalance { + tx_index, + required, + available, + } => { + write!( + f, + "Insufficient balance in tx {}: required {}, available {}", + tx_index, required, available + ) + } + SimulationError::StateAccessError { message } => { + write!(f, "State access error: {}", message) } - SimulationError::StateAccessError { message } => write!(f, "State access error: {}", message), SimulationError::Timeout => write!(f, "Simulation timed out"), SimulationError::Unknown { message } => write!(f, "Unknown error: {}", message), } diff --git a/crates/simulator/src/worker_pool.rs b/crates/simulator/src/worker_pool.rs index 8472d01..40d4ac7 100644 --- a/crates/simulator/src/worker_pool.rs +++ b/crates/simulator/src/worker_pool.rs @@ -2,9 +2,9 @@ use crate::core::BundleSimulator; use crate::engine::SimulationEngine; use crate::publisher::SimulationPublisher; use crate::types::SimulationRequest; -use std::sync::Arc; -use std::sync::atomic::{AtomicU64, Ordering}; use reth_provider::StateProviderFactory; +use std::sync::atomic::{AtomicU64, Ordering}; +use std::sync::Arc; use tokio::sync::mpsc; use tokio::task::JoinSet; use tracing::{debug, info, warn}; @@ -50,7 +50,7 @@ where max_concurrent_simulations: usize, ) -> Arc { let (simulation_tx, simulation_rx) = mpsc::channel(1000); - + Arc::new(Self { simulator, state_provider_factory, @@ -66,43 +66,43 @@ where /// Returns true if workers were started, false if already running pub async fn start(self: &Arc) -> bool { let mut handles = self.worker_handles.lock().await; - + if !handles.is_empty() { debug!("Simulation workers already started"); return false; } - info!(num_workers = self.max_concurrent, "Starting simulation workers"); - + info!( + num_workers = self.max_concurrent, + "Starting simulation workers" + ); + for worker_id in 0..self.max_concurrent { let pool = Arc::clone(self); - - handles.spawn(async move { - Self::simulation_worker( - worker_id, - pool, - ).await - }); + + handles.spawn(async move { Self::simulation_worker(worker_id, pool).await }); } true } /// Queue a simulation task - pub async fn queue_simulation(&self, task: SimulationTask) -> Result<(), mpsc::error::SendError> { + pub async fn queue_simulation( + &self, + task: SimulationTask, + ) -> Result<(), mpsc::error::SendError> { self.simulation_tx.send(task).await } - + /// Update the latest block number being processed pub fn update_latest_block(&self, block_number: u64) { self.latest_block.store(block_number, Ordering::Release); debug!(block_number, "Updated latest block for cancellation"); } - /// Wait for all workers to complete pub async fn shutdown(self) { // Close the channel to signal workers to stop drop(self.simulation_tx); - + // Wait for workers to complete let mut handles = self.worker_handles.lock().await; while let Some(result) = handles.join_next().await { @@ -113,12 +113,9 @@ where } /// Worker task that processes simulation requests - async fn simulation_worker( - worker_id: usize, - pool: Arc, - ) { + async fn simulation_worker(worker_id: usize, pool: Arc) { debug!(worker_id, "Simulation worker started"); - + loop { // Get the next simulation task let task = { @@ -126,12 +123,12 @@ where let mut rx = pool.simulation_rx.lock().await; rx.recv().await }; - + let Some(task) = task else { debug!(worker_id, "Simulation channel closed, worker shutting down"); break; }; - + // Check if this simulation is for an old block let current_latest = pool.latest_block.load(Ordering::Acquire); if task.request.block_number < current_latest { @@ -144,9 +141,13 @@ where ); continue; } - + // Execute the simulation - match pool.simulator.simulate(&task.request, pool.state_provider_factory.as_ref()).await { + match pool + .simulator + .simulate(&task.request, pool.state_provider_factory.as_ref()) + .await + { Ok(_) => { debug!( worker_id, @@ -164,7 +165,7 @@ where } } } - + debug!(worker_id, "Simulation worker stopped"); } } diff --git a/crates/simulator/tests/integration_test.rs b/crates/simulator/tests/integration_test.rs index b7e1db2..0b1c519 100644 --- a/crates/simulator/tests/integration_test.rs +++ b/crates/simulator/tests/integration_test.rs @@ -1,7 +1,7 @@ -use tips_simulator::types::SimulationRequest; -use tips_simulator::MempoolListenerConfig; use alloy_primitives::{Bytes, B256}; use alloy_rpc_types_mev::EthSendBundle; +use tips_simulator::types::SimulationRequest; +use tips_simulator::MempoolListenerConfig; use uuid::Uuid; // Basic smoke test to ensure the core simulation types work correctly @@ -51,7 +51,10 @@ fn test_mempool_simulator_config() { assert_eq!(config.kafka_brokers, vec!["localhost:9092"]); assert_eq!(config.kafka_topic, "mempool-events"); assert_eq!(config.kafka_group_id, "tips-simulator"); - assert_eq!(config.database_url, "postgresql://user:pass@localhost:5432/tips"); + assert_eq!( + config.database_url, + "postgresql://user:pass@localhost:5432/tips" + ); } // Future integration tests would test both: From fdfc0c7028751a37f51ffc0b70367ba6d3e955cf Mon Sep 17 00:00:00 2001 From: Niran Babalola Date: Tue, 23 Sep 2025 17:13:00 -0500 Subject: [PATCH 21/39] Implement builder-playground the way op-rbuilder does and use OpNode types in the simulator --- Cargo.lock | 2 + crates/simulator/Cargo.toml | 4 +- crates/simulator/src/config/mod.rs | 151 +++++++++++++------- crates/simulator/src/config/playground.rs | 164 +++++++++++----------- crates/simulator/src/engine.rs | 10 +- crates/simulator/src/lib.rs | 15 +- crates/simulator/src/main.rs | 52 ++++--- justfile | 10 +- 8 files changed, 229 insertions(+), 179 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 4ec034f..bd8442d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -12189,6 +12189,7 @@ dependencies = [ "reth", "reth-chainspec", "reth-cli", + "reth-cli-commands", "reth-evm", "reth-execution-types", "reth-exex", @@ -12200,6 +12201,7 @@ dependencies = [ "reth-optimism-chainspec", "reth-optimism-cli", "reth-optimism-evm", + "reth-optimism-node", "reth-optimism-primitives", "reth-primitives", "reth-provider", diff --git a/crates/simulator/Cargo.toml b/crates/simulator/Cargo.toml index d9a0723..6b7dbea 100644 --- a/crates/simulator/Cargo.toml +++ b/crates/simulator/Cargo.toml @@ -45,11 +45,13 @@ reth-node-builder.workspace = true reth-node-ethereum.workspace = true reth-revm.workspace = true reth-chainspec.workspace = true -# reth-node-optimism is not available in v1.7.0, using regular node with op components +# Optimism support for parsing chain specs reth-optimism-evm = { git = "https://github.com/paradigmxyz/reth", tag = "v1.7.0" } reth-optimism-primitives = { git = "https://github.com/paradigmxyz/reth", tag = "v1.7.0", features = ["serde", "serde-bincode-compat"] } reth-optimism-chainspec = { git = "https://github.com/paradigmxyz/reth", tag = "v1.7.0" } reth-optimism-cli = { git = "https://github.com/paradigmxyz/reth", tag = "v1.7.0" } +reth-optimism-node = { git = "https://github.com/paradigmxyz/reth", tag = "v1.7.0" } +reth-cli-commands = { git = "https://github.com/paradigmxyz/reth", tag = "v1.7.0" } # Additional dependencies for simulation std-semaphore = "0.1" diff --git a/crates/simulator/src/config/mod.rs b/crates/simulator/src/config/mod.rs index 8e3885d..4e83410 100644 --- a/crates/simulator/src/config/mod.rs +++ b/crates/simulator/src/config/mod.rs @@ -1,13 +1,15 @@ pub mod playground; pub use playground::PlaygroundOptions; +pub type Cli = OpCli; -use crate::types::ExExSimulationConfig; use crate::listeners::MempoolListenerConfig; -use anyhow::{Result, anyhow}; -use clap::Parser; -use eyre; -use tracing::info; +use crate::types::ExExSimulationConfig; +use anyhow::{anyhow, Result}; +use clap::{CommandFactory, Parser}; +use playground::IsDefaultSource; +use reth_cli_commands::node::NoArgs; +use reth_optimism_cli::{chainspec::OpChainSpecParser, Cli as OpCli}; /// Combined configuration for reth node with simulator ExEx #[derive(Parser, Debug)] @@ -15,10 +17,14 @@ use tracing::info; pub struct SimulatorNodeConfig { /// Reth node arguments #[command(flatten)] - pub node: reth::cli::Cli, + pub node: Cli, /// Data directory for simulator - #[arg(long, env = "TIPS_SIMULATOR_DATADIR", default_value = "~/.tips-simulator-reth")] + #[arg( + long, + env = "TIPS_SIMULATOR_DATADIR", + default_value = "~/.tips-simulator-reth" + )] pub datadir: std::path::PathBuf, /// PostgreSQL database connection URL for simulator @@ -34,17 +40,33 @@ pub struct SimulatorNodeConfig { pub simulation_timeout_ms: u64, /// Kafka brokers for mempool events (comma-separated) - #[arg(long, env = "TIPS_SIMULATOR_KAFKA_BROKERS", default_value = "localhost:9092")] + #[arg( + long, + env = "TIPS_SIMULATOR_KAFKA_BROKERS", + default_value = "localhost:9092" + )] pub kafka_brokers: String, /// Kafka topic for mempool events - #[arg(long, env = "TIPS_SIMULATOR_KAFKA_TOPIC", default_value = "mempool-events")] + #[arg( + long, + env = "TIPS_SIMULATOR_KAFKA_TOPIC", + default_value = "mempool-events" + )] pub kafka_topic: String, /// Kafka consumer group ID - #[arg(long, env = "TIPS_SIMULATOR_KAFKA_GROUP_ID", default_value = "tips-simulator")] + #[arg( + long, + env = "TIPS_SIMULATOR_KAFKA_GROUP_ID", + default_value = "tips-simulator" + )] pub kafka_group_id: String, + /// Chain block time for simulator extensions + #[arg(long = "chain.block-time", default_value_t = 1000)] + pub chain_block_time: u64, + /// Path to builder playground to automatically start up the node connected to it #[arg( long = "builder.playground", @@ -69,7 +91,11 @@ impl From<&SimulatorNodeConfig> for ExExSimulationConfig { impl From<&SimulatorNodeConfig> for MempoolListenerConfig { fn from(config: &SimulatorNodeConfig) -> Self { Self { - kafka_brokers: config.kafka_brokers.split(',').map(|s| s.trim().to_string()).collect(), + kafka_brokers: config + .kafka_brokers + .split(',') + .map(|s| s.trim().to_string()) + .collect(), kafka_topic: config.kafka_topic.clone(), kafka_group_id: config.kafka_group_id.clone(), database_url: config.database_url.clone(), @@ -86,44 +112,71 @@ fn expand_path(s: &str) -> Result { } /// Parse CLI args with playground configuration if specified -pub fn parse_config_with_playground() -> eyre::Result { - // Debug: print raw args - eprintln!("Raw args: {:?}", std::env::args().collect::>()); - - // First, parse just to check if playground is specified - let initial_config = SimulatorNodeConfig::parse(); - - eprintln!("Parsed initial config, playground: {:?}", initial_config.playground); - - if let Some(ref playground_dir) = initial_config.playground { - eprintln!("Detected playground configuration, loading from: {}", playground_dir.display()); - - // Load playground options - let options = PlaygroundOptions::new(playground_dir) - .map_err(|e| eyre::eyre!("Failed to load playground options: {}", e))?; - - // Get original args - let mut args: Vec = std::env::args().collect(); - - // Get playground args - let playground_args = options.to_cli_args(); - eprintln!("Playground args to insert: {:?}", playground_args); - - // Find where to insert playground args (after "node" subcommand) - if let Some(node_pos) = args.iter().position(|arg| arg == "node") { - // Insert playground args right after "node" - // Insert in reverse order to maintain correct positions - for arg in playground_args.into_iter().rev() { - args.insert(node_pos + 1, arg); - } +pub trait CliExt { + /// Populates default reth node args when `--builder.playground` is provided. + fn populate_defaults(self) -> Self; + + /// Returns parsed config with defaults applied if applicable. + fn parsed() -> Self; +} + +impl CliExt for SimulatorNodeConfig { + fn populate_defaults(mut self) -> Self { + let Some(ref playground_dir) = self.playground else { + return self; + }; + + let options = PlaygroundOptions::new(playground_dir).unwrap_or_else(|e| exit(e)); + let matches = Self::command().get_matches(); + let matches = matches + .subcommand_matches("node") + .expect("validated that we are in the node command"); + + options.apply_to_cli(&mut self.node); + + if matches.value_source("chain_block_time").is_default() { + self.chain_block_time = options.chain_block_time().as_millis() as u64; } - - eprintln!("Final args with playground config: {:?}", args); - info!("Re-parsing with playground configuration arguments"); - - // Re-parse with playground args included - Ok(SimulatorNodeConfig::parse_from(args)) - } else { - Ok(initial_config) + + self + } + + fn parsed() -> Self { + SimulatorNodeConfig::parse().populate_defaults() + } +} + +impl SimulatorNodeConfig { + pub fn node_cli_mut(&mut self) -> &mut Cli { + &mut self.node + } + + pub fn into_cli(self) -> Cli { + self.node } + + pub fn chain_block_time(&self) -> u64 { + self.chain_block_time + } + + pub fn into_parts(self) -> (Cli, ExExSimulationConfig, MempoolListenerConfig, u64) { + let exex_config = (&self).into(); + let mempool_config = (&self).into(); + ( + self.node, + exex_config, + mempool_config, + self.chain_block_time, + ) + } + + pub fn has_playground(&self) -> bool { + self.playground.is_some() + } +} + +/// Following clap's convention, a failure to apply defaults exits non-zero. +fn exit(error: anyhow::Error) -> ! { + eprintln!("{error}"); + std::process::exit(-1); } diff --git a/crates/simulator/src/config/playground.rs b/crates/simulator/src/config/playground.rs index 779e9cb..acae198 100644 --- a/crates/simulator/src/config/playground.rs +++ b/crates/simulator/src/config/playground.rs @@ -18,20 +18,22 @@ //! This will automatically try to detect the playground configuration and apply //! it to the tips-simulator startup settings. +use super::Cli; use alloy_primitives::hex; -use anyhow::{Result, anyhow}; -use reth_optimism_chainspec::OpChainSpec; -use reth_optimism_cli::chainspec::OpChainSpecParser; +use anyhow::{anyhow, Result}; +use clap::{parser::ValueSource, CommandFactory}; +use core::time::Duration; use reth_cli::chainspec::ChainSpecParser; -use reth_network::config::SecretKey; use reth_network_peers::TrustedPeer; +use reth_optimism_chainspec::OpChainSpec; +use reth_optimism_cli::{chainspec::OpChainSpecParser, commands::Commands}; +use secp256k1::SecretKey; use serde_json::Value; use std::{ fs::read_to_string, net::{IpAddr, Ipv4Addr, SocketAddr}, path::{Path, PathBuf}, sync::Arc, - time::Duration, }; use url::{Host, Url}; @@ -39,9 +41,6 @@ use url::{Host, Url}; pub struct PlaygroundOptions { /// Chain spec loaded from playground pub chain: Arc, - - /// Path to the genesis file - pub genesis_path: String, /// HTTP RPC port pub http_port: u16, @@ -75,36 +74,24 @@ impl PlaygroundOptions { )); } - let genesis_path = existing_path(path, "l2-genesis.json")?; - let chain = OpChainSpecParser::parse(&genesis_path) + let chain = OpChainSpecParser::parse(&existing_path(path, "l2-genesis.json")?) .map_err(|e| anyhow!("Failed to parse chain spec: {}", e))?; let authrpc_addr = Ipv4Addr::UNSPECIFIED.into(); let http_port = pick_preferred_port(2222, 3000..9999); - eprintln!("Selected HTTP port: {}", http_port); let authrpc_jwtsecret = existing_path(path, "jwtsecret")?.into(); let port = pick_preferred_port(30333, 30000..65535); - eprintln!("Selected P2P port: {}", port); let chain_block_time = extract_chain_block_time(path)?; let default_authrpc_port = extract_authrpc_port(path)?; let authrpc_port = pick_preferred_port(default_authrpc_port, 4000..9000); - eprintln!("Selected Auth RPC port: {}", authrpc_port); - let trusted_peer_port = extract_trusted_peer_port(path)?; - let trusted_peer_key = extract_deterministic_p2p_key(path)?; - - // Create a trusted peer from the extracted information let trusted_peer = TrustedPeer::from_secret_key( Host::Ipv4(Ipv4Addr::LOCALHOST), - trusted_peer_port, - &trusted_peer_key, + extract_trusted_peer_port(path)?, + &extract_deterministic_p2p_key(path)?, ); - - eprintln!("Trusted peer configured: {}", trusted_peer); - eprintln!("Chain block time: {:?}", chain_block_time); Ok(Self { chain, - genesis_path, http_port, authrpc_addr, authrpc_port, @@ -115,52 +102,74 @@ impl PlaygroundOptions { }) } - /// Get command line arguments that should be applied to reth node - pub fn to_cli_args(&self) -> Vec { - let mut args = vec![]; - - // Chain configuration - args.push("--chain".to_string()); - args.push(self.genesis_path.clone()); - - // HTTP RPC settings - args.push("--http".to_string()); - args.push("--http.port".to_string()); - args.push(self.http_port.to_string()); - args.push("--http.addr".to_string()); - args.push("127.0.0.1".to_string()); // Explicitly bind to localhost - - // Network settings - args.push("--port".to_string()); - args.push(self.port.to_string()); - args.push("--disable-discovery".to_string()); - - // Add trusted peer - args.push("--trusted-peers".to_string()); - args.push(self.trusted_peer.to_string()); - - // Auth RPC settings - args.push("--authrpc.addr".to_string()); - args.push(self.authrpc_addr.to_string()); - args.push("--authrpc.port".to_string()); - args.push(self.authrpc_port.to_string()); - args.push("--authrpc.jwtsecret".to_string()); - args.push(self.authrpc_jwtsecret.to_string_lossy().to_string()); - - args - } - - /// Get the chain spec for use in the node builder - pub fn chain(&self) -> Arc { - Arc::clone(&self.chain) - } - - /// Get the chain block time pub fn chain_block_time(&self) -> Duration { self.chain_block_time } + + /// Apply playground defaults to the simulator config, only where not user-provided. + pub fn apply_to_cli(&self, cli: &mut Cli) { + let Commands::Node(node) = &mut cli.command else { + return; + }; + + if !node.network.trusted_peers.contains(&self.trusted_peer) { + node.network.trusted_peers.push(self.trusted_peer.clone()); + } + + let matches = Cli::command().get_matches(); + let matches = matches + .subcommand_matches("node") + .expect("validated that we are in the node command"); + + if matches.value_source("chain").is_default() { + node.chain = Arc::clone(&self.chain); + } + + if matches.value_source("http").is_default() { + node.rpc.http = true; + } + + if matches.value_source("http_port").is_default() { + node.rpc.http_port = self.http_port; + } + + if matches.value_source("port").is_default() { + node.network.port = self.port; + } + + if matches.value_source("auth_addr").is_default() { + node.rpc.auth_addr = self.authrpc_addr; + } + + if matches.value_source("auth_port").is_default() { + node.rpc.auth_port = self.authrpc_port; + } + + if matches.value_source("auth_jwtsecret").is_default() { + node.rpc.auth_jwtsecret = Some(self.authrpc_jwtsecret.clone()); + } + + if matches.value_source("disable_discovery").is_default() { + node.network.discovery.disable_discovery = true; + } + + if matches.value_source("trusted_peers").is_default() + && !node.network.trusted_peers.contains(&self.trusted_peer) + { + node.network.trusted_peers.push(self.trusted_peer.clone()); + } + } +} + +pub(crate) trait IsDefaultSource { + fn is_default(&self) -> bool; } +impl IsDefaultSource for Option { + fn is_default(&self) -> bool { + matches!(self, Some(ValueSource::DefaultValue)) || self.is_none() + } +} fn existing_path(base: &Path, relative: &str) -> Result { let path = base.join(relative); @@ -192,7 +201,6 @@ fn pick_random_port(range: std::ops::Range) -> u16 { fn pick_preferred_port(preferred: u16, fallback_range: std::ops::Range) -> u16 { if !is_port_free(preferred) { - eprintln!("Port {} is not free, picking random port from range {:?}", preferred, fallback_range); return pick_random_port(fallback_range); } @@ -200,18 +208,8 @@ fn pick_preferred_port(preferred: u16, fallback_range: std::ops::Range) -> } fn is_port_free(port: u16) -> bool { - // Check if we can bind to the port on both localhost and all interfaces - // Different services bind to different addresses - - // Check all interfaces (0.0.0.0) - used by P2P and Auth RPC - let socket_all = SocketAddr::new(IpAddr::V4(Ipv4Addr::UNSPECIFIED), port); - let all_free = std::net::TcpListener::bind(socket_all).is_ok(); - - // Check localhost (127.0.0.1) - used by HTTP RPC - let socket_local = SocketAddr::new(IpAddr::V4(Ipv4Addr::LOCALHOST), port); - let local_free = std::net::TcpListener::bind(socket_local).is_ok(); - - all_free && local_free + let socket = SocketAddr::new(IpAddr::V4(Ipv4Addr::LOCALHOST), port); + std::net::TcpListener::bind(socket).is_ok() } fn extract_chain_block_time(basepath: &Path) -> Result { @@ -226,13 +224,7 @@ fn extract_chain_block_time(basepath: &Path) -> Result { fn extract_deterministic_p2p_key(basepath: &Path) -> Result { let key = read_to_string(existing_path(basepath, "enode-key-1.txt")?)?; let key_bytes = hex::decode(key.trim()).map_err(|e| anyhow!("Invalid hex key: {e}"))?; - - // Create secp256k1 secret key first - let secp_key = secp256k1::SecretKey::from_slice(&key_bytes) - .map_err(|e| anyhow!("Invalid secret key: {e}"))?; - - // Convert to reth's SecretKey type - Ok(SecretKey::from(secp_key)) + SecretKey::from_slice(&key_bytes).map_err(|e| anyhow!("Invalid secret key: {e}")) } fn read_docker_compose(basepath: &Path) -> Result { @@ -270,7 +262,8 @@ fn extract_service_command_flag(basepath: &Path, service: &str, flag: &str) -> R fn extract_authrpc_port(basepath: &Path) -> Result { let builder_url = extract_service_command_flag(basepath, "rollup-boost", "--builder-url")?; let url = Url::parse(&builder_url).map_err(|e| anyhow!("Invalid builder-url: {e}"))?; - url.port().ok_or_else(|| anyhow!("missing builder-url port")) + url.port() + .ok_or_else(|| anyhow!("missing builder-url port")) } fn extract_trusted_peer_port(basepath: &Path) -> Result { @@ -335,3 +328,4 @@ fn extract_trusted_peer_port(basepath: &Path) -> Result { .parse::() .map_err(|e| anyhow!("Invalid external port mapping value for op-geth: {e}")) } + diff --git a/crates/simulator/src/engine.rs b/crates/simulator/src/engine.rs index 268db65..edd706f 100644 --- a/crates/simulator/src/engine.rs +++ b/crates/simulator/src/engine.rs @@ -7,8 +7,8 @@ use async_trait::async_trait; use eyre::Result; use reth_evm::execute::BlockBuilder; use reth_evm::ConfigureEvm; -use reth_evm::NextBlockEnvAttributes; use reth_node_api::FullNodeComponents; +use reth_optimism_evm::OpNextBlockEnvAttributes; use reth_provider::{HeaderProvider, StateProvider, StateProviderFactory}; use reth_revm::{database::StateProviderDatabase, db::State}; use std::collections::HashMap; @@ -135,7 +135,7 @@ where impl SimulationEngine for RethSimulationEngine where Node: FullNodeComponents, - ::Evm: ConfigureEvm, + ::Evm: ConfigureEvm, { async fn simulate_bundle( &self, @@ -170,13 +170,13 @@ where .with_database(state_db) .with_bundle_update() .build(); - let attributes = NextBlockEnvAttributes { - timestamp: header.timestamp() + BLOCK_TIME, // Optimism 2-second block time + let attributes = OpNextBlockEnvAttributes { + timestamp: header.timestamp() + BLOCK_TIME, suggested_fee_recipient: header.beneficiary(), prev_randao: B256::random(), gas_limit: header.gas_limit(), parent_beacon_block_root: header.parent_beacon_block_root(), - withdrawals: None, + extra_data: header.extra_data().clone(), }; // Variables to track bundle execution diff --git a/crates/simulator/src/lib.rs b/crates/simulator/src/lib.rs index 398901c..1e7f902 100644 --- a/crates/simulator/src/lib.rs +++ b/crates/simulator/src/lib.rs @@ -7,9 +7,10 @@ pub mod types; pub mod worker_pool; use eyre::Result; -use reth_evm::{ConfigureEvm, NextBlockEnvAttributes}; +use reth_evm::ConfigureEvm; use reth_exex::ExExContext; use reth_node_api::FullNodeComponents; +use reth_optimism_evm::OpNextBlockEnvAttributes; use std::sync::Arc; use tracing::{error, info}; @@ -39,7 +40,7 @@ pub type TipsMempoolEventListener = struct CommonListenerComponents where Node: FullNodeComponents, - ::Evm: ConfigureEvm, + ::Evm: ConfigureEvm, { datastore: Arc, simulator: BundleSimulator, TipsSimulationPublisher>, @@ -55,7 +56,7 @@ async fn init_common_components( ) -> Result> where Node: FullNodeComponents, - ::Evm: ConfigureEvm, + ::Evm: ConfigureEvm, { let datastore = Arc::new( tips_datastore::PostgresDatastore::connect(database_url) @@ -100,7 +101,7 @@ pub async fn init_exex_event_listener( ) -> Result> where Node: FullNodeComponents, - ::Evm: ConfigureEvm, + ::Evm: ConfigureEvm, { info!("Initializing ExEx event listener"); @@ -143,7 +144,7 @@ pub async fn init_mempool_event_listener( ) -> Result> where Node: FullNodeComponents, - ::Evm: ConfigureEvm, + ::Evm: ConfigureEvm, { info!("Initializing mempool event listener"); @@ -180,7 +181,7 @@ where pub struct ListenersWithWorkers where Node: FullNodeComponents, - ::Evm: ConfigureEvm, + ::Evm: ConfigureEvm, { worker_pool: Arc< SimulationWorkerPool, TipsSimulationPublisher, Node::Provider>, @@ -192,7 +193,7 @@ where impl ListenersWithWorkers where Node: FullNodeComponents, - ::Evm: ConfigureEvm, + ::Evm: ConfigureEvm, { /// Initialize both event listeners with a shared worker pool /// diff --git a/crates/simulator/src/main.rs b/crates/simulator/src/main.rs index 848c355..95fadb2 100644 --- a/crates/simulator/src/main.rs +++ b/crates/simulator/src/main.rs @@ -1,51 +1,49 @@ -use tips_simulator::{ - ListenersWithWorkers, - MempoolListenerConfig, - config::parse_config_with_playground, -}; +use tips_simulator::{config::CliExt, config::SimulatorNodeConfig, ListenersWithWorkers}; use tracing::info; fn main() -> eyre::Result<()> { dotenvy::dotenv().ok(); - let config = parse_config_with_playground()?; - let exex_config: tips_simulator::types::ExExSimulationConfig = (&config).into(); - let mempool_config: MempoolListenerConfig = (&config).into(); - + let config = SimulatorNodeConfig::parsed(); + let playground_enabled = config.playground.is_some(); + let (cli, exex_config, mempool_config, chain_block_time) = config.into_parts(); + let max_concurrent_simulations = exex_config.max_concurrent_simulations; + info!( - database_url = %config.database_url, - max_concurrent = config.max_concurrent_simulations, - timeout_ms = config.simulation_timeout_ms, - kafka_brokers = %config.kafka_brokers, - kafka_topic = %config.kafka_topic, - playground = config.playground.is_some(), + database_url = %exex_config.database_url, + max_concurrent = exex_config.max_concurrent_simulations, + chain_block_time_ms = chain_block_time, + kafka_brokers = %mempool_config.kafka_brokers.join(","), + kafka_topic = %mempool_config.kafka_topic, + playground = playground_enabled, "Starting reth node with both ExEx and mempool event listeners" ); - config.node.run(|builder, _| async move { + cli.run(|builder, _| async move { let handle = builder - .node(reth_node_ethereum::EthereumNode::default()) + .node(reth_optimism_node::OpNode::default()) .install_exex("tips-simulator", move |ctx| async move { let listeners = ListenersWithWorkers::new( - ctx, - exex_config, + ctx, + exex_config, mempool_config, - config.max_concurrent_simulations, - config.simulation_timeout_ms - ).await + max_concurrent_simulations, + chain_block_time, + ) + .await .map_err(|e| eyre::eyre!("Failed to initialize listeners: {}", e))?; - + info!("Both ExEx and mempool event listeners initialized successfully"); - + Ok(listeners.run()) }) .launch() .await?; - + info!("Reth node with both listeners started successfully"); - + handle.wait_for_node_exit().await })?; - + Ok(()) } diff --git a/justfile b/justfile index f98ecbc..815ccc9 100644 --- a/justfile +++ b/justfile @@ -22,10 +22,10 @@ create-migration name: sync: deps-reset ### DATABASE ### cargo sqlx prepare -D postgresql://postgres:postgres@localhost:5432/postgres --workspace --all --no-dotenv - cd ui && npx drizzle-kit pull --dialect=postgresql --url=postgresql://postgres:postgres@localhost:5432/postgres - cd ui && mv ./drizzle/relations.ts ./src/db/ - cd ui && mv ./drizzle/schema.ts ./src/db/ - cd ui && rm -rf ./drizzle + #cd ui && npx drizzle-kit pull --dialect=postgresql --url=postgresql://postgres:postgres@localhost:5432/postgres + #cd ui && mv ./drizzle/relations.ts ./src/db/ + #cd ui && mv ./drizzle/schema.ts ./src/db/ + #cd ui && rm -rf ./drizzle ### ENV ### just sync-env ### REFORMAT ### @@ -93,7 +93,7 @@ simulator: cargo run --bin tips-simulator node simulator-playground: - cargo run --bin tips-simulator -- --builder.playground=$HOME/.playground/devnet/ node --datadir ~/.playground/devnet/tips-simulator + cargo run --bin tips-simulator node --builder.playground=$HOME/.playground/devnet/ --datadir ~/.playground/devnet/tips-simulator ui: cd ui && yarn dev From 66c23acedff42022079ff66ca4152d794f3ee71d Mon Sep 17 00:00:00 2001 From: Niran Babalola Date: Tue, 23 Sep 2025 18:50:07 -0500 Subject: [PATCH 22/39] Extend Cli with an Ext for Cli instead of flattening the reth args --- crates/simulator/src/config/mod.rs | 160 +++--------------- crates/simulator/src/config/playground.rs | 47 +++-- crates/simulator/src/config/simulator_node.rs | 105 ++++++++++++ crates/simulator/src/main.rs | 13 +- justfile | 2 +- 5 files changed, 168 insertions(+), 159 deletions(-) create mode 100644 crates/simulator/src/config/simulator_node.rs diff --git a/crates/simulator/src/config/mod.rs b/crates/simulator/src/config/mod.rs index 4e83410..9e67116 100644 --- a/crates/simulator/src/config/mod.rs +++ b/crates/simulator/src/config/mod.rs @@ -1,115 +1,15 @@ +mod simulator_node; pub mod playground; pub use playground::PlaygroundOptions; -pub type Cli = OpCli; +pub use simulator_node::SimulatorNodeConfig; use crate::listeners::MempoolListenerConfig; use crate::types::ExExSimulationConfig; -use anyhow::{anyhow, Result}; -use clap::{CommandFactory, Parser}; -use playground::IsDefaultSource; -use reth_cli_commands::node::NoArgs; -use reth_optimism_cli::{chainspec::OpChainSpecParser, Cli as OpCli}; +use clap::{CommandFactory, FromArgMatches}; +use reth_optimism_cli::{chainspec::OpChainSpecParser, commands::Commands, Cli as OpCli}; -/// Combined configuration for reth node with simulator ExEx -#[derive(Parser, Debug)] -#[command(author, version, about = "Reth node with Tips Simulator ExEx")] -pub struct SimulatorNodeConfig { - /// Reth node arguments - #[command(flatten)] - pub node: Cli, - - /// Data directory for simulator - #[arg( - long, - env = "TIPS_SIMULATOR_DATADIR", - default_value = "~/.tips-simulator-reth" - )] - pub datadir: std::path::PathBuf, - - /// PostgreSQL database connection URL for simulator - #[arg(long, env = "TIPS_SIMULATOR_DATABASE_URL")] - pub database_url: String, - - /// Maximum number of concurrent simulations - #[arg(long, env = "TIPS_SIMULATOR_MAX_CONCURRENT", default_value = "10")] - pub max_concurrent_simulations: usize, - - /// Timeout for individual simulations in milliseconds - #[arg(long, env = "TIPS_SIMULATOR_TIMEOUT_MS", default_value = "5000")] - pub simulation_timeout_ms: u64, - - /// Kafka brokers for mempool events (comma-separated) - #[arg( - long, - env = "TIPS_SIMULATOR_KAFKA_BROKERS", - default_value = "localhost:9092" - )] - pub kafka_brokers: String, - - /// Kafka topic for mempool events - #[arg( - long, - env = "TIPS_SIMULATOR_KAFKA_TOPIC", - default_value = "mempool-events" - )] - pub kafka_topic: String, - - /// Kafka consumer group ID - #[arg( - long, - env = "TIPS_SIMULATOR_KAFKA_GROUP_ID", - default_value = "tips-simulator" - )] - pub kafka_group_id: String, - - /// Chain block time for simulator extensions - #[arg(long = "chain.block-time", default_value_t = 1000)] - pub chain_block_time: u64, - - /// Path to builder playground to automatically start up the node connected to it - #[arg( - long = "builder.playground", - num_args = 0..=1, - default_missing_value = "$HOME/.playground/devnet/", - value_parser = expand_path, - env = "TIPS_SIMULATOR_PLAYGROUND_DIR", - )] - pub playground: Option, -} - -impl From<&SimulatorNodeConfig> for ExExSimulationConfig { - fn from(config: &SimulatorNodeConfig) -> Self { - Self { - database_url: config.database_url.clone(), - max_concurrent_simulations: config.max_concurrent_simulations, - simulation_timeout_ms: config.simulation_timeout_ms, - } - } -} - -impl From<&SimulatorNodeConfig> for MempoolListenerConfig { - fn from(config: &SimulatorNodeConfig) -> Self { - Self { - kafka_brokers: config - .kafka_brokers - .split(',') - .map(|s| s.trim().to_string()) - .collect(), - kafka_topic: config.kafka_topic.clone(), - kafka_group_id: config.kafka_group_id.clone(), - database_url: config.database_url.clone(), - } - } -} - -fn expand_path(s: &str) -> Result { - shellexpand::full(s) - .map_err(|e| anyhow!("expansion error for `{s}`: {e}"))? - .into_owned() - .parse() - .map_err(|e| anyhow!("invalid path after expansion: {e}")) -} +pub type Cli = OpCli; /// Parse CLI args with playground configuration if specified pub trait CliExt { @@ -120,59 +20,43 @@ pub trait CliExt { fn parsed() -> Self; } -impl CliExt for SimulatorNodeConfig { - fn populate_defaults(mut self) -> Self { - let Some(ref playground_dir) = self.playground else { +impl CliExt for Cli { + fn populate_defaults(self) -> Self { + let Commands::Node(ref node_command) = self.command else { return self; }; - let options = PlaygroundOptions::new(playground_dir).unwrap_or_else(|e| exit(e)); - let matches = Self::command().get_matches(); - let matches = matches - .subcommand_matches("node") - .expect("validated that we are in the node command"); - - options.apply_to_cli(&mut self.node); + let Some(ref playground_dir) = node_command.ext.playground else { + return self; + }; - if matches.value_source("chain_block_time").is_default() { - self.chain_block_time = options.chain_block_time().as_millis() as u64; - } + let options = PlaygroundOptions::new(playground_dir).unwrap_or_else(|e| exit(e)); - self + options.apply(self) } fn parsed() -> Self { - SimulatorNodeConfig::parse().populate_defaults() + let matches = Cli::command().get_matches(); + Cli::from_arg_matches(&matches) + .expect("Parsing args") + .populate_defaults() } } impl SimulatorNodeConfig { - pub fn node_cli_mut(&mut self) -> &mut Cli { - &mut self.node - } - - pub fn into_cli(self) -> Cli { - self.node - } - - pub fn chain_block_time(&self) -> u64 { - self.chain_block_time - } - - pub fn into_parts(self) -> (Cli, ExExSimulationConfig, MempoolListenerConfig, u64) { + pub fn into_parts( + self, + cli: Cli, + ) -> (Cli, ExExSimulationConfig, MempoolListenerConfig, u64) { let exex_config = (&self).into(); let mempool_config = (&self).into(); ( - self.node, + cli, exex_config, mempool_config, self.chain_block_time, ) } - - pub fn has_playground(&self) -> bool { - self.playground.is_some() - } } /// Following clap's convention, a failure to apply defaults exits non-zero. diff --git a/crates/simulator/src/config/playground.rs b/crates/simulator/src/config/playground.rs index acae198..e8ae9bb 100644 --- a/crates/simulator/src/config/playground.rs +++ b/crates/simulator/src/config/playground.rs @@ -18,11 +18,13 @@ //! This will automatically try to detect the playground configuration and apply //! it to the tips-simulator startup settings. -use super::Cli; use alloy_primitives::hex; use anyhow::{anyhow, Result}; -use clap::{parser::ValueSource, CommandFactory}; -use core::time::Duration; +use clap::{CommandFactory, parser::ValueSource}; +use core::{ + net::{IpAddr, Ipv4Addr, SocketAddr}, + time::Duration, +}; use reth_cli::chainspec::ChainSpecParser; use reth_network_peers::TrustedPeer; use reth_optimism_chainspec::OpChainSpec; @@ -31,12 +33,13 @@ use secp256k1::SecretKey; use serde_json::Value; use std::{ fs::read_to_string, - net::{IpAddr, Ipv4Addr, SocketAddr}, path::{Path, PathBuf}, sync::Arc, }; use url::{Host, Url}; +use super::Cli; + #[derive(Clone, Debug)] pub struct PlaygroundOptions { /// Chain spec loaded from playground @@ -107,13 +110,14 @@ impl PlaygroundOptions { } /// Apply playground defaults to the simulator config, only where not user-provided. - pub fn apply_to_cli(&self, cli: &mut Cli) { - let Commands::Node(node) = &mut cli.command else { - return; + pub fn apply(self, cli: Cli) -> Cli { + let mut cli = cli; + let Commands::Node(ref mut node) = cli.command else { + return cli; }; if !node.network.trusted_peers.contains(&self.trusted_peer) { - node.network.trusted_peers.push(self.trusted_peer.clone()); + node.network.trusted_peers.push(self.trusted_peer); } let matches = Cli::command().get_matches(); @@ -122,7 +126,7 @@ impl PlaygroundOptions { .expect("validated that we are in the node command"); if matches.value_source("chain").is_default() { - node.chain = Arc::clone(&self.chain); + node.chain = self.chain; } if matches.value_source("http").is_default() { @@ -146,18 +150,18 @@ impl PlaygroundOptions { } if matches.value_source("auth_jwtsecret").is_default() { - node.rpc.auth_jwtsecret = Some(self.authrpc_jwtsecret.clone()); + node.rpc.auth_jwtsecret = Some(self.authrpc_jwtsecret); } if matches.value_source("disable_discovery").is_default() { node.network.discovery.disable_discovery = true; } - if matches.value_source("trusted_peers").is_default() - && !node.network.trusted_peers.contains(&self.trusted_peer) - { - node.network.trusted_peers.push(self.trusted_peer.clone()); + if matches.value_source("chain_block_time").is_default() { + node.ext.chain_block_time = self.chain_block_time.as_millis() as u64; } + + cli } } @@ -208,8 +212,19 @@ fn pick_preferred_port(preferred: u16, fallback_range: std::ops::Range) -> } fn is_port_free(port: u16) -> bool { - let socket = SocketAddr::new(IpAddr::V4(Ipv4Addr::LOCALHOST), port); - std::net::TcpListener::bind(socket).is_ok() + let addrs = [ + SocketAddr::new(IpAddr::V4(Ipv4Addr::UNSPECIFIED), port), + SocketAddr::new(IpAddr::V4(Ipv4Addr::LOCALHOST), port), + ]; + + for addr in addrs { + match std::net::TcpListener::bind(addr) { + Ok(listener) => drop(listener), + Err(_) => return false, + } + } + + true } fn extract_chain_block_time(basepath: &Path) -> Result { diff --git a/crates/simulator/src/config/simulator_node.rs b/crates/simulator/src/config/simulator_node.rs new file mode 100644 index 0000000..24088b4 --- /dev/null +++ b/crates/simulator/src/config/simulator_node.rs @@ -0,0 +1,105 @@ +use crate::{ + listeners::MempoolListenerConfig, + types::ExExSimulationConfig, +}; +use anyhow::{anyhow, Result}; +use clap::Args; +use std::path::PathBuf; + +#[derive(Debug, Clone, Args)] +#[command(next_help_heading = "Simulator")] +pub struct SimulatorNodeConfig { + /// PostgreSQL database connection URL for simulator + #[arg(long, env = "TIPS_SIMULATOR_DATABASE_URL")] + pub database_url: String, + + /// Maximum number of concurrent simulations + #[arg(long, env = "TIPS_SIMULATOR_MAX_CONCURRENT", default_value = "10")] + pub max_concurrent_simulations: usize, + + /// Timeout for individual simulations in milliseconds + #[arg(long, env = "TIPS_SIMULATOR_TIMEOUT_MS", default_value = "5000")] + pub simulation_timeout_ms: u64, + + /// Kafka brokers for mempool events (comma-separated) + #[arg( + long, + env = "TIPS_SIMULATOR_KAFKA_BROKERS", + default_value = "localhost:9092" + )] + pub kafka_brokers: String, + + /// Kafka topic for mempool events + #[arg( + long, + env = "TIPS_SIMULATOR_KAFKA_TOPIC", + default_value = "mempool-events" + )] + pub kafka_topic: String, + + /// Kafka consumer group ID + #[arg( + long, + env = "TIPS_SIMULATOR_KAFKA_GROUP_ID", + default_value = "tips-simulator" + )] + pub kafka_group_id: String, + + /// Chain block time for simulator extensions + #[arg(long = "chain.block-time", default_value_t = 1000)] + pub chain_block_time: u64, + + /// Path to builder playground to automatically start up the node connected to it + #[arg( + long = "builder.playground", + num_args = 0..=1, + default_missing_value = "$HOME/.playground/devnet/", + value_parser = expand_path, + env = "TIPS_SIMULATOR_PLAYGROUND_DIR", + )] + pub playground: Option, +} + +impl From<&SimulatorNodeConfig> for ExExSimulationConfig { + fn from(config: &SimulatorNodeConfig) -> Self { + Self { + database_url: config.database_url.clone(), + max_concurrent_simulations: config.max_concurrent_simulations, + simulation_timeout_ms: config.simulation_timeout_ms, + } + } +} + +impl From<&SimulatorNodeConfig> for MempoolListenerConfig { + fn from(config: &SimulatorNodeConfig) -> Self { + Self { + kafka_brokers: config + .kafka_brokers + .split(',') + .map(|s| s.trim().to_string()) + .collect(), + kafka_topic: config.kafka_topic.clone(), + kafka_group_id: config.kafka_group_id.clone(), + database_url: config.database_url.clone(), + } + } +} + +impl SimulatorNodeConfig { + pub fn chain_block_time(&self) -> u64 { + self.chain_block_time + } + + pub fn has_playground(&self) -> bool { + self.playground.is_some() + } +} + +fn expand_path(s: &str) -> Result { + shellexpand::full(s) + .map_err(|e| anyhow!("expansion error for `{s}`: {e}"))? + .into_owned() + .parse() + .map_err(|e| anyhow!("invalid path after expansion: {e}")) +} + diff --git a/crates/simulator/src/main.rs b/crates/simulator/src/main.rs index 95fadb2..7e2b679 100644 --- a/crates/simulator/src/main.rs +++ b/crates/simulator/src/main.rs @@ -1,12 +1,17 @@ -use tips_simulator::{config::CliExt, config::SimulatorNodeConfig, ListenersWithWorkers}; +use reth_optimism_cli::commands::Commands; +use tips_simulator::{config::Cli, config::CliExt, ListenersWithWorkers}; use tracing::info; fn main() -> eyre::Result<()> { dotenvy::dotenv().ok(); - let config = SimulatorNodeConfig::parsed(); - let playground_enabled = config.playground.is_some(); - let (cli, exex_config, mempool_config, chain_block_time) = config.into_parts(); + let cli = ::parsed(); + let config = match &cli.command { + Commands::Node(node) => node.ext.clone(), + _ => eyre::bail!("tips-simulator must be run with the node command"), + }; + let playground_enabled = config.has_playground(); + let (cli, exex_config, mempool_config, chain_block_time) = config.into_parts(cli); let max_concurrent_simulations = exex_config.max_concurrent_simulations; info!( diff --git a/justfile b/justfile index 815ccc9..6f98ae7 100644 --- a/justfile +++ b/justfile @@ -93,7 +93,7 @@ simulator: cargo run --bin tips-simulator node simulator-playground: - cargo run --bin tips-simulator node --builder.playground=$HOME/.playground/devnet/ --datadir ~/.playground/devnet/tips-simulator + cargo run --bin tips-simulator node --builder.playground --datadir ~/.playground/devnet/tips-simulator ui: cd ui && yarn dev From 71d841e71b7868dc6b73293f55dca7049db57bef Mon Sep 17 00:00:00 2001 From: Niran Babalola Date: Tue, 23 Sep 2025 20:54:02 -0500 Subject: [PATCH 23/39] Disable txpool gossip --- crates/simulator/src/main.rs | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/crates/simulator/src/main.rs b/crates/simulator/src/main.rs index 7e2b679..d962f92 100644 --- a/crates/simulator/src/main.rs +++ b/crates/simulator/src/main.rs @@ -1,4 +1,5 @@ use reth_optimism_cli::commands::Commands; +use reth_optimism_node::args::RollupArgs; use tips_simulator::{config::Cli, config::CliExt, ListenersWithWorkers}; use tracing::info; @@ -25,8 +26,12 @@ fn main() -> eyre::Result<()> { ); cli.run(|builder, _| async move { + // Keep the Base mempool private. + let mut rollup_args = RollupArgs::default(); + rollup_args.disable_txpool_gossip = true; + let handle = builder - .node(reth_optimism_node::OpNode::default()) + .node(reth_optimism_node::OpNode::new(rollup_args)) .install_exex("tips-simulator", move |ctx| async move { let listeners = ListenersWithWorkers::new( ctx, From 6856e5dd4885d36e0b5dc94fcea8f63df53a1959 Mon Sep 17 00:00:00 2001 From: Niran Babalola Date: Thu, 25 Sep 2025 13:11:23 -0500 Subject: [PATCH 24/39] Fix the default kafka topic --- crates/simulator/src/config/simulator_node.rs | 2 +- crates/simulator/tests/integration_test.rs | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/crates/simulator/src/config/simulator_node.rs b/crates/simulator/src/config/simulator_node.rs index 24088b4..45f6a7d 100644 --- a/crates/simulator/src/config/simulator_node.rs +++ b/crates/simulator/src/config/simulator_node.rs @@ -33,7 +33,7 @@ pub struct SimulatorNodeConfig { #[arg( long, env = "TIPS_SIMULATOR_KAFKA_TOPIC", - default_value = "mempool-events" + default_value = "tips-audit" )] pub kafka_topic: String, diff --git a/crates/simulator/tests/integration_test.rs b/crates/simulator/tests/integration_test.rs index 0b1c519..f0c5bb2 100644 --- a/crates/simulator/tests/integration_test.rs +++ b/crates/simulator/tests/integration_test.rs @@ -43,13 +43,13 @@ fn test_simulation_request_creation() { fn test_mempool_simulator_config() { let config = MempoolListenerConfig { kafka_brokers: vec!["localhost:9092".to_string()], - kafka_topic: "mempool-events".to_string(), + kafka_topic: "tips-audit".to_string(), kafka_group_id: "tips-simulator".to_string(), database_url: "postgresql://user:pass@localhost:5432/tips".to_string(), }; assert_eq!(config.kafka_brokers, vec!["localhost:9092"]); - assert_eq!(config.kafka_topic, "mempool-events"); + assert_eq!(config.kafka_topic, "tips-audit"); assert_eq!(config.kafka_group_id, "tips-simulator"); assert_eq!( config.database_url, From 941e59bd35771d83191adb5bca0831273783c29c Mon Sep 17 00:00:00 2001 From: Niran Babalola Date: Thu, 25 Sep 2025 15:58:24 -0500 Subject: [PATCH 25/39] Add a simulator container to compose --- .env.example | 4 ++++ Cargo.toml | 12 ----------- crates/simulator/Cargo.toml | 25 +++++++++++----------- crates/simulator/Dockerfile | 42 +++++++++++++++++++++++++++++++++++++ docker-compose.tips.yml | 14 ++++++++++++- justfile | 2 +- 6 files changed, 72 insertions(+), 27 deletions(-) create mode 100644 crates/simulator/Dockerfile diff --git a/.env.example b/.env.example index 68862e4..3716648 100644 --- a/.env.example +++ b/.env.example @@ -29,7 +29,11 @@ TIPS_MAINTENANCE_POLL_INTERVAL_MS=250 TIPS_MAINTENANCE_LOG_LEVEL=debug # Simulator +TIPS_SIMULATOR_DATADIR=~/.playground/devnet/tips-simulator +TIPS_SIMULATOR_BUILDER_PLAYGROUND_DIR=~/.playground/devnet TIPS_SIMULATOR_DATABASE_URL=postgresql://postgres:postgres@localhost:5432/postgres +TIPS_SIMULATOR_KAFKA_BROKERS=localhost:9092 +TIPS_SIMULATOR_KAFKA_TOPIC=tips-audit # TIPS UI TIPS_DATABASE_URL=postgresql://postgres:postgres@localhost:5432/postgres diff --git a/Cargo.toml b/Cargo.toml index c46a855..f70c690 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -11,19 +11,7 @@ tips-simulator = { path = "crates/simulator" } # Reth -reth = { git = "https://github.com/paradigmxyz/reth", tag = "v1.7.0" } reth-rpc-eth-types = { git = "https://github.com/paradigmxyz/reth", tag = "v1.7.0" } -reth-exex = { git = "https://github.com/paradigmxyz/reth", tag = "v1.7.0" } -reth-provider = { git = "https://github.com/paradigmxyz/reth", tag = "v1.7.0" } -reth-primitives = { git = "https://github.com/paradigmxyz/reth", tag = "v1.7.0" } -reth-execution-types = { git = "https://github.com/paradigmxyz/reth", tag = "v1.7.0" } -reth-node-api = { git = "https://github.com/paradigmxyz/reth", tag = "v1.7.0" } -reth-evm = { git = "https://github.com/paradigmxyz/reth", tag = "v1.7.0" } -reth-node-builder = { git = "https://github.com/paradigmxyz/reth", tag = "v1.7.0" } -reth-node-ethereum = { git = "https://github.com/paradigmxyz/reth", tag = "v1.7.0" } -reth-node-optimism = { git = "https://github.com/paradigmxyz/reth", tag = "v1.7.0" } -reth-revm = { git = "https://github.com/paradigmxyz/reth", tag = "v1.7.0" } -reth-chainspec = { git = "https://github.com/paradigmxyz/reth", tag = "v1.7.0" } # alloy alloy-primitives = { version = "1.3.1", default-features = false, features = [ diff --git a/crates/simulator/Cargo.toml b/crates/simulator/Cargo.toml index 6b7dbea..ef7db98 100644 --- a/crates/simulator/Cargo.toml +++ b/crates/simulator/Cargo.toml @@ -33,19 +33,18 @@ alloy-rpc-types-mev.workspace = true alloy-eips.workspace = true op-alloy-consensus.workspace = true -# Reth ExEx dependencies -reth.workspace = true -reth-exex.workspace = true -reth-provider.workspace = true -reth-primitives.workspace = true -reth-execution-types.workspace = true -reth-node-api.workspace = true -reth-evm.workspace = true -reth-node-builder.workspace = true -reth-node-ethereum.workspace = true -reth-revm.workspace = true -reth-chainspec.workspace = true -# Optimism support for parsing chain specs +# Reth +reth = { git = "https://github.com/paradigmxyz/reth", tag = "v1.7.0" } +reth-exex = { git = "https://github.com/paradigmxyz/reth", tag = "v1.7.0" } +reth-provider = { git = "https://github.com/paradigmxyz/reth", tag = "v1.7.0" } +reth-primitives = { git = "https://github.com/paradigmxyz/reth", tag = "v1.7.0" } +reth-execution-types = { git = "https://github.com/paradigmxyz/reth", tag = "v1.7.0" } +reth-node-api = { git = "https://github.com/paradigmxyz/reth", tag = "v1.7.0" } +reth-evm = { git = "https://github.com/paradigmxyz/reth", tag = "v1.7.0" } +reth-node-builder = { git = "https://github.com/paradigmxyz/reth", tag = "v1.7.0" } +reth-node-ethereum = { git = "https://github.com/paradigmxyz/reth", tag = "v1.7.0" } +reth-revm = { git = "https://github.com/paradigmxyz/reth", tag = "v1.7.0" } +reth-chainspec = { git = "https://github.com/paradigmxyz/reth", tag = "v1.7.0" } reth-optimism-evm = { git = "https://github.com/paradigmxyz/reth", tag = "v1.7.0" } reth-optimism-primitives = { git = "https://github.com/paradigmxyz/reth", tag = "v1.7.0", features = ["serde", "serde-bincode-compat"] } reth-optimism-chainspec = { git = "https://github.com/paradigmxyz/reth", tag = "v1.7.0" } diff --git a/crates/simulator/Dockerfile b/crates/simulator/Dockerfile new file mode 100644 index 0000000..58e82be --- /dev/null +++ b/crates/simulator/Dockerfile @@ -0,0 +1,42 @@ +FROM rust:1-bookworm AS base + +# Add dependencies for building reth-mdbx-sys +RUN apt-get update && apt-get install -y \ + clang \ + libclang-dev \ + llvm-dev \ + pkg-config && \ + rm -rf /var/lib/apt/lists/* + +RUN cargo install cargo-chef --locked +WORKDIR /app + +FROM base AS planner +COPY . . +RUN cargo chef prepare --recipe-path recipe.json + +FROM base AS builder +COPY --from=planner /app/recipe.json recipe.json + +RUN --mount=type=cache,target=/usr/local/cargo/registry \ + --mount=type=cache,target=/usr/local/cargo/git \ + --mount=type=cache,target=/app/target \ + cargo chef cook --recipe-path recipe.json + +COPY . . +RUN --mount=type=cache,target=/usr/local/cargo/registry \ + --mount=type=cache,target=/usr/local/cargo/git \ + --mount=type=cache,target=/app/target \ + cargo build --bin tips-simulator && \ + cp target/debug/tips-simulator /tmp/tips-simulator + +FROM debian:bookworm + +RUN apt-get update && apt-get install -y libssl3 ca-certificates && rm -rf /var/lib/apt/lists/* + +WORKDIR /app + +COPY --from=builder /tmp/tips-simulator /app/tips-simulator + +CMD ["/app/tips-simulator", "node", "--datadir", "/data", "--builder.playground", "/playground"] + diff --git a/docker-compose.tips.yml b/docker-compose.tips.yml index cff7d7c..030bf33 100644 --- a/docker-compose.tips.yml +++ b/docker-compose.tips.yml @@ -46,4 +46,16 @@ services: container_name: tips-ingress-writer env_file: - .env.docker - restart: unless-stopped \ No newline at end of file + restart: unless-stopped + + simulator: + build: + context: . + dockerfile: crates/simulator/Dockerfile + container_name: tips-simulator + volumes: + - ${TIPS_SIMULATOR_DATADIR}:/data + - ${TIPS_SIMULATOR_BUILDER_PLAYGROUND_DIR}:/playground + env_file: + - .env.docker + restart: unless-stopped diff --git a/justfile b/justfile index 6f98ae7..3d3c34c 100644 --- a/justfile +++ b/justfile @@ -50,7 +50,7 @@ start-all: stop-all # Start every service in docker, except the one you're currently working on. e.g. just start-except ui ingress-rpc start-except programs: stop-all #!/bin/bash - all_services=(postgres kafka kafka-setup minio minio-setup ingress-rpc ingres-writer audit maintenance ui) + all_services=(postgres kafka kafka-setup minio minio-setup ingress-rpc ingres-writer audit maintenance ui simulator) exclude_services=({{ programs }}) # Create result array with services not in exclude list From 5dd862306624ff565911a722f7558765176faf8c Mon Sep 17 00:00:00 2001 From: Niran Babalola Date: Thu, 25 Sep 2025 16:54:56 -0500 Subject: [PATCH 26/39] Connect to the trusted peer from inside a docker container --- crates/simulator/src/config/playground.rs | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/crates/simulator/src/config/playground.rs b/crates/simulator/src/config/playground.rs index e8ae9bb..9b6ba87 100644 --- a/crates/simulator/src/config/playground.rs +++ b/crates/simulator/src/config/playground.rs @@ -88,7 +88,7 @@ impl PlaygroundOptions { let default_authrpc_port = extract_authrpc_port(path)?; let authrpc_port = pick_preferred_port(default_authrpc_port, 4000..9000); let trusted_peer = TrustedPeer::from_secret_key( - Host::Ipv4(Ipv4Addr::LOCALHOST), + resolve_trusted_peer_host(), extract_trusted_peer_port(path)?, &extract_deterministic_p2p_key(path)?, ); @@ -344,3 +344,11 @@ fn extract_trusted_peer_port(basepath: &Path) -> Result { .map_err(|e| anyhow!("Invalid external port mapping value for op-geth: {e}")) } +fn resolve_trusted_peer_host() -> Host { + if std::fs::metadata("/.dockerenv").is_ok() { + Host::Domain("host.docker.internal".into()) + } else { + Host::Ipv4(Ipv4Addr::LOCALHOST) + } +} + From 5b24f8073124983a304ebfee1d5e8aba4dcbb689 Mon Sep 17 00:00:00 2001 From: Niran Babalola Date: Thu, 25 Sep 2025 18:11:55 -0500 Subject: [PATCH 27/39] Let builder-playground reach the simulator as a builder --- crates/simulator/Dockerfile | 2 +- docker-compose.tips.yml | 3 +++ 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/crates/simulator/Dockerfile b/crates/simulator/Dockerfile index 58e82be..1c0d295 100644 --- a/crates/simulator/Dockerfile +++ b/crates/simulator/Dockerfile @@ -38,5 +38,5 @@ WORKDIR /app COPY --from=builder /tmp/tips-simulator /app/tips-simulator -CMD ["/app/tips-simulator", "node", "--datadir", "/data", "--builder.playground", "/playground"] +CMD ["/app/tips-simulator", "node", "--datadir", "/data", "--http.addr", "0.0.0.0", "--builder.playground", "/playground"] diff --git a/docker-compose.tips.yml b/docker-compose.tips.yml index 030bf33..d77a2b4 100644 --- a/docker-compose.tips.yml +++ b/docker-compose.tips.yml @@ -53,6 +53,9 @@ services: context: . dockerfile: crates/simulator/Dockerfile container_name: tips-simulator + ports: + - "2222:2222" + - "4444:4444" volumes: - ${TIPS_SIMULATOR_DATADIR}:/data - ${TIPS_SIMULATOR_BUILDER_PLAYGROUND_DIR}:/playground From 9038a1ebf31e6ef3e6e99cccca64b0ab3160f206 Mon Sep 17 00:00:00 2001 From: Niran Babalola Date: Thu, 25 Sep 2025 23:16:14 -0500 Subject: [PATCH 28/39] Test the simulator --- crates/simulator/src/core.rs | 51 +-- crates/simulator/src/engine.rs | 15 +- crates/simulator/src/lib.rs | 20 +- crates/simulator/src/listeners/exex.rs | 23 +- crates/simulator/src/listeners/mempool.rs | 20 +- crates/simulator/src/worker_pool.rs | 25 +- crates/simulator/tests/common/builders.rs | 342 ++++++++++++++++++ crates/simulator/tests/common/fixtures.rs | 193 ++++++++++ .../tests/common/mock_bundle_simulator.rs | 85 +++++ crates/simulator/tests/common/mocks.rs | 175 +++++++++ crates/simulator/tests/common/mod.rs | 167 +++++++++ crates/simulator/tests/integration_test.rs | 313 +++++++++++++--- crates/simulator/tests/unit/core_test.rs | 255 +++++++++++++ .../tests/unit/error_handling_test.rs | 277 ++++++++++++++ crates/simulator/tests/unit/mod.rs | 4 + crates/simulator/tests/unit/types_test.rs | 263 ++++++++++++++ 16 files changed, 2091 insertions(+), 137 deletions(-) create mode 100644 crates/simulator/tests/common/builders.rs create mode 100644 crates/simulator/tests/common/fixtures.rs create mode 100644 crates/simulator/tests/common/mock_bundle_simulator.rs create mode 100644 crates/simulator/tests/common/mocks.rs create mode 100644 crates/simulator/tests/common/mod.rs create mode 100644 crates/simulator/tests/unit/core_test.rs create mode 100644 crates/simulator/tests/unit/error_handling_test.rs create mode 100644 crates/simulator/tests/unit/mod.rs create mode 100644 crates/simulator/tests/unit/types_test.rs diff --git a/crates/simulator/src/core.rs b/crates/simulator/src/core.rs index ebaf1e3..5f4e3ce 100644 --- a/crates/simulator/src/core.rs +++ b/crates/simulator/src/core.rs @@ -1,13 +1,20 @@ use crate::engine::SimulationEngine; use crate::publisher::SimulationPublisher; use crate::types::SimulationRequest; +use async_trait::async_trait; use eyre::Result; -use reth_provider::StateProviderFactory; use tracing::{error, info}; -/// Core bundle simulator that provides shared simulation logic -/// Used by both mempool event simulators and ExEx event simulators -pub struct BundleSimulator +/// Clean trait for bundle simulation without exposing Reth's complex types +#[async_trait] +pub trait BundleSimulator: Send + Sync { + /// Simulate a bundle execution + async fn simulate(&self, request: &SimulationRequest) -> Result<()>; +} + +/// Production bundle simulator for Reth +/// This is the Reth-specific implementation +pub struct RethBundleSimulator where E: SimulationEngine, P: SimulationPublisher, @@ -16,34 +23,28 @@ where publisher: P, } -impl BundleSimulator +impl RethBundleSimulator where E: SimulationEngine, P: SimulationPublisher, { pub fn new(engine: E, publisher: P) -> Self { - Self { engine, publisher } + Self { + engine, + publisher, + } } +} - /// Process a simulation request by creating state provider from factory - /// Convenience method that handles state provider creation - pub async fn simulate( - &self, - request: &SimulationRequest, - state_provider_factory: &F, - ) -> Result<()> - where - F: StateProviderFactory, - { - // Get state provider for the block - // FIXME: We probably want to get the state provider once per block rather than once per - // bundle for each block. - let state_provider = state_provider_factory - .state_by_block_hash(request.block_hash) - .map_err(|e| eyre::eyre!("Failed to get state provider: {}", e))?; - - // Run the simulation - match self.engine.simulate_bundle(request, &state_provider).await { +#[async_trait] +impl BundleSimulator for RethBundleSimulator +where + E: SimulationEngine + 'static, + P: SimulationPublisher + 'static, +{ + async fn simulate(&self, request: &SimulationRequest) -> Result<()> { + // Run the simulation - engine will get its own state provider + match self.engine.simulate_bundle(request).await { Ok(result) => { info!( bundle_id = %request.bundle_id, diff --git a/crates/simulator/src/engine.rs b/crates/simulator/src/engine.rs index edd706f..41e49b0 100644 --- a/crates/simulator/src/engine.rs +++ b/crates/simulator/src/engine.rs @@ -101,13 +101,10 @@ where #[async_trait] pub trait SimulationEngine: Send + Sync { /// Simulate a bundle execution - async fn simulate_bundle( + async fn simulate_bundle( &self, request: &SimulationRequest, - state_provider: &S, - ) -> Result - where - S: StateProvider + Send + Sync; + ) -> Result; } #[derive(Clone)] @@ -137,14 +134,10 @@ where Node: FullNodeComponents, ::Evm: ConfigureEvm, { - async fn simulate_bundle( + async fn simulate_bundle( &self, request: &SimulationRequest, - _state_provider: &S, - ) -> Result - where - S: StateProvider + Send + Sync, - { + ) -> Result { let start_time = Instant::now(); let simulation_id = Uuid::new_v4(); diff --git a/crates/simulator/src/lib.rs b/crates/simulator/src/lib.rs index 1e7f902..b9599f4 100644 --- a/crates/simulator/src/lib.rs +++ b/crates/simulator/src/lib.rs @@ -15,7 +15,7 @@ use std::sync::Arc; use tracing::{error, info}; pub use config::SimulatorNodeConfig; -pub use core::BundleSimulator; +pub use core::{BundleSimulator, RethBundleSimulator}; pub use engine::{RethSimulationEngine, SimulationEngine}; pub use listeners::{ExExEventListener, MempoolEventListener, MempoolListenerConfig}; pub use publisher::{SimulationPublisher, TipsSimulationPublisher}; @@ -24,15 +24,14 @@ pub use worker_pool::SimulationWorkerPool; // Type aliases for concrete implementations pub type TipsBundleSimulator = - BundleSimulator, TipsSimulationPublisher>; + RethBundleSimulator, TipsSimulationPublisher>; pub type TipsExExEventListener = ExExEventListener< Node, - RethSimulationEngine, - TipsSimulationPublisher, + TipsBundleSimulator, tips_datastore::PostgresDatastore, >; pub type TipsMempoolEventListener = - MempoolEventListener, TipsSimulationPublisher>; + MempoolEventListener>; // Initialization functions @@ -43,7 +42,7 @@ where ::Evm: ConfigureEvm, { datastore: Arc, - simulator: BundleSimulator, TipsSimulationPublisher>, + simulator: RethBundleSimulator, TipsSimulationPublisher>, } /// Initialize common listener components (database, publisher, engine, core simulator) @@ -81,7 +80,7 @@ where let engine = RethSimulationEngine::new(Arc::clone(&provider), evm_config); info!("Simulation engine initialized"); - let simulator = BundleSimulator::new(engine, publisher); + let simulator = RethBundleSimulator::new(engine, publisher); info!("Core bundle simulator initialized"); Ok(CommonListenerComponents { @@ -119,7 +118,6 @@ where let worker_pool = SimulationWorkerPool::new( Arc::new(common_components.simulator), - Arc::clone(&provider), config.max_concurrent_simulations, ); @@ -160,7 +158,6 @@ where let worker_pool = SimulationWorkerPool::new( Arc::new(common_components.simulator), - Arc::clone(&provider), max_concurrent_simulations, ); @@ -183,9 +180,7 @@ where Node: FullNodeComponents, ::Evm: ConfigureEvm, { - worker_pool: Arc< - SimulationWorkerPool, TipsSimulationPublisher, Node::Provider>, - >, + worker_pool: Arc>>, exex_listener: TipsExExEventListener, mempool_listener: TipsMempoolEventListener, } @@ -222,7 +217,6 @@ where let shared_worker_pool = SimulationWorkerPool::new( Arc::new(common_components.simulator), - Arc::clone(&provider), max_concurrent_simulations, ); diff --git a/crates/simulator/src/listeners/exex.rs b/crates/simulator/src/listeners/exex.rs index b2fd95b..2b6f92e 100644 --- a/crates/simulator/src/listeners/exex.rs +++ b/crates/simulator/src/listeners/exex.rs @@ -1,5 +1,4 @@ -use crate::engine::SimulationEngine; -use crate::publisher::SimulationPublisher; +use crate::core::BundleSimulator; use crate::types::SimulationRequest; use crate::worker_pool::{SimulationTask, SimulationWorkerPool}; @@ -62,11 +61,10 @@ where /// ExEx event listener that processes chain events and queues bundle simulations /// Processes chain events (commits, reorgs, reverts) and queues simulation tasks -pub struct ExExEventListener +pub struct ExExEventListener where Node: FullNodeComponents, - E: SimulationEngine + Clone + 'static, - P: SimulationPublisher + Clone + 'static, + B: BundleSimulator + 'static, D: tips_datastore::BundleDatastore, { /// The execution extension context @@ -74,21 +72,20 @@ where /// Datastore for fetching bundles from mempool datastore: Arc, /// Shared simulation worker pool - worker_pool: Arc>, + worker_pool: Arc>, } -impl ExExEventListener +impl ExExEventListener where Node: FullNodeComponents, - E: SimulationEngine + Clone + 'static, - P: SimulationPublisher + Clone + 'static, + B: BundleSimulator + 'static, D: tips_datastore::BundleDatastore + 'static, { /// Create a new ExEx event listener pub fn new( ctx: ExExContext, datastore: Arc, - worker_pool: Arc>, + worker_pool: Arc>, ) -> Self { Self { ctx, @@ -190,12 +187,12 @@ where } /// Process a single block for potential bundle simulations - async fn process_block( + async fn process_block( &mut self, - block: (&B256, &reth_primitives::RecoveredBlock), + block: (&B256, &reth_primitives::RecoveredBlock), ) -> Result<()> where - B: reth_node_api::Block, + Block: reth_node_api::Block, { let (block_hash, sealed_block) = block; let block_number = sealed_block.number(); diff --git a/crates/simulator/src/listeners/mempool.rs b/crates/simulator/src/listeners/mempool.rs index b6ed70c..0cb4863 100644 --- a/crates/simulator/src/listeners/mempool.rs +++ b/crates/simulator/src/listeners/mempool.rs @@ -1,5 +1,4 @@ -use crate::engine::SimulationEngine; -use crate::publisher::SimulationPublisher; +use crate::core::BundleSimulator; use crate::types::SimulationRequest; use crate::worker_pool::{SimulationTask, SimulationWorkerPool}; use alloy_primitives::B256; @@ -31,11 +30,10 @@ pub struct MempoolListenerConfig { } /// Mempool event listener that processes events and queues simulations -pub struct MempoolEventListener +pub struct MempoolEventListener where Node: FullNodeComponents, - E: SimulationEngine, - P: SimulationPublisher, + B: BundleSimulator + 'static, { /// State provider factory for getting current block info provider: Arc, @@ -44,20 +42,19 @@ where /// Kafka topic name topic: String, /// Shared simulation worker pool - worker_pool: Arc>, + worker_pool: Arc>, } -impl MempoolEventListener +impl MempoolEventListener where Node: FullNodeComponents, - E: SimulationEngine + Clone + 'static, - P: SimulationPublisher + Clone + 'static, + B: BundleSimulator + 'static, { /// Create a new mempool event listener pub fn new( provider: Arc, config: MempoolListenerConfig, - worker_pool: Arc>, + worker_pool: Arc>, ) -> Result { let consumer: StreamConsumer = ClientConfig::new() .set("group.id", &config.kafka_group_id) @@ -86,8 +83,7 @@ where /// Run the mempool event listener pub async fn run(self) -> Result<()> where - E: 'static, - P: 'static, + B: 'static, { info!( topic = %self.topic, diff --git a/crates/simulator/src/worker_pool.rs b/crates/simulator/src/worker_pool.rs index 40d4ac7..eab6de0 100644 --- a/crates/simulator/src/worker_pool.rs +++ b/crates/simulator/src/worker_pool.rs @@ -1,8 +1,5 @@ use crate::core::BundleSimulator; -use crate::engine::SimulationEngine; -use crate::publisher::SimulationPublisher; use crate::types::SimulationRequest; -use reth_provider::StateProviderFactory; use std::sync::atomic::{AtomicU64, Ordering}; use std::sync::Arc; use tokio::sync::mpsc; @@ -15,16 +12,12 @@ pub struct SimulationTask { } /// Generic simulation worker pool that can be shared across different simulators -pub struct SimulationWorkerPool +pub struct SimulationWorkerPool where - E: SimulationEngine, - P: SimulationPublisher, - S: StateProviderFactory, + B: BundleSimulator, { /// Core bundle simulator - simulator: Arc>, - /// State provider factory - state_provider_factory: Arc, + simulator: Arc, /// Channel for sending simulation requests to workers simulation_tx: mpsc::Sender, /// Channel for receiving simulation requests in workers @@ -37,23 +30,19 @@ where max_concurrent: usize, } -impl SimulationWorkerPool +impl SimulationWorkerPool where - E: SimulationEngine + Clone + 'static, - P: SimulationPublisher + Clone + 'static, - S: reth_provider::StateProviderFactory + Send + Sync + 'static, + B: BundleSimulator + 'static, { /// Create a new simulation worker pool pub fn new( - simulator: Arc>, - state_provider_factory: Arc, + simulator: Arc, max_concurrent_simulations: usize, ) -> Arc { let (simulation_tx, simulation_rx) = mpsc::channel(1000); Arc::new(Self { simulator, - state_provider_factory, simulation_tx, simulation_rx: Arc::new(tokio::sync::Mutex::new(simulation_rx)), latest_block: AtomicU64::new(0), @@ -145,7 +134,7 @@ where // Execute the simulation match pool .simulator - .simulate(&task.request, pool.state_provider_factory.as_ref()) + .simulate(&task.request) .await { Ok(_) => { diff --git a/crates/simulator/tests/common/builders.rs b/crates/simulator/tests/common/builders.rs new file mode 100644 index 0000000..45e2281 --- /dev/null +++ b/crates/simulator/tests/common/builders.rs @@ -0,0 +1,342 @@ +/// Test data builders for creating complex test scenarios +use alloy_primitives::{Address, Bytes, B256, U256}; +use alloy_rpc_types_mev::EthSendBundle; +use std::collections::HashMap; +use tips_simulator::types::{SimulationError, SimulationRequest, SimulationResult}; +use uuid::Uuid; + +/// Builder for creating test bundles with various configurations +pub struct TestBundleBuilder { + txs: Vec, + block_number: u64, + min_timestamp: Option, + max_timestamp: Option, + reverting_tx_hashes: Vec, + replacement_uuid: Option, +} + +impl TestBundleBuilder { + pub fn new() -> Self { + Self { + txs: vec![], + block_number: 18_000_000, + min_timestamp: None, + max_timestamp: None, + reverting_tx_hashes: vec![], + replacement_uuid: None, + } + } + + pub fn with_transaction(mut self, tx: Bytes) -> Self { + self.txs.push(tx); + self + } + + pub fn with_simple_transaction(mut self, data: &[u8]) -> Self { + self.txs.push(Bytes::from(data.to_vec())); + self + } + + pub fn with_block_number(mut self, block_number: u64) -> Self { + self.block_number = block_number; + self + } + + pub fn with_timestamps(mut self, min: u64, max: u64) -> Self { + self.min_timestamp = Some(min); + self.max_timestamp = Some(max); + self + } + + pub fn with_reverting_tx(mut self, tx_hash: B256) -> Self { + self.reverting_tx_hashes.push(tx_hash); + self + } + + pub fn with_replacement_uuid(mut self, uuid: String) -> Self { + self.replacement_uuid = Some(uuid); + self + } + + pub fn build(self) -> EthSendBundle { + EthSendBundle { + txs: self.txs, + block_number: self.block_number, + min_timestamp: self.min_timestamp, + max_timestamp: self.max_timestamp, + reverting_tx_hashes: self.reverting_tx_hashes, + replacement_uuid: self.replacement_uuid, + dropping_tx_hashes: vec![], + refund_percent: None, + refund_recipient: None, + refund_tx_hashes: vec![], + extra_fields: Default::default(), + } + } +} + +/// Builder for creating simulation requests +pub struct SimulationRequestBuilder { + bundle_id: Option, + bundle: Option, + block_number: u64, + block_hash: Option, +} + +impl SimulationRequestBuilder { + pub fn new() -> Self { + Self { + bundle_id: None, + bundle: None, + block_number: 18_000_000, + block_hash: None, + } + } + + pub fn with_bundle_id(mut self, id: Uuid) -> Self { + self.bundle_id = Some(id); + self + } + + pub fn with_bundle(mut self, bundle: EthSendBundle) -> Self { + self.bundle = Some(bundle); + self + } + + pub fn with_block(mut self, number: u64, hash: B256) -> Self { + self.block_number = number; + self.block_hash = Some(hash); + self + } + + pub fn build(self) -> SimulationRequest { + SimulationRequest { + bundle_id: self.bundle_id.unwrap_or_else(Uuid::new_v4), + bundle: self.bundle.unwrap_or_else(|| { + TestBundleBuilder::new() + .with_simple_transaction(&[0x01, 0x02, 0x03]) + .build() + }), + block_number: self.block_number, + block_hash: self.block_hash.unwrap_or_else(B256::random), + } + } +} + +/// Builder for creating simulation results with specific characteristics +pub struct SimulationResultBuilder { + id: Option, + bundle_id: Option, + block_number: u64, + block_hash: Option, + success: bool, + gas_used: Option, + execution_time_us: u128, + state_diff: HashMap>, + error: Option, +} + +impl SimulationResultBuilder { + pub fn new() -> Self { + Self { + id: None, + bundle_id: None, + block_number: 18_000_000, + block_hash: None, + success: true, + gas_used: Some(150_000), + execution_time_us: 1500, + state_diff: HashMap::new(), + error: None, + } + } + + pub fn successful() -> Self { + Self::new() + } + + pub fn failed() -> Self { + let mut builder = Self::new(); + builder.success = false; + builder.gas_used = None; + builder.error = Some(SimulationError::Unknown { + message: "Test failure".to_string() + }); + builder + } + + pub fn with_ids(mut self, simulation_id: Uuid, bundle_id: Uuid) -> Self { + self.id = Some(simulation_id); + self.bundle_id = Some(bundle_id); + self + } + + pub fn with_block(mut self, number: u64, hash: B256) -> Self { + self.block_number = number; + self.block_hash = Some(hash); + self + } + + pub fn with_gas_used(mut self, gas: u64) -> Self { + self.gas_used = Some(gas); + self + } + + pub fn with_execution_time_us(mut self, time: u128) -> Self { + self.execution_time_us = time; + self + } + + pub fn with_state_change(mut self, address: Address, slot: U256, value: U256) -> Self { + self.state_diff + .entry(address) + .or_insert_with(HashMap::new) + .insert(slot, value); + self + } + + pub fn with_error(mut self, error: SimulationError) -> Self { + self.success = false; + self.gas_used = None; + self.error = Some(error); + self + } + + pub fn with_revert(self, reason: String) -> Self { + self.with_error(SimulationError::Revert { reason }) + } + + pub fn with_out_of_gas(self) -> Self { + self.with_error(SimulationError::OutOfGas) + } + + pub fn with_invalid_nonce(self, tx_index: usize, expected: u64, actual: u64) -> Self { + self.with_error(SimulationError::InvalidNonce { tx_index, expected, actual }) + } + + pub fn build(self) -> SimulationResult { + if self.success { + SimulationResult::success( + self.id.unwrap_or_else(Uuid::new_v4), + self.bundle_id.unwrap_or_else(Uuid::new_v4), + self.block_number, + self.block_hash.unwrap_or_else(B256::random), + self.gas_used.unwrap_or(150_000), + self.execution_time_us, + self.state_diff, + ) + } else { + SimulationResult::failure( + self.id.unwrap_or_else(Uuid::new_v4), + self.bundle_id.unwrap_or_else(Uuid::new_v4), + self.block_number, + self.block_hash.unwrap_or_else(B256::random), + self.execution_time_us, + self.error.unwrap_or(SimulationError::Unknown { + message: "Unknown error".to_string() + }), + ) + } + } +} + +/// Builder for creating test scenarios with multiple bundles +pub struct ScenarioBuilder { + bundles: Vec, + block_number: u64, + block_hash: B256, +} + +impl ScenarioBuilder { + pub fn new() -> Self { + Self { + bundles: vec![], + block_number: 18_000_000, + block_hash: B256::random(), + } + } + + pub fn with_block(mut self, number: u64, hash: B256) -> Self { + self.block_number = number; + self.block_hash = hash; + self + } + + pub fn add_bundle(mut self, bundle: EthSendBundle) -> Self { + self.bundles.push(bundle); + self + } + + pub fn add_simple_bundle(mut self, num_txs: usize) -> Self { + let mut builder = TestBundleBuilder::new() + .with_block_number(self.block_number); + + for i in 0..num_txs { + builder = builder.with_simple_transaction(&[i as u8, 0x01, 0x02]); + } + + self.bundles.push(builder.build()); + self + } + + pub fn build_requests(self) -> Vec { + self.bundles + .into_iter() + .map(|bundle| { + SimulationRequestBuilder::new() + .with_bundle(bundle) + .with_block(self.block_number, self.block_hash) + .build() + }) + .collect() + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_bundle_builder() { + let bundle = TestBundleBuilder::new() + .with_simple_transaction(&[0x01, 0x02]) + .with_simple_transaction(&[0x03, 0x04]) + .with_block_number(18_500_000) + .with_timestamps(1000, 2000) + .build(); + + assert_eq!(bundle.txs.len(), 2); + assert_eq!(bundle.block_number, 18_500_000); + assert_eq!(bundle.min_timestamp, Some(1000)); + assert_eq!(bundle.max_timestamp, Some(2000)); + } + + #[test] + fn test_result_builder() { + let bundle_id = Uuid::new_v4(); + let result = SimulationResultBuilder::successful() + .with_ids(Uuid::new_v4(), bundle_id) + .with_gas_used(200_000) + .with_state_change(Address::random(), U256::from(1), U256::from(100)) + .build(); + + assert!(result.success); + assert_eq!(result.bundle_id, bundle_id); + assert_eq!(result.gas_used, Some(200_000)); + assert!(!result.state_diff.is_empty()); + } + + #[test] + fn test_scenario_builder() { + let requests = ScenarioBuilder::new() + .with_block(19_000_000, B256::random()) + .add_simple_bundle(2) + .add_simple_bundle(3) + .build_requests(); + + assert_eq!(requests.len(), 2); + assert_eq!(requests[0].block_number, 19_000_000); + assert_eq!(requests[0].bundle.txs.len(), 2); + assert_eq!(requests[1].bundle.txs.len(), 3); + } +} diff --git a/crates/simulator/tests/common/fixtures.rs b/crates/simulator/tests/common/fixtures.rs new file mode 100644 index 0000000..b854c7d --- /dev/null +++ b/crates/simulator/tests/common/fixtures.rs @@ -0,0 +1,193 @@ +/// Test fixtures and pre-configured test data +use alloy_primitives::Bytes; +use alloy_rpc_types_mev::EthSendBundle; +use std::sync::LazyLock; + +/// Common test addresses +pub mod addresses { + use alloy_primitives::Address; + use std::sync::LazyLock; + + pub static ALICE: LazyLock

= LazyLock::new(|| "0x0000000000000000000000000000000000000001".parse().unwrap()); + pub static BOB: LazyLock
= LazyLock::new(|| "0x0000000000000000000000000000000000000002".parse().unwrap()); + pub static CHARLIE: LazyLock
= LazyLock::new(|| "0x0000000000000000000000000000000000000003".parse().unwrap()); + pub static CONTRACT_A: LazyLock
= LazyLock::new(|| "0x1000000000000000000000000000000000000001".parse().unwrap()); + pub static CONTRACT_B: LazyLock
= LazyLock::new(|| "0x1000000000000000000000000000000000000002".parse().unwrap()); +} + +/// Common test block hashes and numbers +pub mod blocks { + use alloy_primitives::B256; + use std::sync::LazyLock; + + pub const BLOCK_18M: u64 = 18_000_000; + pub const BLOCK_18M_PLUS_1: u64 = 18_000_001; + pub const BLOCK_18M_PLUS_2: u64 = 18_000_002; + + pub static HASH_18M: LazyLock = LazyLock::new(|| B256::from_slice(&[1u8; 32])); + pub static HASH_18M_PLUS_1: LazyLock = LazyLock::new(|| B256::from_slice(&[2u8; 32])); + pub static HASH_18M_PLUS_2: LazyLock = LazyLock::new(|| B256::from_slice(&[3u8; 32])); +} + +/// Pre-built transaction fixtures +pub mod transactions { + use alloy_primitives::Bytes; + + /// Simple transfer transaction (mock data) + pub fn simple_transfer() -> Bytes { + Bytes::from(vec![ + 0x02, // EIP-1559 tx type + 0x01, 0x02, 0x03, 0x04, // Mock transaction data + 0x05, 0x06, 0x07, 0x08, + ]) + } + + /// Contract call transaction (mock data) + pub fn contract_call() -> Bytes { + Bytes::from(vec![ + 0x02, // EIP-1559 tx type + 0x10, 0x20, 0x30, 0x40, // Mock contract call data + 0x50, 0x60, 0x70, 0x80, + ]) + } + + /// Transaction that will revert (mock data) + pub fn reverting_tx() -> Bytes { + Bytes::from(vec![ + 0x02, // EIP-1559 tx type + 0xFF, 0xFF, 0xFF, 0xFF, // Mock reverting transaction + ]) + } +} + + +/// Pre-configured bundles for testing +pub mod bundles { + use super::*; + use crate::common::builders::TestBundleBuilder; + + /// Simple single transaction bundle + pub fn single_tx_bundle() -> EthSendBundle { + TestBundleBuilder::new() + .with_transaction(transactions::simple_transfer()) + .with_block_number(blocks::BLOCK_18M) + .build() + } + + /// Bundle with multiple transactions + pub fn multi_tx_bundle() -> EthSendBundle { + TestBundleBuilder::new() + .with_transaction(transactions::simple_transfer()) + .with_transaction(transactions::contract_call()) + .with_transaction(transactions::simple_transfer()) + .with_block_number(blocks::BLOCK_18M) + .build() + } + + /// Bundle with reverting transaction + pub fn reverting_bundle() -> EthSendBundle { + TestBundleBuilder::new() + .with_transaction(transactions::simple_transfer()) + .with_transaction(transactions::reverting_tx()) + .with_block_number(blocks::BLOCK_18M) + .build() + } + + /// Large bundle for stress testing + pub fn large_bundle(num_txs: usize) -> EthSendBundle { + let mut builder = TestBundleBuilder::new() + .with_block_number(blocks::BLOCK_18M); + + for i in 0..num_txs { + let tx_data = vec![0x02, i as u8, 0x01, 0x02, 0x03]; + builder = builder.with_transaction(Bytes::from(tx_data)); + } + + builder.build() + } + + /// Bundle with specific timing constraints + pub fn time_constrained_bundle() -> EthSendBundle { + TestBundleBuilder::new() + .with_transaction(transactions::simple_transfer()) + .with_block_number(blocks::BLOCK_18M) + .with_timestamps(1625097600, 1625097700) // 100 second window + .build() + } +} + +/// Test scenarios combining multiple fixtures +pub mod scenarios { + use super::*; + use tips_simulator::types::SimulationRequest; + use uuid::Uuid; + + /// Create a basic simulation scenario + pub fn basic_simulation() -> SimulationRequest { + let bundle = bundles::single_tx_bundle(); + SimulationRequest { + bundle_id: Uuid::new_v4(), + bundle, + block_number: blocks::BLOCK_18M, + block_hash: *blocks::HASH_18M, + } + } + + /// Create a contract interaction scenario + pub fn contract_interaction() -> SimulationRequest { + let bundle = bundles::multi_tx_bundle(); + SimulationRequest { + bundle_id: Uuid::new_v4(), + bundle, + block_number: blocks::BLOCK_18M, + block_hash: *blocks::HASH_18M, + } + } + + /// Create a large bundle scenario + pub fn large_bundle_scenario() -> SimulationRequest { + let bundle = bundles::large_bundle(100); + SimulationRequest { + bundle_id: Uuid::new_v4(), + bundle, + block_number: blocks::BLOCK_18M, + block_hash: *blocks::HASH_18M, + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_fixture_addresses() { + assert_ne!(*addresses::ALICE, *addresses::BOB); + assert_ne!(*addresses::CONTRACT_A, *addresses::CONTRACT_B); + } + + #[test] + fn test_fixture_bundles() { + let single = bundles::single_tx_bundle(); + assert_eq!(single.txs.len(), 1); + + let multi = bundles::multi_tx_bundle(); + assert_eq!(multi.txs.len(), 3); + + let large = bundles::large_bundle(100); + assert_eq!(large.txs.len(), 100); + } + + #[test] + fn test_fixture_scenarios() { + let request = scenarios::basic_simulation(); + assert_eq!(request.block_number, blocks::BLOCK_18M); + assert_eq!(request.bundle.txs.len(), 1); + + let interaction = scenarios::contract_interaction(); + assert_eq!(interaction.bundle.txs.len(), 3); + + let large_scenario = scenarios::large_bundle_scenario(); + assert_eq!(large_scenario.bundle.txs.len(), 100); + } +} diff --git a/crates/simulator/tests/common/mock_bundle_simulator.rs b/crates/simulator/tests/common/mock_bundle_simulator.rs new file mode 100644 index 0000000..92406f1 --- /dev/null +++ b/crates/simulator/tests/common/mock_bundle_simulator.rs @@ -0,0 +1,85 @@ +/// Mock implementation of BundleSimulator for testing +use tips_simulator::core::BundleSimulator; +use tips_simulator::engine::SimulationEngine; +use tips_simulator::publisher::SimulationPublisher; +use tips_simulator::types::SimulationRequest; +use crate::common::mocks::{MockSimulationEngine, MockSimulationPublisher}; +use async_trait::async_trait; +use eyre::Result; + +/// Mock bundle simulator for testing - no Reth dependencies! +pub struct MockBundleSimulator { + engine: MockSimulationEngine, + publisher: MockSimulationPublisher, +} + +impl MockBundleSimulator { + pub fn new(engine: MockSimulationEngine, publisher: MockSimulationPublisher) -> Self { + Self { engine, publisher } + } + + pub fn engine(&self) -> &MockSimulationEngine { + &self.engine + } + + pub fn publisher(&self) -> &MockSimulationPublisher { + &self.publisher + } +} + +#[async_trait] +impl BundleSimulator for MockBundleSimulator { + async fn simulate(&self, request: &SimulationRequest) -> Result<()> { + // Run the simulation using the mock engine - no state provider needed! + match self.engine.simulate_bundle(request).await { + Ok(result) => { + tracing::info!( + bundle_id = %request.bundle_id, + simulation_id = %result.id, + success = result.success, + "Simulation completed" + ); + + if let Err(e) = self.publisher.publish_result(result).await { + tracing::error!( + error = %e, + bundle_id = %request.bundle_id, + "Failed to publish simulation result" + ); + } + } + Err(e) => { + tracing::error!( + error = %e, + bundle_id = %request.bundle_id, + "Simulation failed" + ); + } + } + + Ok(()) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::common; + + #[tokio::test] + async fn test_mock_bundle_simulator() { + let engine = MockSimulationEngine::new(); + let publisher = MockSimulationPublisher::new(); + let simulator = MockBundleSimulator::new(engine.clone(), publisher.clone()); + + let bundle = common::create_test_bundle(1, 18_000_000); + let request = common::create_test_request(bundle); + + // Use the clean trait interface + let result = simulator.simulate(&request).await; + + assert!(result.is_ok()); + assert_eq!(engine.simulation_count(), 1); + assert_eq!(publisher.published_count(), 1); + } +} diff --git a/crates/simulator/tests/common/mocks.rs b/crates/simulator/tests/common/mocks.rs new file mode 100644 index 0000000..29d0ef5 --- /dev/null +++ b/crates/simulator/tests/common/mocks.rs @@ -0,0 +1,175 @@ +/// Reusable mock implementations for testing +use alloy_primitives::{Address, B256, U256}; +use async_trait::async_trait; +use std::collections::HashMap; +use std::sync::{Arc, Mutex}; +use tips_simulator::{SimulationEngine, SimulationPublisher, SimulationError, SimulationResult}; +use tips_simulator::types::SimulationRequest; +use uuid::Uuid; + +/// Mock simulation engine with configurable behavior +#[derive(Clone)] +pub struct MockSimulationEngine { + /// Results to return for each simulation + results: Arc>>, + /// Track all simulations for verification + simulations: Arc>>, + /// Whether to fail the next simulation + fail_next: Arc>, + /// Custom error to return on failure + error: Arc>>, +} + +impl MockSimulationEngine { + pub fn new() -> Self { + Self { + results: Arc::new(Mutex::new(Vec::new())), + simulations: Arc::new(Mutex::new(Vec::new())), + fail_next: Arc::new(Mutex::new(false)), + error: Arc::new(Mutex::new(None)), + } + } + + pub fn with_result(self, result: SimulationResult) -> Self { + self.results.lock().unwrap().push(result); + self + } + + pub fn fail_next_with(self, error: SimulationError) -> Self { + *self.fail_next.lock().unwrap() = true; + *self.error.lock().unwrap() = Some(error); + self + } + + pub fn get_simulations(&self) -> Vec { + self.simulations.lock().unwrap().clone() + } + + pub fn simulation_count(&self) -> usize { + self.simulations.lock().unwrap().len() + } +} + +#[async_trait] +impl SimulationEngine for MockSimulationEngine { + async fn simulate_bundle( + &self, + request: &SimulationRequest, + ) -> eyre::Result { + // Track the simulation + self.simulations.lock().unwrap().push(request.clone()); + + // Check if we should fail + if *self.fail_next.lock().unwrap() { + *self.fail_next.lock().unwrap() = false; + let error = self.error.lock().unwrap().take() + .unwrap_or(SimulationError::Unknown { message: "Mock failure".to_string() }); + + return Ok(SimulationResult::failure( + Uuid::new_v4(), + request.bundle_id, + request.block_number, + request.block_hash, + 1000, + error, + )); + } + + // Return pre-configured result or create a default success + let mut results = self.results.lock().unwrap(); + if let Some(result) = results.pop() { + Ok(result) + } else { + let mut state_diff = HashMap::new(); + let address = Address::random(); + let mut storage = HashMap::new(); + storage.insert(U256::from(1), U256::from(100)); + state_diff.insert(address, storage); + + Ok(SimulationResult::success( + Uuid::new_v4(), + request.bundle_id, + request.block_number, + request.block_hash, + 150_000, + 1500, + state_diff, + )) + } + } +} + +/// Mock simulation publisher that records published results +#[derive(Clone)] +pub struct MockSimulationPublisher { + published: Arc>>, + fail_next: Arc>, +} + +impl MockSimulationPublisher { + pub fn new() -> Self { + Self { + published: Arc::new(Mutex::new(Vec::new())), + fail_next: Arc::new(Mutex::new(false)), + } + } + + pub fn fail_next(self) -> Self { + *self.fail_next.lock().unwrap() = true; + self + } + + pub fn get_published(&self) -> Vec { + self.published.lock().unwrap().clone() + } + + pub fn published_count(&self) -> usize { + self.published.lock().unwrap().len() + } + + pub fn clear_published(&self) { + self.published.lock().unwrap().clear(); + } +} + +#[async_trait] +impl SimulationPublisher for MockSimulationPublisher { + async fn publish_result(&self, result: SimulationResult) -> eyre::Result<()> { + if *self.fail_next.lock().unwrap() { + *self.fail_next.lock().unwrap() = false; + return Err(eyre::eyre!("Mock publisher failure")); + } + + self.published.lock().unwrap().push(result); + Ok(()) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::common; + + #[tokio::test] + async fn test_mock_simulation_engine() { + let engine = MockSimulationEngine::new(); + let _request = common::create_test_request( + common::create_test_bundle(1, 18_000_000) + ); + + // Verify the engine is initialized correctly + assert_eq!(engine.simulation_count(), 0); + } + + #[tokio::test] + async fn test_mock_publisher() { + let publisher = MockSimulationPublisher::new(); + let result = common::create_success_result(Uuid::new_v4(), 100_000); + + publisher.publish_result(result.clone()).await.unwrap(); + assert_eq!(publisher.published_count(), 1); + + let published = publisher.get_published(); + assert_eq!(published[0].id, result.id); + } +} diff --git a/crates/simulator/tests/common/mod.rs b/crates/simulator/tests/common/mod.rs new file mode 100644 index 0000000..b5f59b4 --- /dev/null +++ b/crates/simulator/tests/common/mod.rs @@ -0,0 +1,167 @@ +/// Common test utilities and infrastructure for simulator testing +pub mod builders; +pub mod fixtures; +pub mod mocks; +pub mod mock_bundle_simulator; + +use alloy_primitives::{Address, Bytes, B256, U256}; +use alloy_rpc_types_mev::EthSendBundle; +use std::collections::HashMap; +use tips_simulator::types::{SimulationRequest, SimulationResult}; +use uuid::Uuid; + +/// Test configuration that can be shared across tests +pub struct TestConfig { + pub default_block_number: u64, + pub default_gas_limit: u64, + pub simulation_timeout_ms: u64, +} + +impl Default for TestConfig { + fn default() -> Self { + Self { + default_block_number: 18_000_000, + default_gas_limit: 30_000_000, + simulation_timeout_ms: 5000, + } + } +} + +/// Helper to create a simple test bundle +pub fn create_test_bundle(num_txs: usize, block_number: u64) -> EthSendBundle { + let mut txs = Vec::new(); + for i in 0..num_txs { + // Create simple transaction bytes (not valid transactions, but good for testing) + let tx_bytes = vec![0x01, 0x02, 0x03, i as u8]; + txs.push(Bytes::from(tx_bytes)); + } + + EthSendBundle { + txs, + block_number, + min_timestamp: Some(1625097600), + max_timestamp: Some(1625097900), + reverting_tx_hashes: vec![], + replacement_uuid: None, + dropping_tx_hashes: vec![], + refund_percent: None, + refund_recipient: None, + refund_tx_hashes: vec![], + extra_fields: Default::default(), + } +} + +/// Helper to create a test simulation request +pub fn create_test_request(bundle: EthSendBundle) -> SimulationRequest { + SimulationRequest { + bundle_id: Uuid::new_v4(), + bundle, + block_number: 18_000_000, + block_hash: B256::random(), + } +} + +/// Helper to create a successful simulation result +pub fn create_success_result( + bundle_id: Uuid, + gas_used: u64, +) -> SimulationResult { + let mut state_diff = HashMap::new(); + let address = Address::random(); + let mut storage = HashMap::new(); + storage.insert(U256::from(1), U256::from(100)); + state_diff.insert(address, storage); + + SimulationResult::success( + Uuid::new_v4(), + bundle_id, + 18_000_000, + B256::random(), + gas_used, + 1500, // execution time in microseconds + state_diff, + ) +} + +/// Test assertion helpers +pub mod assertions { + use super::*; + + /// Assert that a simulation result is successful + pub fn assert_simulation_success(result: &SimulationResult) { + assert!(result.success, "Expected successful simulation"); + assert!(result.gas_used.is_some(), "Successful simulation should have gas_used"); + assert!(result.error_reason.is_none(), "Successful simulation should not have error"); + } + + /// Assert that a simulation result is a failure + pub fn assert_simulation_failure(result: &SimulationResult) { + assert!(!result.success, "Expected failed simulation"); + assert!(result.gas_used.is_none(), "Failed simulation should not have gas_used"); + assert!(result.error_reason.is_some(), "Failed simulation should have error reason"); + } + + /// Assert state diff contains expected changes + pub fn assert_state_diff_contains( + result: &SimulationResult, + address: Address, + slot: U256, + expected_value: U256, + ) { + let storage = result.state_diff.get(&address) + .expect("Address not found in state diff"); + let value = storage.get(&slot) + .expect("Storage slot not found"); + assert_eq!(*value, expected_value, "Unexpected storage value"); + } +} + +/// Test timing utilities +pub mod timing { + use std::time::{Duration, Instant}; + + /// Measure execution time of an async operation + pub async fn measure_async(f: F) -> (T, Duration) + where + F: std::future::Future, + { + let start = Instant::now(); + let result = f.await; + (result, start.elapsed()) + } + + /// Assert that an operation completes within a timeout + pub async fn assert_completes_within( + f: F, + timeout: Duration, + ) -> T + where + F: std::future::Future, + { + tokio::time::timeout(timeout, f) + .await + .expect("Operation timed out") + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_create_test_bundle() { + let bundle = create_test_bundle(3, 18_000_000); + assert_eq!(bundle.txs.len(), 3); + assert_eq!(bundle.block_number, 18_000_000); + } + + #[test] + fn test_create_success_result() { + let bundle_id = Uuid::new_v4(); + let result = create_success_result(bundle_id, 150_000); + + assertions::assert_simulation_success(&result); + assert_eq!(result.bundle_id, bundle_id); + assert_eq!(result.gas_used, Some(150_000)); + } +} diff --git a/crates/simulator/tests/integration_test.rs b/crates/simulator/tests/integration_test.rs index f0c5bb2..66815b3 100644 --- a/crates/simulator/tests/integration_test.rs +++ b/crates/simulator/tests/integration_test.rs @@ -1,62 +1,285 @@ -use alloy_primitives::{Bytes, B256}; -use alloy_rpc_types_mev::EthSendBundle; -use tips_simulator::types::SimulationRequest; -use tips_simulator::MempoolListenerConfig; -use uuid::Uuid; +/// Integration tests for the simulator functionality +/// +/// Note: These tests use mock implementations because the actual StateProvider +/// trait requires complex setup. For real integration testing with actual +/// state providers, see the component tests. +mod common; +mod unit; -// Basic smoke test to ensure the core simulation types work correctly -// Tests both mempool event simulation and ExEx event simulation architectures +use common::assertions::*; +use common::builders::*; +use common::fixtures::*; +use common::mocks::*; +use common::mock_bundle_simulator::MockBundleSimulator; +use common::timing::*; -#[test] -fn test_simulation_request_creation() { - let bundle_id = Uuid::new_v4(); - let bundle = EthSendBundle { - txs: vec![ - Bytes::from_static(&[0x01, 0x02, 0x03]), // Mock transaction data - ], - block_number: 18_000_000, - min_timestamp: Some(1625097600), - max_timestamp: Some(1625097900), - reverting_tx_hashes: vec![], - replacement_uuid: None, - dropping_tx_hashes: vec![], - refund_percent: None, - refund_recipient: None, - refund_tx_hashes: vec![], - extra_fields: Default::default(), - }; +use alloy_primitives::U256; +use tips_simulator::{ + core::BundleSimulator, MempoolListenerConfig, SimulationWorkerPool, + types::ExExSimulationConfig, +}; +use std::sync::Arc; +use std::time::Duration; - let request = SimulationRequest { - bundle_id, - bundle: bundle.clone(), - block_number: 18_000_000, - block_hash: B256::ZERO, - }; +#[tokio::test] +async fn test_successful_bundle_simulation() { + // Setup + let engine = MockSimulationEngine::new(); + let publisher = MockSimulationPublisher::new(); + let simulator = MockBundleSimulator::new(engine.clone(), publisher.clone()); + + // Create test request + let bundle = bundles::single_tx_bundle(); + let request = SimulationRequestBuilder::new() + .with_bundle(bundle) + .build(); + + // Execute + let result = simulator.simulate(&request).await; + + // Verify + assert!(result.is_ok()); + assert_eq!(engine.simulation_count(), 1); + assert_eq!(publisher.published_count(), 1); + + let published = publisher.get_published(); + assert_simulation_success(&published[0]); +} + +#[tokio::test] +async fn test_failed_bundle_simulation() { + // Setup with failing engine + let engine = MockSimulationEngine::new() + .fail_next_with(tips_simulator::types::SimulationError::OutOfGas); + let publisher = MockSimulationPublisher::new(); + let simulator = MockBundleSimulator::new(engine.clone(), publisher.clone()); + + // Create test request + let bundle = bundles::single_tx_bundle(); + let request = SimulationRequestBuilder::new() + .with_bundle(bundle) + .build(); + + // Execute + let result = simulator.simulate(&request).await; + + // Verify + assert!(result.is_ok()); // simulate() itself succeeds even if simulation fails + assert_eq!(engine.simulation_count(), 1); + assert_eq!(publisher.published_count(), 1); + + let published = publisher.get_published(); + assert_simulation_failure(&published[0]); + assert!(published[0].error_reason.as_ref().unwrap().contains("out of gas")); +} + +#[tokio::test] +async fn test_publisher_failure_handling() { + // Setup with failing publisher + let engine = MockSimulationEngine::new(); + let publisher = MockSimulationPublisher::new().fail_next(); + let simulator = MockBundleSimulator::new(engine.clone(), publisher.clone()); + + // Create test request + let bundle = bundles::single_tx_bundle(); + let request = SimulationRequestBuilder::new() + .with_bundle(bundle) + .build(); + + // Execute - should not panic even if publisher fails + let result = simulator.simulate(&request).await; + + // Verify + assert!(result.is_ok()); + assert_eq!(engine.simulation_count(), 1); + assert_eq!(publisher.published_count(), 0); // Publisher failed +} + +#[tokio::test] +async fn test_worker_pool_concurrent_simulations() { + // Setup + let engine = MockSimulationEngine::new(); + let publisher = MockSimulationPublisher::new(); + let simulator = Arc::new(MockBundleSimulator::new(engine.clone(), publisher.clone())); + // Provider no longer needed with new architecture + + // Create worker pool with 4 workers + let pool = SimulationWorkerPool::new(simulator, 4); + pool.start().await; + + // Queue multiple simulations + let num_simulations = 20; + let mut bundle_ids = Vec::new(); + + for i in 0..num_simulations { + let bundle = TestBundleBuilder::new() + .with_simple_transaction(&[i as u8, 0x01, 0x02]) + .with_block_number(blocks::BLOCK_18M + i as u64) + .build(); + + let request = SimulationRequestBuilder::new() + .with_bundle(bundle) + .with_block(blocks::BLOCK_18M + i as u64, alloy_primitives::B256::random()) + .build(); + + bundle_ids.push(request.bundle_id); + + let task = tips_simulator::worker_pool::SimulationTask { request }; + pool.queue_simulation(task).await.unwrap(); + } + + // Wait for completion with timeout + let (_, duration) = measure_async(async { + tokio::time::sleep(Duration::from_millis(500)).await; + }).await; + + // Verify all simulations completed + assert_eq!(publisher.published_count(), num_simulations); + + // Verify all bundle IDs are present + let published = publisher.get_published(); + for bundle_id in bundle_ids { + assert!(published.iter().any(|r| r.bundle_id == bundle_id)); + } + + // Verify reasonable execution time + assert!(duration < Duration::from_secs(2), "Simulations took too long"); +} + +#[tokio::test] +async fn test_worker_pool_error_recovery() { + // Setup engine that fails every other simulation + let engine = MockSimulationEngine::new(); + let publisher = MockSimulationPublisher::new(); + let simulator = Arc::new(MockBundleSimulator::new(engine.clone(), publisher.clone())); + // Provider no longer needed with new architecture + + // Create worker pool + let pool = SimulationWorkerPool::new(simulator, 2); + pool.start().await; + + // Queue simulations with some failures + for i in 0..10 { + let mut builder = SimulationResultBuilder::successful(); + if i % 2 == 1 { + builder = SimulationResultBuilder::failed() + .with_revert(format!("Test revert {}", i)); + } + + let _ = engine.clone().with_result(builder.build()); + + let request = SimulationRequestBuilder::new() + .with_bundle(bundles::single_tx_bundle()) + .build(); + + let task = tips_simulator::worker_pool::SimulationTask { request }; + pool.queue_simulation(task).await.unwrap(); + } + + // Wait for completion + tokio::time::sleep(Duration::from_millis(300)).await; + + // Verify all simulations were attempted + let published = publisher.get_published(); + assert_eq!(published.len(), 10); + + // Verify mix of successes and failures + let successes = published.iter().filter(|r| r.success).count(); + let failures = published.iter().filter(|r| !r.success).count(); + assert!(successes > 0 && failures > 0); +} + +#[tokio::test] +async fn test_large_bundle_simulation() { + // Setup + let engine = MockSimulationEngine::new(); + let publisher = MockSimulationPublisher::new(); + let simulator = MockBundleSimulator::new(engine.clone(), publisher.clone()); + + // Create large bundle + let large_bundle = bundles::large_bundle(100); + let request = SimulationRequestBuilder::new() + .with_bundle(large_bundle) + .build(); + + + // Execute with timeout + let result = assert_completes_within( + simulator.simulate(&request), + Duration::from_secs(5), + ).await; + + // Verify + assert!(result.is_ok()); + assert_eq!(engine.simulation_count(), 1); + assert_eq!(publisher.published_count(), 1); +} - assert_eq!(request.bundle_id, bundle_id); - assert_eq!(request.bundle.txs.len(), 1); - assert_eq!(request.block_number, 18_000_000); +#[tokio::test] +async fn test_state_diff_tracking() { + // Setup engine that returns specific state changes + let simulation_result = SimulationResultBuilder::successful() + .with_state_change(*addresses::ALICE, U256::from(0), U256::from(100)) + .with_state_change(*addresses::ALICE, U256::from(1), U256::from(200)) + .with_state_change(*addresses::BOB, U256::from(0), U256::from(300)) + .build(); + + let engine = MockSimulationEngine::new() + .with_result(simulation_result); + let publisher = MockSimulationPublisher::new(); + let simulator = MockBundleSimulator::new(engine, publisher.clone()); + + // Execute + let bundle = bundles::single_tx_bundle(); + let request = SimulationRequestBuilder::new() + .with_bundle(bundle) + .build(); + simulator.simulate(&request).await.unwrap(); + + // Verify state diff + let published = publisher.get_published(); + assert_eq!(published.len(), 1); + + let result = &published[0]; + assert_state_diff_contains(result, *addresses::ALICE, U256::from(0), U256::from(100)); + assert_state_diff_contains(result, *addresses::ALICE, U256::from(1), U256::from(200)); + assert_state_diff_contains(result, *addresses::BOB, U256::from(0), U256::from(300)); +} + +#[test] +fn test_simulation_request_creation() { + let bundle = bundles::single_tx_bundle(); + let request = SimulationRequestBuilder::new() + .with_bundle(bundle.clone()) + .with_block(blocks::BLOCK_18M, *blocks::HASH_18M) + .build(); + + assert_eq!(request.bundle.txs.len(), bundle.txs.len()); + assert_eq!(request.block_number, blocks::BLOCK_18M); + assert_eq!(request.block_hash, *blocks::HASH_18M); } -// Test mempool simulator configuration creation #[test] -fn test_mempool_simulator_config() { +fn test_mempool_config() { let config = MempoolListenerConfig { kafka_brokers: vec!["localhost:9092".to_string()], kafka_topic: "tips-audit".to_string(), kafka_group_id: "tips-simulator".to_string(), database_url: "postgresql://user:pass@localhost:5432/tips".to_string(), }; - + assert_eq!(config.kafka_brokers, vec!["localhost:9092"]); assert_eq!(config.kafka_topic, "tips-audit"); - assert_eq!(config.kafka_group_id, "tips-simulator"); - assert_eq!( - config.database_url, - "postgresql://user:pass@localhost:5432/tips" - ); } -// Future integration tests would test both: -// 1. Mempool event simulation (Kafka-based) -// 2. ExEx event simulation +#[test] +fn test_exex_config() { + let config = ExExSimulationConfig { + database_url: "postgresql://user:pass@localhost:5432/tips".to_string(), + max_concurrent_simulations: 10, + simulation_timeout_ms: 5000, + }; + + assert_eq!(config.max_concurrent_simulations, 10); + assert_eq!(config.simulation_timeout_ms, 5000); +} diff --git a/crates/simulator/tests/unit/core_test.rs b/crates/simulator/tests/unit/core_test.rs new file mode 100644 index 0000000..d32b469 --- /dev/null +++ b/crates/simulator/tests/unit/core_test.rs @@ -0,0 +1,255 @@ +/// Unit tests for the BundleSimulator core component +use crate::common::builders::*; +use crate::common::fixtures::*; +use crate::common::mocks::*; +use crate::common::mock_bundle_simulator::MockBundleSimulator; +use tips_simulator::{core::BundleSimulator, SimulationError}; +use std::sync::Arc; +use uuid::Uuid; + +#[tokio::test] +async fn test_bundle_simulator_creation() { + let engine = MockSimulationEngine::new(); + let publisher = MockSimulationPublisher::new(); + let simulator = MockBundleSimulator::new(engine, publisher); + + // Simply verify it can be created + assert!(std::mem::size_of_val(&simulator) > 0); +} + +#[tokio::test] +async fn test_simulate_success_flow() { + // Arrange + let bundle_id = Uuid::new_v4(); + let expected_result = SimulationResultBuilder::successful() + .with_ids(Uuid::new_v4(), bundle_id) + .with_gas_used(200_000) + .build(); + + let engine = MockSimulationEngine::new() + .with_result(expected_result.clone()); + let publisher = MockSimulationPublisher::new(); + let simulator = MockBundleSimulator::new(engine.clone(), publisher.clone()); + + let request = SimulationRequestBuilder::new() + .with_bundle_id(bundle_id) + .with_bundle(bundles::single_tx_bundle()) + .build(); + + // Act - using the clean trait interface + let result = simulator.simulate(&request).await; + + // Assert + assert!(result.is_ok()); + assert_eq!(engine.simulation_count(), 1); + assert_eq!(publisher.published_count(), 1); + + let published = publisher.get_published(); + assert_eq!(published[0].bundle_id, bundle_id); + assert_eq!(published[0].gas_used, Some(200_000)); +} + +#[tokio::test] +async fn test_simulate_failure_flow() { + // Arrange + let bundle_id = Uuid::new_v4(); + let engine = MockSimulationEngine::new() + .fail_next_with(SimulationError::Revert { + reason: "Test revert".to_string() + }); + let publisher = MockSimulationPublisher::new(); + let simulator = MockBundleSimulator::new(engine.clone(), publisher.clone()); + + let request = SimulationRequestBuilder::new() + .with_bundle_id(bundle_id) + .build(); + + + // Act + let result = simulator.simulate(&request).await; + + // Assert + assert!(result.is_ok()); // simulate() succeeds even if simulation fails + assert_eq!(engine.simulation_count(), 1); + assert_eq!(publisher.published_count(), 1); + + let published = publisher.get_published(); + assert!(!published[0].success); + assert!(published[0].error_reason.as_ref().unwrap().contains("revert")); +} + +#[tokio::test] +async fn test_publisher_error_handling() { + // Arrange + let engine = MockSimulationEngine::new(); + let publisher = MockSimulationPublisher::new().fail_next(); + let simulator = MockBundleSimulator::new(engine.clone(), publisher.clone()); + + let request = SimulationRequestBuilder::new().build(); + + // Act - should log error but not fail + let result = simulator.simulate(&request).await; + + // Assert + assert!(result.is_ok()); + assert_eq!(engine.simulation_count(), 1); // Engine was called + assert_eq!(publisher.published_count(), 0); // Publisher failed +} + +#[tokio::test] +async fn test_state_provider_factory_error() { + // This test would require a mock StateProviderFactory that fails + // For now, we'll test with an invalid block hash + let engine = MockSimulationEngine::new(); + let publisher = MockSimulationPublisher::new(); + let simulator = MockBundleSimulator::new(engine.clone(), publisher.clone()); + + // Request with block hash that doesn't exist in our mock state + let request = SimulationRequestBuilder::new() + .with_block(99_999_999, alloy_primitives::B256::random()) + .build(); + + + // Act + let result = simulator.simulate(&request).await; + + // Assert - in our mock, this actually succeeds, but in real implementation + // it would fail with state provider error + assert!(result.is_ok()); +} + +#[tokio::test] +async fn test_multiple_sequential_simulations() { + // Arrange + let engine = MockSimulationEngine::new(); + let publisher = MockSimulationPublisher::new(); + let simulator = MockBundleSimulator::new(engine.clone(), publisher.clone()); + + + // Act - simulate multiple bundles + for i in 0..5 { + let request = SimulationRequestBuilder::new() + .with_bundle( + TestBundleBuilder::new() + .with_simple_transaction(&[i as u8, 0x01, 0x02]) + .build() + ) + .build(); + + let result = simulator.simulate(&request).await; + assert!(result.is_ok()); + } + + // Assert + assert_eq!(engine.simulation_count(), 5); + assert_eq!(publisher.published_count(), 5); +} + +#[tokio::test] +async fn test_empty_bundle_simulation() { + // Arrange + let engine = MockSimulationEngine::new(); + let publisher = MockSimulationPublisher::new(); + let simulator = MockBundleSimulator::new(engine.clone(), publisher.clone()); + + let empty_bundle = TestBundleBuilder::new().build(); // No transactions + let request = SimulationRequestBuilder::new() + .with_bundle(empty_bundle) + .build(); + + + // Act + let result = simulator.simulate(&request).await; + + // Assert + assert!(result.is_ok()); + assert_eq!(engine.simulation_count(), 1); + assert_eq!(publisher.published_count(), 1); +} + +#[tokio::test] +async fn test_simulate_with_complex_state_diff() { + // Arrange + let bundle_id = Uuid::new_v4(); + let mut state_diff = std::collections::HashMap::new(); + + // Add multiple accounts with multiple storage changes + for i in 0..3 { + let addr = alloy_primitives::Address::random(); + let mut storage = std::collections::HashMap::new(); + for j in 0..5 { + storage.insert( + alloy_primitives::U256::from(j), + alloy_primitives::U256::from(i * 100 + j) + ); + } + state_diff.insert(addr, storage); + } + + let result = SimulationResultBuilder::successful() + .with_ids(Uuid::new_v4(), bundle_id) + .build(); + + // Manually set the state diff + let mut result = result; + result.state_diff = state_diff.clone(); + + let engine = MockSimulationEngine::new() + .with_result(result); + let publisher = MockSimulationPublisher::new(); + let simulator = MockBundleSimulator::new(engine, publisher.clone()); + + let request = SimulationRequestBuilder::new() + .with_bundle_id(bundle_id) + .build(); + + + // Act + simulator.simulate(&request).await.unwrap(); + + // Assert + let published = publisher.get_published(); + assert_eq!(published[0].state_diff.len(), 3); + for (_, storage) in &published[0].state_diff { + assert_eq!(storage.len(), 5); + } +} + +#[tokio::test] +async fn test_concurrent_simulator_usage() { + // Test that the simulator can be used concurrently + let engine = MockSimulationEngine::new(); + let publisher = MockSimulationPublisher::new(); + let simulator = Arc::new(MockBundleSimulator::new(engine.clone(), publisher.clone())); + + let mut handles = vec![]; + + // Spawn multiple concurrent simulations + for i in 0..10 { + let sim = Arc::clone(&simulator); + + let handle = tokio::spawn(async move { + let request = SimulationRequestBuilder::new() + .with_bundle( + TestBundleBuilder::new() + .with_simple_transaction(&[i as u8, 0x01, 0x02]) + .build() + ) + .build(); + + sim.simulate(&request).await + }); + + handles.push(handle); + } + + // Wait for all to complete + for handle in handles { + let result = handle.await.unwrap(); + assert!(result.is_ok()); + } + + // Verify all were processed + assert_eq!(engine.simulation_count(), 10); + assert_eq!(publisher.published_count(), 10); +} diff --git a/crates/simulator/tests/unit/error_handling_test.rs b/crates/simulator/tests/unit/error_handling_test.rs new file mode 100644 index 0000000..5657f31 --- /dev/null +++ b/crates/simulator/tests/unit/error_handling_test.rs @@ -0,0 +1,277 @@ +/// Unit tests for error handling scenarios +use crate::common::builders::*; +use crate::common::fixtures::*; +use crate::common::mocks::*; +use crate::common::mock_bundle_simulator::MockBundleSimulator; +use tips_simulator::{core::BundleSimulator, SimulationError}; + +#[tokio::test] +async fn test_simulation_error_types() { + // Test all error types get properly propagated + let error_scenarios = vec![ + ( + SimulationError::Revert { reason: "Insufficient funds".to_string() }, + "Insufficient funds", + ), + ( + SimulationError::OutOfGas, + "out of gas", + ), + ( + SimulationError::InvalidNonce { tx_index: 0, expected: 5, actual: 3 }, + "Invalid nonce", + ), + ( + SimulationError::InsufficientBalance { + tx_index: 1, + required: alloy_primitives::U256::from(1000), + available: alloy_primitives::U256::from(500) + }, + "Insufficient balance", + ), + ( + SimulationError::StateAccessError { message: "RPC timeout".to_string() }, + "State access error", + ), + ( + SimulationError::Timeout, + "timed out", + ), + ( + SimulationError::Unknown { message: "Unexpected error".to_string() }, + "Unexpected error", + ), + ]; + + for (error, expected_msg) in error_scenarios { + let engine = MockSimulationEngine::new() + .fail_next_with(error.clone()); + let publisher = MockSimulationPublisher::new(); + let simulator = MockBundleSimulator::new(engine, publisher.clone()); + + let request = SimulationRequestBuilder::new().build(); + + // Execute + simulator.simulate(&request).await.unwrap(); + + // Verify + let published = publisher.get_published(); + assert_eq!(published.len(), 1); + assert!(!published[0].success); + assert!(published[0].error_reason.as_ref().unwrap().contains(expected_msg)); + } +} + +#[tokio::test] +async fn test_publisher_failure_recovery() { + // Test that publisher failures don't crash the simulator + let engine = MockSimulationEngine::new(); + let publisher = MockSimulationPublisher::new(); + let simulator = MockBundleSimulator::new(engine.clone(), publisher.clone()); + + + // First simulation succeeds + let request1 = SimulationRequestBuilder::new().build(); + simulator.simulate(&request1).await.unwrap(); + assert_eq!(publisher.published_count(), 1); + + // Configure publisher to fail next + let publisher2 = publisher.clone().fail_next(); + let simulator2 = MockBundleSimulator::new(engine.clone(), publisher2.clone()); + + // Second simulation - publisher fails but simulator continues + let request2 = SimulationRequestBuilder::new().build(); + simulator2.simulate(&request2).await.unwrap(); + assert_eq!(publisher2.published_count(), 1); // Still 1 from first simulation, second failed + + // Third simulation - publisher recovers + let request3 = SimulationRequestBuilder::new().build(); + simulator2.simulate(&request3).await.unwrap(); + assert_eq!(publisher2.published_count(), 2); // Now 2: first succeeded, second failed, third succeeded +} + +#[tokio::test] +async fn test_engine_failure_recovery() { + // Test that engine failures are handled gracefully + let engine = MockSimulationEngine::new(); + let publisher = MockSimulationPublisher::new(); + + + // Test simulation 1: Out of gas error + let engine1 = engine.clone().fail_next_with(SimulationError::OutOfGas); + let simulator1 = MockBundleSimulator::new(engine1, publisher.clone()); + let request1 = SimulationRequestBuilder::new() + .with_bundle(TestBundleBuilder::new().with_simple_transaction(&[1]).build()) + .build(); + simulator1.simulate(&request1).await.unwrap(); + + // Test simulation 2: Success + let engine2 = engine.clone().with_result(SimulationResultBuilder::successful().build()); + let simulator2 = MockBundleSimulator::new(engine2, publisher.clone()); + let request2 = SimulationRequestBuilder::new() + .with_bundle(TestBundleBuilder::new().with_simple_transaction(&[2]).build()) + .build(); + simulator2.simulate(&request2).await.unwrap(); + + // Test simulation 3: Revert error + let engine3 = engine.clone().fail_next_with(SimulationError::Revert { reason: "Test revert".to_string() }); + let simulator3 = MockBundleSimulator::new(engine3, publisher.clone()); + let request3 = SimulationRequestBuilder::new() + .with_bundle(TestBundleBuilder::new().with_simple_transaction(&[3]).build()) + .build(); + simulator3.simulate(&request3).await.unwrap(); + + // Verify all were published despite failures + let published = publisher.get_published(); + assert_eq!(published.len(), 3); + + // First should fail with out of gas + assert!(!published[0].success); + assert!(published[0].error_reason.as_ref().unwrap().contains("Bundle ran out of gas")); + + // Second should succeed (from pre-configured result) + assert!(published[1].success); + + // Third should fail with revert + assert!(!published[2].success); + assert!(published[2].error_reason.as_ref().unwrap().contains("Bundle reverted")); +} + +#[tokio::test] +async fn test_invalid_bundle_handling() { + // Test handling of various invalid bundle scenarios + let engine = MockSimulationEngine::new(); + let publisher = MockSimulationPublisher::new(); + let simulator = MockBundleSimulator::new(engine.clone(), publisher.clone()); + + + // Empty bundle + let empty_bundle = TestBundleBuilder::new().build(); + let request = SimulationRequestBuilder::new() + .with_bundle(empty_bundle) + .build(); + + simulator.simulate(&request).await.unwrap(); + assert_eq!(publisher.published_count(), 1); + + // Bundle with invalid block number (future block) + let future_bundle = TestBundleBuilder::new() + .with_simple_transaction(&[0x01]) + .with_block_number(99_999_999) + .build(); + let future_request = SimulationRequestBuilder::new() + .with_bundle(future_bundle) + .with_block(99_999_999, alloy_primitives::B256::random()) + .build(); + + simulator.simulate(&future_request).await.unwrap(); + assert_eq!(publisher.published_count(), 2); +} + +#[tokio::test] +async fn test_concurrent_error_handling() { + // Test error handling under concurrent load + use std::sync::Arc; + + let engine = MockSimulationEngine::new(); + let publisher = MockSimulationPublisher::new(); + let simulator = Arc::new(MockBundleSimulator::new(engine.clone(), publisher.clone())); + + // Provider factory no longer needed with new architecture + + let mut handles = vec![]; + + // Spawn multiple tasks, some will fail + for i in 0..10 { + let sim = Arc::clone(&simulator); + // Provider factory no longer needed + let eng = engine.clone(); + + let handle = tokio::spawn(async move { + // Every third simulation fails + if i % 3 == 0 { + let _ = eng.fail_next_with(SimulationError::Timeout); + } + + let request = SimulationRequestBuilder::new() + .with_bundle( + TestBundleBuilder::new() + .with_simple_transaction(&[i as u8]) + .build() + ) + .build(); + + sim.simulate(&request).await + }); + + handles.push(handle); + } + + // Wait for all to complete + let mut results = vec![]; + for handle in handles { + results.push(handle.await.unwrap()); + } + + // All should complete without panicking + assert!(results.iter().all(|r: &eyre::Result<()>| r.is_ok())); + assert_eq!(publisher.published_count(), 10); + + // Verify mix of successes and failures + let published = publisher.get_published(); + let failures = published.iter().filter(|r| !r.success).count(); + assert!(failures > 0); +} + +#[test] +fn test_error_display_formatting() { + // Verify error messages are properly formatted + let errors = vec![ + ( + SimulationError::Revert { reason: "ERC20: transfer amount exceeds balance".to_string() }, + "Bundle reverted: ERC20: transfer amount exceeds balance", + ), + ( + SimulationError::InvalidNonce { tx_index: 0, expected: 10, actual: 5 }, + "Invalid nonce in tx 0: expected 10, got 5", + ), + ( + SimulationError::InsufficientBalance { + tx_index: 2, + required: alloy_primitives::U256::from(1_000_000), + available: alloy_primitives::U256::from(500_000) + }, + "Insufficient balance in tx 2: required 1000000, available 500000", + ), + ]; + + for (error, expected) in errors { + assert_eq!(error.to_string(), expected); + } +} + +#[tokio::test] +async fn test_timeout_simulation() { + // Test timeout error handling + let engine = MockSimulationEngine::new() + .fail_next_with(SimulationError::Timeout); + let publisher = MockSimulationPublisher::new(); + let simulator = MockBundleSimulator::new(engine, publisher.clone()); + + let large_bundle = bundles::large_bundle(1000); // Very large bundle + let request = SimulationRequestBuilder::new() + .with_bundle(large_bundle) + .build(); + + + // Execute + let result = simulator.simulate(&request).await; + + // Should complete successfully even with timeout + assert!(result.is_ok()); + + let published = publisher.get_published(); + assert_eq!(published.len(), 1); + assert!(!published[0].success); + assert_eq!(published[0].error_reason, Some("Simulation timed out".to_string())); +} diff --git a/crates/simulator/tests/unit/mod.rs b/crates/simulator/tests/unit/mod.rs new file mode 100644 index 0000000..4505531 --- /dev/null +++ b/crates/simulator/tests/unit/mod.rs @@ -0,0 +1,4 @@ +/// Unit tests for simulator core components +pub mod core_test; +pub mod types_test; +pub mod error_handling_test; diff --git a/crates/simulator/tests/unit/types_test.rs b/crates/simulator/tests/unit/types_test.rs new file mode 100644 index 0000000..0f3b240 --- /dev/null +++ b/crates/simulator/tests/unit/types_test.rs @@ -0,0 +1,263 @@ +/// Unit tests for simulator types +use crate::common::builders::*; +use crate::common::fixtures::*; +use tips_simulator::types::{SimulationError, SimulationRequest, SimulationResult}; +use alloy_primitives::{Address, B256, U256}; +use std::collections::HashMap; +use uuid::Uuid; + +#[test] +fn test_simulation_result_success_creation() { + let id = Uuid::new_v4(); + let bundle_id = Uuid::new_v4(); + let block_hash = B256::random(); + let gas_used = 150_000; + let execution_time = 1500; + + let mut state_diff = HashMap::new(); + let addr = Address::random(); + let mut storage = HashMap::new(); + storage.insert(U256::from(0), U256::from(100)); + state_diff.insert(addr, storage); + + let result = SimulationResult::success( + id, + bundle_id, + 18_000_000, + block_hash, + gas_used, + execution_time, + state_diff.clone(), + ); + + assert_eq!(result.id, id); + assert_eq!(result.bundle_id, bundle_id); + assert_eq!(result.block_number, 18_000_000); + assert_eq!(result.block_hash, block_hash); + assert!(result.success); + assert_eq!(result.gas_used, Some(gas_used)); + assert_eq!(result.execution_time_us, execution_time); + assert_eq!(result.state_diff.len(), 1); + assert!(result.error_reason.is_none()); +} + +#[test] +fn test_simulation_result_failure_creation() { + let id = Uuid::new_v4(); + let bundle_id = Uuid::new_v4(); + let block_hash = B256::random(); + let execution_time = 500; + let error = SimulationError::Revert { + reason: "Test revert".to_string(), + }; + + let result = SimulationResult::failure( + id, + bundle_id, + 18_000_000, + block_hash, + execution_time, + error.clone(), + ); + + assert_eq!(result.id, id); + assert_eq!(result.bundle_id, bundle_id); + assert!(!result.success); + assert!(result.gas_used.is_none()); + assert!(result.state_diff.is_empty()); + assert_eq!(result.error_reason, Some(error.to_string())); +} + +#[test] +fn test_simulation_error_display() { + let test_cases = vec![ + ( + SimulationError::Revert { + reason: "Invalid state".to_string(), + }, + "Bundle reverted: Invalid state", + ), + ( + SimulationError::OutOfGas, + "Bundle ran out of gas", + ), + ( + SimulationError::InvalidNonce { + tx_index: 2, + expected: 5, + actual: 3, + }, + "Invalid nonce in tx 2: expected 5, got 3", + ), + ( + SimulationError::InsufficientBalance { + tx_index: 1, + required: U256::from(1000), + available: U256::from(500), + }, + "Insufficient balance in tx 1: required 1000, available 500", + ), + ( + SimulationError::StateAccessError { + message: "RPC timeout".to_string(), + }, + "State access error: RPC timeout", + ), + ( + SimulationError::Timeout, + "Simulation timed out", + ), + ( + SimulationError::Unknown { + message: "Something went wrong".to_string(), + }, + "Unknown error: Something went wrong", + ), + ]; + + for (error, expected) in test_cases { + assert_eq!(error.to_string(), expected); + } +} + +#[test] +fn test_simulation_request_fields() { + let bundle_id = Uuid::new_v4(); + let bundle = bundles::single_tx_bundle(); + let block_number = 18_000_000; + let block_hash = B256::random(); + + let request = SimulationRequest { + bundle_id, + bundle: bundle.clone(), + block_number, + block_hash, + }; + + assert_eq!(request.bundle_id, bundle_id); + assert_eq!(request.bundle.txs.len(), bundle.txs.len()); + assert_eq!(request.block_number, block_number); + assert_eq!(request.block_hash, block_hash); +} + +#[test] +fn test_simulation_result_builder() { + // Test successful result + let success_result = SimulationResultBuilder::successful() + .with_gas_used(250_000) + .with_execution_time_us(2000) + .with_state_change(*addresses::ALICE, U256::from(0), U256::from(500)) + .build(); + + assert!(success_result.success); + assert_eq!(success_result.gas_used, Some(250_000)); + assert_eq!(success_result.execution_time_us, 2000); + assert!(success_result.state_diff.contains_key(&*addresses::ALICE)); + + // Test failed result with revert + let revert_result = SimulationResultBuilder::failed() + .with_revert("Insufficient funds".to_string()) + .build(); + + assert!(!revert_result.success); + assert!(revert_result.gas_used.is_none()); + assert!(revert_result.error_reason.as_ref().unwrap().contains("Insufficient funds")); + + // Test failed result with out of gas + let oog_result = SimulationResultBuilder::failed() + .with_out_of_gas() + .build(); + + assert!(!oog_result.success); + assert!(oog_result.error_reason.as_ref().unwrap().contains("out of gas")); + + // Test invalid nonce + let nonce_result = SimulationResultBuilder::failed() + .with_invalid_nonce(0, 5, 3) + .build(); + + assert!(!nonce_result.success); + assert!(nonce_result.error_reason.as_ref().unwrap().contains("Invalid nonce")); +} + +#[test] +fn test_simulation_result_timestamp() { + let result = SimulationResultBuilder::successful().build(); + + // Check that timestamp is recent (within last minute) + let now = chrono::Utc::now(); + let created_timestamp = result.created_at.timestamp(); + let now_timestamp = now.timestamp(); + let diff = now_timestamp - created_timestamp; + assert!(diff < 60); +} + +#[test] +fn test_large_state_diff() { + let mut builder = SimulationResultBuilder::successful(); + + // Add many state changes + for i in 0..100 { + let addr = Address::random(); + for j in 0..10 { + builder = builder.with_state_change( + addr, + U256::from(j), + U256::from(i * 1000 + j) + ); + } + } + + let result = builder.build(); + assert_eq!(result.state_diff.len(), 100); + + // Verify each account has 10 storage slots + for (_, storage) in &result.state_diff { + assert_eq!(storage.len(), 10); + } +} + +#[test] +fn test_error_serialization() { + // Verify that errors can be converted to strings and back + let errors = vec![ + SimulationError::Revert { reason: "test".to_string() }, + SimulationError::OutOfGas, + SimulationError::InvalidNonce { tx_index: 1, expected: 2, actual: 3 }, + SimulationError::Timeout, + ]; + + for error in errors { + let error_string = error.to_string(); + assert!(!error_string.is_empty()); + + // Create a result with this error + let result = SimulationResult::failure( + Uuid::new_v4(), + Uuid::new_v4(), + 18_000_000, + B256::random(), + 1000, + error, + ); + + assert_eq!(result.error_reason, Some(error_string)); + } +} + +#[test] +fn test_simulation_result_gas_used_bounds() { + // Test with maximum gas + let max_gas_result = SimulationResultBuilder::successful() + .with_gas_used(30_000_000) // 30M gas + .build(); + + assert_eq!(max_gas_result.gas_used, Some(30_000_000)); + + // Test with zero gas (edge case) + let zero_gas_result = SimulationResultBuilder::successful() + .with_gas_used(0) + .build(); + + assert_eq!(zero_gas_result.gas_used, Some(0)); +} From 903e6f486197536a4b6e786455c15b7945dc3543 Mon Sep 17 00:00:00 2001 From: Niran Babalola Date: Thu, 25 Sep 2025 23:44:27 -0500 Subject: [PATCH 29/39] just fix --- crates/simulator/src/config/mod.rs | 14 +- crates/simulator/src/config/playground.rs | 3 +- crates/simulator/src/config/simulator_node.rs | 12 +- crates/simulator/src/core.rs | 5 +- crates/simulator/src/engine.rs | 10 +- crates/simulator/src/lib.rs | 10 +- crates/simulator/src/listeners/exex.rs | 4 +- crates/simulator/src/main.rs | 6 +- crates/simulator/src/worker_pool.rs | 11 +- crates/simulator/tests/common/builders.rs | 21 +- crates/simulator/tests/common/fixtures.rs | 80 +++++--- .../tests/common/mock_bundle_simulator.rs | 20 +- crates/simulator/tests/common/mocks.rs | 29 +-- crates/simulator/tests/common/mod.rs | 41 ++-- crates/simulator/tests/integration_test.rs | 137 +++++++------ crates/simulator/tests/unit/core_test.rs | 106 +++++----- .../tests/unit/error_handling_test.rs | 185 +++++++++++------- crates/simulator/tests/unit/mod.rs | 2 +- crates/simulator/tests/unit/types_test.rs | 100 +++++----- 19 files changed, 412 insertions(+), 384 deletions(-) diff --git a/crates/simulator/src/config/mod.rs b/crates/simulator/src/config/mod.rs index 9e67116..2d27283 100644 --- a/crates/simulator/src/config/mod.rs +++ b/crates/simulator/src/config/mod.rs @@ -1,5 +1,5 @@ -mod simulator_node; pub mod playground; +mod simulator_node; pub use playground::PlaygroundOptions; pub use simulator_node::SimulatorNodeConfig; @@ -44,18 +44,10 @@ impl CliExt for Cli { } impl SimulatorNodeConfig { - pub fn into_parts( - self, - cli: Cli, - ) -> (Cli, ExExSimulationConfig, MempoolListenerConfig, u64) { + pub fn into_parts(self, cli: Cli) -> (Cli, ExExSimulationConfig, MempoolListenerConfig, u64) { let exex_config = (&self).into(); let mempool_config = (&self).into(); - ( - cli, - exex_config, - mempool_config, - self.chain_block_time, - ) + (cli, exex_config, mempool_config, self.chain_block_time) } } diff --git a/crates/simulator/src/config/playground.rs b/crates/simulator/src/config/playground.rs index 9b6ba87..2f3f4ee 100644 --- a/crates/simulator/src/config/playground.rs +++ b/crates/simulator/src/config/playground.rs @@ -20,7 +20,7 @@ use alloy_primitives::hex; use anyhow::{anyhow, Result}; -use clap::{CommandFactory, parser::ValueSource}; +use clap::{parser::ValueSource, CommandFactory}; use core::{ net::{IpAddr, Ipv4Addr, SocketAddr}, time::Duration, @@ -351,4 +351,3 @@ fn resolve_trusted_peer_host() -> Host { Host::Ipv4(Ipv4Addr::LOCALHOST) } } - diff --git a/crates/simulator/src/config/simulator_node.rs b/crates/simulator/src/config/simulator_node.rs index 45f6a7d..13f334c 100644 --- a/crates/simulator/src/config/simulator_node.rs +++ b/crates/simulator/src/config/simulator_node.rs @@ -1,7 +1,4 @@ -use crate::{ - listeners::MempoolListenerConfig, - types::ExExSimulationConfig, -}; +use crate::{listeners::MempoolListenerConfig, types::ExExSimulationConfig}; use anyhow::{anyhow, Result}; use clap::Args; use std::path::PathBuf; @@ -30,11 +27,7 @@ pub struct SimulatorNodeConfig { pub kafka_brokers: String, /// Kafka topic for mempool events - #[arg( - long, - env = "TIPS_SIMULATOR_KAFKA_TOPIC", - default_value = "tips-audit" - )] + #[arg(long, env = "TIPS_SIMULATOR_KAFKA_TOPIC", default_value = "tips-audit")] pub kafka_topic: String, /// Kafka consumer group ID @@ -102,4 +95,3 @@ fn expand_path(s: &str) -> Result { .parse() .map_err(|e| anyhow!("invalid path after expansion: {e}")) } - diff --git a/crates/simulator/src/core.rs b/crates/simulator/src/core.rs index 5f4e3ce..5a7392c 100644 --- a/crates/simulator/src/core.rs +++ b/crates/simulator/src/core.rs @@ -29,10 +29,7 @@ where P: SimulationPublisher, { pub fn new(engine: E, publisher: P) -> Self { - Self { - engine, - publisher, - } + Self { engine, publisher } } } diff --git a/crates/simulator/src/engine.rs b/crates/simulator/src/engine.rs index 41e49b0..88c0714 100644 --- a/crates/simulator/src/engine.rs +++ b/crates/simulator/src/engine.rs @@ -101,10 +101,7 @@ where #[async_trait] pub trait SimulationEngine: Send + Sync { /// Simulate a bundle execution - async fn simulate_bundle( - &self, - request: &SimulationRequest, - ) -> Result; + async fn simulate_bundle(&self, request: &SimulationRequest) -> Result; } #[derive(Clone)] @@ -134,10 +131,7 @@ where Node: FullNodeComponents, ::Evm: ConfigureEvm, { - async fn simulate_bundle( - &self, - request: &SimulationRequest, - ) -> Result { + async fn simulate_bundle(&self, request: &SimulationRequest) -> Result { let start_time = Instant::now(); let simulation_id = Uuid::new_v4(); diff --git a/crates/simulator/src/lib.rs b/crates/simulator/src/lib.rs index b9599f4..565f6b7 100644 --- a/crates/simulator/src/lib.rs +++ b/crates/simulator/src/lib.rs @@ -25,13 +25,9 @@ pub use worker_pool::SimulationWorkerPool; // Type aliases for concrete implementations pub type TipsBundleSimulator = RethBundleSimulator, TipsSimulationPublisher>; -pub type TipsExExEventListener = ExExEventListener< - Node, - TipsBundleSimulator, - tips_datastore::PostgresDatastore, ->; -pub type TipsMempoolEventListener = - MempoolEventListener>; +pub type TipsExExEventListener = + ExExEventListener, tips_datastore::PostgresDatastore>; +pub type TipsMempoolEventListener = MempoolEventListener>; // Initialization functions diff --git a/crates/simulator/src/listeners/exex.rs b/crates/simulator/src/listeners/exex.rs index 2b6f92e..db5bcf9 100644 --- a/crates/simulator/src/listeners/exex.rs +++ b/crates/simulator/src/listeners/exex.rs @@ -136,7 +136,7 @@ where ); // Process each block in the committed chain - for (_block_num, block) in new.blocks() { + for block in new.blocks().values() { let block_hash = block.hash(); self.process_block((&block_hash, block)).await?; } @@ -156,7 +156,7 @@ where ); // Process the new canonical chain - for (_block_num, block) in new.blocks() { + for block in new.blocks().values() { let block_hash = block.hash(); self.process_block((&block_hash, block)).await?; } diff --git a/crates/simulator/src/main.rs b/crates/simulator/src/main.rs index d962f92..7b5e191 100644 --- a/crates/simulator/src/main.rs +++ b/crates/simulator/src/main.rs @@ -27,8 +27,10 @@ fn main() -> eyre::Result<()> { cli.run(|builder, _| async move { // Keep the Base mempool private. - let mut rollup_args = RollupArgs::default(); - rollup_args.disable_txpool_gossip = true; + let rollup_args = RollupArgs { + disable_txpool_gossip: true, + ..Default::default() + }; let handle = builder .node(reth_optimism_node::OpNode::new(rollup_args)) diff --git a/crates/simulator/src/worker_pool.rs b/crates/simulator/src/worker_pool.rs index eab6de0..f19a154 100644 --- a/crates/simulator/src/worker_pool.rs +++ b/crates/simulator/src/worker_pool.rs @@ -35,10 +35,7 @@ where B: BundleSimulator + 'static, { /// Create a new simulation worker pool - pub fn new( - simulator: Arc, - max_concurrent_simulations: usize, - ) -> Arc { + pub fn new(simulator: Arc, max_concurrent_simulations: usize) -> Arc { let (simulation_tx, simulation_rx) = mpsc::channel(1000); Arc::new(Self { @@ -132,11 +129,7 @@ where } // Execute the simulation - match pool - .simulator - .simulate(&task.request) - .await - { + match pool.simulator.simulate(&task.request).await { Ok(_) => { debug!( worker_id, diff --git a/crates/simulator/tests/common/builders.rs b/crates/simulator/tests/common/builders.rs index 45e2281..5890983 100644 --- a/crates/simulator/tests/common/builders.rs +++ b/crates/simulator/tests/common/builders.rs @@ -159,8 +159,8 @@ impl SimulationResultBuilder { let mut builder = Self::new(); builder.success = false; builder.gas_used = None; - builder.error = Some(SimulationError::Unknown { - message: "Test failure".to_string() + builder.error = Some(SimulationError::Unknown { + message: "Test failure".to_string(), }); builder } @@ -211,7 +211,11 @@ impl SimulationResultBuilder { } pub fn with_invalid_nonce(self, tx_index: usize, expected: u64, actual: u64) -> Self { - self.with_error(SimulationError::InvalidNonce { tx_index, expected, actual }) + self.with_error(SimulationError::InvalidNonce { + tx_index, + expected, + actual, + }) } pub fn build(self) -> SimulationResult { @@ -232,8 +236,8 @@ impl SimulationResultBuilder { self.block_number, self.block_hash.unwrap_or_else(B256::random), self.execution_time_us, - self.error.unwrap_or(SimulationError::Unknown { - message: "Unknown error".to_string() + self.error.unwrap_or(SimulationError::Unknown { + message: "Unknown error".to_string(), }), ) } @@ -268,13 +272,12 @@ impl ScenarioBuilder { } pub fn add_simple_bundle(mut self, num_txs: usize) -> Self { - let mut builder = TestBundleBuilder::new() - .with_block_number(self.block_number); - + let mut builder = TestBundleBuilder::new().with_block_number(self.block_number); + for i in 0..num_txs { builder = builder.with_simple_transaction(&[i as u8, 0x01, 0x02]); } - + self.bundles.push(builder.build()); self } diff --git a/crates/simulator/tests/common/fixtures.rs b/crates/simulator/tests/common/fixtures.rs index b854c7d..ed4643d 100644 --- a/crates/simulator/tests/common/fixtures.rs +++ b/crates/simulator/tests/common/fixtures.rs @@ -7,23 +7,43 @@ use std::sync::LazyLock; pub mod addresses { use alloy_primitives::Address; use std::sync::LazyLock; - - pub static ALICE: LazyLock
= LazyLock::new(|| "0x0000000000000000000000000000000000000001".parse().unwrap()); - pub static BOB: LazyLock
= LazyLock::new(|| "0x0000000000000000000000000000000000000002".parse().unwrap()); - pub static CHARLIE: LazyLock
= LazyLock::new(|| "0x0000000000000000000000000000000000000003".parse().unwrap()); - pub static CONTRACT_A: LazyLock
= LazyLock::new(|| "0x1000000000000000000000000000000000000001".parse().unwrap()); - pub static CONTRACT_B: LazyLock
= LazyLock::new(|| "0x1000000000000000000000000000000000000002".parse().unwrap()); + + pub static ALICE: LazyLock
= LazyLock::new(|| { + "0x0000000000000000000000000000000000000001" + .parse() + .unwrap() + }); + pub static BOB: LazyLock
= LazyLock::new(|| { + "0x0000000000000000000000000000000000000002" + .parse() + .unwrap() + }); + pub static CHARLIE: LazyLock
= LazyLock::new(|| { + "0x0000000000000000000000000000000000000003" + .parse() + .unwrap() + }); + pub static CONTRACT_A: LazyLock
= LazyLock::new(|| { + "0x1000000000000000000000000000000000000001" + .parse() + .unwrap() + }); + pub static CONTRACT_B: LazyLock
= LazyLock::new(|| { + "0x1000000000000000000000000000000000000002" + .parse() + .unwrap() + }); } /// Common test block hashes and numbers pub mod blocks { use alloy_primitives::B256; use std::sync::LazyLock; - + pub const BLOCK_18M: u64 = 18_000_000; pub const BLOCK_18M_PLUS_1: u64 = 18_000_001; pub const BLOCK_18M_PLUS_2: u64 = 18_000_002; - + pub static HASH_18M: LazyLock = LazyLock::new(|| B256::from_slice(&[1u8; 32])); pub static HASH_18M_PLUS_1: LazyLock = LazyLock::new(|| B256::from_slice(&[2u8; 32])); pub static HASH_18M_PLUS_2: LazyLock = LazyLock::new(|| B256::from_slice(&[3u8; 32])); @@ -32,7 +52,7 @@ pub mod blocks { /// Pre-built transaction fixtures pub mod transactions { use alloy_primitives::Bytes; - + /// Simple transfer transaction (mock data) pub fn simple_transfer() -> Bytes { Bytes::from(vec![ @@ -41,7 +61,7 @@ pub mod transactions { 0x05, 0x06, 0x07, 0x08, ]) } - + /// Contract call transaction (mock data) pub fn contract_call() -> Bytes { Bytes::from(vec![ @@ -50,7 +70,7 @@ pub mod transactions { 0x50, 0x60, 0x70, 0x80, ]) } - + /// Transaction that will revert (mock data) pub fn reverting_tx() -> Bytes { Bytes::from(vec![ @@ -60,12 +80,11 @@ pub mod transactions { } } - /// Pre-configured bundles for testing pub mod bundles { use super::*; use crate::common::builders::TestBundleBuilder; - + /// Simple single transaction bundle pub fn single_tx_bundle() -> EthSendBundle { TestBundleBuilder::new() @@ -73,7 +92,7 @@ pub mod bundles { .with_block_number(blocks::BLOCK_18M) .build() } - + /// Bundle with multiple transactions pub fn multi_tx_bundle() -> EthSendBundle { TestBundleBuilder::new() @@ -83,7 +102,7 @@ pub mod bundles { .with_block_number(blocks::BLOCK_18M) .build() } - + /// Bundle with reverting transaction pub fn reverting_bundle() -> EthSendBundle { TestBundleBuilder::new() @@ -92,20 +111,19 @@ pub mod bundles { .with_block_number(blocks::BLOCK_18M) .build() } - + /// Large bundle for stress testing pub fn large_bundle(num_txs: usize) -> EthSendBundle { - let mut builder = TestBundleBuilder::new() - .with_block_number(blocks::BLOCK_18M); - + let mut builder = TestBundleBuilder::new().with_block_number(blocks::BLOCK_18M); + for i in 0..num_txs { let tx_data = vec![0x02, i as u8, 0x01, 0x02, 0x03]; builder = builder.with_transaction(Bytes::from(tx_data)); } - + builder.build() } - + /// Bundle with specific timing constraints pub fn time_constrained_bundle() -> EthSendBundle { TestBundleBuilder::new() @@ -121,7 +139,7 @@ pub mod scenarios { use super::*; use tips_simulator::types::SimulationRequest; use uuid::Uuid; - + /// Create a basic simulation scenario pub fn basic_simulation() -> SimulationRequest { let bundle = bundles::single_tx_bundle(); @@ -132,7 +150,7 @@ pub mod scenarios { block_hash: *blocks::HASH_18M, } } - + /// Create a contract interaction scenario pub fn contract_interaction() -> SimulationRequest { let bundle = bundles::multi_tx_bundle(); @@ -143,7 +161,7 @@ pub mod scenarios { block_hash: *blocks::HASH_18M, } } - + /// Create a large bundle scenario pub fn large_bundle_scenario() -> SimulationRequest { let bundle = bundles::large_bundle(100); @@ -159,34 +177,34 @@ pub mod scenarios { #[cfg(test)] mod tests { use super::*; - + #[test] fn test_fixture_addresses() { assert_ne!(*addresses::ALICE, *addresses::BOB); assert_ne!(*addresses::CONTRACT_A, *addresses::CONTRACT_B); } - + #[test] fn test_fixture_bundles() { let single = bundles::single_tx_bundle(); assert_eq!(single.txs.len(), 1); - + let multi = bundles::multi_tx_bundle(); assert_eq!(multi.txs.len(), 3); - + let large = bundles::large_bundle(100); assert_eq!(large.txs.len(), 100); } - + #[test] fn test_fixture_scenarios() { let request = scenarios::basic_simulation(); assert_eq!(request.block_number, blocks::BLOCK_18M); assert_eq!(request.bundle.txs.len(), 1); - + let interaction = scenarios::contract_interaction(); assert_eq!(interaction.bundle.txs.len(), 3); - + let large_scenario = scenarios::large_bundle_scenario(); assert_eq!(large_scenario.bundle.txs.len(), 100); } diff --git a/crates/simulator/tests/common/mock_bundle_simulator.rs b/crates/simulator/tests/common/mock_bundle_simulator.rs index 92406f1..fa28a80 100644 --- a/crates/simulator/tests/common/mock_bundle_simulator.rs +++ b/crates/simulator/tests/common/mock_bundle_simulator.rs @@ -1,11 +1,11 @@ +use crate::common::mocks::{MockSimulationEngine, MockSimulationPublisher}; +use async_trait::async_trait; +use eyre::Result; /// Mock implementation of BundleSimulator for testing use tips_simulator::core::BundleSimulator; use tips_simulator::engine::SimulationEngine; use tips_simulator::publisher::SimulationPublisher; use tips_simulator::types::SimulationRequest; -use crate::common::mocks::{MockSimulationEngine, MockSimulationPublisher}; -use async_trait::async_trait; -use eyre::Result; /// Mock bundle simulator for testing - no Reth dependencies! pub struct MockBundleSimulator { @@ -17,11 +17,11 @@ impl MockBundleSimulator { pub fn new(engine: MockSimulationEngine, publisher: MockSimulationPublisher) -> Self { Self { engine, publisher } } - + pub fn engine(&self) -> &MockSimulationEngine { &self.engine } - + pub fn publisher(&self) -> &MockSimulationPublisher { &self.publisher } @@ -56,7 +56,7 @@ impl BundleSimulator for MockBundleSimulator { ); } } - + Ok(()) } } @@ -65,19 +65,19 @@ impl BundleSimulator for MockBundleSimulator { mod tests { use super::*; use crate::common; - + #[tokio::test] async fn test_mock_bundle_simulator() { let engine = MockSimulationEngine::new(); let publisher = MockSimulationPublisher::new(); let simulator = MockBundleSimulator::new(engine.clone(), publisher.clone()); - + let bundle = common::create_test_bundle(1, 18_000_000); let request = common::create_test_request(bundle); - + // Use the clean trait interface let result = simulator.simulate(&request).await; - + assert!(result.is_ok()); assert_eq!(engine.simulation_count(), 1); assert_eq!(publisher.published_count(), 1); diff --git a/crates/simulator/tests/common/mocks.rs b/crates/simulator/tests/common/mocks.rs index 29d0ef5..9767b28 100644 --- a/crates/simulator/tests/common/mocks.rs +++ b/crates/simulator/tests/common/mocks.rs @@ -3,8 +3,8 @@ use alloy_primitives::{Address, B256, U256}; use async_trait::async_trait; use std::collections::HashMap; use std::sync::{Arc, Mutex}; -use tips_simulator::{SimulationEngine, SimulationPublisher, SimulationError, SimulationResult}; use tips_simulator::types::SimulationRequest; +use tips_simulator::{SimulationEngine, SimulationError, SimulationPublisher, SimulationResult}; use uuid::Uuid; /// Mock simulation engine with configurable behavior @@ -52,19 +52,22 @@ impl MockSimulationEngine { #[async_trait] impl SimulationEngine for MockSimulationEngine { - async fn simulate_bundle( - &self, - request: &SimulationRequest, - ) -> eyre::Result { + async fn simulate_bundle(&self, request: &SimulationRequest) -> eyre::Result { // Track the simulation self.simulations.lock().unwrap().push(request.clone()); // Check if we should fail if *self.fail_next.lock().unwrap() { *self.fail_next.lock().unwrap() = false; - let error = self.error.lock().unwrap().take() - .unwrap_or(SimulationError::Unknown { message: "Mock failure".to_string() }); - + let error = self + .error + .lock() + .unwrap() + .take() + .unwrap_or(SimulationError::Unknown { + message: "Mock failure".to_string(), + }); + return Ok(SimulationResult::failure( Uuid::new_v4(), request.bundle_id, @@ -153,10 +156,8 @@ mod tests { #[tokio::test] async fn test_mock_simulation_engine() { let engine = MockSimulationEngine::new(); - let _request = common::create_test_request( - common::create_test_bundle(1, 18_000_000) - ); - + let _request = common::create_test_request(common::create_test_bundle(1, 18_000_000)); + // Verify the engine is initialized correctly assert_eq!(engine.simulation_count(), 0); } @@ -165,10 +166,10 @@ mod tests { async fn test_mock_publisher() { let publisher = MockSimulationPublisher::new(); let result = common::create_success_result(Uuid::new_v4(), 100_000); - + publisher.publish_result(result.clone()).await.unwrap(); assert_eq!(publisher.published_count(), 1); - + let published = publisher.get_published(); assert_eq!(published[0].id, result.id); } diff --git a/crates/simulator/tests/common/mod.rs b/crates/simulator/tests/common/mod.rs index b5f59b4..c711855 100644 --- a/crates/simulator/tests/common/mod.rs +++ b/crates/simulator/tests/common/mod.rs @@ -1,8 +1,8 @@ /// Common test utilities and infrastructure for simulator testing pub mod builders; pub mod fixtures; -pub mod mocks; pub mod mock_bundle_simulator; +pub mod mocks; use alloy_primitives::{Address, Bytes, B256, U256}; use alloy_rpc_types_mev::EthSendBundle; @@ -62,10 +62,7 @@ pub fn create_test_request(bundle: EthSendBundle) -> SimulationRequest { } /// Helper to create a successful simulation result -pub fn create_success_result( - bundle_id: Uuid, - gas_used: u64, -) -> SimulationResult { +pub fn create_success_result(bundle_id: Uuid, gas_used: u64) -> SimulationResult { let mut state_diff = HashMap::new(); let address = Address::random(); let mut storage = HashMap::new(); @@ -90,15 +87,27 @@ pub mod assertions { /// Assert that a simulation result is successful pub fn assert_simulation_success(result: &SimulationResult) { assert!(result.success, "Expected successful simulation"); - assert!(result.gas_used.is_some(), "Successful simulation should have gas_used"); - assert!(result.error_reason.is_none(), "Successful simulation should not have error"); + assert!( + result.gas_used.is_some(), + "Successful simulation should have gas_used" + ); + assert!( + result.error_reason.is_none(), + "Successful simulation should not have error" + ); } /// Assert that a simulation result is a failure pub fn assert_simulation_failure(result: &SimulationResult) { assert!(!result.success, "Expected failed simulation"); - assert!(result.gas_used.is_none(), "Failed simulation should not have gas_used"); - assert!(result.error_reason.is_some(), "Failed simulation should have error reason"); + assert!( + result.gas_used.is_none(), + "Failed simulation should not have gas_used" + ); + assert!( + result.error_reason.is_some(), + "Failed simulation should have error reason" + ); } /// Assert state diff contains expected changes @@ -108,10 +117,11 @@ pub mod assertions { slot: U256, expected_value: U256, ) { - let storage = result.state_diff.get(&address) + let storage = result + .state_diff + .get(&address) .expect("Address not found in state diff"); - let value = storage.get(&slot) - .expect("Storage slot not found"); + let value = storage.get(&slot).expect("Storage slot not found"); assert_eq!(*value, expected_value, "Unexpected storage value"); } } @@ -131,10 +141,7 @@ pub mod timing { } /// Assert that an operation completes within a timeout - pub async fn assert_completes_within( - f: F, - timeout: Duration, - ) -> T + pub async fn assert_completes_within(f: F, timeout: Duration) -> T where F: std::future::Future, { @@ -159,7 +166,7 @@ mod tests { fn test_create_success_result() { let bundle_id = Uuid::new_v4(); let result = create_success_result(bundle_id, 150_000); - + assertions::assert_simulation_success(&result); assert_eq!(result.bundle_id, bundle_id); assert_eq!(result.gas_used, Some(150_000)); diff --git a/crates/simulator/tests/integration_test.rs b/crates/simulator/tests/integration_test.rs index 66815b3..e6d0ae0 100644 --- a/crates/simulator/tests/integration_test.rs +++ b/crates/simulator/tests/integration_test.rs @@ -1,5 +1,5 @@ /// Integration tests for the simulator functionality -/// +/// /// Note: These tests use mock implementations because the actual StateProvider /// trait requires complex setup. For real integration testing with actual /// state providers, see the component tests. @@ -9,17 +9,16 @@ mod unit; use common::assertions::*; use common::builders::*; use common::fixtures::*; -use common::mocks::*; use common::mock_bundle_simulator::MockBundleSimulator; +use common::mocks::*; use common::timing::*; use alloy_primitives::U256; -use tips_simulator::{ - core::BundleSimulator, MempoolListenerConfig, SimulationWorkerPool, - types::ExExSimulationConfig, -}; use std::sync::Arc; use std::time::Duration; +use tips_simulator::{ + core::BundleSimulator, types::ExExSimulationConfig, MempoolListenerConfig, SimulationWorkerPool, +}; #[tokio::test] async fn test_successful_bundle_simulation() { @@ -27,21 +26,19 @@ async fn test_successful_bundle_simulation() { let engine = MockSimulationEngine::new(); let publisher = MockSimulationPublisher::new(); let simulator = MockBundleSimulator::new(engine.clone(), publisher.clone()); - + // Create test request let bundle = bundles::single_tx_bundle(); - let request = SimulationRequestBuilder::new() - .with_bundle(bundle) - .build(); - + let request = SimulationRequestBuilder::new().with_bundle(bundle).build(); + // Execute let result = simulator.simulate(&request).await; - + // Verify assert!(result.is_ok()); assert_eq!(engine.simulation_count(), 1); assert_eq!(publisher.published_count(), 1); - + let published = publisher.get_published(); assert_simulation_success(&published[0]); } @@ -53,24 +50,26 @@ async fn test_failed_bundle_simulation() { .fail_next_with(tips_simulator::types::SimulationError::OutOfGas); let publisher = MockSimulationPublisher::new(); let simulator = MockBundleSimulator::new(engine.clone(), publisher.clone()); - + // Create test request let bundle = bundles::single_tx_bundle(); - let request = SimulationRequestBuilder::new() - .with_bundle(bundle) - .build(); - + let request = SimulationRequestBuilder::new().with_bundle(bundle).build(); + // Execute let result = simulator.simulate(&request).await; - + // Verify assert!(result.is_ok()); // simulate() itself succeeds even if simulation fails assert_eq!(engine.simulation_count(), 1); assert_eq!(publisher.published_count(), 1); - + let published = publisher.get_published(); assert_simulation_failure(&published[0]); - assert!(published[0].error_reason.as_ref().unwrap().contains("out of gas")); + assert!(published[0] + .error_reason + .as_ref() + .unwrap() + .contains("out of gas")); } #[tokio::test] @@ -79,16 +78,14 @@ async fn test_publisher_failure_handling() { let engine = MockSimulationEngine::new(); let publisher = MockSimulationPublisher::new().fail_next(); let simulator = MockBundleSimulator::new(engine.clone(), publisher.clone()); - + // Create test request let bundle = bundles::single_tx_bundle(); - let request = SimulationRequestBuilder::new() - .with_bundle(bundle) - .build(); - + let request = SimulationRequestBuilder::new().with_bundle(bundle).build(); + // Execute - should not panic even if publisher fails let result = simulator.simulate(&request).await; - + // Verify assert!(result.is_ok()); assert_eq!(engine.simulation_count(), 1); @@ -102,48 +99,55 @@ async fn test_worker_pool_concurrent_simulations() { let publisher = MockSimulationPublisher::new(); let simulator = Arc::new(MockBundleSimulator::new(engine.clone(), publisher.clone())); // Provider no longer needed with new architecture - + // Create worker pool with 4 workers let pool = SimulationWorkerPool::new(simulator, 4); pool.start().await; - + // Queue multiple simulations let num_simulations = 20; let mut bundle_ids = Vec::new(); - + for i in 0..num_simulations { let bundle = TestBundleBuilder::new() .with_simple_transaction(&[i as u8, 0x01, 0x02]) .with_block_number(blocks::BLOCK_18M + i as u64) .build(); - + let request = SimulationRequestBuilder::new() .with_bundle(bundle) - .with_block(blocks::BLOCK_18M + i as u64, alloy_primitives::B256::random()) + .with_block( + blocks::BLOCK_18M + i as u64, + alloy_primitives::B256::random(), + ) .build(); - + bundle_ids.push(request.bundle_id); - + let task = tips_simulator::worker_pool::SimulationTask { request }; pool.queue_simulation(task).await.unwrap(); } - + // Wait for completion with timeout let (_, duration) = measure_async(async { tokio::time::sleep(Duration::from_millis(500)).await; - }).await; - + }) + .await; + // Verify all simulations completed assert_eq!(publisher.published_count(), num_simulations); - + // Verify all bundle IDs are present let published = publisher.get_published(); for bundle_id in bundle_ids { assert!(published.iter().any(|r| r.bundle_id == bundle_id)); } - + // Verify reasonable execution time - assert!(duration < Duration::from_secs(2), "Simulations took too long"); + assert!( + duration < Duration::from_secs(2), + "Simulations took too long" + ); } #[tokio::test] @@ -153,36 +157,35 @@ async fn test_worker_pool_error_recovery() { let publisher = MockSimulationPublisher::new(); let simulator = Arc::new(MockBundleSimulator::new(engine.clone(), publisher.clone())); // Provider no longer needed with new architecture - + // Create worker pool let pool = SimulationWorkerPool::new(simulator, 2); pool.start().await; - + // Queue simulations with some failures for i in 0..10 { let mut builder = SimulationResultBuilder::successful(); if i % 2 == 1 { - builder = SimulationResultBuilder::failed() - .with_revert(format!("Test revert {}", i)); + builder = SimulationResultBuilder::failed().with_revert(format!("Test revert {}", i)); } - + let _ = engine.clone().with_result(builder.build()); - + let request = SimulationRequestBuilder::new() .with_bundle(bundles::single_tx_bundle()) .build(); - + let task = tips_simulator::worker_pool::SimulationTask { request }; pool.queue_simulation(task).await.unwrap(); } - + // Wait for completion tokio::time::sleep(Duration::from_millis(300)).await; - + // Verify all simulations were attempted let published = publisher.get_published(); assert_eq!(published.len(), 10); - + // Verify mix of successes and failures let successes = published.iter().filter(|r| r.success).count(); let failures = published.iter().filter(|r| !r.success).count(); @@ -195,20 +198,17 @@ async fn test_large_bundle_simulation() { let engine = MockSimulationEngine::new(); let publisher = MockSimulationPublisher::new(); let simulator = MockBundleSimulator::new(engine.clone(), publisher.clone()); - + // Create large bundle let large_bundle = bundles::large_bundle(100); let request = SimulationRequestBuilder::new() .with_bundle(large_bundle) .build(); - - + // Execute with timeout - let result = assert_completes_within( - simulator.simulate(&request), - Duration::from_secs(5), - ).await; - + let result = + assert_completes_within(simulator.simulate(&request), Duration::from_secs(5)).await; + // Verify assert!(result.is_ok()); assert_eq!(engine.simulation_count(), 1); @@ -223,23 +223,20 @@ async fn test_state_diff_tracking() { .with_state_change(*addresses::ALICE, U256::from(1), U256::from(200)) .with_state_change(*addresses::BOB, U256::from(0), U256::from(300)) .build(); - - let engine = MockSimulationEngine::new() - .with_result(simulation_result); + + let engine = MockSimulationEngine::new().with_result(simulation_result); let publisher = MockSimulationPublisher::new(); let simulator = MockBundleSimulator::new(engine, publisher.clone()); - + // Execute let bundle = bundles::single_tx_bundle(); - let request = SimulationRequestBuilder::new() - .with_bundle(bundle) - .build(); + let request = SimulationRequestBuilder::new().with_bundle(bundle).build(); simulator.simulate(&request).await.unwrap(); - + // Verify state diff let published = publisher.get_published(); assert_eq!(published.len(), 1); - + let result = &published[0]; assert_state_diff_contains(result, *addresses::ALICE, U256::from(0), U256::from(100)); assert_state_diff_contains(result, *addresses::ALICE, U256::from(1), U256::from(200)); @@ -253,7 +250,7 @@ fn test_simulation_request_creation() { .with_bundle(bundle.clone()) .with_block(blocks::BLOCK_18M, *blocks::HASH_18M) .build(); - + assert_eq!(request.bundle.txs.len(), bundle.txs.len()); assert_eq!(request.block_number, blocks::BLOCK_18M); assert_eq!(request.block_hash, *blocks::HASH_18M); @@ -267,7 +264,7 @@ fn test_mempool_config() { kafka_group_id: "tips-simulator".to_string(), database_url: "postgresql://user:pass@localhost:5432/tips".to_string(), }; - + assert_eq!(config.kafka_brokers, vec!["localhost:9092"]); assert_eq!(config.kafka_topic, "tips-audit"); } @@ -279,7 +276,7 @@ fn test_exex_config() { max_concurrent_simulations: 10, simulation_timeout_ms: 5000, }; - + assert_eq!(config.max_concurrent_simulations, 10); assert_eq!(config.simulation_timeout_ms, 5000); } diff --git a/crates/simulator/tests/unit/core_test.rs b/crates/simulator/tests/unit/core_test.rs index d32b469..3ec9ee7 100644 --- a/crates/simulator/tests/unit/core_test.rs +++ b/crates/simulator/tests/unit/core_test.rs @@ -1,10 +1,10 @@ /// Unit tests for the BundleSimulator core component use crate::common::builders::*; use crate::common::fixtures::*; -use crate::common::mocks::*; use crate::common::mock_bundle_simulator::MockBundleSimulator; -use tips_simulator::{core::BundleSimulator, SimulationError}; +use crate::common::mocks::*; use std::sync::Arc; +use tips_simulator::{core::BundleSimulator, SimulationError}; use uuid::Uuid; #[tokio::test] @@ -12,7 +12,7 @@ async fn test_bundle_simulator_creation() { let engine = MockSimulationEngine::new(); let publisher = MockSimulationPublisher::new(); let simulator = MockBundleSimulator::new(engine, publisher); - + // Simply verify it can be created assert!(std::mem::size_of_val(&simulator) > 0); } @@ -25,25 +25,24 @@ async fn test_simulate_success_flow() { .with_ids(Uuid::new_v4(), bundle_id) .with_gas_used(200_000) .build(); - - let engine = MockSimulationEngine::new() - .with_result(expected_result.clone()); + + let engine = MockSimulationEngine::new().with_result(expected_result.clone()); let publisher = MockSimulationPublisher::new(); let simulator = MockBundleSimulator::new(engine.clone(), publisher.clone()); - + let request = SimulationRequestBuilder::new() .with_bundle_id(bundle_id) .with_bundle(bundles::single_tx_bundle()) .build(); - + // Act - using the clean trait interface let result = simulator.simulate(&request).await; - + // Assert assert!(result.is_ok()); assert_eq!(engine.simulation_count(), 1); assert_eq!(publisher.published_count(), 1); - + let published = publisher.get_published(); assert_eq!(published[0].bundle_id, bundle_id); assert_eq!(published[0].gas_used, Some(200_000)); @@ -53,29 +52,31 @@ async fn test_simulate_success_flow() { async fn test_simulate_failure_flow() { // Arrange let bundle_id = Uuid::new_v4(); - let engine = MockSimulationEngine::new() - .fail_next_with(SimulationError::Revert { - reason: "Test revert".to_string() - }); + let engine = MockSimulationEngine::new().fail_next_with(SimulationError::Revert { + reason: "Test revert".to_string(), + }); let publisher = MockSimulationPublisher::new(); let simulator = MockBundleSimulator::new(engine.clone(), publisher.clone()); - + let request = SimulationRequestBuilder::new() .with_bundle_id(bundle_id) .build(); - - + // Act let result = simulator.simulate(&request).await; - + // Assert assert!(result.is_ok()); // simulate() succeeds even if simulation fails assert_eq!(engine.simulation_count(), 1); assert_eq!(publisher.published_count(), 1); - + let published = publisher.get_published(); assert!(!published[0].success); - assert!(published[0].error_reason.as_ref().unwrap().contains("revert")); + assert!(published[0] + .error_reason + .as_ref() + .unwrap() + .contains("revert")); } #[tokio::test] @@ -84,12 +85,12 @@ async fn test_publisher_error_handling() { let engine = MockSimulationEngine::new(); let publisher = MockSimulationPublisher::new().fail_next(); let simulator = MockBundleSimulator::new(engine.clone(), publisher.clone()); - + let request = SimulationRequestBuilder::new().build(); - + // Act - should log error but not fail let result = simulator.simulate(&request).await; - + // Assert assert!(result.is_ok()); assert_eq!(engine.simulation_count(), 1); // Engine was called @@ -103,16 +104,15 @@ async fn test_state_provider_factory_error() { let engine = MockSimulationEngine::new(); let publisher = MockSimulationPublisher::new(); let simulator = MockBundleSimulator::new(engine.clone(), publisher.clone()); - + // Request with block hash that doesn't exist in our mock state let request = SimulationRequestBuilder::new() .with_block(99_999_999, alloy_primitives::B256::random()) .build(); - - + // Act let result = simulator.simulate(&request).await; - + // Assert - in our mock, this actually succeeds, but in real implementation // it would fail with state provider error assert!(result.is_ok()); @@ -124,22 +124,21 @@ async fn test_multiple_sequential_simulations() { let engine = MockSimulationEngine::new(); let publisher = MockSimulationPublisher::new(); let simulator = MockBundleSimulator::new(engine.clone(), publisher.clone()); - - + // Act - simulate multiple bundles for i in 0..5 { let request = SimulationRequestBuilder::new() .with_bundle( TestBundleBuilder::new() .with_simple_transaction(&[i as u8, 0x01, 0x02]) - .build() + .build(), ) .build(); - + let result = simulator.simulate(&request).await; assert!(result.is_ok()); } - + // Assert assert_eq!(engine.simulation_count(), 5); assert_eq!(publisher.published_count(), 5); @@ -151,16 +150,15 @@ async fn test_empty_bundle_simulation() { let engine = MockSimulationEngine::new(); let publisher = MockSimulationPublisher::new(); let simulator = MockBundleSimulator::new(engine.clone(), publisher.clone()); - + let empty_bundle = TestBundleBuilder::new().build(); // No transactions let request = SimulationRequestBuilder::new() .with_bundle(empty_bundle) .build(); - - + // Act let result = simulator.simulate(&request).await; - + // Assert assert!(result.is_ok()); assert_eq!(engine.simulation_count(), 1); @@ -172,7 +170,7 @@ async fn test_simulate_with_complex_state_diff() { // Arrange let bundle_id = Uuid::new_v4(); let mut state_diff = std::collections::HashMap::new(); - + // Add multiple accounts with multiple storage changes for i in 0..3 { let addr = alloy_primitives::Address::random(); @@ -180,33 +178,31 @@ async fn test_simulate_with_complex_state_diff() { for j in 0..5 { storage.insert( alloy_primitives::U256::from(j), - alloy_primitives::U256::from(i * 100 + j) + alloy_primitives::U256::from(i * 100 + j), ); } state_diff.insert(addr, storage); } - + let result = SimulationResultBuilder::successful() .with_ids(Uuid::new_v4(), bundle_id) .build(); - + // Manually set the state diff let mut result = result; result.state_diff = state_diff.clone(); - - let engine = MockSimulationEngine::new() - .with_result(result); + + let engine = MockSimulationEngine::new().with_result(result); let publisher = MockSimulationPublisher::new(); let simulator = MockBundleSimulator::new(engine, publisher.clone()); - + let request = SimulationRequestBuilder::new() .with_bundle_id(bundle_id) .build(); - - + // Act simulator.simulate(&request).await.unwrap(); - + // Assert let published = publisher.get_published(); assert_eq!(published[0].state_diff.len(), 3); @@ -221,34 +217,34 @@ async fn test_concurrent_simulator_usage() { let engine = MockSimulationEngine::new(); let publisher = MockSimulationPublisher::new(); let simulator = Arc::new(MockBundleSimulator::new(engine.clone(), publisher.clone())); - + let mut handles = vec![]; - + // Spawn multiple concurrent simulations for i in 0..10 { let sim = Arc::clone(&simulator); - + let handle = tokio::spawn(async move { let request = SimulationRequestBuilder::new() .with_bundle( TestBundleBuilder::new() .with_simple_transaction(&[i as u8, 0x01, 0x02]) - .build() + .build(), ) .build(); - + sim.simulate(&request).await }); - + handles.push(handle); } - + // Wait for all to complete for handle in handles { let result = handle.await.unwrap(); assert!(result.is_ok()); } - + // Verify all were processed assert_eq!(engine.simulation_count(), 10); assert_eq!(publisher.published_count(), 10); diff --git a/crates/simulator/tests/unit/error_handling_test.rs b/crates/simulator/tests/unit/error_handling_test.rs index 5657f31..9066d61 100644 --- a/crates/simulator/tests/unit/error_handling_test.rs +++ b/crates/simulator/tests/unit/error_handling_test.rs @@ -1,8 +1,8 @@ /// Unit tests for error handling scenarios use crate::common::builders::*; use crate::common::fixtures::*; -use crate::common::mocks::*; use crate::common::mock_bundle_simulator::MockBundleSimulator; +use crate::common::mocks::*; use tips_simulator::{core::BundleSimulator, SimulationError}; #[tokio::test] @@ -10,55 +10,62 @@ async fn test_simulation_error_types() { // Test all error types get properly propagated let error_scenarios = vec![ ( - SimulationError::Revert { reason: "Insufficient funds".to_string() }, + SimulationError::Revert { + reason: "Insufficient funds".to_string(), + }, "Insufficient funds", ), + (SimulationError::OutOfGas, "out of gas"), ( - SimulationError::OutOfGas, - "out of gas", - ), - ( - SimulationError::InvalidNonce { tx_index: 0, expected: 5, actual: 3 }, + SimulationError::InvalidNonce { + tx_index: 0, + expected: 5, + actual: 3, + }, "Invalid nonce", ), ( - SimulationError::InsufficientBalance { - tx_index: 1, - required: alloy_primitives::U256::from(1000), - available: alloy_primitives::U256::from(500) + SimulationError::InsufficientBalance { + tx_index: 1, + required: alloy_primitives::U256::from(1000), + available: alloy_primitives::U256::from(500), }, "Insufficient balance", ), ( - SimulationError::StateAccessError { message: "RPC timeout".to_string() }, + SimulationError::StateAccessError { + message: "RPC timeout".to_string(), + }, "State access error", ), + (SimulationError::Timeout, "timed out"), ( - SimulationError::Timeout, - "timed out", - ), - ( - SimulationError::Unknown { message: "Unexpected error".to_string() }, + SimulationError::Unknown { + message: "Unexpected error".to_string(), + }, "Unexpected error", ), ]; for (error, expected_msg) in error_scenarios { - let engine = MockSimulationEngine::new() - .fail_next_with(error.clone()); + let engine = MockSimulationEngine::new().fail_next_with(error.clone()); let publisher = MockSimulationPublisher::new(); let simulator = MockBundleSimulator::new(engine, publisher.clone()); - + let request = SimulationRequestBuilder::new().build(); - + // Execute simulator.simulate(&request).await.unwrap(); - + // Verify let published = publisher.get_published(); assert_eq!(published.len(), 1); assert!(!published[0].success); - assert!(published[0].error_reason.as_ref().unwrap().contains(expected_msg)); + assert!(published[0] + .error_reason + .as_ref() + .unwrap() + .contains(expected_msg)); } } @@ -68,22 +75,21 @@ async fn test_publisher_failure_recovery() { let engine = MockSimulationEngine::new(); let publisher = MockSimulationPublisher::new(); let simulator = MockBundleSimulator::new(engine.clone(), publisher.clone()); - - + // First simulation succeeds let request1 = SimulationRequestBuilder::new().build(); simulator.simulate(&request1).await.unwrap(); assert_eq!(publisher.published_count(), 1); - + // Configure publisher to fail next let publisher2 = publisher.clone().fail_next(); let simulator2 = MockBundleSimulator::new(engine.clone(), publisher2.clone()); - + // Second simulation - publisher fails but simulator continues let request2 = SimulationRequestBuilder::new().build(); simulator2.simulate(&request2).await.unwrap(); assert_eq!(publisher2.published_count(), 1); // Still 1 from first simulation, second failed - + // Third simulation - publisher recovers let request3 = SimulationRequestBuilder::new().build(); simulator2.simulate(&request3).await.unwrap(); @@ -95,46 +101,69 @@ async fn test_engine_failure_recovery() { // Test that engine failures are handled gracefully let engine = MockSimulationEngine::new(); let publisher = MockSimulationPublisher::new(); - - + // Test simulation 1: Out of gas error let engine1 = engine.clone().fail_next_with(SimulationError::OutOfGas); let simulator1 = MockBundleSimulator::new(engine1, publisher.clone()); let request1 = SimulationRequestBuilder::new() - .with_bundle(TestBundleBuilder::new().with_simple_transaction(&[1]).build()) + .with_bundle( + TestBundleBuilder::new() + .with_simple_transaction(&[1]) + .build(), + ) .build(); simulator1.simulate(&request1).await.unwrap(); - + // Test simulation 2: Success - let engine2 = engine.clone().with_result(SimulationResultBuilder::successful().build()); + let engine2 = engine + .clone() + .with_result(SimulationResultBuilder::successful().build()); let simulator2 = MockBundleSimulator::new(engine2, publisher.clone()); let request2 = SimulationRequestBuilder::new() - .with_bundle(TestBundleBuilder::new().with_simple_transaction(&[2]).build()) + .with_bundle( + TestBundleBuilder::new() + .with_simple_transaction(&[2]) + .build(), + ) .build(); simulator2.simulate(&request2).await.unwrap(); - + // Test simulation 3: Revert error - let engine3 = engine.clone().fail_next_with(SimulationError::Revert { reason: "Test revert".to_string() }); + let engine3 = engine.clone().fail_next_with(SimulationError::Revert { + reason: "Test revert".to_string(), + }); let simulator3 = MockBundleSimulator::new(engine3, publisher.clone()); let request3 = SimulationRequestBuilder::new() - .with_bundle(TestBundleBuilder::new().with_simple_transaction(&[3]).build()) + .with_bundle( + TestBundleBuilder::new() + .with_simple_transaction(&[3]) + .build(), + ) .build(); simulator3.simulate(&request3).await.unwrap(); - + // Verify all were published despite failures let published = publisher.get_published(); assert_eq!(published.len(), 3); - + // First should fail with out of gas assert!(!published[0].success); - assert!(published[0].error_reason.as_ref().unwrap().contains("Bundle ran out of gas")); - + assert!(published[0] + .error_reason + .as_ref() + .unwrap() + .contains("Bundle ran out of gas")); + // Second should succeed (from pre-configured result) assert!(published[1].success); - + // Third should fail with revert assert!(!published[2].success); - assert!(published[2].error_reason.as_ref().unwrap().contains("Bundle reverted")); + assert!(published[2] + .error_reason + .as_ref() + .unwrap() + .contains("Bundle reverted")); } #[tokio::test] @@ -143,17 +172,16 @@ async fn test_invalid_bundle_handling() { let engine = MockSimulationEngine::new(); let publisher = MockSimulationPublisher::new(); let simulator = MockBundleSimulator::new(engine.clone(), publisher.clone()); - - + // Empty bundle let empty_bundle = TestBundleBuilder::new().build(); let request = SimulationRequestBuilder::new() .with_bundle(empty_bundle) .build(); - + simulator.simulate(&request).await.unwrap(); assert_eq!(publisher.published_count(), 1); - + // Bundle with invalid block number (future block) let future_bundle = TestBundleBuilder::new() .with_simple_transaction(&[0x01]) @@ -163,7 +191,7 @@ async fn test_invalid_bundle_handling() { .with_bundle(future_bundle) .with_block(99_999_999, alloy_primitives::B256::random()) .build(); - + simulator.simulate(&future_request).await.unwrap(); assert_eq!(publisher.published_count(), 2); } @@ -172,51 +200,51 @@ async fn test_invalid_bundle_handling() { async fn test_concurrent_error_handling() { // Test error handling under concurrent load use std::sync::Arc; - + let engine = MockSimulationEngine::new(); let publisher = MockSimulationPublisher::new(); let simulator = Arc::new(MockBundleSimulator::new(engine.clone(), publisher.clone())); - + // Provider factory no longer needed with new architecture - + let mut handles = vec![]; - + // Spawn multiple tasks, some will fail for i in 0..10 { let sim = Arc::clone(&simulator); // Provider factory no longer needed let eng = engine.clone(); - + let handle = tokio::spawn(async move { // Every third simulation fails if i % 3 == 0 { let _ = eng.fail_next_with(SimulationError::Timeout); } - + let request = SimulationRequestBuilder::new() .with_bundle( TestBundleBuilder::new() .with_simple_transaction(&[i as u8]) - .build() + .build(), ) .build(); - + sim.simulate(&request).await }); - + handles.push(handle); } - + // Wait for all to complete let mut results = vec![]; for handle in handles { results.push(handle.await.unwrap()); } - + // All should complete without panicking assert!(results.iter().all(|r: &eyre::Result<()>| r.is_ok())); assert_eq!(publisher.published_count(), 10); - + // Verify mix of successes and failures let published = publisher.get_published(); let failures = published.iter().filter(|r| !r.success).count(); @@ -228,23 +256,29 @@ fn test_error_display_formatting() { // Verify error messages are properly formatted let errors = vec![ ( - SimulationError::Revert { reason: "ERC20: transfer amount exceeds balance".to_string() }, + SimulationError::Revert { + reason: "ERC20: transfer amount exceeds balance".to_string(), + }, "Bundle reverted: ERC20: transfer amount exceeds balance", ), ( - SimulationError::InvalidNonce { tx_index: 0, expected: 10, actual: 5 }, + SimulationError::InvalidNonce { + tx_index: 0, + expected: 10, + actual: 5, + }, "Invalid nonce in tx 0: expected 10, got 5", ), ( - SimulationError::InsufficientBalance { - tx_index: 2, - required: alloy_primitives::U256::from(1_000_000), - available: alloy_primitives::U256::from(500_000) + SimulationError::InsufficientBalance { + tx_index: 2, + required: alloy_primitives::U256::from(1_000_000), + available: alloy_primitives::U256::from(500_000), }, "Insufficient balance in tx 2: required 1000000, available 500000", ), ]; - + for (error, expected) in errors { assert_eq!(error.to_string(), expected); } @@ -253,25 +287,26 @@ fn test_error_display_formatting() { #[tokio::test] async fn test_timeout_simulation() { // Test timeout error handling - let engine = MockSimulationEngine::new() - .fail_next_with(SimulationError::Timeout); + let engine = MockSimulationEngine::new().fail_next_with(SimulationError::Timeout); let publisher = MockSimulationPublisher::new(); let simulator = MockBundleSimulator::new(engine, publisher.clone()); - + let large_bundle = bundles::large_bundle(1000); // Very large bundle let request = SimulationRequestBuilder::new() .with_bundle(large_bundle) .build(); - - + // Execute let result = simulator.simulate(&request).await; - + // Should complete successfully even with timeout assert!(result.is_ok()); - + let published = publisher.get_published(); assert_eq!(published.len(), 1); assert!(!published[0].success); - assert_eq!(published[0].error_reason, Some("Simulation timed out".to_string())); + assert_eq!( + published[0].error_reason, + Some("Simulation timed out".to_string()) + ); } diff --git a/crates/simulator/tests/unit/mod.rs b/crates/simulator/tests/unit/mod.rs index 4505531..2cf9635 100644 --- a/crates/simulator/tests/unit/mod.rs +++ b/crates/simulator/tests/unit/mod.rs @@ -1,4 +1,4 @@ /// Unit tests for simulator core components pub mod core_test; -pub mod types_test; pub mod error_handling_test; +pub mod types_test; diff --git a/crates/simulator/tests/unit/types_test.rs b/crates/simulator/tests/unit/types_test.rs index 0f3b240..b5518d7 100644 --- a/crates/simulator/tests/unit/types_test.rs +++ b/crates/simulator/tests/unit/types_test.rs @@ -1,9 +1,9 @@ /// Unit tests for simulator types use crate::common::builders::*; use crate::common::fixtures::*; -use tips_simulator::types::{SimulationError, SimulationRequest, SimulationResult}; use alloy_primitives::{Address, B256, U256}; use std::collections::HashMap; +use tips_simulator::types::{SimulationError, SimulationRequest, SimulationResult}; use uuid::Uuid; #[test] @@ -13,13 +13,13 @@ fn test_simulation_result_success_creation() { let block_hash = B256::random(); let gas_used = 150_000; let execution_time = 1500; - + let mut state_diff = HashMap::new(); let addr = Address::random(); let mut storage = HashMap::new(); storage.insert(U256::from(0), U256::from(100)); state_diff.insert(addr, storage); - + let result = SimulationResult::success( id, bundle_id, @@ -29,7 +29,7 @@ fn test_simulation_result_success_creation() { execution_time, state_diff.clone(), ); - + assert_eq!(result.id, id); assert_eq!(result.bundle_id, bundle_id); assert_eq!(result.block_number, 18_000_000); @@ -50,7 +50,7 @@ fn test_simulation_result_failure_creation() { let error = SimulationError::Revert { reason: "Test revert".to_string(), }; - + let result = SimulationResult::failure( id, bundle_id, @@ -59,7 +59,7 @@ fn test_simulation_result_failure_creation() { execution_time, error.clone(), ); - + assert_eq!(result.id, id); assert_eq!(result.bundle_id, bundle_id); assert!(!result.success); @@ -77,10 +77,7 @@ fn test_simulation_error_display() { }, "Bundle reverted: Invalid state", ), - ( - SimulationError::OutOfGas, - "Bundle ran out of gas", - ), + (SimulationError::OutOfGas, "Bundle ran out of gas"), ( SimulationError::InvalidNonce { tx_index: 2, @@ -103,10 +100,7 @@ fn test_simulation_error_display() { }, "State access error: RPC timeout", ), - ( - SimulationError::Timeout, - "Simulation timed out", - ), + (SimulationError::Timeout, "Simulation timed out"), ( SimulationError::Unknown { message: "Something went wrong".to_string(), @@ -114,7 +108,7 @@ fn test_simulation_error_display() { "Unknown error: Something went wrong", ), ]; - + for (error, expected) in test_cases { assert_eq!(error.to_string(), expected); } @@ -126,14 +120,14 @@ fn test_simulation_request_fields() { let bundle = bundles::single_tx_bundle(); let block_number = 18_000_000; let block_hash = B256::random(); - + let request = SimulationRequest { bundle_id, bundle: bundle.clone(), block_number, block_hash, }; - + assert_eq!(request.bundle_id, bundle_id); assert_eq!(request.bundle.txs.len(), bundle.txs.len()); assert_eq!(request.block_number, block_number); @@ -148,42 +142,52 @@ fn test_simulation_result_builder() { .with_execution_time_us(2000) .with_state_change(*addresses::ALICE, U256::from(0), U256::from(500)) .build(); - + assert!(success_result.success); assert_eq!(success_result.gas_used, Some(250_000)); assert_eq!(success_result.execution_time_us, 2000); assert!(success_result.state_diff.contains_key(&*addresses::ALICE)); - + // Test failed result with revert let revert_result = SimulationResultBuilder::failed() .with_revert("Insufficient funds".to_string()) .build(); - + assert!(!revert_result.success); assert!(revert_result.gas_used.is_none()); - assert!(revert_result.error_reason.as_ref().unwrap().contains("Insufficient funds")); - + assert!(revert_result + .error_reason + .as_ref() + .unwrap() + .contains("Insufficient funds")); + // Test failed result with out of gas - let oog_result = SimulationResultBuilder::failed() - .with_out_of_gas() - .build(); - + let oog_result = SimulationResultBuilder::failed().with_out_of_gas().build(); + assert!(!oog_result.success); - assert!(oog_result.error_reason.as_ref().unwrap().contains("out of gas")); - + assert!(oog_result + .error_reason + .as_ref() + .unwrap() + .contains("out of gas")); + // Test invalid nonce let nonce_result = SimulationResultBuilder::failed() .with_invalid_nonce(0, 5, 3) .build(); - + assert!(!nonce_result.success); - assert!(nonce_result.error_reason.as_ref().unwrap().contains("Invalid nonce")); + assert!(nonce_result + .error_reason + .as_ref() + .unwrap() + .contains("Invalid nonce")); } #[test] fn test_simulation_result_timestamp() { let result = SimulationResultBuilder::successful().build(); - + // Check that timestamp is recent (within last minute) let now = chrono::Utc::now(); let created_timestamp = result.created_at.timestamp(); @@ -195,22 +199,18 @@ fn test_simulation_result_timestamp() { #[test] fn test_large_state_diff() { let mut builder = SimulationResultBuilder::successful(); - + // Add many state changes for i in 0..100 { let addr = Address::random(); for j in 0..10 { - builder = builder.with_state_change( - addr, - U256::from(j), - U256::from(i * 1000 + j) - ); + builder = builder.with_state_change(addr, U256::from(j), U256::from(i * 1000 + j)); } } - + let result = builder.build(); assert_eq!(result.state_diff.len(), 100); - + // Verify each account has 10 storage slots for (_, storage) in &result.state_diff { assert_eq!(storage.len(), 10); @@ -221,16 +221,22 @@ fn test_large_state_diff() { fn test_error_serialization() { // Verify that errors can be converted to strings and back let errors = vec![ - SimulationError::Revert { reason: "test".to_string() }, + SimulationError::Revert { + reason: "test".to_string(), + }, SimulationError::OutOfGas, - SimulationError::InvalidNonce { tx_index: 1, expected: 2, actual: 3 }, + SimulationError::InvalidNonce { + tx_index: 1, + expected: 2, + actual: 3, + }, SimulationError::Timeout, ]; - + for error in errors { let error_string = error.to_string(); assert!(!error_string.is_empty()); - + // Create a result with this error let result = SimulationResult::failure( Uuid::new_v4(), @@ -240,7 +246,7 @@ fn test_error_serialization() { 1000, error, ); - + assert_eq!(result.error_reason, Some(error_string)); } } @@ -251,13 +257,13 @@ fn test_simulation_result_gas_used_bounds() { let max_gas_result = SimulationResultBuilder::successful() .with_gas_used(30_000_000) // 30M gas .build(); - + assert_eq!(max_gas_result.gas_used, Some(30_000_000)); - + // Test with zero gas (edge case) let zero_gas_result = SimulationResultBuilder::successful() .with_gas_used(0) .build(); - + assert_eq!(zero_gas_result.gas_used, Some(0)); } From f2689b345248132eb433f2b399355ed7a0511992 Mon Sep 17 00:00:00 2001 From: Niran Babalola Date: Fri, 26 Sep 2025 11:44:54 -0500 Subject: [PATCH 30/39] Remove and rename functions --- crates/simulator/src/lib.rs | 105 ++++-------------------------------- 1 file changed, 11 insertions(+), 94 deletions(-) diff --git a/crates/simulator/src/lib.rs b/crates/simulator/src/lib.rs index 565f6b7..1a53736 100644 --- a/crates/simulator/src/lib.rs +++ b/crates/simulator/src/lib.rs @@ -31,24 +31,23 @@ pub type TipsMempoolEventListener = MempoolEventListener +/// Dependencies shared across listeners +struct ListenerDependencies where - Node: FullNodeComponents, - ::Evm: ConfigureEvm, + B: BundleSimulator, { datastore: Arc, - simulator: RethBundleSimulator, TipsSimulationPublisher>, + simulator: B, } -/// Initialize common listener components (database, publisher, engine, core simulator) -async fn init_common_components( +/// Initialize listener dependencies (database, publisher, engine, core simulator) +async fn init_dependencies( provider: Arc, evm_config: Node::Evm, database_url: String, kafka_brokers: String, kafka_topic: String, -) -> Result> +) -> Result, TipsSimulationPublisher>>> where Node: FullNodeComponents, ::Evm: ConfigureEvm, @@ -79,94 +78,12 @@ where let simulator = RethBundleSimulator::new(engine, publisher); info!("Core bundle simulator initialized"); - Ok(CommonListenerComponents { + Ok(ListenerDependencies { datastore, simulator, }) } -/// Initialize ExEx event listener (ExEx) that processes committed blocks -/// -/// Note: The worker pool is created but NOT started. -pub async fn init_exex_event_listener( - ctx: ExExContext, - config: ExExSimulationConfig, - kafka_brokers: String, - kafka_topic: String, -) -> Result> -where - Node: FullNodeComponents, - ::Evm: ConfigureEvm, -{ - info!("Initializing ExEx event listener"); - - let provider = Arc::new(ctx.components.provider().clone()); - let evm_config = ctx.components.evm_config().clone(); - - let common_components = init_common_components( - Arc::clone(&provider), - evm_config, - config.database_url.clone(), - kafka_brokers, - kafka_topic, - ) - .await?; - - let worker_pool = SimulationWorkerPool::new( - Arc::new(common_components.simulator), - config.max_concurrent_simulations, - ); - - let consensus_listener = ExExEventListener::new(ctx, common_components.datastore, worker_pool); - - info!( - max_concurrent = config.max_concurrent_simulations, - "ExEx event listener initialized successfully" - ); - - Ok(consensus_listener) -} - -/// Initialize mempool event listener that processes mempool transactions -/// -/// Note: The worker pool is created but NOT started. -pub async fn init_mempool_event_listener( - ctx: Arc>, - provider: Arc, - config: MempoolListenerConfig, - max_concurrent_simulations: usize, -) -> Result> -where - Node: FullNodeComponents, - ::Evm: ConfigureEvm, -{ - info!("Initializing mempool event listener"); - - let evm_config = ctx.components.evm_config().clone(); - let common_components = init_common_components( - Arc::clone(&provider), - evm_config, - config.database_url.clone(), - config.kafka_brokers.join(","), - config.kafka_topic.clone(), - ) - .await?; - - let worker_pool = SimulationWorkerPool::new( - Arc::new(common_components.simulator), - max_concurrent_simulations, - ); - - let mempool_listener = MempoolEventListener::new(Arc::clone(&provider), config, worker_pool)?; - - info!( - max_concurrent = max_concurrent_simulations, - "Mempool event listener initialized successfully" - ); - - Ok(mempool_listener) -} - /// Encapsulates both event listeners with their shared worker pool /// /// This struct ensures that the ExEx and mempool listeners always use the same @@ -202,7 +119,7 @@ where let provider = Arc::new(exex_ctx.components.provider().clone()); let evm_config = exex_ctx.components.evm_config().clone(); - let common_components = init_common_components( + let dependencies = init_dependencies( Arc::clone(&provider), evm_config, exex_config.database_url.clone(), @@ -212,13 +129,13 @@ where .await?; let shared_worker_pool = SimulationWorkerPool::new( - Arc::new(common_components.simulator), + Arc::new(dependencies.simulator), max_concurrent_simulations, ); let exex_listener = ExExEventListener::new( exex_ctx, - common_components.datastore, + dependencies.datastore, Arc::clone(&shared_worker_pool), ); From 13c8d48230cda1a13b2b68513565012b41aab908 Mon Sep 17 00:00:00 2001 From: Niran Babalola Date: Fri, 26 Sep 2025 11:48:10 -0500 Subject: [PATCH 31/39] Remove unused test utilities --- crates/simulator/tests/common/builders.rs | 18 ---------- crates/simulator/tests/common/fixtures.rs | 33 ------------------- .../tests/common/mock_bundle_simulator.rs | 6 ---- crates/simulator/tests/common/mocks.rs | 8 +---- crates/simulator/tests/common/mod.rs | 16 --------- 5 files changed, 1 insertion(+), 80 deletions(-) diff --git a/crates/simulator/tests/common/builders.rs b/crates/simulator/tests/common/builders.rs index 5890983..5992d23 100644 --- a/crates/simulator/tests/common/builders.rs +++ b/crates/simulator/tests/common/builders.rs @@ -48,15 +48,7 @@ impl TestBundleBuilder { self } - pub fn with_reverting_tx(mut self, tx_hash: B256) -> Self { - self.reverting_tx_hashes.push(tx_hash); - self - } - pub fn with_replacement_uuid(mut self, uuid: String) -> Self { - self.replacement_uuid = Some(uuid); - self - } pub fn build(self) -> EthSendBundle { EthSendBundle { @@ -171,12 +163,6 @@ impl SimulationResultBuilder { self } - pub fn with_block(mut self, number: u64, hash: B256) -> Self { - self.block_number = number; - self.block_hash = Some(hash); - self - } - pub fn with_gas_used(mut self, gas: u64) -> Self { self.gas_used = Some(gas); self @@ -266,10 +252,6 @@ impl ScenarioBuilder { self } - pub fn add_bundle(mut self, bundle: EthSendBundle) -> Self { - self.bundles.push(bundle); - self - } pub fn add_simple_bundle(mut self, num_txs: usize) -> Self { let mut builder = TestBundleBuilder::new().with_block_number(self.block_number); diff --git a/crates/simulator/tests/common/fixtures.rs b/crates/simulator/tests/common/fixtures.rs index ed4643d..f31f831 100644 --- a/crates/simulator/tests/common/fixtures.rs +++ b/crates/simulator/tests/common/fixtures.rs @@ -1,7 +1,6 @@ /// Test fixtures and pre-configured test data use alloy_primitives::Bytes; use alloy_rpc_types_mev::EthSendBundle; -use std::sync::LazyLock; /// Common test addresses pub mod addresses { @@ -18,11 +17,6 @@ pub mod addresses { .parse() .unwrap() }); - pub static CHARLIE: LazyLock
= LazyLock::new(|| { - "0x0000000000000000000000000000000000000003" - .parse() - .unwrap() - }); pub static CONTRACT_A: LazyLock
= LazyLock::new(|| { "0x1000000000000000000000000000000000000001" .parse() @@ -41,12 +35,8 @@ pub mod blocks { use std::sync::LazyLock; pub const BLOCK_18M: u64 = 18_000_000; - pub const BLOCK_18M_PLUS_1: u64 = 18_000_001; - pub const BLOCK_18M_PLUS_2: u64 = 18_000_002; pub static HASH_18M: LazyLock = LazyLock::new(|| B256::from_slice(&[1u8; 32])); - pub static HASH_18M_PLUS_1: LazyLock = LazyLock::new(|| B256::from_slice(&[2u8; 32])); - pub static HASH_18M_PLUS_2: LazyLock = LazyLock::new(|| B256::from_slice(&[3u8; 32])); } /// Pre-built transaction fixtures @@ -71,13 +61,6 @@ pub mod transactions { ]) } - /// Transaction that will revert (mock data) - pub fn reverting_tx() -> Bytes { - Bytes::from(vec![ - 0x02, // EIP-1559 tx type - 0xFF, 0xFF, 0xFF, 0xFF, // Mock reverting transaction - ]) - } } /// Pre-configured bundles for testing @@ -103,14 +86,6 @@ pub mod bundles { .build() } - /// Bundle with reverting transaction - pub fn reverting_bundle() -> EthSendBundle { - TestBundleBuilder::new() - .with_transaction(transactions::simple_transfer()) - .with_transaction(transactions::reverting_tx()) - .with_block_number(blocks::BLOCK_18M) - .build() - } /// Large bundle for stress testing pub fn large_bundle(num_txs: usize) -> EthSendBundle { @@ -124,14 +99,6 @@ pub mod bundles { builder.build() } - /// Bundle with specific timing constraints - pub fn time_constrained_bundle() -> EthSendBundle { - TestBundleBuilder::new() - .with_transaction(transactions::simple_transfer()) - .with_block_number(blocks::BLOCK_18M) - .with_timestamps(1625097600, 1625097700) // 100 second window - .build() - } } /// Test scenarios combining multiple fixtures diff --git a/crates/simulator/tests/common/mock_bundle_simulator.rs b/crates/simulator/tests/common/mock_bundle_simulator.rs index fa28a80..e7ab473 100644 --- a/crates/simulator/tests/common/mock_bundle_simulator.rs +++ b/crates/simulator/tests/common/mock_bundle_simulator.rs @@ -18,13 +18,7 @@ impl MockBundleSimulator { Self { engine, publisher } } - pub fn engine(&self) -> &MockSimulationEngine { - &self.engine - } - pub fn publisher(&self) -> &MockSimulationPublisher { - &self.publisher - } } #[async_trait] diff --git a/crates/simulator/tests/common/mocks.rs b/crates/simulator/tests/common/mocks.rs index 9767b28..2ee412e 100644 --- a/crates/simulator/tests/common/mocks.rs +++ b/crates/simulator/tests/common/mocks.rs @@ -1,5 +1,5 @@ /// Reusable mock implementations for testing -use alloy_primitives::{Address, B256, U256}; +use alloy_primitives::{Address, U256}; use async_trait::async_trait; use std::collections::HashMap; use std::sync::{Arc, Mutex}; @@ -41,9 +41,6 @@ impl MockSimulationEngine { self } - pub fn get_simulations(&self) -> Vec { - self.simulations.lock().unwrap().clone() - } pub fn simulation_count(&self) -> usize { self.simulations.lock().unwrap().len() @@ -130,9 +127,6 @@ impl MockSimulationPublisher { self.published.lock().unwrap().len() } - pub fn clear_published(&self) { - self.published.lock().unwrap().clear(); - } } #[async_trait] diff --git a/crates/simulator/tests/common/mod.rs b/crates/simulator/tests/common/mod.rs index c711855..5780ba3 100644 --- a/crates/simulator/tests/common/mod.rs +++ b/crates/simulator/tests/common/mod.rs @@ -10,22 +10,6 @@ use std::collections::HashMap; use tips_simulator::types::{SimulationRequest, SimulationResult}; use uuid::Uuid; -/// Test configuration that can be shared across tests -pub struct TestConfig { - pub default_block_number: u64, - pub default_gas_limit: u64, - pub simulation_timeout_ms: u64, -} - -impl Default for TestConfig { - fn default() -> Self { - Self { - default_block_number: 18_000_000, - default_gas_limit: 30_000_000, - simulation_timeout_ms: 5000, - } - } -} /// Helper to create a simple test bundle pub fn create_test_bundle(num_txs: usize, block_number: u64) -> EthSendBundle { From cf5ac0bc69b02238bacd3c75c1b0dae46e84d791 Mon Sep 17 00:00:00 2001 From: Niran Babalola Date: Fri, 26 Sep 2025 12:29:10 -0500 Subject: [PATCH 32/39] Avoid starting the simulator container by default and fail if the ports are in use --- docker-compose.tips.yml | 7 +++++-- justfile | 10 +++++++++- 2 files changed, 14 insertions(+), 3 deletions(-) diff --git a/docker-compose.tips.yml b/docker-compose.tips.yml index d77a2b4..bf71f4c 100644 --- a/docker-compose.tips.yml +++ b/docker-compose.tips.yml @@ -54,11 +54,14 @@ services: dockerfile: crates/simulator/Dockerfile container_name: tips-simulator ports: - - "2222:2222" - - "4444:4444" + # Listen on the loopback interface to fail fast if the ports are already in use by op-rbuilder + - "127.0.0.1:2222:2222" + - "127.0.0.1:4444:4444" volumes: - ${TIPS_SIMULATOR_DATADIR}:/data - ${TIPS_SIMULATOR_BUILDER_PLAYGROUND_DIR}:/playground env_file: - .env.docker restart: unless-stopped + profiles: + - simulator diff --git a/justfile b/justfile index 3d3c34c..931c979 100644 --- a/justfile +++ b/justfile @@ -47,10 +47,18 @@ stop-all: start-all: stop-all export COMPOSE_FILE=docker-compose.yml:docker-compose.tips.yml && mkdir -p data/postgres data/kafka data/minio && docker compose build && docker compose up -d +# Stop only the specified service without stopping the other services or removing the data directories +stop-only program: + export COMPOSE_FILE=docker-compose.yml:docker-compose.tips.yml && docker compose down {{ program }} + +# Start only the specified service without stopping the other services or removing the data directories +start-only program: + export COMPOSE_FILE=docker-compose.yml:docker-compose.tips.yml && mkdir -p data/postgres data/kafka data/minio && docker compose build && docker compose up -d {{ program }} + # Start every service in docker, except the one you're currently working on. e.g. just start-except ui ingress-rpc start-except programs: stop-all #!/bin/bash - all_services=(postgres kafka kafka-setup minio minio-setup ingress-rpc ingres-writer audit maintenance ui simulator) + all_services=(postgres kafka kafka-setup minio minio-setup ingress-rpc ingres-writer audit maintenance ui) exclude_services=({{ programs }}) # Create result array with services not in exclude list From 1c7d570aa91387576993a56df4bac2b201d1329a Mon Sep 17 00:00:00 2001 From: Niran Babalola Date: Fri, 26 Sep 2025 14:02:39 -0500 Subject: [PATCH 33/39] Clean up tests --- crates/simulator/src/core.rs | 10 +- crates/simulator/src/lib.rs | 8 +- .../tests/bundle_simulator_impl_test.rs | 178 +++++++ crates/simulator/tests/common/builders.rs | 51 +- crates/simulator/tests/common/fixtures.rs | 38 +- .../tests/common/mock_bundle_simulator.rs | 24 +- crates/simulator/tests/common/mocks.rs | 30 +- crates/simulator/tests/common/mod.rs | 75 +-- .../tests/common_tests/builders_test.rs | 97 ++++ .../tests/common_tests/fixtures_test.rs | 32 ++ .../mock_bundle_simulator_test.rs} | 59 +-- crates/simulator/tests/common_tests/mod.rs | 3 + .../tests/{unit => }/error_handling_test.rs | 10 +- crates/simulator/tests/integration_test.rs | 282 ----------- crates/simulator/tests/publisher_test.rs | 146 ++++++ crates/simulator/tests/run_common_tests.rs | 2 + crates/simulator/tests/unit/mod.rs | 4 - crates/simulator/tests/unit/types_test.rs | 269 ---------- crates/simulator/tests/worker_pool_test.rs | 463 ++++++++++++++++++ 19 files changed, 968 insertions(+), 813 deletions(-) create mode 100644 crates/simulator/tests/bundle_simulator_impl_test.rs create mode 100644 crates/simulator/tests/common_tests/builders_test.rs create mode 100644 crates/simulator/tests/common_tests/fixtures_test.rs rename crates/simulator/tests/{unit/core_test.rs => common_tests/mock_bundle_simulator_test.rs} (80%) create mode 100644 crates/simulator/tests/common_tests/mod.rs rename crates/simulator/tests/{unit => }/error_handling_test.rs (98%) delete mode 100644 crates/simulator/tests/integration_test.rs create mode 100644 crates/simulator/tests/publisher_test.rs create mode 100644 crates/simulator/tests/run_common_tests.rs delete mode 100644 crates/simulator/tests/unit/mod.rs delete mode 100644 crates/simulator/tests/unit/types_test.rs create mode 100644 crates/simulator/tests/worker_pool_test.rs diff --git a/crates/simulator/src/core.rs b/crates/simulator/src/core.rs index 5a7392c..01711e9 100644 --- a/crates/simulator/src/core.rs +++ b/crates/simulator/src/core.rs @@ -12,9 +12,9 @@ pub trait BundleSimulator: Send + Sync { async fn simulate(&self, request: &SimulationRequest) -> Result<()>; } -/// Production bundle simulator for Reth -/// This is the Reth-specific implementation -pub struct RethBundleSimulator +/// Production bundle simulator implementation +/// Wires the engine and publisher together for bundle simulation +pub struct BundleSimulatorImpl where E: SimulationEngine, P: SimulationPublisher, @@ -23,7 +23,7 @@ where publisher: P, } -impl RethBundleSimulator +impl BundleSimulatorImpl where E: SimulationEngine, P: SimulationPublisher, @@ -34,7 +34,7 @@ where } #[async_trait] -impl BundleSimulator for RethBundleSimulator +impl BundleSimulator for BundleSimulatorImpl where E: SimulationEngine + 'static, P: SimulationPublisher + 'static, diff --git a/crates/simulator/src/lib.rs b/crates/simulator/src/lib.rs index 1a53736..c7f1d5f 100644 --- a/crates/simulator/src/lib.rs +++ b/crates/simulator/src/lib.rs @@ -15,7 +15,7 @@ use std::sync::Arc; use tracing::{error, info}; pub use config::SimulatorNodeConfig; -pub use core::{BundleSimulator, RethBundleSimulator}; +pub use core::{BundleSimulator, BundleSimulatorImpl}; pub use engine::{RethSimulationEngine, SimulationEngine}; pub use listeners::{ExExEventListener, MempoolEventListener, MempoolListenerConfig}; pub use publisher::{SimulationPublisher, TipsSimulationPublisher}; @@ -24,7 +24,7 @@ pub use worker_pool::SimulationWorkerPool; // Type aliases for concrete implementations pub type TipsBundleSimulator = - RethBundleSimulator, TipsSimulationPublisher>; + BundleSimulatorImpl, TipsSimulationPublisher>; pub type TipsExExEventListener = ExExEventListener, tips_datastore::PostgresDatastore>; pub type TipsMempoolEventListener = MempoolEventListener>; @@ -47,7 +47,7 @@ async fn init_dependencies( database_url: String, kafka_brokers: String, kafka_topic: String, -) -> Result, TipsSimulationPublisher>>> +) -> Result, TipsSimulationPublisher>>> where Node: FullNodeComponents, ::Evm: ConfigureEvm, @@ -75,7 +75,7 @@ where let engine = RethSimulationEngine::new(Arc::clone(&provider), evm_config); info!("Simulation engine initialized"); - let simulator = RethBundleSimulator::new(engine, publisher); + let simulator = BundleSimulatorImpl::new(engine, publisher); info!("Core bundle simulator initialized"); Ok(ListenerDependencies { diff --git a/crates/simulator/tests/bundle_simulator_impl_test.rs b/crates/simulator/tests/bundle_simulator_impl_test.rs new file mode 100644 index 0000000..781932d --- /dev/null +++ b/crates/simulator/tests/bundle_simulator_impl_test.rs @@ -0,0 +1,178 @@ +// Tests for the concrete BundleSimulator implementation +mod common; + +use common::builders::*; +use common::fixtures::*; +use common::mocks::*; +use tips_simulator::{core::BundleSimulator, core::BundleSimulatorImpl, SimulationError}; +use uuid::Uuid; + +#[tokio::test] +async fn test_bundle_simulator_impl_successful_flow() { + // Setup - exercise the concrete BundleSimulator implementation + let bundle_id = Uuid::new_v4(); + let expected_result = SimulationResultBuilder::successful() + .with_ids(Uuid::new_v4(), bundle_id) + .with_gas_used(250_000) + .with_execution_time_us(2500) + .build(); + + let engine = MockSimulationEngine::new().with_result(expected_result.clone()); + let publisher = MockSimulationPublisher::new(); + let simulator = BundleSimulatorImpl::new(engine.clone(), publisher.clone()); + + let request = SimulationRequestBuilder::new() + .with_bundle_id(bundle_id) + .with_bundle(bundles::single_tx_bundle()) + .build(); + + // Act - test the actual BundleSimulator trait implementation + let result = simulator.simulate(&request).await; + + // Assert + assert!(result.is_ok()); + assert_eq!(engine.simulation_count(), 1); + assert_eq!(publisher.published_count(), 1); + + let published = publisher.get_published(); + assert_eq!(published[0].bundle_id, bundle_id); + assert_eq!(published[0].gas_used, Some(250_000)); + assert!(published[0].success); +} + +#[tokio::test] +async fn test_bundle_simulator_impl_engine_failure() { + // Test that the concrete BundleSimulator handles engine failures + let bundle_id = Uuid::new_v4(); + let engine = MockSimulationEngine::new().fail_next_with(SimulationError::OutOfGas); + let publisher = MockSimulationPublisher::new(); + let simulator = BundleSimulatorImpl::new(engine.clone(), publisher.clone()); + + let request = SimulationRequestBuilder::new() + .with_bundle_id(bundle_id) + .build(); + + // Act + let result = simulator.simulate(&request).await; + + // Assert - simulate() should succeed even if the engine simulation fails + assert!(result.is_ok()); + assert_eq!(engine.simulation_count(), 1); + assert_eq!(publisher.published_count(), 1); + + let published = publisher.get_published(); + assert!(!published[0].success); + assert!(published[0].error_reason.is_some()); +} + +#[tokio::test] +async fn test_bundle_simulator_impl_publisher_failure() { + // Test that the concrete BundleSimulator handles publisher failures gracefully + let engine = MockSimulationEngine::new(); + let publisher = MockSimulationPublisher::new().fail_next(); + let simulator = BundleSimulatorImpl::new(engine.clone(), publisher.clone()); + + let request = SimulationRequestBuilder::new().build(); + + // Act - should complete without error even if publisher fails + let result = simulator.simulate(&request).await; + + // Assert + assert!(result.is_ok()); + assert_eq!(engine.simulation_count(), 1); + assert_eq!(publisher.published_count(), 0); // Publisher failed +} + +#[tokio::test] +async fn test_bundle_simulator_impl_multiple_simulations() { + // Test the concrete BundleSimulator with multiple sequential simulations + let engine = MockSimulationEngine::new(); + let publisher = MockSimulationPublisher::new(); + let simulator = BundleSimulatorImpl::new(engine.clone(), publisher.clone()); + + // Run multiple simulations with different bundle IDs + for i in 0..3 { + let bundle_id = Uuid::new_v4(); + let result = SimulationResultBuilder::successful() + .with_ids(Uuid::new_v4(), bundle_id) + .with_gas_used(100_000 + i * 50_000) + .build(); + + let _ = engine.clone().with_result(result); + + let request = SimulationRequestBuilder::new() + .with_bundle_id(bundle_id) + .with_bundle( + TestBundleBuilder::new() + .with_simple_transaction(&[i as u8, 0x01, 0x02]) + .build(), + ) + .build(); + + let sim_result = simulator.simulate(&request).await; + assert!(sim_result.is_ok()); + } + + // Verify all simulations were processed + assert_eq!(engine.simulation_count(), 3); + assert_eq!(publisher.published_count(), 3); + + let published = publisher.get_published(); + assert_eq!(published.len(), 3); + for (i, result) in published.iter().enumerate() { + assert!(result.success); + assert_eq!(result.gas_used, Some(100_000 + i as u64 * 50_000)); + } +} + +#[tokio::test] +async fn test_bundle_simulator_impl_various_error_types() { + // Test the concrete BundleSimulator with different types of simulation errors + let errors = vec![ + SimulationError::Revert { + reason: "Contract reverted".to_string(), + }, + SimulationError::InvalidNonce { + tx_index: 1, + expected: 10, + actual: 5, + }, + SimulationError::InsufficientBalance { + tx_index: 0, + required: alloy_primitives::U256::from(1000000), + available: alloy_primitives::U256::from(500000), + }, + SimulationError::StateAccessError { + message: "RPC timeout".to_string(), + }, + SimulationError::Timeout, + ]; + + for (_i, error) in errors.into_iter().enumerate() { + let engine = MockSimulationEngine::new().fail_next_with(error.clone()); + let publisher = MockSimulationPublisher::new(); + let simulator = BundleSimulatorImpl::new(engine.clone(), publisher.clone()); + + let request = SimulationRequestBuilder::new() + .with_bundle_id(Uuid::new_v4()) + .build(); + + let result = simulator.simulate(&request).await; + assert!(result.is_ok()); + + let published = publisher.get_published(); + assert_eq!(published.len(), 1); + assert!(!published[0].success); + + let error_msg = published[0].error_reason.as_ref().unwrap(); + match error { + SimulationError::Revert { .. } => assert!(error_msg.contains("reverted")), + SimulationError::InvalidNonce { .. } => assert!(error_msg.contains("nonce")), + SimulationError::InsufficientBalance { .. } => assert!(error_msg.contains("balance")), + SimulationError::StateAccessError { .. } => assert!(error_msg.contains("State access")), + SimulationError::Timeout => assert!(error_msg.contains("timed out")), + _ => {} + } + } +} + diff --git a/crates/simulator/tests/common/builders.rs b/crates/simulator/tests/common/builders.rs index 5992d23..31bf7d7 100644 --- a/crates/simulator/tests/common/builders.rs +++ b/crates/simulator/tests/common/builders.rs @@ -1,4 +1,7 @@ +#![allow(dead_code)] + /// Test data builders for creating complex test scenarios + use alloy_primitives::{Address, Bytes, B256, U256}; use alloy_rpc_types_mev::EthSendBundle; use std::collections::HashMap; @@ -277,51 +280,3 @@ impl ScenarioBuilder { } } -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_bundle_builder() { - let bundle = TestBundleBuilder::new() - .with_simple_transaction(&[0x01, 0x02]) - .with_simple_transaction(&[0x03, 0x04]) - .with_block_number(18_500_000) - .with_timestamps(1000, 2000) - .build(); - - assert_eq!(bundle.txs.len(), 2); - assert_eq!(bundle.block_number, 18_500_000); - assert_eq!(bundle.min_timestamp, Some(1000)); - assert_eq!(bundle.max_timestamp, Some(2000)); - } - - #[test] - fn test_result_builder() { - let bundle_id = Uuid::new_v4(); - let result = SimulationResultBuilder::successful() - .with_ids(Uuid::new_v4(), bundle_id) - .with_gas_used(200_000) - .with_state_change(Address::random(), U256::from(1), U256::from(100)) - .build(); - - assert!(result.success); - assert_eq!(result.bundle_id, bundle_id); - assert_eq!(result.gas_used, Some(200_000)); - assert!(!result.state_diff.is_empty()); - } - - #[test] - fn test_scenario_builder() { - let requests = ScenarioBuilder::new() - .with_block(19_000_000, B256::random()) - .add_simple_bundle(2) - .add_simple_bundle(3) - .build_requests(); - - assert_eq!(requests.len(), 2); - assert_eq!(requests[0].block_number, 19_000_000); - assert_eq!(requests[0].bundle.txs.len(), 2); - assert_eq!(requests[1].bundle.txs.len(), 3); - } -} diff --git a/crates/simulator/tests/common/fixtures.rs b/crates/simulator/tests/common/fixtures.rs index f31f831..b3620cd 100644 --- a/crates/simulator/tests/common/fixtures.rs +++ b/crates/simulator/tests/common/fixtures.rs @@ -1,4 +1,7 @@ +#![allow(dead_code)] + /// Test fixtures and pre-configured test data + use alloy_primitives::Bytes; use alloy_rpc_types_mev::EthSendBundle; @@ -141,38 +144,3 @@ pub mod scenarios { } } -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_fixture_addresses() { - assert_ne!(*addresses::ALICE, *addresses::BOB); - assert_ne!(*addresses::CONTRACT_A, *addresses::CONTRACT_B); - } - - #[test] - fn test_fixture_bundles() { - let single = bundles::single_tx_bundle(); - assert_eq!(single.txs.len(), 1); - - let multi = bundles::multi_tx_bundle(); - assert_eq!(multi.txs.len(), 3); - - let large = bundles::large_bundle(100); - assert_eq!(large.txs.len(), 100); - } - - #[test] - fn test_fixture_scenarios() { - let request = scenarios::basic_simulation(); - assert_eq!(request.block_number, blocks::BLOCK_18M); - assert_eq!(request.bundle.txs.len(), 1); - - let interaction = scenarios::contract_interaction(); - assert_eq!(interaction.bundle.txs.len(), 3); - - let large_scenario = scenarios::large_bundle_scenario(); - assert_eq!(large_scenario.bundle.txs.len(), 100); - } -} diff --git a/crates/simulator/tests/common/mock_bundle_simulator.rs b/crates/simulator/tests/common/mock_bundle_simulator.rs index e7ab473..08637d7 100644 --- a/crates/simulator/tests/common/mock_bundle_simulator.rs +++ b/crates/simulator/tests/common/mock_bundle_simulator.rs @@ -1,3 +1,5 @@ +#![allow(dead_code)] + use crate::common::mocks::{MockSimulationEngine, MockSimulationPublisher}; use async_trait::async_trait; use eyre::Result; @@ -55,25 +57,3 @@ impl BundleSimulator for MockBundleSimulator { } } -#[cfg(test)] -mod tests { - use super::*; - use crate::common; - - #[tokio::test] - async fn test_mock_bundle_simulator() { - let engine = MockSimulationEngine::new(); - let publisher = MockSimulationPublisher::new(); - let simulator = MockBundleSimulator::new(engine.clone(), publisher.clone()); - - let bundle = common::create_test_bundle(1, 18_000_000); - let request = common::create_test_request(bundle); - - // Use the clean trait interface - let result = simulator.simulate(&request).await; - - assert!(result.is_ok()); - assert_eq!(engine.simulation_count(), 1); - assert_eq!(publisher.published_count(), 1); - } -} diff --git a/crates/simulator/tests/common/mocks.rs b/crates/simulator/tests/common/mocks.rs index 2ee412e..e234a01 100644 --- a/crates/simulator/tests/common/mocks.rs +++ b/crates/simulator/tests/common/mocks.rs @@ -1,4 +1,7 @@ +#![allow(dead_code)] + /// Reusable mock implementations for testing + use alloy_primitives::{Address, U256}; use async_trait::async_trait; use std::collections::HashMap; @@ -141,30 +144,3 @@ impl SimulationPublisher for MockSimulationPublisher { Ok(()) } } - -#[cfg(test)] -mod tests { - use super::*; - use crate::common; - - #[tokio::test] - async fn test_mock_simulation_engine() { - let engine = MockSimulationEngine::new(); - let _request = common::create_test_request(common::create_test_bundle(1, 18_000_000)); - - // Verify the engine is initialized correctly - assert_eq!(engine.simulation_count(), 0); - } - - #[tokio::test] - async fn test_mock_publisher() { - let publisher = MockSimulationPublisher::new(); - let result = common::create_success_result(Uuid::new_v4(), 100_000); - - publisher.publish_result(result.clone()).await.unwrap(); - assert_eq!(publisher.published_count(), 1); - - let published = publisher.get_published(); - assert_eq!(published[0].id, result.id); - } -} diff --git a/crates/simulator/tests/common/mod.rs b/crates/simulator/tests/common/mod.rs index 5780ba3..e0eec56 100644 --- a/crates/simulator/tests/common/mod.rs +++ b/crates/simulator/tests/common/mod.rs @@ -1,4 +1,7 @@ +#![allow(dead_code)] + /// Common test utilities and infrastructure for simulator testing + pub mod builders; pub mod fixtures; pub mod mock_bundle_simulator; @@ -81,78 +84,6 @@ pub mod assertions { ); } - /// Assert that a simulation result is a failure - pub fn assert_simulation_failure(result: &SimulationResult) { - assert!(!result.success, "Expected failed simulation"); - assert!( - result.gas_used.is_none(), - "Failed simulation should not have gas_used" - ); - assert!( - result.error_reason.is_some(), - "Failed simulation should have error reason" - ); - } - /// Assert state diff contains expected changes - pub fn assert_state_diff_contains( - result: &SimulationResult, - address: Address, - slot: U256, - expected_value: U256, - ) { - let storage = result - .state_diff - .get(&address) - .expect("Address not found in state diff"); - let value = storage.get(&slot).expect("Storage slot not found"); - assert_eq!(*value, expected_value, "Unexpected storage value"); - } } -/// Test timing utilities -pub mod timing { - use std::time::{Duration, Instant}; - - /// Measure execution time of an async operation - pub async fn measure_async(f: F) -> (T, Duration) - where - F: std::future::Future, - { - let start = Instant::now(); - let result = f.await; - (result, start.elapsed()) - } - - /// Assert that an operation completes within a timeout - pub async fn assert_completes_within(f: F, timeout: Duration) -> T - where - F: std::future::Future, - { - tokio::time::timeout(timeout, f) - .await - .expect("Operation timed out") - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_create_test_bundle() { - let bundle = create_test_bundle(3, 18_000_000); - assert_eq!(bundle.txs.len(), 3); - assert_eq!(bundle.block_number, 18_000_000); - } - - #[test] - fn test_create_success_result() { - let bundle_id = Uuid::new_v4(); - let result = create_success_result(bundle_id, 150_000); - - assertions::assert_simulation_success(&result); - assert_eq!(result.bundle_id, bundle_id); - assert_eq!(result.gas_used, Some(150_000)); - } -} diff --git a/crates/simulator/tests/common_tests/builders_test.rs b/crates/simulator/tests/common_tests/builders_test.rs new file mode 100644 index 0000000..1882a93 --- /dev/null +++ b/crates/simulator/tests/common_tests/builders_test.rs @@ -0,0 +1,97 @@ +use crate::common::builders::*; +use alloy_primitives::{Address, B256, U256}; +use uuid::Uuid; + +#[test] +fn test_bundle_builder() { + let bundle = TestBundleBuilder::new() + .with_simple_transaction(&[0x01, 0x02]) + .with_simple_transaction(&[0x03, 0x04]) + .with_block_number(18_500_000) + .with_timestamps(1000, 2000) + .build(); + + assert_eq!(bundle.txs.len(), 2); + assert_eq!(bundle.block_number, 18_500_000); + assert_eq!(bundle.min_timestamp, Some(1000)); + assert_eq!(bundle.max_timestamp, Some(2000)); +} + +#[test] +fn test_result_builder() { + let bundle_id = Uuid::new_v4(); + let result = SimulationResultBuilder::successful() + .with_ids(Uuid::new_v4(), bundle_id) + .with_gas_used(200_000) + .with_state_change(Address::random(), U256::from(1), U256::from(100)) + .build(); + + assert!(result.success); + assert_eq!(result.bundle_id, bundle_id); + assert_eq!(result.gas_used, Some(200_000)); + assert!(!result.state_diff.is_empty()); +} + +#[test] +fn test_simulation_result_builder_comprehensive() { + // Test successful result + let success_result = SimulationResultBuilder::successful() + .with_gas_used(250_000) + .with_execution_time_us(2000) + .with_state_change(Address::random(), U256::from(0), U256::from(500)) + .build(); + + assert!(success_result.success); + assert_eq!(success_result.gas_used, Some(250_000)); + assert_eq!(success_result.execution_time_us, 2000); + assert!(!success_result.state_diff.is_empty()); + + // Test failed result with revert + let revert_result = SimulationResultBuilder::failed() + .with_revert("Insufficient funds".to_string()) + .build(); + + assert!(!revert_result.success); + assert!(revert_result.gas_used.is_none()); + assert!(revert_result + .error_reason + .as_ref() + .unwrap() + .contains("Insufficient funds")); + + // Test failed result with out of gas + let oog_result = SimulationResultBuilder::failed().with_out_of_gas().build(); + + assert!(!oog_result.success); + assert!(oog_result + .error_reason + .as_ref() + .unwrap() + .contains("out of gas")); + + // Test invalid nonce + let nonce_result = SimulationResultBuilder::failed() + .with_invalid_nonce(0, 5, 3) + .build(); + + assert!(!nonce_result.success); + assert!(nonce_result + .error_reason + .as_ref() + .unwrap() + .contains("Invalid nonce")); +} + +#[test] +fn test_scenario_builder() { + let requests = ScenarioBuilder::new() + .with_block(19_000_000, B256::random()) + .add_simple_bundle(2) + .add_simple_bundle(3) + .build_requests(); + + assert_eq!(requests.len(), 2); + assert_eq!(requests[0].block_number, 19_000_000); + assert_eq!(requests[0].bundle.txs.len(), 2); + assert_eq!(requests[1].bundle.txs.len(), 3); +} diff --git a/crates/simulator/tests/common_tests/fixtures_test.rs b/crates/simulator/tests/common_tests/fixtures_test.rs new file mode 100644 index 0000000..226c07c --- /dev/null +++ b/crates/simulator/tests/common_tests/fixtures_test.rs @@ -0,0 +1,32 @@ +use crate::common::fixtures::*; + +#[test] +fn test_fixture_addresses() { + assert_ne!(*addresses::ALICE, *addresses::BOB); + assert_ne!(*addresses::CONTRACT_A, *addresses::CONTRACT_B); +} + +#[test] +fn test_fixture_bundles() { + let single = bundles::single_tx_bundle(); + assert_eq!(single.txs.len(), 1); + + let multi = bundles::multi_tx_bundle(); + assert_eq!(multi.txs.len(), 3); + + let large = bundles::large_bundle(100); + assert_eq!(large.txs.len(), 100); +} + +#[test] +fn test_fixture_scenarios() { + let request = scenarios::basic_simulation(); + assert_eq!(request.block_number, blocks::BLOCK_18M); + assert_eq!(request.bundle.txs.len(), 1); + + let interaction = scenarios::contract_interaction(); + assert_eq!(interaction.bundle.txs.len(), 3); + + let large_scenario = scenarios::large_bundle_scenario(); + assert_eq!(large_scenario.bundle.txs.len(), 100); +} diff --git a/crates/simulator/tests/unit/core_test.rs b/crates/simulator/tests/common_tests/mock_bundle_simulator_test.rs similarity index 80% rename from crates/simulator/tests/unit/core_test.rs rename to crates/simulator/tests/common_tests/mock_bundle_simulator_test.rs index 3ec9ee7..7f90a9c 100644 --- a/crates/simulator/tests/unit/core_test.rs +++ b/crates/simulator/tests/common_tests/mock_bundle_simulator_test.rs @@ -1,25 +1,31 @@ -/// Unit tests for the BundleSimulator core component use crate::common::builders::*; use crate::common::fixtures::*; use crate::common::mock_bundle_simulator::MockBundleSimulator; -use crate::common::mocks::*; +use crate::common::mocks::{MockSimulationEngine, MockSimulationPublisher}; +use crate::common::{create_test_bundle, create_test_request}; use std::sync::Arc; -use tips_simulator::{core::BundleSimulator, SimulationError}; +use tips_simulator::core::BundleSimulator; +use tips_simulator::SimulationError; use uuid::Uuid; #[tokio::test] -async fn test_bundle_simulator_creation() { +async fn test_mock_bundle_simulator() { let engine = MockSimulationEngine::new(); let publisher = MockSimulationPublisher::new(); - let simulator = MockBundleSimulator::new(engine, publisher); + let simulator = MockBundleSimulator::new(engine.clone(), publisher.clone()); + + let bundle = create_test_bundle(1, 18_000_000); + let request = create_test_request(bundle); - // Simply verify it can be created - assert!(std::mem::size_of_val(&simulator) > 0); + let result = simulator.simulate(&request).await; + + assert!(result.is_ok()); + assert_eq!(engine.simulation_count(), 1); + assert_eq!(publisher.published_count(), 1); } #[tokio::test] async fn test_simulate_success_flow() { - // Arrange let bundle_id = Uuid::new_v4(); let expected_result = SimulationResultBuilder::successful() .with_ids(Uuid::new_v4(), bundle_id) @@ -35,10 +41,8 @@ async fn test_simulate_success_flow() { .with_bundle(bundles::single_tx_bundle()) .build(); - // Act - using the clean trait interface let result = simulator.simulate(&request).await; - // Assert assert!(result.is_ok()); assert_eq!(engine.simulation_count(), 1); assert_eq!(publisher.published_count(), 1); @@ -50,7 +54,6 @@ async fn test_simulate_success_flow() { #[tokio::test] async fn test_simulate_failure_flow() { - // Arrange let bundle_id = Uuid::new_v4(); let engine = MockSimulationEngine::new().fail_next_with(SimulationError::Revert { reason: "Test revert".to_string(), @@ -62,11 +65,9 @@ async fn test_simulate_failure_flow() { .with_bundle_id(bundle_id) .build(); - // Act let result = simulator.simulate(&request).await; - // Assert - assert!(result.is_ok()); // simulate() succeeds even if simulation fails + assert!(result.is_ok()); assert_eq!(engine.simulation_count(), 1); assert_eq!(publisher.published_count(), 1); @@ -81,51 +82,40 @@ async fn test_simulate_failure_flow() { #[tokio::test] async fn test_publisher_error_handling() { - // Arrange let engine = MockSimulationEngine::new(); let publisher = MockSimulationPublisher::new().fail_next(); let simulator = MockBundleSimulator::new(engine.clone(), publisher.clone()); let request = SimulationRequestBuilder::new().build(); - // Act - should log error but not fail let result = simulator.simulate(&request).await; - // Assert assert!(result.is_ok()); - assert_eq!(engine.simulation_count(), 1); // Engine was called - assert_eq!(publisher.published_count(), 0); // Publisher failed + assert_eq!(engine.simulation_count(), 1); + assert_eq!(publisher.published_count(), 0); } #[tokio::test] async fn test_state_provider_factory_error() { - // This test would require a mock StateProviderFactory that fails - // For now, we'll test with an invalid block hash let engine = MockSimulationEngine::new(); let publisher = MockSimulationPublisher::new(); let simulator = MockBundleSimulator::new(engine.clone(), publisher.clone()); - // Request with block hash that doesn't exist in our mock state let request = SimulationRequestBuilder::new() .with_block(99_999_999, alloy_primitives::B256::random()) .build(); - // Act let result = simulator.simulate(&request).await; - // Assert - in our mock, this actually succeeds, but in real implementation - // it would fail with state provider error assert!(result.is_ok()); } #[tokio::test] async fn test_multiple_sequential_simulations() { - // Arrange let engine = MockSimulationEngine::new(); let publisher = MockSimulationPublisher::new(); let simulator = MockBundleSimulator::new(engine.clone(), publisher.clone()); - // Act - simulate multiple bundles for i in 0..5 { let request = SimulationRequestBuilder::new() .with_bundle( @@ -139,27 +129,23 @@ async fn test_multiple_sequential_simulations() { assert!(result.is_ok()); } - // Assert assert_eq!(engine.simulation_count(), 5); assert_eq!(publisher.published_count(), 5); } #[tokio::test] async fn test_empty_bundle_simulation() { - // Arrange let engine = MockSimulationEngine::new(); let publisher = MockSimulationPublisher::new(); let simulator = MockBundleSimulator::new(engine.clone(), publisher.clone()); - let empty_bundle = TestBundleBuilder::new().build(); // No transactions + let empty_bundle = TestBundleBuilder::new().build(); let request = SimulationRequestBuilder::new() .with_bundle(empty_bundle) .build(); - // Act let result = simulator.simulate(&request).await; - // Assert assert!(result.is_ok()); assert_eq!(engine.simulation_count(), 1); assert_eq!(publisher.published_count(), 1); @@ -167,11 +153,9 @@ async fn test_empty_bundle_simulation() { #[tokio::test] async fn test_simulate_with_complex_state_diff() { - // Arrange let bundle_id = Uuid::new_v4(); let mut state_diff = std::collections::HashMap::new(); - // Add multiple accounts with multiple storage changes for i in 0..3 { let addr = alloy_primitives::Address::random(); let mut storage = std::collections::HashMap::new(); @@ -188,7 +172,6 @@ async fn test_simulate_with_complex_state_diff() { .with_ids(Uuid::new_v4(), bundle_id) .build(); - // Manually set the state diff let mut result = result; result.state_diff = state_diff.clone(); @@ -200,10 +183,8 @@ async fn test_simulate_with_complex_state_diff() { .with_bundle_id(bundle_id) .build(); - // Act simulator.simulate(&request).await.unwrap(); - // Assert let published = publisher.get_published(); assert_eq!(published[0].state_diff.len(), 3); for (_, storage) in &published[0].state_diff { @@ -213,14 +194,12 @@ async fn test_simulate_with_complex_state_diff() { #[tokio::test] async fn test_concurrent_simulator_usage() { - // Test that the simulator can be used concurrently let engine = MockSimulationEngine::new(); let publisher = MockSimulationPublisher::new(); let simulator = Arc::new(MockBundleSimulator::new(engine.clone(), publisher.clone())); let mut handles = vec![]; - // Spawn multiple concurrent simulations for i in 0..10 { let sim = Arc::clone(&simulator); @@ -239,13 +218,11 @@ async fn test_concurrent_simulator_usage() { handles.push(handle); } - // Wait for all to complete for handle in handles { let result = handle.await.unwrap(); assert!(result.is_ok()); } - // Verify all were processed assert_eq!(engine.simulation_count(), 10); assert_eq!(publisher.published_count(), 10); } diff --git a/crates/simulator/tests/common_tests/mod.rs b/crates/simulator/tests/common_tests/mod.rs new file mode 100644 index 0000000..25bd631 --- /dev/null +++ b/crates/simulator/tests/common_tests/mod.rs @@ -0,0 +1,3 @@ +pub mod builders_test; +pub mod fixtures_test; +pub mod mock_bundle_simulator_test; diff --git a/crates/simulator/tests/unit/error_handling_test.rs b/crates/simulator/tests/error_handling_test.rs similarity index 98% rename from crates/simulator/tests/unit/error_handling_test.rs rename to crates/simulator/tests/error_handling_test.rs index 9066d61..c168695 100644 --- a/crates/simulator/tests/unit/error_handling_test.rs +++ b/crates/simulator/tests/error_handling_test.rs @@ -1,8 +1,10 @@ /// Unit tests for error handling scenarios -use crate::common::builders::*; -use crate::common::fixtures::*; -use crate::common::mock_bundle_simulator::MockBundleSimulator; -use crate::common::mocks::*; +mod common; + +use common::builders::*; +use common::fixtures::*; +use common::mock_bundle_simulator::MockBundleSimulator; +use common::mocks::*; use tips_simulator::{core::BundleSimulator, SimulationError}; #[tokio::test] diff --git a/crates/simulator/tests/integration_test.rs b/crates/simulator/tests/integration_test.rs deleted file mode 100644 index e6d0ae0..0000000 --- a/crates/simulator/tests/integration_test.rs +++ /dev/null @@ -1,282 +0,0 @@ -/// Integration tests for the simulator functionality -/// -/// Note: These tests use mock implementations because the actual StateProvider -/// trait requires complex setup. For real integration testing with actual -/// state providers, see the component tests. -mod common; -mod unit; - -use common::assertions::*; -use common::builders::*; -use common::fixtures::*; -use common::mock_bundle_simulator::MockBundleSimulator; -use common::mocks::*; -use common::timing::*; - -use alloy_primitives::U256; -use std::sync::Arc; -use std::time::Duration; -use tips_simulator::{ - core::BundleSimulator, types::ExExSimulationConfig, MempoolListenerConfig, SimulationWorkerPool, -}; - -#[tokio::test] -async fn test_successful_bundle_simulation() { - // Setup - let engine = MockSimulationEngine::new(); - let publisher = MockSimulationPublisher::new(); - let simulator = MockBundleSimulator::new(engine.clone(), publisher.clone()); - - // Create test request - let bundle = bundles::single_tx_bundle(); - let request = SimulationRequestBuilder::new().with_bundle(bundle).build(); - - // Execute - let result = simulator.simulate(&request).await; - - // Verify - assert!(result.is_ok()); - assert_eq!(engine.simulation_count(), 1); - assert_eq!(publisher.published_count(), 1); - - let published = publisher.get_published(); - assert_simulation_success(&published[0]); -} - -#[tokio::test] -async fn test_failed_bundle_simulation() { - // Setup with failing engine - let engine = MockSimulationEngine::new() - .fail_next_with(tips_simulator::types::SimulationError::OutOfGas); - let publisher = MockSimulationPublisher::new(); - let simulator = MockBundleSimulator::new(engine.clone(), publisher.clone()); - - // Create test request - let bundle = bundles::single_tx_bundle(); - let request = SimulationRequestBuilder::new().with_bundle(bundle).build(); - - // Execute - let result = simulator.simulate(&request).await; - - // Verify - assert!(result.is_ok()); // simulate() itself succeeds even if simulation fails - assert_eq!(engine.simulation_count(), 1); - assert_eq!(publisher.published_count(), 1); - - let published = publisher.get_published(); - assert_simulation_failure(&published[0]); - assert!(published[0] - .error_reason - .as_ref() - .unwrap() - .contains("out of gas")); -} - -#[tokio::test] -async fn test_publisher_failure_handling() { - // Setup with failing publisher - let engine = MockSimulationEngine::new(); - let publisher = MockSimulationPublisher::new().fail_next(); - let simulator = MockBundleSimulator::new(engine.clone(), publisher.clone()); - - // Create test request - let bundle = bundles::single_tx_bundle(); - let request = SimulationRequestBuilder::new().with_bundle(bundle).build(); - - // Execute - should not panic even if publisher fails - let result = simulator.simulate(&request).await; - - // Verify - assert!(result.is_ok()); - assert_eq!(engine.simulation_count(), 1); - assert_eq!(publisher.published_count(), 0); // Publisher failed -} - -#[tokio::test] -async fn test_worker_pool_concurrent_simulations() { - // Setup - let engine = MockSimulationEngine::new(); - let publisher = MockSimulationPublisher::new(); - let simulator = Arc::new(MockBundleSimulator::new(engine.clone(), publisher.clone())); - // Provider no longer needed with new architecture - - // Create worker pool with 4 workers - let pool = SimulationWorkerPool::new(simulator, 4); - pool.start().await; - - // Queue multiple simulations - let num_simulations = 20; - let mut bundle_ids = Vec::new(); - - for i in 0..num_simulations { - let bundle = TestBundleBuilder::new() - .with_simple_transaction(&[i as u8, 0x01, 0x02]) - .with_block_number(blocks::BLOCK_18M + i as u64) - .build(); - - let request = SimulationRequestBuilder::new() - .with_bundle(bundle) - .with_block( - blocks::BLOCK_18M + i as u64, - alloy_primitives::B256::random(), - ) - .build(); - - bundle_ids.push(request.bundle_id); - - let task = tips_simulator::worker_pool::SimulationTask { request }; - pool.queue_simulation(task).await.unwrap(); - } - - // Wait for completion with timeout - let (_, duration) = measure_async(async { - tokio::time::sleep(Duration::from_millis(500)).await; - }) - .await; - - // Verify all simulations completed - assert_eq!(publisher.published_count(), num_simulations); - - // Verify all bundle IDs are present - let published = publisher.get_published(); - for bundle_id in bundle_ids { - assert!(published.iter().any(|r| r.bundle_id == bundle_id)); - } - - // Verify reasonable execution time - assert!( - duration < Duration::from_secs(2), - "Simulations took too long" - ); -} - -#[tokio::test] -async fn test_worker_pool_error_recovery() { - // Setup engine that fails every other simulation - let engine = MockSimulationEngine::new(); - let publisher = MockSimulationPublisher::new(); - let simulator = Arc::new(MockBundleSimulator::new(engine.clone(), publisher.clone())); - // Provider no longer needed with new architecture - - // Create worker pool - let pool = SimulationWorkerPool::new(simulator, 2); - pool.start().await; - - // Queue simulations with some failures - for i in 0..10 { - let mut builder = SimulationResultBuilder::successful(); - if i % 2 == 1 { - builder = SimulationResultBuilder::failed().with_revert(format!("Test revert {}", i)); - } - - let _ = engine.clone().with_result(builder.build()); - - let request = SimulationRequestBuilder::new() - .with_bundle(bundles::single_tx_bundle()) - .build(); - - let task = tips_simulator::worker_pool::SimulationTask { request }; - pool.queue_simulation(task).await.unwrap(); - } - - // Wait for completion - tokio::time::sleep(Duration::from_millis(300)).await; - - // Verify all simulations were attempted - let published = publisher.get_published(); - assert_eq!(published.len(), 10); - - // Verify mix of successes and failures - let successes = published.iter().filter(|r| r.success).count(); - let failures = published.iter().filter(|r| !r.success).count(); - assert!(successes > 0 && failures > 0); -} - -#[tokio::test] -async fn test_large_bundle_simulation() { - // Setup - let engine = MockSimulationEngine::new(); - let publisher = MockSimulationPublisher::new(); - let simulator = MockBundleSimulator::new(engine.clone(), publisher.clone()); - - // Create large bundle - let large_bundle = bundles::large_bundle(100); - let request = SimulationRequestBuilder::new() - .with_bundle(large_bundle) - .build(); - - // Execute with timeout - let result = - assert_completes_within(simulator.simulate(&request), Duration::from_secs(5)).await; - - // Verify - assert!(result.is_ok()); - assert_eq!(engine.simulation_count(), 1); - assert_eq!(publisher.published_count(), 1); -} - -#[tokio::test] -async fn test_state_diff_tracking() { - // Setup engine that returns specific state changes - let simulation_result = SimulationResultBuilder::successful() - .with_state_change(*addresses::ALICE, U256::from(0), U256::from(100)) - .with_state_change(*addresses::ALICE, U256::from(1), U256::from(200)) - .with_state_change(*addresses::BOB, U256::from(0), U256::from(300)) - .build(); - - let engine = MockSimulationEngine::new().with_result(simulation_result); - let publisher = MockSimulationPublisher::new(); - let simulator = MockBundleSimulator::new(engine, publisher.clone()); - - // Execute - let bundle = bundles::single_tx_bundle(); - let request = SimulationRequestBuilder::new().with_bundle(bundle).build(); - simulator.simulate(&request).await.unwrap(); - - // Verify state diff - let published = publisher.get_published(); - assert_eq!(published.len(), 1); - - let result = &published[0]; - assert_state_diff_contains(result, *addresses::ALICE, U256::from(0), U256::from(100)); - assert_state_diff_contains(result, *addresses::ALICE, U256::from(1), U256::from(200)); - assert_state_diff_contains(result, *addresses::BOB, U256::from(0), U256::from(300)); -} - -#[test] -fn test_simulation_request_creation() { - let bundle = bundles::single_tx_bundle(); - let request = SimulationRequestBuilder::new() - .with_bundle(bundle.clone()) - .with_block(blocks::BLOCK_18M, *blocks::HASH_18M) - .build(); - - assert_eq!(request.bundle.txs.len(), bundle.txs.len()); - assert_eq!(request.block_number, blocks::BLOCK_18M); - assert_eq!(request.block_hash, *blocks::HASH_18M); -} - -#[test] -fn test_mempool_config() { - let config = MempoolListenerConfig { - kafka_brokers: vec!["localhost:9092".to_string()], - kafka_topic: "tips-audit".to_string(), - kafka_group_id: "tips-simulator".to_string(), - database_url: "postgresql://user:pass@localhost:5432/tips".to_string(), - }; - - assert_eq!(config.kafka_brokers, vec!["localhost:9092"]); - assert_eq!(config.kafka_topic, "tips-audit"); -} - -#[test] -fn test_exex_config() { - let config = ExExSimulationConfig { - database_url: "postgresql://user:pass@localhost:5432/tips".to_string(), - max_concurrent_simulations: 10, - simulation_timeout_ms: 5000, - }; - - assert_eq!(config.max_concurrent_simulations, 10); - assert_eq!(config.simulation_timeout_ms, 5000); -} diff --git a/crates/simulator/tests/publisher_test.rs b/crates/simulator/tests/publisher_test.rs new file mode 100644 index 0000000..16447d1 --- /dev/null +++ b/crates/simulator/tests/publisher_test.rs @@ -0,0 +1,146 @@ +/// Unit tests for the SimulationPublisher implementation +mod common; + +use common::builders::*; +use alloy_primitives::{Address, B256, U256}; +use std::collections::HashMap; + +// These tests focus on the logic that can be tested without requiring +// complex mocking of Kafka and PostgreSQL infrastructure + +#[tokio::test] +async fn test_state_diff_conversion_logic() { + // Test the state diff conversion logic that TipsSimulationPublisher uses + let mut original_state_diff = HashMap::new(); + + // Create test data with multiple accounts and storage slots + for i in 0..3 { + let addr = Address::random(); + let mut storage = HashMap::new(); + + for j in 0..5 { + storage.insert(U256::from(i * 10 + j), U256::from((i + 1) * 100 + j)); + } + + original_state_diff.insert(addr, storage); + } + + // Convert as TipsSimulationPublisher would + let mut converted = HashMap::new(); + for (address, storage) in &original_state_diff { + let mut storage_map = HashMap::new(); + for (key, value) in storage { + let key_bytes = key.to_be_bytes::<32>(); + let storage_key = B256::from(key_bytes); + storage_map.insert(storage_key, *value); + } + converted.insert(*address, storage_map); + } + + // Verify conversion + assert_eq!(converted.len(), original_state_diff.len()); + + for (address, original_storage) in &original_state_diff { + assert!(converted.contains_key(address)); + let converted_storage = &converted[address]; + assert_eq!(converted_storage.len(), original_storage.len()); + + for (key, value) in original_storage { + let key_bytes = key.to_be_bytes::<32>(); + let storage_key = B256::from(key_bytes); + assert_eq!(converted_storage[&storage_key], *value); + } + } +} + + +#[test] +fn test_large_state_diff_handling() { + // Test handling of large state diffs + let mut large_state_diff = HashMap::new(); + + // Create a large state diff with many accounts and storage slots + for i in 0..100 { + let addr = Address::random(); + let mut storage = HashMap::new(); + + for j in 0..50 { + storage.insert(U256::from(i * 1000 + j), U256::from(j * 12345)); + } + + large_state_diff.insert(addr, storage); + } + + // Convert as TipsSimulationPublisher would + let mut converted = HashMap::new(); + for (address, storage) in &large_state_diff { + let mut storage_map = HashMap::new(); + for (key, value) in storage { + let key_bytes = key.to_be_bytes::<32>(); + let storage_key = B256::from(key_bytes); + storage_map.insert(storage_key, *value); + } + converted.insert(*address, storage_map); + } + + // Verify large state diff conversion + assert_eq!(converted.len(), 100); + for (_, storage) in &converted { + assert_eq!(storage.len(), 50); + } +} + + + +#[test] +fn test_execution_time_bounds() { + // Test execution time edge cases + let test_cases = vec![ + (1_u128, "Minimum execution time"), + (1000_u128, "Typical execution time"), + (1_000_000_u128, "Long execution time"), + (u64::MAX as u128, "Maximum practical time"), + ]; + + for (execution_time, description) in test_cases { + let result = SimulationResultBuilder::successful() + .with_execution_time_us(execution_time) + .build(); + + assert_eq!(result.execution_time_us, execution_time, "Failed for: {}", description); + } +} + + + +#[test] +fn test_multiple_addresses_same_storage() { + // Test multiple addresses with the same storage patterns + let addresses = vec![Address::random(), Address::random(), Address::random()]; + let mut state_diff = HashMap::new(); + + for addr in &addresses { + let mut storage = HashMap::new(); + storage.insert(U256::from(1), U256::from(100)); + storage.insert(U256::from(2), U256::from(200)); + state_diff.insert(*addr, storage); + } + + // Convert + let mut converted = HashMap::new(); + for (address, storage) in &state_diff { + let mut storage_map = HashMap::new(); + for (key, value) in storage { + let key_bytes = key.to_be_bytes::<32>(); + let storage_key = B256::from(key_bytes); + storage_map.insert(storage_key, *value); + } + converted.insert(*address, storage_map); + } + + assert_eq!(converted.len(), 3); + for addr in &addresses { + assert!(converted.contains_key(addr)); + assert_eq!(converted[addr].len(), 2); + } +} diff --git a/crates/simulator/tests/run_common_tests.rs b/crates/simulator/tests/run_common_tests.rs new file mode 100644 index 0000000..0664139 --- /dev/null +++ b/crates/simulator/tests/run_common_tests.rs @@ -0,0 +1,2 @@ +mod common; +mod common_tests; diff --git a/crates/simulator/tests/unit/mod.rs b/crates/simulator/tests/unit/mod.rs deleted file mode 100644 index 2cf9635..0000000 --- a/crates/simulator/tests/unit/mod.rs +++ /dev/null @@ -1,4 +0,0 @@ -/// Unit tests for simulator core components -pub mod core_test; -pub mod error_handling_test; -pub mod types_test; diff --git a/crates/simulator/tests/unit/types_test.rs b/crates/simulator/tests/unit/types_test.rs deleted file mode 100644 index b5518d7..0000000 --- a/crates/simulator/tests/unit/types_test.rs +++ /dev/null @@ -1,269 +0,0 @@ -/// Unit tests for simulator types -use crate::common::builders::*; -use crate::common::fixtures::*; -use alloy_primitives::{Address, B256, U256}; -use std::collections::HashMap; -use tips_simulator::types::{SimulationError, SimulationRequest, SimulationResult}; -use uuid::Uuid; - -#[test] -fn test_simulation_result_success_creation() { - let id = Uuid::new_v4(); - let bundle_id = Uuid::new_v4(); - let block_hash = B256::random(); - let gas_used = 150_000; - let execution_time = 1500; - - let mut state_diff = HashMap::new(); - let addr = Address::random(); - let mut storage = HashMap::new(); - storage.insert(U256::from(0), U256::from(100)); - state_diff.insert(addr, storage); - - let result = SimulationResult::success( - id, - bundle_id, - 18_000_000, - block_hash, - gas_used, - execution_time, - state_diff.clone(), - ); - - assert_eq!(result.id, id); - assert_eq!(result.bundle_id, bundle_id); - assert_eq!(result.block_number, 18_000_000); - assert_eq!(result.block_hash, block_hash); - assert!(result.success); - assert_eq!(result.gas_used, Some(gas_used)); - assert_eq!(result.execution_time_us, execution_time); - assert_eq!(result.state_diff.len(), 1); - assert!(result.error_reason.is_none()); -} - -#[test] -fn test_simulation_result_failure_creation() { - let id = Uuid::new_v4(); - let bundle_id = Uuid::new_v4(); - let block_hash = B256::random(); - let execution_time = 500; - let error = SimulationError::Revert { - reason: "Test revert".to_string(), - }; - - let result = SimulationResult::failure( - id, - bundle_id, - 18_000_000, - block_hash, - execution_time, - error.clone(), - ); - - assert_eq!(result.id, id); - assert_eq!(result.bundle_id, bundle_id); - assert!(!result.success); - assert!(result.gas_used.is_none()); - assert!(result.state_diff.is_empty()); - assert_eq!(result.error_reason, Some(error.to_string())); -} - -#[test] -fn test_simulation_error_display() { - let test_cases = vec![ - ( - SimulationError::Revert { - reason: "Invalid state".to_string(), - }, - "Bundle reverted: Invalid state", - ), - (SimulationError::OutOfGas, "Bundle ran out of gas"), - ( - SimulationError::InvalidNonce { - tx_index: 2, - expected: 5, - actual: 3, - }, - "Invalid nonce in tx 2: expected 5, got 3", - ), - ( - SimulationError::InsufficientBalance { - tx_index: 1, - required: U256::from(1000), - available: U256::from(500), - }, - "Insufficient balance in tx 1: required 1000, available 500", - ), - ( - SimulationError::StateAccessError { - message: "RPC timeout".to_string(), - }, - "State access error: RPC timeout", - ), - (SimulationError::Timeout, "Simulation timed out"), - ( - SimulationError::Unknown { - message: "Something went wrong".to_string(), - }, - "Unknown error: Something went wrong", - ), - ]; - - for (error, expected) in test_cases { - assert_eq!(error.to_string(), expected); - } -} - -#[test] -fn test_simulation_request_fields() { - let bundle_id = Uuid::new_v4(); - let bundle = bundles::single_tx_bundle(); - let block_number = 18_000_000; - let block_hash = B256::random(); - - let request = SimulationRequest { - bundle_id, - bundle: bundle.clone(), - block_number, - block_hash, - }; - - assert_eq!(request.bundle_id, bundle_id); - assert_eq!(request.bundle.txs.len(), bundle.txs.len()); - assert_eq!(request.block_number, block_number); - assert_eq!(request.block_hash, block_hash); -} - -#[test] -fn test_simulation_result_builder() { - // Test successful result - let success_result = SimulationResultBuilder::successful() - .with_gas_used(250_000) - .with_execution_time_us(2000) - .with_state_change(*addresses::ALICE, U256::from(0), U256::from(500)) - .build(); - - assert!(success_result.success); - assert_eq!(success_result.gas_used, Some(250_000)); - assert_eq!(success_result.execution_time_us, 2000); - assert!(success_result.state_diff.contains_key(&*addresses::ALICE)); - - // Test failed result with revert - let revert_result = SimulationResultBuilder::failed() - .with_revert("Insufficient funds".to_string()) - .build(); - - assert!(!revert_result.success); - assert!(revert_result.gas_used.is_none()); - assert!(revert_result - .error_reason - .as_ref() - .unwrap() - .contains("Insufficient funds")); - - // Test failed result with out of gas - let oog_result = SimulationResultBuilder::failed().with_out_of_gas().build(); - - assert!(!oog_result.success); - assert!(oog_result - .error_reason - .as_ref() - .unwrap() - .contains("out of gas")); - - // Test invalid nonce - let nonce_result = SimulationResultBuilder::failed() - .with_invalid_nonce(0, 5, 3) - .build(); - - assert!(!nonce_result.success); - assert!(nonce_result - .error_reason - .as_ref() - .unwrap() - .contains("Invalid nonce")); -} - -#[test] -fn test_simulation_result_timestamp() { - let result = SimulationResultBuilder::successful().build(); - - // Check that timestamp is recent (within last minute) - let now = chrono::Utc::now(); - let created_timestamp = result.created_at.timestamp(); - let now_timestamp = now.timestamp(); - let diff = now_timestamp - created_timestamp; - assert!(diff < 60); -} - -#[test] -fn test_large_state_diff() { - let mut builder = SimulationResultBuilder::successful(); - - // Add many state changes - for i in 0..100 { - let addr = Address::random(); - for j in 0..10 { - builder = builder.with_state_change(addr, U256::from(j), U256::from(i * 1000 + j)); - } - } - - let result = builder.build(); - assert_eq!(result.state_diff.len(), 100); - - // Verify each account has 10 storage slots - for (_, storage) in &result.state_diff { - assert_eq!(storage.len(), 10); - } -} - -#[test] -fn test_error_serialization() { - // Verify that errors can be converted to strings and back - let errors = vec![ - SimulationError::Revert { - reason: "test".to_string(), - }, - SimulationError::OutOfGas, - SimulationError::InvalidNonce { - tx_index: 1, - expected: 2, - actual: 3, - }, - SimulationError::Timeout, - ]; - - for error in errors { - let error_string = error.to_string(); - assert!(!error_string.is_empty()); - - // Create a result with this error - let result = SimulationResult::failure( - Uuid::new_v4(), - Uuid::new_v4(), - 18_000_000, - B256::random(), - 1000, - error, - ); - - assert_eq!(result.error_reason, Some(error_string)); - } -} - -#[test] -fn test_simulation_result_gas_used_bounds() { - // Test with maximum gas - let max_gas_result = SimulationResultBuilder::successful() - .with_gas_used(30_000_000) // 30M gas - .build(); - - assert_eq!(max_gas_result.gas_used, Some(30_000_000)); - - // Test with zero gas (edge case) - let zero_gas_result = SimulationResultBuilder::successful() - .with_gas_used(0) - .build(); - - assert_eq!(zero_gas_result.gas_used, Some(0)); -} diff --git a/crates/simulator/tests/worker_pool_test.rs b/crates/simulator/tests/worker_pool_test.rs new file mode 100644 index 0000000..f68e42c --- /dev/null +++ b/crates/simulator/tests/worker_pool_test.rs @@ -0,0 +1,463 @@ +/// Unit tests for the SimulationWorkerPool implementation +mod common; + +use common::builders::*; +use common::fixtures::*; +use common::mock_bundle_simulator::MockBundleSimulator; +use common::mocks::*; +use std::sync::Arc; +use std::time::Duration; +use tips_simulator::worker_pool::{SimulationTask, SimulationWorkerPool}; + + +#[tokio::test] +async fn test_worker_pool_start_and_shutdown() { + let engine = MockSimulationEngine::new(); + let publisher = MockSimulationPublisher::new(); + let simulator = Arc::new(MockBundleSimulator::new(engine, publisher)); + + let pool = SimulationWorkerPool::new(simulator, 2); + + // Start the pool + let started = pool.start().await; + assert!(started); // Should return true on first start + + // Starting again should return false + let started_again = pool.start().await; + assert!(!started_again); + + // Test shutdown - pool will shutdown when dropped +} + +#[tokio::test] +async fn test_worker_pool_single_simulation() { + let engine = MockSimulationEngine::new(); + let publisher = MockSimulationPublisher::new(); + let simulator = Arc::new(MockBundleSimulator::new(engine.clone(), publisher.clone())); + + let pool = SimulationWorkerPool::new(simulator, 1); + pool.start().await; + + // Queue a single simulation + let bundle = bundles::single_tx_bundle(); + let request = SimulationRequestBuilder::new() + .with_bundle(bundle) + .build(); + + let task = SimulationTask { request }; + + let queue_result = pool.queue_simulation(task).await; + assert!(queue_result.is_ok()); + + // Wait for processing + tokio::time::sleep(Duration::from_millis(100)).await; + + // Verify simulation was processed + assert_eq!(engine.simulation_count(), 1); + assert_eq!(publisher.published_count(), 1); +} + +#[tokio::test] +async fn test_worker_pool_multiple_simulations() { + let engine = MockSimulationEngine::new(); + let publisher = MockSimulationPublisher::new(); + let simulator = Arc::new(MockBundleSimulator::new(engine.clone(), publisher.clone())); + + let pool = SimulationWorkerPool::new(simulator, 3); + pool.start().await; + + // Queue multiple simulations + let num_simulations = 10; + for i in 0..num_simulations { + let bundle = TestBundleBuilder::new() + .with_simple_transaction(&[i as u8, 0x01, 0x02]) + .with_block_number(18_000_000 + i as u64) + .build(); + + let request = SimulationRequestBuilder::new() + .with_bundle(bundle) + .with_block(18_000_000 + i as u64, alloy_primitives::B256::random()) + .build(); + + let task = SimulationTask { request }; + pool.queue_simulation(task).await.unwrap(); + } + + // Wait for processing + tokio::time::sleep(Duration::from_millis(500)).await; + + // Verify all simulations were processed + assert_eq!(engine.simulation_count(), num_simulations); + assert_eq!(publisher.published_count(), num_simulations); +} + +#[tokio::test] +async fn test_worker_pool_concurrent_workers() { + let engine = MockSimulationEngine::new(); + let publisher = MockSimulationPublisher::new(); + let simulator = Arc::new(MockBundleSimulator::new(engine.clone(), publisher.clone())); + + // Create pool with multiple workers + let num_workers = 5; + let pool = SimulationWorkerPool::new(simulator, num_workers); + pool.start().await; + + // Queue many simulations quickly + let num_simulations = 20; + let mut tasks = vec![]; + + for i in 0..num_simulations { + let bundle = TestBundleBuilder::new() + .with_simple_transaction(&[i as u8, 0x03, 0x04]) + .build(); + + let request = SimulationRequestBuilder::new() + .with_bundle(bundle) + .build(); + + tasks.push(SimulationTask { request }); + } + + // Queue all tasks + for task in tasks { + pool.queue_simulation(task).await.unwrap(); + } + + // Wait for all to process + tokio::time::sleep(Duration::from_millis(800)).await; + + // Verify all were processed + assert_eq!(engine.simulation_count(), num_simulations); + assert_eq!(publisher.published_count(), num_simulations); +} + +#[tokio::test] +async fn test_worker_pool_simulation_failures() { + let engine = MockSimulationEngine::new(); + let publisher = MockSimulationPublisher::new(); + let simulator = Arc::new(MockBundleSimulator::new(engine.clone(), publisher.clone())); + + let pool = SimulationWorkerPool::new(simulator, 2); + pool.start().await; + + // Queue simulations with some that will fail + for i in 0..5 { + // Configure engine to fail odd-numbered simulations + if i % 2 == 1 { + let _ = engine.clone().fail_next_with(tips_simulator::SimulationError::OutOfGas); + } else { + let result = SimulationResultBuilder::successful() + .with_gas_used(100_000 + i * 10_000) + .build(); + let _ = engine.clone().with_result(result); + } + + let request = SimulationRequestBuilder::new() + .with_bundle(bundles::single_tx_bundle()) + .build(); + + let task = SimulationTask { request }; + pool.queue_simulation(task).await.unwrap(); + } + + // Wait for processing + tokio::time::sleep(Duration::from_millis(300)).await; + + // Verify all simulations were attempted (both successes and failures) + assert_eq!(engine.simulation_count(), 5); + assert_eq!(publisher.published_count(), 5); + + // Verify mix of success and failure results + let published = publisher.get_published(); + let successes = published.iter().filter(|r| r.success).count(); + let failures = published.iter().filter(|r| !r.success).count(); + assert!(successes > 0); + assert!(failures > 0); +} + +#[tokio::test] +async fn test_worker_pool_publisher_failures() { + let engine = MockSimulationEngine::new(); + let publisher = MockSimulationPublisher::new(); + let simulator = Arc::new(MockBundleSimulator::new(engine.clone(), publisher.clone())); + + let pool = SimulationWorkerPool::new(simulator, 1); + pool.start().await; + + // Configure publisher to fail + let _ = publisher.clone().fail_next(); + + let request = SimulationRequestBuilder::new().build(); + let task = SimulationTask { request }; + + pool.queue_simulation(task).await.unwrap(); + + // Wait for processing + tokio::time::sleep(Duration::from_millis(100)).await; + + // Engine should still be called even if publisher fails + assert_eq!(engine.simulation_count(), 1); + assert_eq!(publisher.published_count(), 0); // Publisher failed +} + +#[tokio::test] +async fn test_worker_pool_block_cancellation() { + let engine = MockSimulationEngine::new(); + let publisher = MockSimulationPublisher::new(); + let simulator = Arc::new(MockBundleSimulator::new(engine.clone(), publisher.clone())); + + let pool = SimulationWorkerPool::new(simulator, 2); + pool.start().await; + + // Queue simulations for different blocks + let old_block = 18_000_000; + let new_block = 18_000_010; + + // Queue simulation for old block + let old_request = SimulationRequestBuilder::new() + .with_block(old_block, alloy_primitives::B256::random()) + .build(); + let old_task = SimulationTask { request: old_request }; + + // Queue simulation for new block + let new_request = SimulationRequestBuilder::new() + .with_block(new_block, alloy_primitives::B256::random()) + .build(); + let new_task = SimulationTask { request: new_request }; + + // Update latest block to the new block (should cancel old simulations) + pool.update_latest_block(new_block); + + // Queue both tasks (old should be cancelled, new should proceed) + pool.queue_simulation(old_task).await.unwrap(); + pool.queue_simulation(new_task).await.unwrap(); + + // Wait for processing + tokio::time::sleep(Duration::from_millis(200)).await; + + // Only the new block simulation should have been processed + // Note: Due to timing, both might be processed or the old one might be skipped + // We check that at least one was processed + assert!(engine.simulation_count() >= 1); + assert!(publisher.published_count() >= 1); +} + +#[tokio::test] +async fn test_worker_pool_heavy_load() { + let engine = MockSimulationEngine::new(); + let publisher = MockSimulationPublisher::new(); + let simulator = Arc::new(MockBundleSimulator::new(engine.clone(), publisher.clone())); + + let pool = SimulationWorkerPool::new(simulator, 4); + pool.start().await; + + // Queue a large number of simulations + let num_simulations = 50; + + for i in 0..num_simulations { + let bundle = TestBundleBuilder::new() + .with_simple_transaction(&[i as u8, 0x05, 0x06]) + .build(); + + let request = SimulationRequestBuilder::new() + .with_bundle(bundle) + .build(); + + let task = SimulationTask { request }; + pool.queue_simulation(task).await.unwrap(); + } + + // Wait for processing with a reasonable timeout + tokio::time::sleep(Duration::from_secs(2)).await; + + // Verify all simulations were processed + assert_eq!(engine.simulation_count(), num_simulations); + assert_eq!(publisher.published_count(), num_simulations); +} + +#[tokio::test] +async fn test_worker_pool_empty_queue_shutdown() { + let engine = MockSimulationEngine::new(); + let publisher = MockSimulationPublisher::new(); + let simulator = Arc::new(MockBundleSimulator::new(engine, publisher)); + + let pool = SimulationWorkerPool::new(simulator, 2); + pool.start().await; + + // Shutdown immediately without queuing any tasks - pool will shutdown when dropped +} + +#[tokio::test] +async fn test_worker_pool_large_bundles() { + let engine = MockSimulationEngine::new(); + let publisher = MockSimulationPublisher::new(); + let simulator = Arc::new(MockBundleSimulator::new(engine.clone(), publisher.clone())); + + let pool = SimulationWorkerPool::new(simulator, 2); + pool.start().await; + + // Queue simulations with large bundles + for i in 0..3 { + let large_bundle = bundles::large_bundle(20 + i * 10); // 20, 30, 40 transactions + let request = SimulationRequestBuilder::new() + .with_bundle(large_bundle) + .build(); + + let task = SimulationTask { request }; + pool.queue_simulation(task).await.unwrap(); + } + + // Wait for processing + tokio::time::sleep(Duration::from_millis(400)).await; + + // Verify all large bundles were processed + assert_eq!(engine.simulation_count(), 3); + assert_eq!(publisher.published_count(), 3); +} + +#[tokio::test] +async fn test_worker_pool_queue_full_behavior() { + let engine = MockSimulationEngine::new(); + let publisher = MockSimulationPublisher::new(); + let simulator = Arc::new(MockBundleSimulator::new(engine.clone(), publisher.clone())); + + // Create pool with small queue capacity (we can't easily control this with current API) + let pool = SimulationWorkerPool::new(simulator, 1); + pool.start().await; + + // Queue many simulations rapidly + let mut queue_results = vec![]; + for i in 0..20 { + let request = SimulationRequestBuilder::new() + .with_bundle( + TestBundleBuilder::new() + .with_simple_transaction(&[i as u8]) + .build(), + ) + .build(); + + let task = SimulationTask { request }; + let result = pool.queue_simulation(task).await; + queue_results.push(result); + } + + // All should succeed with current implementation (large default queue size) + for result in queue_results { + assert!(result.is_ok()); + } + + // Wait for processing + tokio::time::sleep(Duration::from_millis(800)).await; + + // Verify processing + assert_eq!(engine.simulation_count(), 20); + assert_eq!(publisher.published_count(), 20); +} + +#[tokio::test] +async fn test_worker_pool_mixed_block_numbers() { + let engine = MockSimulationEngine::new(); + let publisher = MockSimulationPublisher::new(); + let simulator = Arc::new(MockBundleSimulator::new(engine.clone(), publisher.clone())); + + let pool = SimulationWorkerPool::new(simulator, 3); + pool.start().await; + + // Queue simulations for various block numbers + let block_numbers = vec![18_000_000, 18_000_005, 18_000_002, 18_000_008, 18_000_001]; + + for (i, block_num) in block_numbers.iter().enumerate() { + let request = SimulationRequestBuilder::new() + .with_block(*block_num, alloy_primitives::B256::random()) + .with_bundle( + TestBundleBuilder::new() + .with_simple_transaction(&[i as u8, 0x07, 0x08]) + .build(), + ) + .build(); + + let task = SimulationTask { request }; + pool.queue_simulation(task).await.unwrap(); + } + + // Update to latest block to potentially cancel some older simulations + pool.update_latest_block(18_000_008); + + // Wait for processing + tokio::time::sleep(Duration::from_millis(300)).await; + + // Some simulations should have been processed + assert!(engine.simulation_count() > 0); + assert!(publisher.published_count() > 0); + assert!(engine.simulation_count() <= block_numbers.len()); +} + +#[tokio::test] +async fn test_worker_pool_rapid_block_updates() { + let engine = MockSimulationEngine::new(); + let publisher = MockSimulationPublisher::new(); + let simulator = Arc::new(MockBundleSimulator::new(engine.clone(), publisher.clone())); + + let pool = SimulationWorkerPool::new(simulator, 2); + pool.start().await; + + // Rapidly update block numbers + for i in 0..10 { + pool.update_latest_block(18_000_000 + i); + + // Queue a simulation for an older block (should be cancelled) + let request = SimulationRequestBuilder::new() + .with_block(18_000_000 + i - 1, alloy_primitives::B256::random()) + .build(); + + let task = SimulationTask { request }; + let _ = pool.queue_simulation(task).await; + } + + // Wait for processing + tokio::time::sleep(Duration::from_millis(200)).await; + + // Most simulations should have been cancelled due to rapid block updates + // The exact count depends on timing, but should be less than 10 + assert!(engine.simulation_count() <= 10); + assert!(publisher.published_count() <= 10); +} + +#[tokio::test] +async fn test_worker_pool_simulation_timing() { + let engine = MockSimulationEngine::new(); + let publisher = MockSimulationPublisher::new(); + let simulator = Arc::new(MockBundleSimulator::new(engine.clone(), publisher.clone())); + + let pool = SimulationWorkerPool::new(simulator, 1); + pool.start().await; + + let start_time = std::time::Instant::now(); + + // Queue a few simulations + for _i in 0..3 { + let request = SimulationRequestBuilder::new() + .with_bundle(bundles::single_tx_bundle()) + .build(); + + let task = SimulationTask { request }; + pool.queue_simulation(task).await.unwrap(); + } + + // Wait for all to complete + while engine.simulation_count() < 3 { + tokio::time::sleep(Duration::from_millis(10)).await; + + // Prevent infinite loop with timeout + if start_time.elapsed() > Duration::from_secs(5) { + break; + } + } + + let elapsed = start_time.elapsed(); + + // Verify timing is reasonable (should complete quickly with mocks) + assert!(elapsed < Duration::from_secs(2)); + assert_eq!(engine.simulation_count(), 3); + assert_eq!(publisher.published_count(), 3); +} From d653aa0de2e805fdb32ec6bb0cd77babc35e2b8e Mon Sep 17 00:00:00 2001 From: Niran Babalola Date: Fri, 26 Sep 2025 14:08:53 -0500 Subject: [PATCH 34/39] just fix --- crates/simulator/src/lib.rs | 10 ++-- .../tests/bundle_simulator_impl_test.rs | 1 - crates/simulator/tests/common/builders.rs | 5 -- crates/simulator/tests/common/fixtures.rs | 5 -- .../tests/common/mock_bundle_simulator.rs | 3 - crates/simulator/tests/common/mocks.rs | 3 - crates/simulator/tests/common/mod.rs | 5 -- crates/simulator/tests/publisher_test.rs | 37 ++++++------ crates/simulator/tests/worker_pool_test.rs | 57 +++++++++---------- 9 files changed, 51 insertions(+), 75 deletions(-) diff --git a/crates/simulator/src/lib.rs b/crates/simulator/src/lib.rs index c7f1d5f..5d9c565 100644 --- a/crates/simulator/src/lib.rs +++ b/crates/simulator/src/lib.rs @@ -47,7 +47,9 @@ async fn init_dependencies( database_url: String, kafka_brokers: String, kafka_topic: String, -) -> Result, TipsSimulationPublisher>>> +) -> Result< + ListenerDependencies, TipsSimulationPublisher>>, +> where Node: FullNodeComponents, ::Evm: ConfigureEvm, @@ -128,10 +130,8 @@ where ) .await?; - let shared_worker_pool = SimulationWorkerPool::new( - Arc::new(dependencies.simulator), - max_concurrent_simulations, - ); + let shared_worker_pool = + SimulationWorkerPool::new(Arc::new(dependencies.simulator), max_concurrent_simulations); let exex_listener = ExExEventListener::new( exex_ctx, diff --git a/crates/simulator/tests/bundle_simulator_impl_test.rs b/crates/simulator/tests/bundle_simulator_impl_test.rs index 781932d..9ad0722 100644 --- a/crates/simulator/tests/bundle_simulator_impl_test.rs +++ b/crates/simulator/tests/bundle_simulator_impl_test.rs @@ -175,4 +175,3 @@ async fn test_bundle_simulator_impl_various_error_types() { } } } - diff --git a/crates/simulator/tests/common/builders.rs b/crates/simulator/tests/common/builders.rs index 31bf7d7..2333630 100644 --- a/crates/simulator/tests/common/builders.rs +++ b/crates/simulator/tests/common/builders.rs @@ -1,7 +1,6 @@ #![allow(dead_code)] /// Test data builders for creating complex test scenarios - use alloy_primitives::{Address, Bytes, B256, U256}; use alloy_rpc_types_mev::EthSendBundle; use std::collections::HashMap; @@ -51,8 +50,6 @@ impl TestBundleBuilder { self } - - pub fn build(self) -> EthSendBundle { EthSendBundle { txs: self.txs, @@ -255,7 +252,6 @@ impl ScenarioBuilder { self } - pub fn add_simple_bundle(mut self, num_txs: usize) -> Self { let mut builder = TestBundleBuilder::new().with_block_number(self.block_number); @@ -279,4 +275,3 @@ impl ScenarioBuilder { .collect() } } - diff --git a/crates/simulator/tests/common/fixtures.rs b/crates/simulator/tests/common/fixtures.rs index b3620cd..f937f3d 100644 --- a/crates/simulator/tests/common/fixtures.rs +++ b/crates/simulator/tests/common/fixtures.rs @@ -1,7 +1,6 @@ #![allow(dead_code)] /// Test fixtures and pre-configured test data - use alloy_primitives::Bytes; use alloy_rpc_types_mev::EthSendBundle; @@ -63,7 +62,6 @@ pub mod transactions { 0x50, 0x60, 0x70, 0x80, ]) } - } /// Pre-configured bundles for testing @@ -89,7 +87,6 @@ pub mod bundles { .build() } - /// Large bundle for stress testing pub fn large_bundle(num_txs: usize) -> EthSendBundle { let mut builder = TestBundleBuilder::new().with_block_number(blocks::BLOCK_18M); @@ -101,7 +98,6 @@ pub mod bundles { builder.build() } - } /// Test scenarios combining multiple fixtures @@ -143,4 +139,3 @@ pub mod scenarios { } } } - diff --git a/crates/simulator/tests/common/mock_bundle_simulator.rs b/crates/simulator/tests/common/mock_bundle_simulator.rs index 08637d7..2c353b3 100644 --- a/crates/simulator/tests/common/mock_bundle_simulator.rs +++ b/crates/simulator/tests/common/mock_bundle_simulator.rs @@ -19,8 +19,6 @@ impl MockBundleSimulator { pub fn new(engine: MockSimulationEngine, publisher: MockSimulationPublisher) -> Self { Self { engine, publisher } } - - } #[async_trait] @@ -56,4 +54,3 @@ impl BundleSimulator for MockBundleSimulator { Ok(()) } } - diff --git a/crates/simulator/tests/common/mocks.rs b/crates/simulator/tests/common/mocks.rs index e234a01..d4741dc 100644 --- a/crates/simulator/tests/common/mocks.rs +++ b/crates/simulator/tests/common/mocks.rs @@ -1,7 +1,6 @@ #![allow(dead_code)] /// Reusable mock implementations for testing - use alloy_primitives::{Address, U256}; use async_trait::async_trait; use std::collections::HashMap; @@ -44,7 +43,6 @@ impl MockSimulationEngine { self } - pub fn simulation_count(&self) -> usize { self.simulations.lock().unwrap().len() } @@ -129,7 +127,6 @@ impl MockSimulationPublisher { pub fn published_count(&self) -> usize { self.published.lock().unwrap().len() } - } #[async_trait] diff --git a/crates/simulator/tests/common/mod.rs b/crates/simulator/tests/common/mod.rs index e0eec56..574734d 100644 --- a/crates/simulator/tests/common/mod.rs +++ b/crates/simulator/tests/common/mod.rs @@ -1,7 +1,6 @@ #![allow(dead_code)] /// Common test utilities and infrastructure for simulator testing - pub mod builders; pub mod fixtures; pub mod mock_bundle_simulator; @@ -13,7 +12,6 @@ use std::collections::HashMap; use tips_simulator::types::{SimulationRequest, SimulationResult}; use uuid::Uuid; - /// Helper to create a simple test bundle pub fn create_test_bundle(num_txs: usize, block_number: u64) -> EthSendBundle { let mut txs = Vec::new(); @@ -83,7 +81,4 @@ pub mod assertions { "Successful simulation should not have error" ); } - - } - diff --git a/crates/simulator/tests/publisher_test.rs b/crates/simulator/tests/publisher_test.rs index 16447d1..5567bda 100644 --- a/crates/simulator/tests/publisher_test.rs +++ b/crates/simulator/tests/publisher_test.rs @@ -1,8 +1,8 @@ /// Unit tests for the SimulationPublisher implementation mod common; -use common::builders::*; use alloy_primitives::{Address, B256, U256}; +use common::builders::*; use std::collections::HashMap; // These tests focus on the logic that can be tested without requiring @@ -12,16 +12,16 @@ use std::collections::HashMap; async fn test_state_diff_conversion_logic() { // Test the state diff conversion logic that TipsSimulationPublisher uses let mut original_state_diff = HashMap::new(); - + // Create test data with multiple accounts and storage slots for i in 0..3 { let addr = Address::random(); let mut storage = HashMap::new(); - + for j in 0..5 { storage.insert(U256::from(i * 10 + j), U256::from((i + 1) * 100 + j)); } - + original_state_diff.insert(addr, storage); } @@ -39,12 +39,12 @@ async fn test_state_diff_conversion_logic() { // Verify conversion assert_eq!(converted.len(), original_state_diff.len()); - + for (address, original_storage) in &original_state_diff { assert!(converted.contains_key(address)); let converted_storage = &converted[address]; assert_eq!(converted_storage.len(), original_storage.len()); - + for (key, value) in original_storage { let key_bytes = key.to_be_bytes::<32>(); let storage_key = B256::from(key_bytes); @@ -53,21 +53,20 @@ async fn test_state_diff_conversion_logic() { } } - #[test] fn test_large_state_diff_handling() { // Test handling of large state diffs let mut large_state_diff = HashMap::new(); - + // Create a large state diff with many accounts and storage slots for i in 0..100 { let addr = Address::random(); let mut storage = HashMap::new(); - + for j in 0..50 { storage.insert(U256::from(i * 1000 + j), U256::from(j * 12345)); } - + large_state_diff.insert(addr, storage); } @@ -90,8 +89,6 @@ fn test_large_state_diff_handling() { } } - - #[test] fn test_execution_time_bounds() { // Test execution time edge cases @@ -106,26 +103,28 @@ fn test_execution_time_bounds() { let result = SimulationResultBuilder::successful() .with_execution_time_us(execution_time) .build(); - - assert_eq!(result.execution_time_us, execution_time, "Failed for: {}", description); + + assert_eq!( + result.execution_time_us, execution_time, + "Failed for: {}", + description + ); } } - - #[test] fn test_multiple_addresses_same_storage() { // Test multiple addresses with the same storage patterns let addresses = vec![Address::random(), Address::random(), Address::random()]; let mut state_diff = HashMap::new(); - + for addr in &addresses { let mut storage = HashMap::new(); storage.insert(U256::from(1), U256::from(100)); storage.insert(U256::from(2), U256::from(200)); state_diff.insert(*addr, storage); } - + // Convert let mut converted = HashMap::new(); for (address, storage) in &state_diff { @@ -137,7 +136,7 @@ fn test_multiple_addresses_same_storage() { } converted.insert(*address, storage_map); } - + assert_eq!(converted.len(), 3); for addr in &addresses { assert!(converted.contains_key(addr)); diff --git a/crates/simulator/tests/worker_pool_test.rs b/crates/simulator/tests/worker_pool_test.rs index f68e42c..ee4099e 100644 --- a/crates/simulator/tests/worker_pool_test.rs +++ b/crates/simulator/tests/worker_pool_test.rs @@ -9,7 +9,6 @@ use std::sync::Arc; use std::time::Duration; use tips_simulator::worker_pool::{SimulationTask, SimulationWorkerPool}; - #[tokio::test] async fn test_worker_pool_start_and_shutdown() { let engine = MockSimulationEngine::new(); @@ -17,15 +16,15 @@ async fn test_worker_pool_start_and_shutdown() { let simulator = Arc::new(MockBundleSimulator::new(engine, publisher)); let pool = SimulationWorkerPool::new(simulator, 2); - + // Start the pool let started = pool.start().await; assert!(started); // Should return true on first start - + // Starting again should return false let started_again = pool.start().await; assert!(!started_again); - + // Test shutdown - pool will shutdown when dropped } @@ -40,12 +39,10 @@ async fn test_worker_pool_single_simulation() { // Queue a single simulation let bundle = bundles::single_tx_bundle(); - let request = SimulationRequestBuilder::new() - .with_bundle(bundle) - .build(); - + let request = SimulationRequestBuilder::new().with_bundle(bundle).build(); + let task = SimulationTask { request }; - + let queue_result = pool.queue_simulation(task).await; assert!(queue_result.is_ok()); @@ -73,7 +70,7 @@ async fn test_worker_pool_multiple_simulations() { .with_simple_transaction(&[i as u8, 0x01, 0x02]) .with_block_number(18_000_000 + i as u64) .build(); - + let request = SimulationRequestBuilder::new() .with_bundle(bundle) .with_block(18_000_000 + i as u64, alloy_primitives::B256::random()) @@ -105,15 +102,13 @@ async fn test_worker_pool_concurrent_workers() { // Queue many simulations quickly let num_simulations = 20; let mut tasks = vec![]; - + for i in 0..num_simulations { let bundle = TestBundleBuilder::new() .with_simple_transaction(&[i as u8, 0x03, 0x04]) .build(); - - let request = SimulationRequestBuilder::new() - .with_bundle(bundle) - .build(); + + let request = SimulationRequestBuilder::new().with_bundle(bundle).build(); tasks.push(SimulationTask { request }); } @@ -144,7 +139,9 @@ async fn test_worker_pool_simulation_failures() { for i in 0..5 { // Configure engine to fail odd-numbered simulations if i % 2 == 1 { - let _ = engine.clone().fail_next_with(tips_simulator::SimulationError::OutOfGas); + let _ = engine + .clone() + .fail_next_with(tips_simulator::SimulationError::OutOfGas); } else { let result = SimulationResultBuilder::successful() .with_gas_used(100_000 + i * 10_000) @@ -189,7 +186,7 @@ async fn test_worker_pool_publisher_failures() { let request = SimulationRequestBuilder::new().build(); let task = SimulationTask { request }; - + pool.queue_simulation(task).await.unwrap(); // Wait for processing @@ -217,13 +214,17 @@ async fn test_worker_pool_block_cancellation() { let old_request = SimulationRequestBuilder::new() .with_block(old_block, alloy_primitives::B256::random()) .build(); - let old_task = SimulationTask { request: old_request }; - + let old_task = SimulationTask { + request: old_request, + }; + // Queue simulation for new block let new_request = SimulationRequestBuilder::new() .with_block(new_block, alloy_primitives::B256::random()) .build(); - let new_task = SimulationTask { request: new_request }; + let new_task = SimulationTask { + request: new_request, + }; // Update latest block to the new block (should cancel old simulations) pool.update_latest_block(new_block); @@ -253,15 +254,13 @@ async fn test_worker_pool_heavy_load() { // Queue a large number of simulations let num_simulations = 50; - + for i in 0..num_simulations { let bundle = TestBundleBuilder::new() .with_simple_transaction(&[i as u8, 0x05, 0x06]) .build(); - - let request = SimulationRequestBuilder::new() - .with_bundle(bundle) - .build(); + + let request = SimulationRequestBuilder::new().with_bundle(bundle).build(); let task = SimulationTask { request }; pool.queue_simulation(task).await.unwrap(); @@ -365,7 +364,7 @@ async fn test_worker_pool_mixed_block_numbers() { // Queue simulations for various block numbers let block_numbers = vec![18_000_000, 18_000_005, 18_000_002, 18_000_008, 18_000_001]; - + for (i, block_num) in block_numbers.iter().enumerate() { let request = SimulationRequestBuilder::new() .with_block(*block_num, alloy_primitives::B256::random()) @@ -404,12 +403,12 @@ async fn test_worker_pool_rapid_block_updates() { // Rapidly update block numbers for i in 0..10 { pool.update_latest_block(18_000_000 + i); - + // Queue a simulation for an older block (should be cancelled) let request = SimulationRequestBuilder::new() .with_block(18_000_000 + i - 1, alloy_primitives::B256::random()) .build(); - + let task = SimulationTask { request }; let _ = pool.queue_simulation(task).await; } @@ -447,7 +446,7 @@ async fn test_worker_pool_simulation_timing() { // Wait for all to complete while engine.simulation_count() < 3 { tokio::time::sleep(Duration::from_millis(10)).await; - + // Prevent infinite loop with timeout if start_time.elapsed() > Duration::from_secs(5) { break; From 2311a6b56752505f8e4e63e6594e5d14370f59fc Mon Sep 17 00:00:00 2001 From: Niran Babalola Date: Fri, 26 Sep 2025 14:13:27 -0500 Subject: [PATCH 35/39] Remove unused function --- crates/simulator/src/engine.rs | 78 ---------------------------------- 1 file changed, 78 deletions(-) diff --git a/crates/simulator/src/engine.rs b/crates/simulator/src/engine.rs index 88c0714..59f6845 100644 --- a/crates/simulator/src/engine.rs +++ b/crates/simulator/src/engine.rs @@ -20,84 +20,6 @@ use uuid::Uuid; // FIXME: The block time should be retrieved from the reth node. const BLOCK_TIME: u64 = 2; -/// Create state provider from ExEx context -/// -/// This function prepares the necessary components for EVM simulation: -/// 1. Creates a StateProvider at a specific block using the Provider from ExEx context -/// 2. Validates that the block exists and retrieves its hash -/// 3. Returns the state provider that can be used for EVM database initialization -/// -/// # Arguments -/// * `provider` - The state provider factory from the ExEx context (e.g., ctx.provider) -/// * `block_number` - The block number to create the state at -/// -/// # Returns -/// A tuple of (StateProvider, block_hash) ready for EVM initialization -/// -/// # Usage in ExEx -/// When implementing an ExEx that needs to simulate transactions, you can use this -/// function to get a state provider that implements the Client interface. This state -/// provider can then be used with reth's EvmConfig to create an EVM instance. -/// -/// The typical flow is: -/// 1. Get the provider from ExExContext: `ctx.provider` -/// 2. Call this function to get a state provider at a specific block -/// 3. Use the state provider with reth_revm::database::StateProviderDatabase -/// 4. Configure the EVM with the appropriate EvmConfig from your node -pub fn prepare_evm_state

( - provider: Arc

, - block_number: u64, -) -> Result<(Box, B256)> -where - P: StateProviderFactory, -{ - // Get the state provider at the specified block - let state_provider = provider - .state_by_block_number_or_tag(BlockNumberOrTag::Number(block_number)) - .map_err(|e| { - eyre::eyre!( - "Failed to get state provider at block {}: {}", - block_number, - e - ) - })?; - - // Get the block hash - let block_hash = state_provider - .block_hash(block_number) - .map_err(|e| eyre::eyre!("Failed to get block hash: {}", e))? - .ok_or_else(|| eyre::eyre!("Block {} not found", block_number))?; - - Ok((state_provider, block_hash)) -} - -/// Example usage within an ExEx: -/// ```ignore -/// // In your ExEx implementation -/// use reth_exex::ExExContext; -/// use reth_revm::database::StateProviderDatabase; -/// use revm::Evm; -/// -/// // Get provider from ExEx context -/// let provider = ctx.provider.clone(); -/// -/// // Prepare EVM state -/// let (state_provider, block_hash) = prepare_evm_state::( -/// provider.clone(), -/// block_number, -/// )?; -/// -/// // Create state database -/// let db = StateProviderDatabase::new(state_provider); -/// -/// // Build EVM with the database -/// // Note: You would configure the EVM with proper environment settings -/// // based on your chain's requirements (gas limits, fork settings, etc.) -/// let evm = Evm::builder() -/// .with_db(db) -/// .build(); -/// ``` - #[async_trait] pub trait SimulationEngine: Send + Sync { /// Simulate a bundle execution From 7d534842ea28dfc06e5182cac740ce8c386bc3db Mon Sep 17 00:00:00 2001 From: Niran Babalola Date: Fri, 26 Sep 2025 14:20:57 -0500 Subject: [PATCH 36/39] Load .env.example when building the containers in Github Actions --- .github/workflows/docker.yml | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index ac22ab7..4fa7d34 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -19,5 +19,12 @@ jobs: egress-policy: audit - uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0 + + - name: Load environment variables + uses: xom9ikk/dotenv@v2 + with: + path: . + mode: example + - run: cp .env.example .env.docker - - run: docker compose -f docker-compose.tips.yml build \ No newline at end of file + - run: docker compose -f docker-compose.tips.yml build From d0da65c95c1ce281a8937610bea24ca5d6f75e00 Mon Sep 17 00:00:00 2001 From: Niran Babalola Date: Fri, 26 Sep 2025 14:46:55 -0500 Subject: [PATCH 37/39] Remove unused imports --- crates/simulator/src/engine.rs | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/crates/simulator/src/engine.rs b/crates/simulator/src/engine.rs index 59f6845..74d055e 100644 --- a/crates/simulator/src/engine.rs +++ b/crates/simulator/src/engine.rs @@ -2,14 +2,13 @@ use crate::types::{SimulationError, SimulationRequest, SimulationResult}; use alloy_consensus::{transaction::SignerRecoverable, BlockHeader}; use alloy_eips::eip2718::Decodable2718; use alloy_primitives::B256; -use alloy_rpc_types::BlockNumberOrTag; use async_trait::async_trait; use eyre::Result; use reth_evm::execute::BlockBuilder; use reth_evm::ConfigureEvm; use reth_node_api::FullNodeComponents; use reth_optimism_evm::OpNextBlockEnvAttributes; -use reth_provider::{HeaderProvider, StateProvider, StateProviderFactory}; +use reth_provider::{HeaderProvider, StateProviderFactory}; use reth_revm::{database::StateProviderDatabase, db::State}; use std::collections::HashMap; use std::sync::Arc; From 0f8ea908b4c995a0c40dd4491d4e7423c24426f5 Mon Sep 17 00:00:00 2001 From: Niran Babalola Date: Fri, 26 Sep 2025 14:52:13 -0500 Subject: [PATCH 38/39] Add reth-mdbx-sys build dependencies to all cargo chef Dockerfiles --- crates/audit/Dockerfile | 10 +++++++++- crates/ingress-rpc/Dockerfile | 10 +++++++++- crates/ingress-writer/Dockerfile | 10 +++++++++- crates/maintenance/Dockerfile | 10 +++++++++- 4 files changed, 36 insertions(+), 4 deletions(-) diff --git a/crates/audit/Dockerfile b/crates/audit/Dockerfile index b09a0fd..455a21f 100644 --- a/crates/audit/Dockerfile +++ b/crates/audit/Dockerfile @@ -1,5 +1,13 @@ FROM rust:1-bookworm AS base +# Add dependencies for building reth-mdbx-sys (needed by workspace dependencies) +RUN apt-get update && apt-get install -y \ + clang \ + libclang-dev \ + llvm-dev \ + pkg-config && \ + rm -rf /var/lib/apt/lists/* + RUN cargo install cargo-chef --locked WORKDIR /app @@ -32,4 +40,4 @@ COPY --from=builder /tmp/tips-audit /app/tips-audit EXPOSE 3001 -CMD ["/app/tips-audit"] \ No newline at end of file +CMD ["/app/tips-audit"] diff --git a/crates/ingress-rpc/Dockerfile b/crates/ingress-rpc/Dockerfile index 97c27d0..ce91dc9 100644 --- a/crates/ingress-rpc/Dockerfile +++ b/crates/ingress-rpc/Dockerfile @@ -1,5 +1,13 @@ FROM rust:1-bookworm AS base +# Add dependencies for building reth-mdbx-sys (needed by workspace dependencies) +RUN apt-get update && apt-get install -y \ + clang \ + libclang-dev \ + llvm-dev \ + pkg-config && \ + rm -rf /var/lib/apt/lists/* + RUN cargo install cargo-chef --locked WORKDIR /app @@ -32,4 +40,4 @@ COPY --from=builder /tmp/tips-ingress-rpc /app/tips-ingress-rpc EXPOSE 3000 -CMD ["/app/tips-ingress-rpc"] \ No newline at end of file +CMD ["/app/tips-ingress-rpc"] diff --git a/crates/ingress-writer/Dockerfile b/crates/ingress-writer/Dockerfile index d44fe80..8a69ef9 100644 --- a/crates/ingress-writer/Dockerfile +++ b/crates/ingress-writer/Dockerfile @@ -1,5 +1,13 @@ FROM rust:1-bookworm AS base +# Add dependencies for building reth-mdbx-sys (needed by workspace dependencies) +RUN apt-get update && apt-get install -y \ + clang \ + libclang-dev \ + llvm-dev \ + pkg-config && \ + rm -rf /var/lib/apt/lists/* + RUN cargo install cargo-chef --locked WORKDIR /app @@ -30,4 +38,4 @@ WORKDIR /app COPY --from=builder /tmp/tips-ingress-writer /app/tips-ingress-writer -CMD ["/app/tips-ingress-writer"] \ No newline at end of file +CMD ["/app/tips-ingress-writer"] diff --git a/crates/maintenance/Dockerfile b/crates/maintenance/Dockerfile index b3e080f..a799e93 100644 --- a/crates/maintenance/Dockerfile +++ b/crates/maintenance/Dockerfile @@ -1,5 +1,13 @@ FROM rust:1-bookworm AS base +# Add dependencies for building reth-mdbx-sys (needed by workspace dependencies) +RUN apt-get update && apt-get install -y \ + clang \ + libclang-dev \ + llvm-dev \ + pkg-config && \ + rm -rf /var/lib/apt/lists/* + RUN cargo install cargo-chef --locked WORKDIR /app @@ -30,4 +38,4 @@ WORKDIR /app COPY --from=builder /tmp/tips-maintenance /app/tips-maintenance -CMD ["/app/tips-maintenance"] \ No newline at end of file +CMD ["/app/tips-maintenance"] From a0395eb557c9bdff3b77192bb6a163c66569ae69 Mon Sep 17 00:00:00 2001 From: Niran Babalola Date: Fri, 26 Sep 2025 16:28:39 -0500 Subject: [PATCH 39/39] Uncomment the ui sync commands --- justfile | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/justfile b/justfile index 931c979..cc52d89 100644 --- a/justfile +++ b/justfile @@ -22,10 +22,10 @@ create-migration name: sync: deps-reset ### DATABASE ### cargo sqlx prepare -D postgresql://postgres:postgres@localhost:5432/postgres --workspace --all --no-dotenv - #cd ui && npx drizzle-kit pull --dialect=postgresql --url=postgresql://postgres:postgres@localhost:5432/postgres - #cd ui && mv ./drizzle/relations.ts ./src/db/ - #cd ui && mv ./drizzle/schema.ts ./src/db/ - #cd ui && rm -rf ./drizzle + cd ui && npx drizzle-kit pull --dialect=postgresql --url=postgresql://postgres:postgres@localhost:5432/postgres + cd ui && mv ./drizzle/relations.ts ./src/db/ + cd ui && mv ./drizzle/schema.ts ./src/db/ + cd ui && rm -rf ./drizzle ### ENV ### just sync-env ### REFORMAT ###