Skip to content

Commit

Permalink
build(node): add option to compile the node without rpc
Browse files Browse the repository at this point in the history
  • Loading branch information
ShahakShama committed Feb 14, 2024
1 parent 04ec5b5 commit e13238b
Show file tree
Hide file tree
Showing 9 changed files with 167 additions and 19 deletions.
32 changes: 32 additions & 0 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,25 @@ jobs:
target/release/papyrus_node --base_layer.node_url ${{ secrets.CI_BASE_LAYER_NODE_URL }}
& sleep 30 ; kill $!
executable-run-no-rpc:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: dtolnay/rust-toolchain@stable
- uses: Swatinem/rust-cache@v2
- uses: Noelware/setup-protoc@1.1.0
with:
version: ${{env.PROTOC_VERSION}}
- run: mkdir data

- name: Build node
run: cargo build -r --no-default-features

- name: Run executable
run: >
target/release/papyrus_node --base_layer.node_url ${{ secrets.CI_BASE_LAYER_NODE_URL }}
& sleep 30 ; kill $!
test:
runs-on: ubuntu-latest
steps:
Expand Down Expand Up @@ -88,6 +107,19 @@ jobs:
cargo test -r --test '*' -- --include-ignored --skip test_gw_integration_testnet;
cargo run -r -p papyrus_node --bin central_source_integration_test
test-no-rpc:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: dtolnay/rust-toolchain@stable
- uses: Swatinem/rust-cache@v2

- run: |
cargo test -p papyrus_node --no-default-features
env:
SEED: 0
rustfmt:
runs-on: ubuntu-latest
steps:
Expand Down
1 change: 1 addition & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions commitlint.config.js
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ const Configuration = {
'load_test',
'monitoring',
'network',
'node',
'release',
'starknet_client',
'storage',
Expand Down
11 changes: 9 additions & 2 deletions crates/papyrus_node/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,10 @@ license-file.workspace = true
[package.metadata.cargo-udeps.ignore]
normal = ["papyrus_base_layer"]

[features]
default = ["rpc"]
rpc = ["papyrus_rpc"]

[dependencies]
anyhow.workspace = true
async-stream.workspace = true
Expand All @@ -22,9 +26,9 @@ papyrus_base_layer = { path = "../papyrus_base_layer" }
papyrus_config = { path = "../papyrus_config", version = "0.3.0-rc.0" }
papyrus_common = { path = "../papyrus_common", version = "0.3.0-rc.0" }
papyrus_monitoring_gateway = { path = "../papyrus_monitoring_gateway" }
papyrus_rpc = { path = "../papyrus_rpc" }
papyrus_rpc = { path = "../papyrus_rpc", version = "0.3.0-rc.0", optional = true }
papyrus_storage = { path = "../papyrus_storage", version = "0.3.0-rc.0" }
papyrus_sync = { path = "../papyrus_sync" }
papyrus_sync = { path = "../papyrus_sync", version = "0.3.0-rc.0" }
reqwest = { workspace = true, features = ["json", "blocking"] }
serde = { workspace = true, features = ["derive"] }
serde_json = { workspace = true, features = ["arbitrary_precision"] }
Expand All @@ -38,6 +42,9 @@ tracing.workspace = true
url.workspace = true
validator = { workspace = true, features = ["derive"] }

[target.'cfg(not(feature = "rpc"))'.dependencies]
futures.workspace = true

[dev-dependencies]
metrics-exporter-prometheus.workspace = true
pretty_assertions.workspace = true
Expand Down
33 changes: 33 additions & 0 deletions crates/papyrus_node/src/bin/dump_config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ use papyrus_config::{ParamPath, SerializedParam};
use papyrus_node::config::{NodeConfig, DEFAULT_CONFIG_PATH};
use starknet_api::core::ChainId;

#[cfg(feature = "rpc")]
lazy_static! {
/// Returns vector of (pointer target name, pointer target serialized param, vec<pointer param path>)
/// to be applied on the dumped node config.
Expand Down Expand Up @@ -39,6 +40,38 @@ lazy_static! {
)];
}

#[cfg(not(feature = "rpc"))]
lazy_static! {
/// Returns vector of (pointer target name, pointer target serialized param, vec<pointer param path>)
/// to be applied on the dumped node config.
/// The config updates will be performed on the shared pointer targets, and finally, the values
/// will be propagated to the pointer params.
static ref CONFIG_POINTERS: Vec<((ParamPath, SerializedParam), Vec<ParamPath>)> = vec![(
ser_pointer_target_param(
"chain_id",
&ChainId("SN_MAIN".to_string()),
"The chain to follow. For more details see https://docs.starknet.io/documentation/architecture_and_concepts/Blocks/transactions/#chain-id.",
),
vec!["storage.db_config.chain_id".to_owned()],
),
(
ser_pointer_target_param(
"starknet_url",
&"https://alpha-mainnet.starknet.io/".to_string(),
"The URL of a centralized Starknet gateway.",
),
vec!["central.url".to_owned(), "monitoring_gateway.starknet_url".to_owned()],
),
(
ser_pointer_target_param(
"collect_metrics",
&false,
"If true, collect metrics for the node.",
),
vec!["monitoring_gateway.collect_metrics".to_owned()],
)];
}

/// Updates the default config file by:
/// cargo run --bin dump_config -q
#[cfg_attr(coverage_nightly, coverage_attribute)]
Expand Down
13 changes: 10 additions & 3 deletions crates/papyrus_node/src/config/config_test.rs
Original file line number Diff line number Diff line change
Expand Up @@ -73,6 +73,9 @@ fn load_http_headers() {
assert_eq!(config.central.http_headers.unwrap(), target_http_headers);
}

// insta doesn't work well with features, so if the output between two features are different we
// can only test one of them. We chose to test rpc over testing not(rpc).
#[cfg(feature = "rpc")]
#[test]
// Regression test which checks that the default config dumping hasn't changed.
fn test_dump_default_config() {
Expand All @@ -95,11 +98,15 @@ fn test_default_config_process() {

#[test]
fn test_update_dumped_config_by_command() {
let args =
get_args(vec!["--rpc.max_events_keys", "1234", "--storage.db_config.path_prefix", "/abc"]);
let args = get_args(vec![
"--central.retry_config.retry_max_delay_millis",
"1234",
"--storage.db_config.path_prefix",
"/abc",
]);
env::set_current_dir(get_absolute_path("")).expect("Couldn't set working dir.");
let config = NodeConfig::load_and_process(args).unwrap();

assert_eq!(config.rpc.max_events_keys, 1234);
assert_eq!(config.central.retry_config.retry_max_delay_millis, 1234);
assert_eq!(config.storage.db_config.path_prefix.to_str(), Some("/abc"));
}
26 changes: 26 additions & 0 deletions crates/papyrus_node/src/config/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,10 +13,15 @@ use std::{env, fs, io};
use clap::{arg, value_parser, Arg, ArgMatches, Command};
use itertools::{chain, Itertools};
use papyrus_base_layer::ethereum_base_layer_contract::EthereumBaseLayerConfig;
#[cfg(not(feature = "rpc"))]
use papyrus_config::dumping::ser_param;
use papyrus_config::dumping::{append_sub_config_name, ser_optional_sub_config, SerializeConfig};
use papyrus_config::loading::load_and_process_config;
#[cfg(not(feature = "rpc"))]
use papyrus_config::ParamPrivacyInput;
use papyrus_config::{ConfigError, ParamPath, SerializedParam};
use papyrus_monitoring_gateway::MonitoringGatewayConfig;
#[cfg(feature = "rpc")]
use papyrus_rpc::RpcConfig;
use papyrus_storage::db::DbConfig;
use papyrus_storage::StorageConfig;
Expand Down Expand Up @@ -90,3 +95,24 @@ pub fn node_command() -> Command {
.version(VERSION_FULL)
.about("Papyrus is a StarkNet full node written in Rust.")
}

// TODO(shahak): Try to make this config empty.
#[cfg(not(feature = "rpc"))]
#[derive(Debug, Default, Deserialize, Serialize, Clone, PartialEq, Validate)]
pub struct RpcConfig {
// We need to add some field because empty configs are not supported, and that field needs to
// be one that exists in the real RpcConfig.
pub collect_metrics: bool,
}

#[cfg(not(feature = "rpc"))]
impl SerializeConfig for RpcConfig {
fn dump(&self) -> BTreeMap<ParamPath, SerializedParam> {
BTreeMap::from_iter([ser_param(
"collect_metrics",
&self.collect_metrics,
"If true, collect metrics for the rpc.",
ParamPrivacyInput::Public,
)])
}
}
49 changes: 40 additions & 9 deletions crates/papyrus_node/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
mod main_test;

use std::env::args;
use std::future::{self, pending};
use std::future::{self, pending, Future};
use std::process::exit;
use std::sync::Arc;
use std::time::Duration;
Expand All @@ -15,19 +15,19 @@ use papyrus_config::ConfigError;
use papyrus_monitoring_gateway::MonitoringServer;
use papyrus_node::config::NodeConfig;
use papyrus_node::version::VERSION_FULL;
#[cfg(feature = "rpc")]
use papyrus_rpc::run_server;
use papyrus_storage::{open_storage, update_storage_metrics, StorageReader, StorageWriter};
use papyrus_sync::sources::base_layer::{BaseLayerSourceError, EthereumBaseLayerSource};
use papyrus_sync::sources::central::{CentralError, CentralSource};
use papyrus_sync::sources::pending::PendingSource;
use papyrus_sync::{StateSync, StateSyncError};
use starknet_api::block::BlockHash;
use starknet_api::hash::{StarkFelt, GENESIS_HASH};
use starknet_api::stark_felt;
use starknet_api::hash::GENESIS_HASH;
use starknet_client::reader::objects::pending_data::{PendingBlock, PendingBlockOrDeprecated};
use starknet_client::reader::PendingData;
use tokio::sync::RwLock;
use tokio::task::JoinHandle;
use tokio::task::{JoinError, JoinHandle};
use tracing::metadata::LevelFilter;
use tracing::{debug_span, error, info, warn, Instrument};
use tracing_subscriber::prelude::*;
Expand All @@ -40,6 +40,37 @@ const DEFAULT_LEVEL: LevelFilter = LevelFilter::INFO;
// Duration between updates to the storage metrics (those in the collect_storage_metrics function).
const STORAGE_METRICS_UPDATE_INTERVAL: Duration = Duration::from_secs(10);

#[cfg(feature = "rpc")]
async fn create_rpc_server_future(
config: &NodeConfig,
shared_highest_block: Arc<RwLock<Option<BlockHashAndNumber>>>,
pending_data: Arc<RwLock<PendingData>>,
pending_classes: Arc<RwLock<PendingClasses>>,
storage_reader: StorageReader,
) -> anyhow::Result<impl Future<Output = Result<(), JoinError>>> {
let (_, server_handle) = run_server(
&config.rpc,
shared_highest_block,
pending_data,
pending_classes,
storage_reader,
VERSION_FULL,
)
.await?;
Ok(tokio::spawn(server_handle.stopped()))
}

#[cfg(not(feature = "rpc"))]
async fn create_rpc_server_future(
_config: &NodeConfig,
_shared_highest_block: Arc<RwLock<Option<BlockHashAndNumber>>>,
_pending_data: Arc<RwLock<PendingData>>,
_pending_classes: Arc<RwLock<PendingClasses>>,
_storage_reader: StorageReader,
) -> anyhow::Result<impl Future<Output = Result<(), JoinError>>> {
Ok(futures::future::pending())
}

async fn run_threads(config: NodeConfig) -> anyhow::Result<()> {
let (storage_reader, storage_writer) = open_storage(config.storage.clone())?;

Expand All @@ -65,24 +96,24 @@ async fn run_threads(config: NodeConfig) -> anyhow::Result<()> {
// The pending data might change later to DeprecatedPendingBlock, depending on the response
// from the feeder gateway.
block: PendingBlockOrDeprecated::Current(PendingBlock {
parent_block_hash: BlockHash(stark_felt!(GENESIS_HASH)),
parent_block_hash: BlockHash(
GENESIS_HASH.try_into().expect("Failed converting genesis hash to StarkHash"),
),
..Default::default()
}),
..Default::default()
}));
let pending_classes = Arc::new(RwLock::new(PendingClasses::default()));

// JSON-RPC server.
let (_, server_handle) = run_server(
&config.rpc,
let server_handle_future = create_rpc_server_future(
&config,
shared_highest_block.clone(),
pending_data.clone(),
pending_classes.clone(),
storage_reader.clone(),
VERSION_FULL,
)
.await?;
let server_handle_future = tokio::spawn(server_handle.stopped());

// Sync task.
let sync_future = run_sync(
Expand Down
20 changes: 15 additions & 5 deletions crates/papyrus_node/src/main_test.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,26 @@ use std::time::Duration;

use metrics_exporter_prometheus::PrometheusBuilder;
use papyrus_node::config::NodeConfig;
#[cfg(feature = "rpc")]
use papyrus_rpc::RpcConfig;
use papyrus_storage::{open_storage, StorageConfig};
use tempfile::TempDir;
use test_utils::{get_absolute_path, prometheus_is_contained};
#[cfg(feature = "rpc")]
use test_utils::get_absolute_path;
use test_utils::prometheus_is_contained;

use crate::{run_threads, spawn_storage_metrics_collector};

#[cfg(feature = "rpc")]
fn fix_execution_config_path(config: &mut NodeConfig) {
let default_execution_config_path = RpcConfig::default().execution_config;
config.rpc.execution_config =
get_absolute_path(default_execution_config_path.to_str().unwrap());
}

#[cfg(not(feature = "rpc"))]
fn fix_execution_config_path(_config: &mut NodeConfig) {}

// The mission of this test is to ensure that if an error is returned from one of the spawned tasks,
// the node will stop, and this error will be returned. This is done by checking the case of an
// illegal central URL, which will cause the sync task to return an error.
Expand All @@ -18,10 +31,7 @@ async fn run_threads_stop() {
let temp_dir = TempDir::new().unwrap();
config.storage.db_config.path_prefix = temp_dir.path().into();

// Fix the path to the execution config.
let default_execution_config_path = RpcConfig::default().execution_config;
config.rpc.execution_config =
get_absolute_path(default_execution_config_path.to_str().unwrap());
fix_execution_config_path(&mut config);

// Error when not supplying legal central URL.
config.central.url = "_not_legal_url".to_string();
Expand Down

0 comments on commit e13238b

Please sign in to comment.