Skip to content
This repository was archived by the owner on May 27, 2025. It is now read-only.
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions .github/workflows/ci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,11 @@ jobs:
steps:
- uses: actions/checkout@v4

- name: Install Redis for tests
run: |
sudo apt-get update
sudo apt-get install -y redis

- name: Install Rust toolchain
uses: dtolnay/rust-toolchain@master
with:
Expand Down
140 changes: 114 additions & 26 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

5 changes: 4 additions & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -25,9 +25,12 @@ metrics-derive = "0.1"
thiserror = "2.0.11"
serde_json = "1.0.138"
hostname = "0.4.0"
redis = "0.24.0"
redis-test = { version = "0.9.0", optional = true }


[dependencies.ring]
version = "0.17.12"

[features]
integration = []
integration = ["redis-test"]
29 changes: 28 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ You can build and test the project using [Cargo](https://doc.rust-lang.org/cargo
# Build the project
cargo build

# Run all the tests
# Run all the tests (requires local version of redis to be installed)
cargo test --all-features
```

Expand All @@ -35,3 +35,30 @@ You can see a full list of parameters by running:

`docker run ghcr.io/base/flashblocks-websocket-proxy:master --help`

### Redis Integration

The proxy supports distributed rate limiting with Redis. This is useful when running multiple instances of the proxy behind a load balancer, as it allows rate limits to be enforced across all instances.

To enable Redis integration, use the following parameters:

- `--redis-url` - Redis connection URL (e.g., `redis://localhost:6379`)
- `--redis-key-prefix` - Prefix for Redis keys (default: `flashblocks`)

Example:

```bash
docker run ghcr.io/base/flashblocks-websocket-proxy:master \
--upstream-ws wss://your-sequencer-endpoint \
--redis-url redis://redis:6379 \
--global-connections-limit 1000 \
--per-ip-connections-limit 10
```

When Redis is enabled, the following features are available:

- Distributed rate limiting across multiple proxy instances
- Connection tracking persists even if the proxy instance restarts
- More accurate global connection limiting in multi-instance deployments

If the Redis connection fails, the proxy will automatically fall back to in-memory rate limiting.

56 changes: 51 additions & 5 deletions src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,14 +8,15 @@ mod server;
mod subscriber;

use crate::metrics::Metrics;
use crate::rate_limit::InMemoryRateLimit;
use crate::rate_limit::{InMemoryRateLimit, RateLimit};
use crate::registry::Registry;
use crate::server::Server;
use crate::subscriber::WebsocketSubscriber;
use axum::http::Uri;
use clap::Parser;
use dotenvy::dotenv;
use metrics_exporter_prometheus::PrometheusBuilder;
use rate_limit::RedisRateLimit;
use std::net::SocketAddr;
use std::sync::Arc;
use tokio::signal::unix::{signal, SignalKind};
Expand Down Expand Up @@ -96,6 +97,21 @@ struct Args {
/// Maximum backoff allowed for upstream connections
#[arg(long, env, default_value = "20")]
subscriber_max_interval: u64,

#[arg(
long,
env,
help = "Redis URL for distributed rate limiting (e.g., redis://localhost:6379). If not provided, in-memory rate limiting will be used."
)]
redis_url: Option<String>,

#[arg(
long,
env,
default_value = "flashblocks",
help = "Prefix for Redis keys"
)]
redis_key_prefix: String,
}

#[tokio::main]
Expand Down Expand Up @@ -203,10 +219,40 @@ async fn main() {

let registry = Registry::new(sender, metrics.clone());

let rate_limiter = Arc::new(InMemoryRateLimit::new(
args.global_connections_limit,
args.per_ip_connections_limit,
));
let rate_limiter = match &args.redis_url {
Some(redis_url) => {
info!(message = "Using Redis rate limiter", redis_url = redis_url);
match RedisRateLimit::new(
redis_url,
args.global_connections_limit,
args.per_ip_connections_limit,
&args.redis_key_prefix,
) {
Ok(limiter) => {
info!(message = "Connected to Redis successfully");
Arc::new(limiter) as Arc<dyn RateLimit>
}
Err(e) => {
error!(
message =
"Failed to connect to Redis, falling back to in-memory rate limiting",
error = e.to_string()
);
Arc::new(InMemoryRateLimit::new(
args.global_connections_limit,
args.per_ip_connections_limit,
)) as Arc<dyn RateLimit>
}
}
}
None => {
info!(message = "Using in-memory rate limiter");
Arc::new(InMemoryRateLimit::new(
args.global_connections_limit,
args.per_ip_connections_limit,
)) as Arc<dyn RateLimit>
}
};

let server = Server::new(
args.listen_addr,
Expand Down
Loading