Skip to content

Commit

Permalink
crypto: Use sui read/write keypair logic in narwhal (#7278)
Browse files Browse the repository at this point in the history
#5195 (Reopened since the
benchmark are still using narwhal commands)

Test using `fab local` (all changes are mirrored in
full_demo/remote/local) @arun-koshy lmk if there is any CI I should run
to ensure correctness.
```
fab local 
Starting local benchmark
Setting up testbed...
About to run ['cargo', 'build', '--quiet', '--release', '--features', 'benchmark']...
Running benchmark (60 sec)...
Parsing logs...
WARN: Clients missed their target rate 288 time(s)

-----------------------------------------
 SUMMARY:
-----------------------------------------
 + CONFIG:
 Faults: 0 node(s)
 Committee size: 4 node(s)
 Worker(s) per node: 1 worker(s)
 Collocate primary and workers: True
 Input rate: 50,000 tx/s
 Transaction size: 512 B
 Execution time: 57 s

 Header number of batches threshold: 32 digests
 Header maximum number of batches: 1,000 digests
 Max header delay: 2,000 ms
 GC depth: 50 round(s)
 Sync retry delay: 10,000 ms
 Sync retry nodes: 3 node(s)
 batch size: 500,000 B
 Max batch delay: 200 ms
 Max concurrent requests: 500,000

 + RESULTS:
 Batch creation avg latency: 74 ms
 Header creation avg latency: 2,168 ms
 	Batch to header avg latency: 1,392 ms
 Header to certificate avg latency: 8 ms
 	Request vote outbound avg latency: 4 ms
 Certificate commit avg latency: 2,930 ms

 Consensus TPS: 46,317 tx/s
 Consensus BPS: 23,714,522 B/s
 Consensus latency: 3,037 ms

 End-to-end TPS: 45,509 tx/s
 End-to-end BPS: 23,300,412 B/s
 End-to-end latency: 4,483 ms
-----------------------------------------
```
  • Loading branch information
joyqvq committed Jan 10, 2023
1 parent 492365e commit f9ddd9d
Show file tree
Hide file tree
Showing 9 changed files with 100 additions and 68 deletions.
2 changes: 2 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

10 changes: 5 additions & 5 deletions crates/sui-keys/src/keypair_file.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ use anyhow::anyhow;
use fastcrypto::traits::EncodeDecodeBase64;
use sui_types::crypto::{AuthorityKeyPair, NetworkKeyPair, SuiKeyPair};

/// Write Base64 encoded `flag || privkey || pubkey` to file
/// Write Base64 encoded `flag || privkey` to file.
pub fn write_keypair_to_file<P: AsRef<std::path::Path>>(
keypair: &SuiKeyPair,
path: P,
Expand All @@ -15,7 +15,7 @@ pub fn write_keypair_to_file<P: AsRef<std::path::Path>>(
Ok(())
}

/// Write Base64 encoded `privkey || pubkey` to file
/// Write Base64 encoded `privkey` to file.
pub fn write_authority_keypair_to_file<P: AsRef<std::path::Path>>(
keypair: &AuthorityKeyPair,
path: P,
Expand All @@ -25,21 +25,21 @@ pub fn write_authority_keypair_to_file<P: AsRef<std::path::Path>>(
Ok(())
}

/// Read from file as Base64 encoded `privkey || pubkey` and return AuthorityKeyPair
/// Read from file as Base64 encoded `privkey` and return a AuthorityKeyPair.
pub fn read_authority_keypair_from_file<P: AsRef<std::path::Path>>(
path: P,
) -> anyhow::Result<AuthorityKeyPair> {
let contents = std::fs::read_to_string(path)?;
AuthorityKeyPair::decode_base64(contents.as_str().trim()).map_err(|e| anyhow!(e))
}

/// Read from file as Base64 encoded `flag || privkey || pubkey` and return SuiKeyapir
/// Read from file as Base64 encoded `flag || privkey` and return a SuiKeypair.
pub fn read_keypair_from_file<P: AsRef<std::path::Path>>(path: P) -> anyhow::Result<SuiKeyPair> {
let contents = std::fs::read_to_string(path)?;
SuiKeyPair::decode_base64(contents.as_str().trim()).map_err(|e| anyhow!(e))
}

/// Read from file as Base64 encoded `flag || privkey || pubkey` and return NetworkKeyPair using the SuiKeyPair scheme enum.
/// Read from file as Base64 encoded `flag || privkey` and return a NetworkKeyPair.
pub fn read_network_keypair_from_file<P: AsRef<std::path::Path>>(
path: P,
) -> anyhow::Result<NetworkKeyPair> {
Expand Down
7 changes: 6 additions & 1 deletion narwhal/benchmark/benchmark/commands.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,11 +33,16 @@ def generate_key(filename):
assert isinstance(filename, str)
return f'./narwhal-node generate_keys --filename {filename}'

@staticmethod
def get_pub_key(filename):
assert isinstance(filename, str)
return f'./narwhal-node get_pub_key --filename {filename}'

@staticmethod
def generate_network_key(filename):
assert isinstance(filename, str)
return f'./narwhal-node generate_network_keys --filename {filename}'

@staticmethod
def run_primary(primary_keys, primary_network_keys, worker_keys, committee, workers, store, parameters, debug=False):
assert isinstance(primary_keys, str)
Expand Down
14 changes: 0 additions & 14 deletions narwhal/benchmark/benchmark/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,20 +9,6 @@
class ConfigError(Exception):
pass


class Key:
def __init__(self, name, secret):
self.name = name
self.secret = secret

@classmethod
def from_file(cls, filename):
assert isinstance(filename, str)
with open(filename, 'r') as f:
data = load(f)
return cls(data['name'], data['secret'])


class WorkerCache:
''' The worker cache looks as follows:
"workers: {
Expand Down
23 changes: 13 additions & 10 deletions narwhal/benchmark/benchmark/full_demo.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@

from benchmark.commands import CommandMaker
from benchmark.logs import ParseError, LogGrpcParser
from benchmark.config import Key, LocalCommittee, LocalWorkerCache, NodeParameters, BenchParameters, ConfigError
from benchmark.config import LocalCommittee, LocalWorkerCache, NodeParameters, BenchParameters, ConfigError
from benchmark.utils import Print, BenchError, PathMaker


Expand Down Expand Up @@ -74,35 +74,38 @@ def run(self, debug=False):
subprocess.run([cmd], shell=True)

# Generate configuration files.
primary_keys = []
primary_names = []
primary_key_files = [
PathMaker.primary_key_file(i) for i in range(nodes)]
for filename in primary_key_files:
cmd = CommandMaker.generate_key(filename).split()
subprocess.run(cmd, check=True)
primary_keys += [Key.from_file(filename)]
primary_names = [x.name for x in primary_keys]
cmd_pk = CommandMaker.get_pub_key(filename)
pk = subprocess.check_output(cmd_pk, encoding='utf-8').strip()
primary_names += [pk]

primary_network_keys = []
primary_network_names = []
primary_network_key_files = [
PathMaker.primary_network_key_file(i) for i in range(nodes)]
for filename in primary_network_key_files:
cmd = CommandMaker.generate_network_key(filename).split()
subprocess.run(cmd, check=True)
primary_network_keys += [Key.from_file(filename)]
primary_network_names = [x.name for x in primary_network_keys]
cmd_pk = CommandMaker.get_pub_key(filename).split()
pk = subprocess.check_output(cmd_pk, encoding='utf-8').strip()
primary_network_names += [pk]

committee = LocalCommittee(primary_names, primary_network_names, self.BASE_PORT)
committee.print(PathMaker.committee_file())

worker_keys = []
worker_names = []
worker_key_files = [PathMaker.worker_key_file(
i) for i in range(self.workers*nodes)]
for filename in worker_key_files:
cmd = CommandMaker.generate_network_key(filename).split()
subprocess.run(cmd, check=True)
worker_keys += [Key.from_file(filename)]
worker_names = [x.name for x in worker_keys]
cmd_pk = CommandMaker.get_pub_key(filename).split()
pk = subprocess.check_output(cmd_pk, encoding='utf-8').strip()
worker_names += [pk]

# 2 ports used per authority so add 2 * num authorities to base port
worker_cache = LocalWorkerCache(
Expand Down
24 changes: 13 additions & 11 deletions narwhal/benchmark/benchmark/local.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
from time import sleep

from benchmark.commands import CommandMaker
from benchmark.config import Key, LocalCommittee, NodeParameters, LocalWorkerCache, BenchParameters, ConfigError
from benchmark.config import LocalCommittee, NodeParameters, LocalWorkerCache, BenchParameters, ConfigError
from benchmark.logs import LogParser, ParseError
from benchmark.utils import Print, BenchError, PathMaker

Expand Down Expand Up @@ -63,36 +63,38 @@ def run(self, debug=False):
subprocess.run([cmd], shell=True)

# Generate configuration files.
primary_keys = []
primary_names = []
primary_key_files = [
PathMaker.primary_key_file(i) for i in range(nodes)]
for filename in primary_key_files:
cmd = CommandMaker.generate_key(filename).split()
subprocess.run(cmd, check=True)
primary_keys += [Key.from_file(filename)]
primary_names = [x.name for x in primary_keys]
cmd_pk = CommandMaker.get_pub_key(filename).split()
pk = subprocess.check_output(cmd_pk, encoding='utf-8').strip()
primary_names += [pk]

primary_network_keys = []
primary_network_names = []
primary_network_key_files = [
PathMaker.primary_network_key_file(i) for i in range(nodes)]
for filename in primary_network_key_files:
cmd = CommandMaker.generate_network_key(filename).split()
subprocess.run(cmd, check=True)
primary_network_keys += [Key.from_file(filename)]
primary_network_names = [x.name for x in primary_network_keys]

cmd_pk = CommandMaker.get_pub_key(filename).split()
pk = subprocess.check_output(cmd_pk, encoding='utf-8').strip()
primary_network_names += [pk]
committee = LocalCommittee(
primary_names, primary_network_names, self.BASE_PORT)
committee.print(PathMaker.committee_file())

worker_keys = []
worker_names = []
worker_key_files = [PathMaker.worker_key_file(
i) for i in range(self.workers*nodes)]
for filename in worker_key_files:
cmd = CommandMaker.generate_network_key(filename).split()
subprocess.run(cmd, check=True)
worker_keys += [Key.from_file(filename)]
worker_names = [x.name for x in worker_keys]
cmd_pk = CommandMaker.get_pub_key(filename).split()
pk = subprocess.check_output(cmd_pk, encoding='utf-8').strip()
worker_names += [pk]

# 2 ports used per authority so add 2 * num authorities to base port
worker_cache = LocalWorkerCache(
Expand Down
23 changes: 13 additions & 10 deletions narwhal/benchmark/benchmark/remote.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
from copy import deepcopy
import subprocess

from benchmark.config import Committee, Key, NodeParameters, WorkerCache, BenchParameters, ConfigError
from benchmark.config import Committee, NodeParameters, WorkerCache, BenchParameters, ConfigError
from benchmark.utils import BenchError, Print, PathMaker, progress_bar
from benchmark.commands import CommandMaker
from benchmark.logs import LogParser, ParseError
Expand Down Expand Up @@ -181,23 +181,25 @@ def _config(self, hosts, node_parameters, bench_parameters):
subprocess.run([cmd], shell=True)

# Generate configuration files.
primary_keys = []
primary_names = []
primary_key_files = [PathMaker.primary_key_file(
i) for i in range(len(hosts))]
for filename in primary_key_files:
cmd = CommandMaker.generate_key(filename).split()
subprocess.run(cmd, check=True)
primary_keys += [Key.from_file(filename)]
primary_names = [x.name for x in primary_keys]
cmd_pk = CommandMaker.get_pub_key(filename).split()
pk = subprocess.check_output(cmd_pk, encoding='utf-8').strip()
primary_names += [pk]

primary_network_keys = []
primary_network_names = []
primary_network_key_files = [PathMaker.primary_network_key_file(
i) for i in range(len(hosts))]
for filename in primary_network_key_files:
cmd = CommandMaker.generate_network_key(filename).split()
subprocess.run(cmd, check=True)
primary_network_keys += [Key.from_file(filename)]
primary_network_names = [x.name for x in primary_network_keys]
cmd_pk = CommandMaker.get_pub_key(filename).split()
pk = subprocess.check_output(cmd_pk, encoding='utf-8').strip()
primary_network_names += [pk]

if bench_parameters.collocate:
addresses = OrderedDict(
Expand All @@ -210,14 +212,15 @@ def _config(self, hosts, node_parameters, bench_parameters):
committee = Committee(addresses, self.settings.base_port)
committee.print(PathMaker.committee_file())

worker_keys = []
worker_names = []
worker_key_files = [PathMaker.worker_key_file(
i) for i in range(bench_parameters.workers*len(hosts))]
for filename in worker_key_files:
cmd = CommandMaker.generate_network_key(filename).split()
subprocess.run(cmd, check=True)
worker_keys += [Key.from_file(filename)]
worker_names = [x.name for x in worker_keys]
cmd_pk = CommandMaker.get_pub_key(filename).split()
pk = subprocess.check_output(cmd_pk, encoding='utf-8').strip()
worker_names += [pk]

if bench_parameters.collocate:
workers = OrderedDict(
Expand Down
2 changes: 2 additions & 0 deletions narwhal/node/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,8 @@ storage = { path = "../storage", package = "narwhal-storage" }
types = { path = "../types", package = "narwhal-types" }
worker = { path = "../worker", package = "narwhal-worker" }
eyre = "0.6.8"
sui-keys = { path = "../../crates/sui-keys" }
sui-types = { path = "../../crates/sui-types" }

mysten-metrics = { path = "../../crates/mysten-metrics" }
store = { path = "../../crates/typed-store", package = "typed-store" }
Expand Down
63 changes: 46 additions & 17 deletions narwhal/node/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ use clap::{crate_name, crate_version, App, AppSettings, ArgMatches, SubCommand};
use config::{Committee, Import, Parameters, WorkerCache, WorkerId};
use crypto::{KeyPair, NetworkKeyPair};
use eyre::Context;
use fastcrypto::{generate_production_keypair, traits::KeyPair as _};
use fastcrypto::traits::KeyPair as _;
use mysten_metrics::RegistryService;
use narwhal_node as node;
use narwhal_node::primary_node::PrimaryNode;
Expand All @@ -25,6 +25,11 @@ use node::{
use prometheus::Registry;
use std::sync::Arc;
use storage::NodeStorage;
use sui_keys::keypair_file::{
read_authority_keypair_from_file, read_network_keypair_from_file,
write_authority_keypair_to_file, write_keypair_to_file,
};
use sui_types::crypto::{get_key_pair_from_rng, AuthorityKeyPair, SuiKeyPair};
use telemetry_subscribers::TelemetryGuards;
use tokio::sync::mpsc::channel;
#[cfg(feature = "benchmark")]
Expand All @@ -42,13 +47,18 @@ async fn main() -> Result<(), eyre::Report> {
.args_from_usage("-v... 'Sets the level of verbosity'")
.subcommand(
SubCommand::with_name("generate_keys")
.about("Print a fresh key pair to file")
.args_from_usage("--filename=<FILE> 'The file where to print the new key pair'"),
.about("Save an encoded bls12381 keypair (Base64 encoded `privkey`) to file")
.args_from_usage("--filename=<FILE> 'The file where to save the encoded authority key pair'"),
)
.subcommand(
SubCommand::with_name("generate_network_keys")
.about("Print a fresh network key pair (ed25519) to file")
.args_from_usage("--filename=<FILE> 'The file where to print the new network key pair'"),
.about("Save an encoded ed25519 network keypair (Base64 encoded `flag || privkey`) to file")
.args_from_usage("--filename=<FILE> 'The file where to save the encoded network key pair'"),
)
.subcommand(
SubCommand::with_name("get_pub_key")
.about("Get the public key from a keypair file")
.args_from_usage("--filename=<FILE> 'The file where the keypair is stored'"),
)
.subcommand(
SubCommand::with_name("run")
Expand Down Expand Up @@ -94,26 +104,45 @@ async fn main() -> Result<(), eyre::Report> {
match matches.subcommand() {
("generate_keys", Some(sub_matches)) => {
let _guard = setup_telemetry(tracing_level, network_tracing_level, None);
let kp = generate_production_keypair::<KeyPair>();
config::Export::export(&kp, sub_matches.value_of("filename").unwrap())
.context("Failed to generate key pair")?;
let key_file = sub_matches.value_of("filename").unwrap();
let keypair: AuthorityKeyPair = get_key_pair_from_rng(&mut rand::rngs::OsRng).1;
write_authority_keypair_to_file(&keypair, key_file).unwrap();
}
("generate_network_keys", Some(sub_matches)) => {
let _guard = setup_telemetry(tracing_level, network_tracing_level, None);
let network_kp = generate_production_keypair::<NetworkKeyPair>();
config::Export::export(&network_kp, sub_matches.value_of("filename").unwrap())
.context("Failed to generate network key pair")?
let network_key_file = sub_matches.value_of("filename").unwrap();
let network_keypair: NetworkKeyPair = get_key_pair_from_rng(&mut rand::rngs::OsRng).1;
write_keypair_to_file(&SuiKeyPair::Ed25519(network_keypair), network_key_file).unwrap();
}
("get_pub_key", Some(sub_matches)) => {
let _guard = setup_telemetry(tracing_level, network_tracing_level, None);
let file = sub_matches.value_of("filename").unwrap();
match read_network_keypair_from_file(file) {
Ok(keypair) => {
// Network keypair file is stored as `flag || privkey`.
println!("{:?}", keypair.public())
}
Err(_) => {
// Authority keypair file is stored as `privkey`.
match read_authority_keypair_from_file(file) {
Ok(kp) => println!("{:?}", kp.public()),
Err(e) => {
println!("Failed to read keypair at path {:?} err: {:?}", file, e)
}
}
}
}
}
("run", Some(sub_matches)) => {
let primary_key_file = sub_matches.value_of("primary-keys").unwrap();
let primary_keypair = KeyPair::import(primary_key_file)
.context("Failed to load the node's primary keypair")?;
let primary_keypair = read_authority_keypair_from_file(primary_key_file)
.expect("Failed to load the node's primary keypair");
let primary_network_key_file = sub_matches.value_of("primary-network-keys").unwrap();
let primary_network_keypair = NetworkKeyPair::import(primary_network_key_file)
.context("Failed to load the node's primary network keypair")?;
let primary_network_keypair = read_network_keypair_from_file(primary_network_key_file)
.expect("Failed to load the node's primary network keypair");
let worker_key_file = sub_matches.value_of("worker-keys").unwrap();
let worker_keypair = NetworkKeyPair::import(worker_key_file)
.context("Failed to load the node's worker keypair")?;
let worker_keypair = read_network_keypair_from_file(worker_key_file)
.expect("Failed to load the node's worker keypair");
let registry = match sub_matches.subcommand() {
("primary", _) => primary_metrics_registry(primary_keypair.public().clone()),
("worker", Some(worker_matches)) => {
Expand Down

1 comment on commit f9ddd9d

@github-actions
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Owned Transactions Benchmark Results

Benchmark Report:
+-------------+-----+--------+-----+-----+-----+-----+-----+-----+-------+-----+
| duration(s) | tps | error% | min | p25 | p50 | p75 | p90 | p99 | p99.9 | max |
+==============================================================================+
| 60          | 100 | 0      | 12  | 56  | 58  | 60  | 97  | 102 | 104   | 104 |

Shared Transactions Benchmark Results

Benchmark Report:
+-------------+-----+--------+-----+-----+-----+-----+-----+-----+-------+------+
| duration(s) | tps | error% | min | p25 | p50 | p75 | p90 | p99 | p99.9 | max  |
+===============================================================================+
| 60          | 99  | 0      | 18  | 420 | 513 | 610 | 677 | 883 | 1259  | 1289 |

Narwhal Benchmark Results

 SUMMARY:
-----------------------------------------
 + CONFIG:
 Faults: 0 node(s)
 Committee size: 4 node(s)
 Worker(s) per node: 1 worker(s)
 Collocate primary and workers: True
 Input rate: 50,000 tx/s
 Transaction size: 512 B
 Execution time: 58 s

 Header number of batches threshold: 32 digests
 Header maximum number of batches: 1,000 digests
 Max header delay: 2,000 ms
 GC depth: 50 round(s)
 Sync retry delay: 10,000 ms
 Sync retry nodes: 3 node(s)
 batch size: 500,000 B
 Max batch delay: 200 ms
 Max concurrent requests: 500,000 

 + RESULTS:
 Batch creation avg latency: 77 ms
 Header creation avg latency: 1,756 ms
 	Batch to header avg latency: 954 ms
 Header to certificate avg latency: 7 ms
 	Request vote outbound avg latency: 3 ms
 Certificate commit avg latency: 3,473 ms

 Consensus TPS: 49,713 tx/s
 Consensus BPS: 25,453,107 B/s
 Consensus latency: 3,600 ms

 End-to-end TPS: 48,032 tx/s
 End-to-end BPS: 24,592,422 B/s
 End-to-end latency: 4,595 ms
-----------------------------------------

Please sign in to comment.