Skip to content

Commit

Permalink
Added tests.
Browse files Browse the repository at this point in the history
  • Loading branch information
xnuter committed Sep 23, 2020
1 parent 36532d7 commit d4db9bd
Show file tree
Hide file tree
Showing 6 changed files with 300 additions and 33 deletions.
14 changes: 14 additions & 0 deletions .github/actions-rs/grcov.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
output-type: lcov
output-path: ./lcov.info
source-dir: ./src
ignore-dir:
- "*.cargo/*"
ignore:
- "*.cargo*"
- "*rust*"
- "*configuration.rs"
- "*main.rs"
ignore-not-existing: true
llvm: true
excl-start: (.*)begin-ignore-line(.*)
excl-stop: (.*)end-ignore-line(.*)
62 changes: 62 additions & 0 deletions .github/workflows/grcov.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
on: [push, pull_request]

name: Code coverage with grcov

jobs:
grcov:
runs-on: ${{ matrix.os }}
strategy:
matrix:
os:
- ubuntu-latest

steps:
- uses: actions/checkout@v2

- name: Install toolchain
uses: actions-rs/toolchain@v1
with:
toolchain: nightly
override: true
profile: minimal

- name: Execute tests
uses: actions-rs/cargo@v1
with:
command: test
args: --all
env:
CARGO_INCREMENTAL: 0
RUSTFLAGS: "-Zprofile -Ccodegen-units=1 -Cinline-threshold=0 -Clink-dead-code -Coverflow-checks=off -Cpanic=abort -Zpanic_abort_tests"

- name: Gather coverage data
id: coverage
uses: actions-rs/grcov@v0.1

- name: Pre-installing rust-covfix
uses: actions-rs/install@v0.1
with:
crate: rust-covfix
use-tool-cache: true

- name: Fix coverage data
id: fix-coverage
continue-on-error: true
run: rust-covfix lcov.info -o lcov.info

- name: Coveralls upload
uses: coverallsapp/github-action@master
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
parallel: true
path-to-lcov: lcov.info

grcov_finalize:
runs-on: ubuntu-latest
needs: grcov
steps:
- name: Coveralls finalization
uses: coverallsapp/github-action@master
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
parallel-finished: true
18 changes: 18 additions & 0 deletions .github/workflows/tests.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
on: [push, pull_request]
name: Tests

env:
CARGO_TERM_COLOR: always

jobs:
build:

runs-on: ubuntu-latest

steps:
- uses: actions/checkout@v2
- name: Build
run: cargo build --verbose
- name: Run tests
run: cargo test --verbose

7 changes: 5 additions & 2 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -26,9 +26,12 @@ serde = { version = "1.0", features = ["derive"] }
serde_derive = "1.0"
serde_yaml = "0.8"
serde_json = "1.0"
leaky-bucket="0.7"
leaky-bucket = "0.7"
async-trait = "0.1"
#hyper="0.13"

[dev-dependencies]
mockito = "0.27"
tokio-test = "0.2"

[profile.dev]
opt-level = 0
Expand Down
159 changes: 128 additions & 31 deletions src/bench_session.rs
Original file line number Diff line number Diff line change
Expand Up @@ -77,43 +77,140 @@ impl fmt::Display for BenchRun {
self.total_bytes as f64 / elapsed.as_secs_f64()
)?;

writeln!(f)?;
if !self.summary.is_empty() {
writeln!(f)?;

let mut pairs: Vec<(String, i32)> =
self.summary.iter().map(|(k, v)| (k.clone(), *v)).collect();

pairs.sort_by(|a, b| {
let d = b.1 - a.1;
match d {
1..=0x7fffffff => cmp::Ordering::Greater,
0 => a.0.cmp(&b.0),
_ => cmp::Ordering::Less,
}
});

writeln!(f, "Summary:")?;
for pair in pairs {
writeln!(f, "{}: {}", pair.0, pair.1)?;
}
}

let mut pairs: Vec<(String, i32)> =
self.summary.iter().map(|(k, v)| (k.clone(), *v)).collect();
if self.latencies.entries() > 0 {
writeln!(f)?;
writeln!(
f,
"Percentiles: p50: {}µs p90: {}µs p99: {}µs p99.9: {}µs",
self.latencies.percentile(50.0).unwrap(),
self.latencies.percentile(90.0).unwrap(),
self.latencies.percentile(99.0).unwrap(),
self.latencies.percentile(99.9).unwrap(),
)?;

writeln!(
f,
"Latency (µs): Min: {}µs Avg: {}µs Max: {}µs StdDev: {}µs",
self.latencies.minimum().unwrap(),
self.latencies.mean().unwrap(),
self.latencies.maximum().unwrap(),
self.latencies.stddev().unwrap(),
)
} else {
writeln!(f)
}
}
}

pairs.sort_by(|a, b| {
let d = b.1 - a.1;
match d {
_ if d > 0 => cmp::Ordering::Greater,
_ if d < 0 => cmp::Ordering::Less,
_ => a.0.cmp(&b.0),
}
});
#[cfg(test)]
mod tests {
use crate::bench_session::BenchRun;

#[test]
fn test_codes() {
let mut bench_run = BenchRun::new();
bench_run.increment("200 OK".to_string());
bench_run.increment("200 OK".to_string());
bench_run.increment("400 BAD_REQUEST".to_string());
bench_run.increment("502 BAD_GATEWAY".to_string());
bench_run.increment("502 BAD_GATEWAY".to_string());
bench_run.increment("502 BAD_GATEWAY".to_string());

let as_str = bench_run.to_string();
assert!(as_str.contains("400 BAD_REQUEST: 1"));
assert!(as_str.contains("200 OK: 2"));
assert!(as_str.contains("502 BAD_GATEWAY: 3"));
}

writeln!(f, "Summary:")?;
for pair in pairs {
writeln!(f, "{}: {}", pair.0, pair.1)?;
#[test]
fn test_latencies() {
let mut bench_run = BenchRun::new();
for i in 0..1000 {
bench_run.report_latency(i).expect("Shouldn't fail");
}

writeln!(f)?;
let as_str = bench_run.to_string();

writeln!(
f,
"Percentiles: p50: {}µs p90: {}µs p99: {}µs p99.9: {} µs",
self.latencies.percentile(50.0).unwrap(),
self.latencies.percentile(90.0).unwrap(),
self.latencies.percentile(99.0).unwrap(),
self.latencies.percentile(99.9).unwrap(),
)?;
println!("{}", as_str);

writeln!(
f,
"Latency (µs): Min: {}µs Avg: {}µs Max: {}µs StdDev: {}µs",
self.latencies.minimum().unwrap(),
self.latencies.mean().unwrap(),
self.latencies.maximum().unwrap(),
self.latencies.stddev().unwrap(),
)
assert!(as_str.contains("p50: 500µs "));
assert!(as_str.contains("p90: 900µs "));
assert!(as_str.contains("p99: 990µs "));
assert!(as_str.contains("p99.9: 999µs"));

assert!(as_str.contains("Min: 0µs "));
assert!(as_str.contains("Avg: 500µs "));
assert!(as_str.contains("Max: 999µs "));
assert!(as_str.contains("StdDev: 289µs"));
}

#[test]
fn test_merge() {
let mut b1 = BenchRun::new();
let mut b2 = BenchRun::new();

b1.total_bytes += 1;
b2.total_bytes += 10;

b1.total_requests += 1;
b2.total_requests += 10;

for i in 0..500 {
b1.report_latency(i).expect("Shouldn't fail");
}
for i in 500..1000 {
b2.report_latency(i).expect("Shouldn't fail");
}

b1.increment("200 OK".to_string());
b2.increment("200 OK".to_string());
b2.increment("400 BAD_REQUEST".to_string());
b2.increment("502 BAD_GATEWAY".to_string());
b1.increment("502 BAD_GATEWAY".to_string());
b2.increment("502 BAD_GATEWAY".to_string());

b1.merge(&b2);

let as_str = b1.to_string();

println!("{}", as_str);

assert!(as_str.contains("Total bytes: 11."));
assert!(as_str.contains("Bytes per request: 1.000."));

assert!(as_str.contains("400 BAD_REQUEST: 1"));
assert!(as_str.contains("200 OK: 2"));
assert!(as_str.contains("502 BAD_GATEWAY: 3"));

assert!(as_str.contains("p50: 500µs "));
assert!(as_str.contains("p90: 900µs "));
assert!(as_str.contains("p99: 990µs "));
assert!(as_str.contains("p99.9: 999µs"));

assert!(as_str.contains("Min: 0µs "));
assert!(as_str.contains("Avg: 500µs "));
assert!(as_str.contains("Max: 999µs "));
assert!(as_str.contains("StdDev: 289µs"));
}
}
73 changes: 73 additions & 0 deletions src/http_bench_session.rs
Original file line number Diff line number Diff line change
Expand Up @@ -73,3 +73,76 @@ impl BenchClient for HttpBenchmark {
}
}
}

#[cfg(test)]
mod tests {
use crate::bench_session::BenchClient;
use crate::http_bench_session::{HttpBenchmark, HttpBenchmarkBuilder};
use mockito::mock;

#[tokio::test]
async fn test_success_request() {
let body = "world";

let _m = mock("GET", "/1")
.with_status(200)
.with_header("content-type", "text/plain")
.with_body(body)
.create();

let url = mockito::server_url().to_string();
println!("Url: {}", url);
let http_bench: HttpBenchmark = HttpBenchmarkBuilder::default()
.url(format!("{}/1", url))
.tunnel(None)
.ignore_cert(true)
.conn_reuse(true)
.store_cookies(true)
.http2_only(false)
.verbose(false)
.build()
.unwrap();

let client = http_bench.build_client().expect("Client is built");
let result = http_bench.send_request(&client).await;

assert!(result.is_ok());
let stats = result.unwrap();
println!("{:?}", stats);
assert_eq!(body.len(), stats.bytes_processed);
assert_eq!("200 OK".to_string(), stats.status);
}

#[tokio::test]
async fn test_failed_request() {
let body = "world";

let _m = mock("GET", "/1")
.with_status(500)
.with_header("content-type", "text/plain")
.with_body(body)
.create();

let url = mockito::server_url().to_string();
println!("Url: {}", url);
let http_bench: HttpBenchmark = HttpBenchmarkBuilder::default()
.url(format!("{}/1", url))
.tunnel(None)
.ignore_cert(true)
.conn_reuse(true)
.store_cookies(true)
.http2_only(false)
.verbose(false)
.build()
.unwrap();

let client = http_bench.build_client().expect("Client is built");
let result = http_bench.send_request(&client).await;

assert!(result.is_ok());
let stats = result.unwrap();
println!("{:?}", stats);
assert_eq!(body.len(), stats.bytes_processed);
assert_eq!("500 Internal Server Error".to_string(), stats.status);
}
}

0 comments on commit d4db9bd

Please sign in to comment.