Skip to content

Commit

Permalink
Merge pull request #50 from florian1345/more-benches
Browse files Browse the repository at this point in the history
Add benches for ejecting insert and ejecting/non-ejecting mutate
  • Loading branch information
florian1345 committed Aug 7, 2023
2 parents 0d7c099 + c07f523 commit bfd3e00
Show file tree
Hide file tree
Showing 8 changed files with 151 additions and 49 deletions.
109 changes: 72 additions & 37 deletions benches/bench/bencher_extensions.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
use std::iter::{self, Empty, Once};
use std::iter::{self, Once};
use std::time::{Duration, Instant};
use std::array::IntoIter as ArrayIntoIter;
use std::vec::IntoIter as VecIntoIter;

use criterion::BenchmarkGroup;
Expand All @@ -14,14 +15,6 @@ pub(crate) trait KeysToRestore {
fn keys(self) -> Self::KeyIter;
}

impl KeysToRestore for () {
type KeyIter = Empty<String>;

fn keys(self) -> Empty<String> {
iter::empty()
}
}

impl KeysToRestore for String {
type KeyIter = Once<String>;

Expand All @@ -30,6 +23,14 @@ impl KeysToRestore for String {
}
}

impl<const N: usize> KeysToRestore for [String; N] {
type KeyIter = ArrayIntoIter<String, N>;

fn keys(self) -> ArrayIntoIter<String, N> {
self.into_iter()
}
}

impl KeysToRestore for Vec<String> {
type KeyIter = VecIntoIter<String>;

Expand All @@ -56,21 +57,33 @@ pub(crate) trait CacheBenchmarkGroup {
/// with `size` elements on each iteration. As a second argument, a slice of
/// all initial keys is provided. The cache is not repaired in any way after
/// an iteration, so it is the routine's responsibility no to change the key
/// set.
fn bench_with_cache<O, R>(&mut self, routine: R, size: usize)
/// set. The max capacity is set to the current capacity after filling it
/// up.
fn bench_with_capped_cache<O, R>(&mut self, routine: R, size: usize)
where
R: FnMut(&mut LruCache<String, String>, &[String]) -> O;

/// Benchmarks the given `routine`, which is supplied with a mutable
/// reference to the same `LruCache` on each iteration. Initially, the cache
/// is filled to the given `size`. After each iteration, every removed key,
/// as indicated by the [KeysToRestore] return value of the routine, is
/// restored. As a second argument, the routine gets a slice of all keys.
/// is filled to the given `size`. After each iteration, every removed key
/// or key whose entry was altered in a way that requires restoration, as
/// indicated by the [KeysToRestore] return value of the routine, is
/// regenerated. As a second argument, the routine gets a slice of all keys.
fn bench_with_refilled_cache<O, R>(&mut self, routine: R, size: usize)
where
O: KeysToRestore,
R: FnMut(&mut LruCache<String, String>, &[String]) -> O;

/// Same as [CacheBenchmarkGroup::bench_with_refilled_cache], but in
/// addition, the cache is capped. That means that the max capacity is set
/// to the current capacity after filling it up. Any expansion/insertion
/// will lead to LRU ejection. This has to be considered when deciding
/// which [KeysToRestore].
fn bench_with_refilled_capped_cache<O, R>(&mut self, routine: R, size: usize)
where
O: KeysToRestore,
R: FnMut(&mut LruCache<String, String>, &[String]) -> O;

/// Benchmarks the given `routine`, which is supplied with a mutable
/// reference to the same `LruCache` on each iteration. Should the size of
/// the cache be greater than `max_size` after any iteration, it will be
Expand Down Expand Up @@ -122,6 +135,40 @@ where
}
}

fn bench_with_refilled_cache<O, R>(group: &mut BenchmarkGroup<'_, WallTime>,
mut routine: R, size: usize, cap: bool)
where
O: KeysToRestore,
R: FnMut(&mut LruCache<String, String>, &[String]) -> O
{
let id = crate::get_id(size);
let mut cache = LruCache::with_capacity(usize::MAX, size);
fill_to_size(&mut cache, size);
let keys = cache.keys().cloned().collect::<Vec<_>>();

if cap {
cache.set_max_size(cache.current_size());
}

group.bench_function(id, |bencher| bencher.iter_custom(|iter_count| {
let mut completed = 0;
let mut total = Duration::ZERO;

loop {
let before = Instant::now();
let keys_to_restore = routine(&mut cache, &keys);
total += before.elapsed();

restore_keys(&mut cache, keys_to_restore);
completed += 1;

if completed >= iter_count {
return total;
}
}
}));
}

impl<'a> CacheBenchmarkGroup for BenchmarkGroup<'a, WallTime> {

fn bench_with_reset_cache<O, Rou, Res>(&mut self, mut routine: Rou,
Expand Down Expand Up @@ -149,45 +196,33 @@ impl<'a> CacheBenchmarkGroup for BenchmarkGroup<'a, WallTime> {
}));
}

fn bench_with_cache<O, R>(&mut self, mut routine: R, size: usize)
fn bench_with_capped_cache<O, R>(&mut self, mut routine: R, size: usize)
where
R: FnMut(&mut LruCache<String, String>, &[String]) -> O
{
let id = crate::get_id(size);
let mut cache = LruCache::with_capacity(usize::MAX, size);
fill_to_size(&mut cache, size);
cache.set_max_size(cache.current_size());
let keys = cache.keys().cloned().collect::<Vec<_>>();

self.bench_function(id, |group| group.iter(|| routine(&mut cache, &keys)));
}

fn bench_with_refilled_cache<O, R>(&mut self, mut routine: R, size: usize)
fn bench_with_refilled_cache<O, R>(&mut self, routine: R, size: usize)
where
O: KeysToRestore,
R: FnMut(&mut LruCache<String, String>, &[String]) -> O
{
let id = crate::get_id(size);
let mut cache = LruCache::with_capacity(usize::MAX, size);
fill_to_size(&mut cache, size);
let keys = cache.keys().cloned().collect::<Vec<_>>();

self.bench_function(id, |bencher| bencher.iter_custom(|iter_count| {
let mut completed = 0;
let mut total = Duration::ZERO;

loop {
let before = Instant::now();
let keys_to_restore = routine(&mut cache, &keys);
total += before.elapsed();

restore_keys(&mut cache, keys_to_restore);
completed += 1;
bench_with_refilled_cache(self, routine, size, false)
}

if completed >= iter_count {
return total;
}
}
}));
fn bench_with_refilled_capped_cache<O, R>(&mut self, routine: R, size: usize)
where
O: KeysToRestore,
R: FnMut(&mut LruCache<String, String>, &[String]) -> O
{
bench_with_refilled_cache(self, routine, size, true)
}

fn bench_with_depleted_cache<O, R>(&mut self, mut routine: R,
Expand Down
2 changes: 1 addition & 1 deletion benches/bench/clone.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ pub(crate) fn clone_benchmark(c: &mut Criterion) {
let mut group = crate::make_group(c, "clone");

for &size in crate::LINEAR_TIME_SIZES {
group.bench_with_cache(|cache, _| {
group.bench_with_capped_cache(|cache, _| {
black_box(cache.clone());
}, size);
}
Expand Down
2 changes: 1 addition & 1 deletion benches/bench/get.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ pub(crate) fn get_benchmark(c: &mut Criterion) {
let mut rng = rand::thread_rng();

for &size in crate::CONSTANT_TIME_SIZES {
group.bench_with_cache(|cache, keys| {
group.bench_with_capped_cache(|cache, keys| {
let key_index = rng.gen_range(0..keys.len());
cache.get(&keys[key_index]);
}, size);
Expand Down
31 changes: 24 additions & 7 deletions benches/bench/insert.rs
Original file line number Diff line number Diff line change
@@ -1,16 +1,33 @@
use criterion::Criterion;
use lru_mem::LruCache;

use crate::bencher_extensions::CacheBenchmarkGroup;

pub(crate) fn insert_benchmark(c: &mut Criterion) {
let mut group = crate::make_group(c, "insert");
#[inline]
fn insert_and_increment(cache: &mut LruCache<String, String>, key_idx: &mut u64) {
cache.insert(format!("{:012x}", key_idx), String::new()).unwrap();
*key_idx += 1;
}

pub(crate) fn insert_no_eject_benchmark(c: &mut Criterion) {
let mut group = crate::make_group(c, "insert-no-eject");

for &size in crate::CONSTANT_TIME_SIZES {
let min_size = size * 7 / 8;
let mut key_idx: u64 = 0;

group.bench_with_depleted_cache(
|cache| insert_and_increment(cache, &mut key_idx), min_size, size);
}
}

pub(crate) fn insert_eject_benchmark(c: &mut Criterion) {
let mut group = crate::make_group(c, "insert-eject");

for &size in crate::CONSTANT_TIME_SIZES {
let mut key_idx: u32 = 0;
let mut key_idx: u64 = 0;

group.bench_with_depleted_cache(|cache| {
cache.insert(format!("{:08x}", key_idx), String::new()).unwrap();
key_idx += 1;
}, size * 7 / 8, size);
group.bench_with_capped_cache(
|cache, _| insert_and_increment(cache, &mut key_idx), size);
}
}
2 changes: 1 addition & 1 deletion benches/bench/iter.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ pub(crate) fn iter_benchmark(c: &mut Criterion) {
let mut group = crate::make_group(c, "iter");

for &size in crate::LINEAR_TIME_SIZES {
group.bench_with_cache(|cache, _| {
group.bench_with_capped_cache(|cache, _| {
for entry in cache.iter() {
criterion::black_box(entry);
}
Expand Down
6 changes: 5 additions & 1 deletion benches/bench/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ mod get;
mod heap_size;
mod insert;
mod iter;
mod mutate;
mod peek;
mod remove;
mod retain;
Expand Down Expand Up @@ -59,8 +60,11 @@ criterion::criterion_group!(benches,
drain::drain_benchmark,
get::get_benchmark,
heap_size::heap_size_benchmark,
insert::insert_benchmark,
insert::insert_no_eject_benchmark,
insert::insert_eject_benchmark,
iter::iter_benchmark,
mutate::mutate_no_eject_benchmark,
mutate::mutate_eject_benchmark,
peek::peek_benchmark,
remove::remove_benchmark,
retain::retain_benchmark,
Expand Down
46 changes: 46 additions & 0 deletions benches/bench/mutate.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
use criterion::Criterion;
use rand::Rng;

use crate::bencher_extensions::CacheBenchmarkGroup;

fn mutate_in_place(string: &mut String) {
let old = string.pop().unwrap();
let new = (b'0' + b'f' - old as u8) as char;

string.push(new);
}

fn mutate_expanding(string: &mut String) {
let old_capacity = string.capacity();

while string.capacity() <= old_capacity {
string.push('0')
}
}

pub(crate) fn mutate_no_eject_benchmark(c: &mut Criterion) {
let mut group = crate::make_group(c, "mutate-no-eject");
let mut rng = rand::thread_rng();

for &size in crate::CONSTANT_TIME_SIZES {
group.bench_with_capped_cache(|cache, keys| {
let key_index = rng.gen_range(0..keys.len());
cache.mutate(&keys[key_index], mutate_in_place).unwrap();
}, size);
}
}

pub(crate) fn mutate_eject_benchmark(c: &mut Criterion) {
let mut group = crate::make_group(c, "mutate-eject");
let mut rng = rand::thread_rng();

for &size in crate::CONSTANT_TIME_SIZES {
group.bench_with_refilled_capped_cache(|cache, keys| {
let key_index = rng.gen_range(0..keys.len());
let key = &keys[key_index];
let lru_key = cache.peek_lru().unwrap().0.clone();
cache.mutate(key, mutate_expanding).unwrap();
[lru_key, key.clone()]
}, size);
}
}
2 changes: 1 addition & 1 deletion benches/bench/peek.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ pub(crate) fn peek_benchmark(c: &mut Criterion) {
let mut rng = rand::thread_rng();

for &size in crate::CONSTANT_TIME_SIZES {
group.bench_with_cache(|cache, keys| {
group.bench_with_capped_cache(|cache, keys| {
let key_index = rng.gen_range(0..keys.len());
cache.peek(&keys[key_index]);
}, size);
Expand Down

0 comments on commit bfd3e00

Please sign in to comment.