Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
util, finagle: Port allocation benchmarks to JMH
Motivation There are a handful of allocation benchmarks that we had written on an internal test tool before JMH supported allocation profiling. These are more valuable as JMH tests. Solution Port them to util-benchmark and finagle-benchmark RB_ID=784722
- Loading branch information
1 parent
5217d54
commit a1f925e
Showing
3 changed files
with
185 additions
and
57 deletions.
There are no files selected for viewing
54 changes: 54 additions & 0 deletions
54
util-benchmark/src/main/scala/com/twitter/concurrent/AsyncStreamBenchmark.scala
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,54 @@ | ||
package com.twitter.concurrent | ||
|
||
import com.twitter.conversions.time._ | ||
import com.twitter.util.{Await, StdBenchAnnotations} | ||
import org.openjdk.jmh.annotations._ | ||
|
||
@State(Scope.Benchmark) | ||
class AsyncStreamBenchmark extends StdBenchAnnotations { | ||
|
||
/** Number of elements in the AsyncStream */ | ||
@Param(Array("10")) | ||
var size: Int = _ | ||
|
||
private[this] val timeout = 5.seconds | ||
|
||
private[this] var as: AsyncStream[Int] = _ | ||
|
||
@Setup(Level.Iteration) | ||
def setup(): Unit = | ||
as = AsyncStream.fromSeq(0.until(size)) | ||
|
||
@Benchmark | ||
def baseline(): Seq[Int] = | ||
Await.result(as.toSeq(), timeout) | ||
|
||
private[this] val MapFn: Int => Int = | ||
x => x + 1 | ||
|
||
@Benchmark | ||
def map(): Seq[Int] = | ||
Await.result(as.map(MapFn).toSeq()) | ||
|
||
private[this] val FlatMapFn: Int => AsyncStream[Int] = | ||
x => AsyncStream(x) | ||
|
||
@Benchmark | ||
def flatMap(): Seq[Int] = | ||
Await.result(as.flatMap(FlatMapFn).toSeq()) | ||
|
||
private[this] val FilterFn: Int => Boolean = | ||
x => x % 2 == 0 | ||
|
||
@Benchmark | ||
def filter(): Seq[Int] = | ||
Await.result(as.filter(FilterFn).toSeq()) | ||
|
||
private[this] val TakeWhileFn: Int => Boolean = | ||
x => x < size / 2 | ||
|
||
@Benchmark | ||
def takeWhile(): Seq[Int] = | ||
Await.result(as.takeWhile(TakeWhileFn).toSeq()) | ||
|
||
} |
164 changes: 131 additions & 33 deletions
164
util-benchmark/src/main/scala/com/twitter/io/BufBenchmark.scala
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,50 +1,148 @@ | ||
package com.twitter.io | ||
|
||
import java.util.concurrent.TimeUnit | ||
import com.twitter.util.StdBenchAnnotations | ||
import java.nio | ||
import org.openjdk.jmh.annotations._ | ||
import org.openjdk.jmh.infra.Blackhole | ||
import scala.util.Random | ||
|
||
@OutputTimeUnit(TimeUnit.NANOSECONDS) | ||
@BenchmarkMode(Array(Mode.AverageTime)) | ||
class BufBenchmark { | ||
import BufBenchmark._ | ||
@State(Scope.Benchmark) | ||
class BufBenchmark extends StdBenchAnnotations { | ||
|
||
@Benchmark | ||
def timeSlice(state: BufBenchmarkState) { | ||
state.buf.slice(state.startIndex, state.endIndex) | ||
@Param(Array("1000")) | ||
var size: Int = 1000 | ||
|
||
private[this] var byteArrayBuf: Buf = _ | ||
private[this] var byteBufferBuf: Buf = _ | ||
private[this] var concatBuf: Buf = _ | ||
private[this] var all: Array[Buf] = _ | ||
|
||
private[this] var string: String = _ | ||
private[this] var stringBuf: Buf = _ | ||
|
||
@Setup(Level.Iteration) | ||
def setup(): Unit = { | ||
val cap = size * 2 | ||
val start = cap / 4 | ||
val end = start + size | ||
val raw = 0.until(cap).map(_.toByte).toArray | ||
|
||
val bb = java.nio.ByteBuffer.wrap(raw, start, size) | ||
|
||
byteArrayBuf = Buf.ByteArray.Owned(raw, start, end) | ||
byteBufferBuf = Buf.ByteBuffer.Owned(bb) | ||
concatBuf = byteArrayBuf.slice(0, size / 2).concat(byteArrayBuf.slice(size / 2, size)) | ||
all = Array(byteArrayBuf, byteBufferBuf, concatBuf) | ||
|
||
val rnd = new Random(120412421512L) | ||
string = rnd.nextString(size) | ||
stringBuf = Buf.Utf8(string) | ||
} | ||
} | ||
|
||
object BufBenchmark { | ||
@State(Scope.Benchmark) | ||
class BufBenchmarkState { | ||
@Param(Array("1000")) | ||
var bufSize = 1000 | ||
private[this] def equality(buf: Buf, hole: Blackhole): Unit = { | ||
var i = 0 | ||
while (i < all.length) { | ||
hole.consume(buf == all(i)) | ||
i += 1 | ||
} | ||
} | ||
|
||
@Benchmark | ||
def equalityByteArrayBuf(hole: Blackhole): Unit = | ||
equality(byteArrayBuf, hole) | ||
|
||
@Benchmark | ||
def equalityByteBufferBuf(hole: Blackhole): Unit = | ||
equality(byteBufferBuf, hole) | ||
|
||
@Param(Array("1", "5", "10")) | ||
var sliceSize = 10 | ||
@Benchmark | ||
def equalityConcatBuf(hole: Blackhole): Unit = | ||
equality(concatBuf, hole) | ||
|
||
@Param(Array("4", "100")) | ||
var parts = 4 | ||
private[this] def hash(buf: Buf): Int = buf.hashCode() | ||
|
||
@Param(Array("0", "100")) | ||
var startPositionPercentage = 0 | ||
@Benchmark | ||
def hashCodeByteArrayBuf(): Int = | ||
hash(byteArrayBuf) | ||
|
||
var buf: Buf = null | ||
@Benchmark | ||
def hashCodeByteBufferBuf(): Int = | ||
hash(byteBufferBuf) | ||
|
||
var startIndex = 0 | ||
var endIndex = 0 | ||
@Benchmark | ||
def hashCodeConcatBuf(): Int = | ||
hash(concatBuf) | ||
|
||
@Setup(Level.Trial) | ||
def setup() { | ||
buf = (for (i <- 0 until parts) yield Buf.ByteArray.Owned(Array.fill[Byte](bufSize / parts)(i.toByte))).reduce(_ concat _) | ||
private[this] def slice(buf: Buf): Buf = | ||
buf.slice(size / 4, size / 4 + size / 2) | ||
|
||
startIndex = (bufSize * (startPositionPercentage.toFloat / 100)).toInt | ||
endIndex = startIndex + sliceSize | ||
@Benchmark | ||
def sliceByteArrayBuf(): Buf = | ||
slice(byteArrayBuf) | ||
|
||
if (endIndex > bufSize) { | ||
endIndex = bufSize | ||
startIndex = math.max(0, endIndex - sliceSize) | ||
} | ||
} | ||
@Benchmark | ||
def sliceByteBufferBuf(): Buf = | ||
slice(byteBufferBuf) | ||
|
||
@Benchmark | ||
def sliceConcatBuf(): Buf = | ||
slice(concatBuf) | ||
|
||
private[this] def concat(buf: Buf): Buf = | ||
buf.concat(buf) | ||
|
||
@Benchmark | ||
def concatByteArrayBuf(): Buf = | ||
concat(byteArrayBuf) | ||
|
||
@Benchmark | ||
def concatByteBufferBuf(): Buf = | ||
concat(byteBufferBuf) | ||
|
||
@Benchmark | ||
def concatConcatBuf(): Buf = | ||
concat(concatBuf) | ||
|
||
private[this] def asByteBuffer(buf: Buf): nio.ByteBuffer = | ||
Buf.ByteBuffer.Owned.extract(buf) | ||
|
||
@Benchmark | ||
def asByteBufferByteArrayBuf(): nio.ByteBuffer = | ||
asByteBuffer(byteArrayBuf) | ||
|
||
@Benchmark | ||
def asByteBufferByteBufferBuf(): nio.ByteBuffer = | ||
asByteBuffer(byteBufferBuf) | ||
|
||
@Benchmark | ||
def asByteBufferConcatBuf(): nio.ByteBuffer = | ||
asByteBuffer(concatBuf) | ||
|
||
private[this] def asByteArray(buf: Buf): Array[Byte] = | ||
Buf.ByteArray.Owned.extract(buf) | ||
|
||
@Benchmark | ||
def asByteArrayByteArrayBuf(): Array[Byte] = | ||
asByteArray(byteArrayBuf) | ||
|
||
@Benchmark | ||
def asByteArrayByteBufferBuf(): Array[Byte] = | ||
asByteArray(byteBufferBuf) | ||
|
||
@Benchmark | ||
def asByteArrayConcatBuf(): Array[Byte] = | ||
asByteArray(concatBuf) | ||
|
||
|
||
|
||
@Benchmark | ||
def stringToUtf8Buf(): Buf = | ||
Buf.Utf8(string) | ||
|
||
@Benchmark | ||
def utf8BufToString(): String = { | ||
val Buf.Utf8(str) = stringBuf | ||
str | ||
} | ||
|
||
} |
24 changes: 0 additions & 24 deletions
24
util-benchmark/src/main/scala/com/twitter/util/events/SinkBenchmark.scala
This file was deleted.
Oops, something went wrong.