Skip to content

Commit

Permalink
Initial build capability supporting benchmark creation.
Browse files Browse the repository at this point in the history
Signed-off-by: Simeon H.K. fitch <fitch@astraea.io>
  • Loading branch information
metasim authored and echeipesh committed Sep 29, 2017
1 parent 9ca19e3 commit b1ae13f
Show file tree
Hide file tree
Showing 6 changed files with 67 additions and 1 deletion.
9 changes: 9 additions & 0 deletions bench/README.md
@@ -0,0 +1,9 @@
# GeoTrellis Performance Benchmarking

To run from project root directory:

```
sbt bench/bench
```

Results will be in `bench/target/jmh-results-<datestamp>.csv`.
27 changes: 27 additions & 0 deletions bench/build.sbt
@@ -0,0 +1,27 @@
import java.text.SimpleDateFormat
import java.util.Date

import scala.util.matching.Regex

enablePlugins(JmhPlugin)

val jmhOutputFormat = settingKey[String]("Output format: {text|csv|scsv|json|latex}")
jmhOutputFormat := "csv"

val jmhFileRegex = settingKey[Regex]("Filename regular expression for selecting files to benchmark")
jmhFileRegex := ".*Bench.*".r

val jmhRun = Def.taskDyn {
val rf = jmhOutputFormat.value
def timestamp = new SimpleDateFormat("yyyyMMdd").format(new Date())
val rff = target.value / s"jmh-results-$timestamp.${jmhOutputFormat.value}"
val pat = jmhFileRegex.value.toString

val args = s" -rf $rf -rff $rff $pat"
(run in Jmh).toTask(args)
}

val bench = taskKey[Unit]("Run JMH")
bench := jmhRun.value


@@ -0,0 +1,24 @@
package geotrellis.spark.io.index


import java.util.concurrent.TimeUnit
import org.openjdk.jmh.annotations._

@BenchmarkMode(Array(Mode.AverageTime))
@OutputTimeUnit(TimeUnit.MICROSECONDS)
@Warmup(iterations = 5)
@Measurement(iterations = 10)
@Fork(1)
@Threads(1)
@State(Scope.Thread)
class MergeQueueBench {
val queue = new MergeQueue()

@Benchmark
def mergeOrderedDense = {
for(i -10000 to 10000) {
queue += (i, i + 10)
}
queue
}
}
4 changes: 4 additions & 0 deletions build.sbt
Expand Up @@ -235,3 +235,7 @@ lazy val util = project
lazy val `doc-examples` = project
.dependsOn(spark, s3, accumulo, cassandra, hbase, spark, `spark-testkit`)
.settings(commonSettings)

lazy val bench = project
.dependsOn(spark)
.settings(commonSettings)
2 changes: 1 addition & 1 deletion project/build.properties
@@ -1 +1 @@
sbt.version=0.13.15
sbt.version=0.13.16
2 changes: 2 additions & 0 deletions project/plugins.sbt
Expand Up @@ -16,3 +16,5 @@ addSbtPlugin("me.lessis" % "ls-sbt" % "0.1.3")
addSbtPlugin("com.timushev.sbt" % "sbt-updates" % "0.3.0")

addSbtPlugin("de.heikoseeberger" % "sbt-header" % "1.7.0")

addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.2.27")

0 comments on commit b1ae13f

Please sign in to comment.