-
Notifications
You must be signed in to change notification settings - Fork 0
/
Benchmark.scala
115 lines (102 loc) · 3.73 KB
/
Benchmark.scala
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
package net.houzuo.aurinko2.test.logic
import scala.util.Random
import org.scalatest.FunSuite
import TemporaryFactory.collection
import net.houzuo.aurinko2.logic.Query
import net.houzuo.aurinko2.test.TimedExecution.average
import net.houzuo.aurinko2.test.TimedExecution.time
class Benchmark extends FunSuite {
val random = new Random
/*
* This benchmark spawns multiple threads to simulate concurrent read and write operations.
*/
test("collection performance benchmark") {
val numThreads = Runtime.getRuntime.availableProcessors * 4
val iterations = 200000
val col = collection
col.index(List("i1", "i2"), 14, 200)
col.index(List("j1", "j2"), 14, 200)
col.index(List("k1", "k2"), 14, 200)
println("Insert 200k documents")
val inserts = for (i <- 1 to numThreads) yield new Thread {
override def run() {
for (j <- 1 to iterations / numThreads)
col.insert(
<root>
<i1><i2>{ random nextInt iterations }</i2></i1>
<j1><j2>{ random nextInt iterations }</j2></j1>
<k1><k2>{ random nextInt iterations }</k2></k1>
</root>)
}
}
average(iterations) {
inserts foreach { _ start }
inserts foreach { _ join }
}
println("Read 200k documents")
val positions = col.all toArray
val reads = for (i <- 1 to numThreads) yield new Thread {
override def run() {
for (j <- 1 to iterations / numThreads)
col.read(positions(random nextInt iterations))
}
}
average(iterations) {
reads foreach { _ start }
reads foreach { _ join }
}
println("Update 200k documents")
val updates = for (i <- 1 to numThreads) yield new Thread {
override def run() {
for (j <- 1 to iterations / numThreads)
col.update(positions(random nextInt iterations),
<root>
<i1><i2>{ random nextInt iterations }</i2></i1>
<j1><j2>{ random nextInt iterations }</j2></j1>
<k1><k2>{ random nextInt iterations }</k2></k1>
</root>)
}
}
average(iterations) {
updates foreach { _ start }
updates foreach { _ join }
}
println("200k queries")
val queries = for (i <- 1 to numThreads) yield new Thread {
override def run() {
for (j <- 1 to iterations / numThreads)
new Query(col).eval(<root>
<diff>
<intersect>
<eq limit={ random nextInt 100 toString }><to><i2>{ random nextInt iterations }</i2></to><in><path>i1</path><path>i2</path></in></eq>
<eq limit={ random nextInt 100 toString }><to><j2>{ random nextInt iterations }</j2></to><in><path>j1</path><path>j2</path></in></eq>
</intersect>
<eq limit={ random nextInt 100 toString }><to><k2>{ random nextInt iterations }</k2></to><in><path>k1</path><path>k2</path></in></eq>
</diff>
</root>)
}
}
average(iterations) {
queries foreach { _ start }
queries foreach { _ join }
}
println("Delete 200k documents")
val deletes = for (i <- 1 to numThreads) yield new Thread {
override def run() {
for (j <- 1 to iterations / numThreads)
col.delete(positions(random nextInt iterations))
}
}
average(iterations) {
deletes foreach { _ start }
deletes foreach { _ join }
}
println("Get all 200k document IDs 10 times")
time(10) {
col.all
}
println("Index 200k documents")
col.unindex(List("i1", "i2"))
time(1) { col.index(List("i1", "i2"), 14, 200) }
}
}