Skip to content

Commit

Permalink
reduce use of guava
Browse files Browse the repository at this point in the history
Removes some of the simple uses of guava.

Not sure why, but something about the
TimeSeriesBuffer class triggers a
ClassNotFoundException with the
equalsverifier library:

```
[info]   Cause: java.lang.NoClassDefFoundError: nl/jqno/equalsverifier/internal/cglib/proxy/Factory
[info]   at java.lang.ClassLoader.defineClass1(Native Method)
[info]   at java.lang.ClassLoader.defineClass(ClassLoader.java:760)
[info]   at sun.reflect.GeneratedMethodAccessor7.invoke(Unknown Source)
[info]   at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
[info]   at java.lang.reflect.Method.invoke(Method.java:483)
[info]   at nl.jqno.equalsverifier.internal.cglib.core.ReflectUtils.defineClass(ReflectUtils.java:384)
[info]   at nl.jqno.equalsverifier.internal.cglib.core.AbstractClassGenerator.create(AbstractClassGenerator.java:219)
[info]   at nl.jqno.equalsverifier.internal.cglib.proxy.Enhancer.createHelper(Enhancer.java:377)
[info]   at nl.jqno.equalsverifier.internal.cglib.proxy.Enhancer.createClass(Enhancer.java:317)
[info]   at nl.jqno.equalsverifier.util.Instantiator.createDynamicSubclass(Instantiator.java:104)
[info]   ...
[info]   Cause: java.lang.ClassNotFoundException: nl.jqno.equalsverifier.internal.cglib.proxy.Factory
[info]   at java.net.URLClassLoader$1.run(URLClassLoader.java:372)
[info]   at java.net.URLClassLoader$1.run(URLClassLoader.java:361)
[info]   at java.security.AccessController.doPrivileged(Native Method)
[info]   at java.net.URLClassLoader.findClass(URLClassLoader.java:360)
[info]   at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
[info]   at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
[info]   at java.lang.ClassLoader.defineClass1(Native Method)
[info]   at java.lang.ClassLoader.defineClass(ClassLoader.java:760)
[info]   at sun.reflect.GeneratedMethodAccessor7.invoke(Unknown Source)
[info]   at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
[info]   ...
```

Current workaround is to set `fork in Test := true`
for the sbt build.
  • Loading branch information
brharrington committed Jan 8, 2015
1 parent a055139 commit 0183c43
Show file tree
Hide file tree
Showing 8 changed files with 68 additions and 54 deletions.
10 changes: 4 additions & 6 deletions atlas-chart/src/main/scala/com/netflix/atlas/chart/Colors.scala
Expand Up @@ -16,9 +16,8 @@
package com.netflix.atlas.chart

import java.awt.Color
import java.nio.charset.Charset

import com.google.common.io.Resources
import com.netflix.atlas.core.util.Streams
import com.netflix.atlas.core.util.Strings


Expand All @@ -27,9 +26,8 @@ object Colors {
* Load a list of colors from a resource file.
*/
def load(name: String): List[Color] = {
import scala.collection.JavaConversions._
val url = Resources.getResource(name)
val data = Resources.readLines(url, Charset.forName("UTF-8"))
data.toList.map(Strings.parseColor)
Streams.scope(Streams.resource(name)) { in =>
Streams.lines(in).map(Strings.parseColor).toList
}
}
}
Expand Up @@ -21,8 +21,7 @@ import java.io.FileInputStream
import java.io.FileOutputStream
import java.io.InputStream

import com.google.common.io.ByteStreams
import com.google.common.io.Files
import com.netflix.atlas.core.util.Streams
import org.scalatest.Assertions


Expand All @@ -40,8 +39,9 @@ class GraphAssertions(goldenDir: String, targetDir: String) extends Assertions {
}

private def getString(file: String): String = {
val in = getInputStream(file)
try new String(ByteStreams.toByteArray(in), "UTF-8") finally in.close()
Streams.scope(getInputStream(file)) { in =>
new String(Streams.byteArray(in), "UTF-8")
}
}

def generateReport(clazz: Class[_]) {
Expand All @@ -68,7 +68,9 @@ class GraphAssertions(goldenDir: String, targetDir: String) extends Assertions {
} </body>
</html>

Files.write(report.toString.getBytes("UTF-8"), new File(s"$targetDir/report.html"))
Streams.scope(Streams.fileOut(new File(s"$targetDir/report.html"))) { out =>
out.write(report.toString.getBytes("UTF-8"))
}
}

def assertEquals(v1: Double, v2: Double, delta: Double) {
Expand Down
Expand Up @@ -17,7 +17,7 @@ package com.netflix.atlas.chart

import java.io.InputStream

import com.google.common.io.Resources
import com.netflix.atlas.core.util.Streams
import org.scalatest.FunSuite


Expand All @@ -42,7 +42,7 @@ class PngImageSuite extends FunSuite {
""".stripMargin

def getInputStream(file: String): InputStream = {
Resources.getResource("pngimage/" + file).openStream()
Streams.resource("pngimage/" + file)
}

def getImage(file: String): PngImage = {
Expand Down
Expand Up @@ -17,7 +17,6 @@ package com.netflix.atlas.core.db

import java.util

import com.google.common.base.Objects
import com.netflix.atlas.core.model.ArrayTimeSeq
import com.netflix.atlas.core.model.Block
import com.netflix.atlas.core.model.ConsolidationFunction
Expand All @@ -32,6 +31,10 @@ import com.netflix.atlas.core.util.Math

object TimeSeriesBuffer {

def apply(tags: Map[String, String], step: Long, start: Long, vs: Array[Double]): TimeSeriesBuffer = {
new TimeSeriesBuffer(tags, new ArrayTimeSeq(DsType.Gauge, start / step * step, step, vs))
}

def apply(tags: Map[String, String], step: Long, start: Long, end: Long): TimeSeriesBuffer = {
val s = start / step
val e = end / step
Expand Down Expand Up @@ -116,13 +119,9 @@ object TimeSeriesBuffer {
/**
* Mutable buffer for efficiently manipulating metric data.
*/
class TimeSeriesBuffer(var tags: Map[String, String], val data: ArrayTimeSeq)
final class TimeSeriesBuffer(var tags: Map[String, String], val data: ArrayTimeSeq)
extends TimeSeries with TimeSeq with LazyTaggedItem {

def this(tags: Map[String, String], step: Long, start: Long, vs: Array[Double]) = {
this(tags, new ArrayTimeSeq(DsType.Gauge, start / step * step, step, vs))
}

def label: String = TimeSeries.toLabel(tags)
def dsType: DsType = data.dsType
def step: Long = data.step
Expand Down Expand Up @@ -482,17 +481,18 @@ class TimeSeriesBuffer(var tags: Map[String, String], val data: ArrayTimeSeq)
// Follows guidelines from: http://www.artima.com/pins1ed/object-equality.html#28.4
other match {
case that: TimeSeriesBuffer =>
that.canEqual(this) &&
tags == that.tags &&
step == that.step &&
start == that.start &&
util.Arrays.equals(values, that.values)
that.canEqual(this) && tags == that.tags && data == that.data
case _ => false
}
}

override def hashCode: Int = {
Objects.hashCode(tags, java.lang.Long.valueOf(step), java.lang.Long.valueOf(start), values)
import java.lang.{Long => JLong}
val prime = 31
var hc = prime
hc = hc * prime + tags.hashCode()
hc = hc * prime + data.hashCode()
hc
}

def canEqual(other: Any): Boolean = {
Expand Down
Expand Up @@ -20,6 +20,8 @@ import com.netflix.atlas.core.model.Block
import com.netflix.atlas.core.model.ConsolidationFunction
import com.netflix.atlas.core.model.ConstantBlock
import com.netflix.atlas.core.util.Math
import nl.jqno.equalsverifier.EqualsVerifier
import nl.jqno.equalsverifier.Warning
import org.scalatest.FunSuite
import org.scalactic.Tolerance._

Expand All @@ -30,12 +32,14 @@ class TimeSeriesBufferSuite extends FunSuite {

import java.lang.{Double => JDouble}

private val emptyTags = Map.empty[String, String]

private def newBuffer(v: Double, start: Long = 0L) = {
new TimeSeriesBuffer(Map.empty, 60000, start, Array.fill(1)(v))
TimeSeriesBuffer(emptyTags, 60000, start, Array.fill(1)(v))
}

private def newBufferN(v: Double, n: Int, start: Long = 0L) = {
new TimeSeriesBuffer(Map.empty, 60000, start, Array.fill(n)(v))
TimeSeriesBuffer(emptyTags, 60000, start, Array.fill(n)(v))
}

private def newBlock(start: Long, size: Int): Block = {
Expand All @@ -55,7 +59,7 @@ class TimeSeriesBufferSuite extends FunSuite {
}

test("apply List[Block]") {
val tags = Map.empty[String, String]
val tags = emptyTags
val step = 60000L
val blocks = List(
ConstantBlock(0 * step, 6, 1.0),
Expand All @@ -73,7 +77,7 @@ class TimeSeriesBufferSuite extends FunSuite {
}

test("add Block") {
val tags = Map.empty[String, String]
val tags = emptyTags
val step = 60000L
val blocks = List(
ConstantBlock(0 * step, 6, 1.0),
Expand All @@ -92,7 +96,7 @@ class TimeSeriesBufferSuite extends FunSuite {
}

test("add Block with cf 6") {
val tags = Map.empty[String, String]
val tags = emptyTags
val step = 60000L
val blocks = List(
ConstantBlock(0 * step, 6, 1.0),
Expand All @@ -111,7 +115,7 @@ class TimeSeriesBufferSuite extends FunSuite {
}

test("add Block with step 10s cf 6m") {
val tags = Map.empty[String, String]
val tags = emptyTags
val step = 10000L
val blockSize = 6 * 60
val blocks = (0 until 1000).map(i => ConstantBlock(i * blockSize * step, blockSize, 4.0)).toList
Expand All @@ -127,7 +131,7 @@ class TimeSeriesBufferSuite extends FunSuite {
}

ignore("cf with start") {
val tags = Map.empty[String, String]
val tags = emptyTags
val step = 60000L
val block = ArrayBlock(0L, 60)
(8 until 60).foreach { i => block.buffer(i) = 1.0 }
Expand All @@ -147,7 +151,7 @@ class TimeSeriesBufferSuite extends FunSuite {
case (name, af) =>
test(s"$name: consolidate then aggregate === aggregate then consolidate") {
val cf = name
val tags = Map.empty[String, String]
val tags = emptyTags[String, String]
val step = 60000L
val blocks = (0 until 1000).map(_ => newBlock(0, 60))
Expand All @@ -166,7 +170,7 @@ class TimeSeriesBufferSuite extends FunSuite {
test(s"$name with NaN: consolidate then aggregate === aggregate then consolidate") {
val cf = name
val tags = Map.empty[String, String]
val tags = emptyTags[String, String]
val step = 60000L
val blocks = (0 until 1000).map(_ => newBlockWithNaN(0, 60))
Expand All @@ -188,8 +192,8 @@ class TimeSeriesBufferSuite extends FunSuite {
val common = Map("a" -> "b", "c" -> "d")
val t1 = common + ("c" -> "e")
val t2 = common + ("z" -> "y")
val b1 = new TimeSeriesBuffer(t1, 60000, 0, Array.fill(1)(0.0))
val b2 = new TimeSeriesBuffer(t2, 60000, 0, Array.fill(1)(0.0))
val b1 = TimeSeriesBuffer(t1, 60000, 0, Array.fill(1)(0.0))
val b2 = TimeSeriesBuffer(t2, 60000, 0, Array.fill(1)(0.0))
b1.add(b2)
assert(b1.tags === Map("a" -> "b"))
}
Expand Down Expand Up @@ -476,30 +480,30 @@ class TimeSeriesBufferSuite extends FunSuite {

test("consolidate") {
val start = 1366746900000L
val b = new TimeSeriesBuffer(Map.empty, 60000, start, Array(1.0, 2.0, 3.0, 4.0, 5.0))
val b = TimeSeriesBuffer(emptyTags, 60000, start, Array(1.0, 2.0, 3.0, 4.0, 5.0))

val b2 = new TimeSeriesBuffer(Map.empty, 120000, start, Array(1.0, 5.0, 9.0))
val b2 = TimeSeriesBuffer(emptyTags, 120000, start, Array(1.0, 5.0, 9.0))
assert(b.consolidate(2, ConsolidationFunction.Sum) === b2)

val b3 = new TimeSeriesBuffer(Map.empty, 180000, start, Array(3.0, 12.0))
val b3 = TimeSeriesBuffer(emptyTags, 180000, start, Array(3.0, 12.0))
assert(b.consolidate(3, ConsolidationFunction.Sum) === b3)

val b4 = new TimeSeriesBuffer(Map.empty, 240000, start, Array(1.0, 14.0))
val b4 = TimeSeriesBuffer(emptyTags, 240000, start, Array(1.0, 14.0))
assert(b.consolidate(4, ConsolidationFunction.Sum) === b4)

val b5 = new TimeSeriesBuffer(Map.empty, 300000, start, Array(15.0))
val b5 = TimeSeriesBuffer(emptyTags, 300000, start, Array(15.0))
assert(b.consolidate(5, ConsolidationFunction.Sum) === b5)
}

test("normalize") {
val start = 1366746900000L
val b1 = new TimeSeriesBuffer(Map.empty, 60000, start, Array(1.0, 2.0, 3.0, 4.0, 5.0))
val b1e = new TimeSeriesBuffer(Map.empty, 120000, start, Array(1.0, 2.5, 4.5))
val b1 = TimeSeriesBuffer(emptyTags, 60000, start, Array(1.0, 2.0, 3.0, 4.0, 5.0))
val b1e = TimeSeriesBuffer(emptyTags, 120000, start, Array(1.0, 2.5, 4.5))
assert(b1.normalize(60000, start, 5) === b1)
assert(b1.normalize(120000, start, 3) === b1e)

val b2 = new TimeSeriesBuffer(Map.empty, 120000, start, Array(3.0, 7.0))
val b2e = new TimeSeriesBuffer(Map.empty, 60000, start, Array(3.0, 7.0, 7.0, Double.NaN, Double.NaN))
val b2 = TimeSeriesBuffer(emptyTags, 120000, start, Array(3.0, 7.0))
val b2e = TimeSeriesBuffer(emptyTags, 60000, start, Array(3.0, 7.0, 7.0, Double.NaN, Double.NaN))
assert(b2.normalize(60000, start, 5) === b2e)
}

Expand All @@ -513,9 +517,17 @@ class TimeSeriesBufferSuite extends FunSuite {
val step = 300000L
val bufStart = start + step * 4
val end = bufStart + step * 12
val buffer = TimeSeriesBuffer(Map.empty, step, bufStart, end)
val buffer = TimeSeriesBuffer(emptyTags, step, bufStart, end)

buffer.aggrBlock(Map.empty, block, Block.Sum, ConsolidationFunction.Avg, 5, Math.addNaN)
buffer.aggrBlock(emptyTags, block, Block.Sum, ConsolidationFunction.Avg, 5, Math.addNaN)
buffer.values.foreach { v => assert(v.isNaN || v <= 0.0) }
}

test("equals") {
EqualsVerifier
.forClass(classOf[TimeSeriesBuffer])
.suppress(Warning.NULL_FIELDS)
.suppress(Warning.NONFINAL_FIELDS)
.verify()
}
}
Expand Up @@ -21,9 +21,8 @@ import java.io.FileInputStream
import java.io.FileOutputStream
import java.io.InputStream

import com.google.common.io.ByteStreams
import com.google.common.io.Files
import com.netflix.atlas.chart.PngImage
import com.netflix.atlas.core.util.Streams
import org.scalatest.Assertions


Expand All @@ -41,8 +40,9 @@ class GraphAssertions(goldenDir: String, targetDir: String) extends Assertions {
}

private def getString(file: String): String = {
val in = getInputStream(file)
try new String(ByteStreams.toByteArray(in), "UTF-8") finally in.close()
Streams.scope(getInputStream(file)) { in =>
new String(Streams.byteArray(in), "UTF-8")
}
}

def generateReport(clazz: Class[_]) {
Expand All @@ -69,7 +69,9 @@ class GraphAssertions(goldenDir: String, targetDir: String) extends Assertions {
} </body>
</html>

Files.write(report.toString.getBytes("UTF-8"), new File(s"$targetDir/report.html"))
Streams.scope(Streams.fileOut(new File(s"$targetDir/report.html"))) { out =>
out.write(report.toString.getBytes("UTF-8"))
}
}

def assertEquals(v1: Double, v2: Double, delta: Double) {
Expand Down Expand Up @@ -134,4 +136,3 @@ class GraphAssertions(goldenDir: String, targetDir: String) extends Assertions {
try stream.write(s.getBytes("UTF-8")) finally stream.close()
}
}

1 change: 1 addition & 0 deletions project/Build.scala
Expand Up @@ -17,6 +17,7 @@ object MainBuild extends Build {
scalacOptions ++= BuildSettings.compilerFlags,
crossPaths := false,
sourcesInBase := false,
fork in Test := true, // Needed to avoid ClassNotFoundException with equalsverifier
exportJars := true, // Needed for one-jar, with multi-project
resolvers += Resolver.sonatypeRepo("snapshots"),
resolvers += "rrd4j" at "https://raw.githubusercontent.com/brharrington/rrd4j/repo",
Expand Down
2 changes: 1 addition & 1 deletion project/Dependencies.scala
Expand Up @@ -21,7 +21,7 @@ object Dependencies {
val awsCore = "com.amazonaws" % "aws-java-sdk-core" % aws
val awsEC2 = "com.amazonaws" % "aws-java-sdk-ec2" % aws
val awsS3 = "com.amazonaws" % "aws-java-sdk-s3" % aws
val equalsVerifier = "nl.jqno.equalsverifier" % "equalsverifier" % "1.4.1"
val equalsVerifier = "nl.jqno.equalsverifier" % "equalsverifier" % "1.5.1"
val eureka = "com.netflix.eureka" % "eureka-client" % "1.1.142"
val frigga = "com.netflix.frigga" % "frigga" % "0.13"
val guava = "com.google.guava" % "guava" % "15.0"
Expand Down

0 comments on commit 0183c43

Please sign in to comment.