From a74cc745a3877640d002f24d4b81bdd6d4d35838 Mon Sep 17 00:00:00 2001 From: Grigory Pomadchin Date: Tue, 27 Mar 2018 22:20:07 +0300 Subject: [PATCH] Fix cache serialization issues --- .../geotrellis/spark/io/AttributeCaching.scala | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/spark/src/main/scala/geotrellis/spark/io/AttributeCaching.scala b/spark/src/main/scala/geotrellis/spark/io/AttributeCaching.scala index 168a475f99..ea0f87ce23 100644 --- a/spark/src/main/scala/geotrellis/spark/io/AttributeCaching.scala +++ b/spark/src/main/scala/geotrellis/spark/io/AttributeCaching.scala @@ -25,18 +25,14 @@ import com.typesafe.config.ConfigFactory import scala.concurrent.duration._ trait AttributeCaching { self: AttributeStore => - @transient private lazy val (enabled, cache) = { - val config = ConfigFactory.load() - val expiration = config.getInt("geotrellis.attribute.caching.expirationMinutes") - val maxSize = config.getInt("geotrellis.attribute.caching.maxSize") - val enabled = config.getBoolean("geotrellis.attribute.caching.enabled") + import AttributeCaching._ - enabled -> Scaffeine() + @transient private lazy val cache = + Scaffeine() .recordStats() .expireAfterWrite(expiration.minutes) .maximumSize(maxSize) .build[(LayerId, String), Any] - } def cacheRead[T: JsonFormat](layerId: LayerId, attributeName: String): T = { if(enabled) @@ -65,3 +61,10 @@ trait AttributeCaching { self: AttributeStore => if(enabled) cache.invalidate(id -> attribute) } } + +object AttributeCaching extends Serializable { + lazy val config = ConfigFactory.load() + lazy val expiration = config.getInt("geotrellis.attribute.caching.expirationMinutes") + lazy val maxSize = config.getInt("geotrellis.attribute.caching.maxSize") + lazy val enabled = config.getBoolean("geotrellis.attribute.caching.enabled") +}