Skip to content

Commit

Permalink
update scalacache
Browse files Browse the repository at this point in the history
  • Loading branch information
zingmane committed Oct 29, 2019
1 parent 86f3b33 commit 11f6db9
Show file tree
Hide file tree
Showing 3 changed files with 57 additions and 68 deletions.
2 changes: 1 addition & 1 deletion build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ dependencies {

compile 'com.jcabi:jcabi-manifests:1.1'

compile 'com.github.cb372:scalacache-guava_2.12:0.10.0'
compile 'com.github.cb372:scalacache-guava_2.12:0.28.0'

compile 'org.webjars:swagger-ui:3.17.6'

Expand Down
98 changes: 43 additions & 55 deletions src/main/scala/com/campudus/tableaux/cache/CacheVerticle.scala
Original file line number Diff line number Diff line change
Expand Up @@ -3,14 +3,14 @@ package com.campudus.tableaux.cache
import java.util.concurrent.TimeUnit

import com.campudus.tableaux.database.model.TableauxModel.{ColumnId, TableId}
import com.google.common.cache.CacheBuilder
import com.typesafe.scalalogging.LazyLogging
import io.vertx.lang.scala.ScalaVerticle
import io.vertx.scala.core.eventbus.Message
import org.vertx.scala.core.json.{Json, JsonObject}
import com.google.common.cache.CacheBuilder
import scalacache._
import scalacache.guava._
import scalacache.serialization.InMemoryRepr
import scalacache.modes.scalaFuture._

import scala.collection.mutable
import scala.concurrent.Future
Expand All @@ -21,26 +21,26 @@ object CacheVerticle {
/**
* Default never expire
*/
val DEFAULT_EXPIRE_AFTER_ACCESS = -1l
val DEFAULT_EXPIRE_AFTER_ACCESS: Long = -1L

/**
* Max. 10k cached values per column
*/
val DEFAULT_MAXIMUM_SIZE = 10000l
val DEFAULT_MAXIMUM_SIZE: ColumnId = 10000L

val NOT_FOUND_FAILURE = 404
val INVALID_MESSAGE = 400
val NOT_FOUND_FAILURE: Int = 404
val INVALID_MESSAGE: Int = 400

val ADDRESS_SET = "cache.set"
val ADDRESS_RETRIEVE = "cache.retrieve"
val ADDRESS_SET: String = "cache.set"
val ADDRESS_RETRIEVE: String = "cache.retrieve"

val ADDRESS_INVALIDATE_CELL = "cache.invalidate.cell"
val ADDRESS_INVALIDATE_COLUMN = "cache.invalidate.column"
val ADDRESS_INVALIDATE_ROW = "cache.invalidate.row"
val ADDRESS_INVALIDATE_TABLE = "cache.invalidate.table"
val ADDRESS_INVALIDATE_ALL = "cache.invalidate.all"
val ADDRESS_INVALIDATE_CELL: String = "cache.invalidate.cell"
val ADDRESS_INVALIDATE_COLUMN: String = "cache.invalidate.column"
val ADDRESS_INVALIDATE_ROW: String = "cache.invalidate.row"
val ADDRESS_INVALIDATE_TABLE: String = "cache.invalidate.table"
val ADDRESS_INVALIDATE_ALL: String = "cache.invalidate.all"

val TIMEOUT_AFTER_MILLISECONDS = 400
val TIMEOUT_AFTER_MILLISECONDS: Int = 400
}

class CacheVerticle extends ScalaVerticle with LazyLogging {
Expand All @@ -49,7 +49,7 @@ class CacheVerticle extends ScalaVerticle with LazyLogging {

private lazy val eventBus = vertx.eventBus()

private val caches: mutable.Map[(TableId, ColumnId), ScalaCache[InMemoryRepr]] = mutable.Map.empty
private val caches: mutable.Map[(TableId, ColumnId), Cache[AnyRef]] = mutable.Map.empty

override def startFuture(): Future[_] = {
registerOnEventBus()
Expand All @@ -69,7 +69,7 @@ class CacheVerticle extends ScalaVerticle with LazyLogging {
)
}

private def getCache(tableId: TableId, columnId: ColumnId): ScalaCache[InMemoryRepr] = {
private def getCache(tableId: TableId, columnId: ColumnId): Cache[AnyRef] = {

def createCache() = {
val builder = CacheBuilder
Expand All @@ -89,13 +89,13 @@ class CacheVerticle extends ScalaVerticle with LazyLogging {

builder.recordStats()

builder.build[String, Object]
builder.build[String, Entry[AnyRef]]
}

caches.get((tableId, columnId)) match {
caches.get(tableId, columnId) match {
case Some(cache) => cache
case None =>
val cache = ScalaCache(GuavaCache(createCache()))
val cache: Cache[AnyRef] = GuavaCache(createCache())
caches.put((tableId, columnId), cache)
cache
}
Expand All @@ -110,42 +110,32 @@ class CacheVerticle extends ScalaVerticle with LazyLogging {

val value = obj.getValue("value")

(for {
tableId <- Option(obj.getLong("tableId")).map(_.toLong)
columnId <- Option(obj.getLong("columnId")).map(_.toLong)
rowId <- Option(obj.getLong("rowId")).map(_.toLong)
} yield (tableId, columnId, rowId)) match {
extractTableColumnRow(obj) match {
case Some((tableId, columnId, rowId)) =>
implicit val scalaCache = getCache(tableId, columnId)
implicit val scalaCache: Cache[AnyRef] = getCache(tableId, columnId)
put(rowId)(value)
.map(_ => {
val reply = Json.obj(
"tableId" -> tableId,
"columnId" -> columnId,
"rowId" -> rowId
)

message.reply(reply)
})
.map(_ => replyJson(message, tableId, columnId, rowId))

case None =>
logger.error("Message invalid: Fields (tableId, columnId, rowId) should be a Long")
message.fail(INVALID_MESSAGE, "Message invalid: Fields (tableId, columnId, rowId) should be a Long")
}
}

private def messageHandlerRetrieve(message: Message[JsonObject]): Unit = {
val obj = message.body()

(for {
private def extractTableColumnRow(obj: JsonObject): Option[(ColumnId, ColumnId, ColumnId)] = {
for {
tableId <- Option(obj.getLong("tableId")).map(_.toLong)
columnId <- Option(obj.getLong("columnId")).map(_.toLong)
rowId <- Option(obj.getLong("rowId")).map(_.toLong)
} yield (tableId, columnId, rowId)) match {
} yield (tableId, columnId, rowId)
}

private def messageHandlerRetrieve(message: Message[JsonObject]): Unit = {
extractTableColumnRow(message.body()) match {
case Some((tableId, columnId, rowId)) =>
implicit val scalaCache = getCache(tableId, columnId)
implicit val scalaCache: Cache[AnyRef] = getCache(tableId, columnId)

get[AnyRef, NoSerialization](rowId)
get(rowId)
.map({
case Some(value) =>
val reply = Json.obj(
Expand All @@ -168,29 +158,25 @@ class CacheVerticle extends ScalaVerticle with LazyLogging {
}

private def messageHandlerInvalidateCell(message: Message[JsonObject]): Unit = {
val obj = message.body()

(for {
tableId <- Option(obj.getLong("tableId")).map(_.toLong)
columnId <- Option(obj.getLong("columnId")).map(_.toLong)
rowId <- Option(obj.getLong("rowId")).map(_.toLong)
} yield (tableId, columnId, rowId)) match {
extractTableColumnRow(message.body()) match {
case Some((tableId, columnId, rowId)) =>
// invalidate cell
implicit val scalaCache = getCache(tableId, columnId)
implicit val scalaCache: Cache[AnyRef] = getCache(tableId, columnId)

remove(rowId)
.map(_ => {
val reply = Json.obj("tableId" -> tableId, "columnId" -> columnId, "rowId" -> rowId)
message.reply(reply)
})
.map(_ => replyJson(message, tableId, columnId, rowId))

case None =>
logger.error("Message invalid: Fields (tableId, columnId, rowId) should be a Long")
message.fail(INVALID_MESSAGE, "Message invalid: Fields (tableId, columnId, rowId) should be a Long")
}
}

private def replyJson(message: Message[JsonObject], tableId: ColumnId, columnId: ColumnId, rowId: ColumnId): Unit = {
val reply = Json.obj("tableId" -> tableId, "columnId" -> columnId, "rowId" -> rowId)
message.reply(reply)
}

private def messageHandlerInvalidateColumn(message: Message[JsonObject]): Unit = {
val obj = message.body()

Expand All @@ -200,7 +186,7 @@ class CacheVerticle extends ScalaVerticle with LazyLogging {
} yield (tableId, columnId)) match {
case Some((tableId, columnId)) =>
// invalidate column
implicit val scalaCache = getCache(tableId, columnId)
implicit val scalaCache: Cache[AnyRef] = getCache(tableId, columnId)

removeAll()
.map(_ => {
Expand Down Expand Up @@ -279,7 +265,9 @@ class CacheVerticle extends ScalaVerticle with LazyLogging {
Future
.sequence(caches.map({
case ((tableId, columnId), cache) =>
removeAll()(cache)
implicit val implicitCache: Cache[AnyRef] = implicitly(cache)

removeAll()
.map(_ => {
removeCache(tableId, columnId)
})
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,11 +11,12 @@ import com.campudus.tableaux.database.model.structure.CachedColumnModel._
import com.campudus.tableaux.database.model.structure.ColumnModel.isColumnGroupMatchingToFormatPattern
import com.campudus.tableaux.helper.ResultChecker._
import com.campudus.tableaux.router.auth.permission.RoleModel
import com.google.common.cache.{CacheBuilder, Cache => GuavaBuiltCache}
import com.google.common.cache.CacheBuilder
import com.typesafe.scalalogging.LazyLogging
import org.vertx.scala.core.json._
import scalacache.guava.GuavaCache
import scalacache.{ScalaCache, caching, remove}
import scalacache._
import scalacache.guava._
import scalacache.modes.scalaFuture._

import scala.collection.JavaConverters._
import scala.collection.immutable.SortedSet
Expand All @@ -26,12 +27,12 @@ object CachedColumnModel {
/**
* Default never expire
*/
val DEFAULT_EXPIRE_AFTER_ACCESS: Long = -1l
val DEFAULT_EXPIRE_AFTER_ACCESS: Long = -1L

/**
* Max. 10k cached values per column
*/
val DEFAULT_MAXIMUM_SIZE: Long = 10000l
val DEFAULT_MAXIMUM_SIZE: Long = 10000L
}

class CachedColumnModel(
Expand All @@ -42,9 +43,9 @@ class CachedColumnModel(
roleModel: RoleModel
) extends ColumnModel(connection) {

implicit val scalaCache = ScalaCache(GuavaCache(createCache()))
implicit val scalaCache: Cache[Object] = GuavaCache(createCache())

private def createCache(): GuavaBuiltCache[String, Object] = {
private def createCache() = {
val builder = CacheBuilder
.newBuilder()

Expand All @@ -62,7 +63,7 @@ class CachedColumnModel(

builder.recordStats()

builder.build[String, Object]
builder.build[String, Entry[Object]]
}

private def removeCache(tableId: TableId, columnIdOpt: Option[ColumnId]): Future[Unit] = {
Expand Down Expand Up @@ -106,15 +107,15 @@ class CachedColumnModel(
}

override def retrieve(table: Table, columnId: ColumnId): Future[ColumnType[_]] = {
caching("retrieve", table.id, columnId) {
cachingF[Future, Object]("retrieve", table.id, columnId)(None)(
super.retrieve(table, columnId)
}
).asInstanceOf[Future[ColumnType[_]]]
}

override def retrieveAll(table: Table): Future[Seq[ColumnType[_]]] = {
caching("retrieveAll", table.id) {
cachingF[Future, Object]("retrieveAll", table.id)(None)(
super.retrieveAll(table)
}
).asInstanceOf[Future[Seq[ColumnType[_]]]]
}

override def createColumns(table: Table, createColumns: Seq[CreateColumn]): Future[Seq[ColumnType[_]]] = {
Expand Down

0 comments on commit 11f6db9

Please sign in to comment.