Skip to content

Commit

Permalink
Work in progress
Browse files Browse the repository at this point in the history
  • Loading branch information
VinceMacBuche committed Jan 5, 2017
1 parent 59acc3f commit 8bd3b96
Show file tree
Hide file tree
Showing 7 changed files with 126 additions and 101 deletions.
4 changes: 2 additions & 2 deletions rudder-core/src/main/resources/reportsSchema.sql
Original file line number Diff line number Diff line change
Expand Up @@ -463,8 +463,8 @@ CREATE TABLE dataSources (

-- data source properties and status are valid json, until we can use postgres 9.2 keep text type (more details in configuration details)

, properties text
, status text
, properties text NOT NULL CHECK (properties <> '' )
, status text NOT NULL CHECK (status <> '' )
, PRIMARY KEY dataSourceId
);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,11 @@ import scala.concurrent.duration.FiniteDuration
import com.normation.inventory.domain.NodeId
import scala.concurrent.duration.Duration
import java.util.concurrent.TimeUnit
import com.normation.rudder.repository.json.JsonExctractorUtils
import com.normation.rudder.repository.json.JsonExctractorUtils
import scalaz.Monad
import scalaz.Id
import com.normation.rudder.repository.json.backendJson

object DataSource {
val defaultDuration = FiniteDuration(5,"minutes")
Expand Down Expand Up @@ -145,24 +150,24 @@ final case class DataSource (
val scope = "all"
}

object DataSourceSerialiser {
trait DataSourceSerialiser[M[_]] extends JsonExctractorUtils[M] {
import net.liftweb.json._
import net.liftweb.json.JsonDSL._
import com.normation.rudder.repository.json.JsonExctractorUtils._
import com.normation.utils.Control.sequence

def serializeDataSource(source : DataSource) : JValue = {
( ( "name" -> source.name.value )
~ ( "id" -> source.id.value )
~ ( "description" -> source.description )
~ ( "type" -> (
~ ( "type" ->
( "name" -> source.sourceType.name )
~ { source.sourceType match {
~ ( "parameters" -> { source.sourceType match {
case HttpDataSourceType(url,headers,method,checkSsl,path,mode,timeOut) =>
( ( "url" -> url )
~ ( "headers" -> headers )
~ ( "path" -> path )
~ ( "checkSsl" -> checkSsl )
~ ( "requestMethod" -> method )
~ ( "requestTimeout" -> timeOut.toMinutes )
~ ( "requestMode" ->
( ( "name" -> mode.name )
Expand Down Expand Up @@ -198,23 +203,26 @@ object DataSourceSerialiser {
* components values Ids.
* Never fails, but returned an empty list.
*/
def unserializeDataSource(id : DataSourceId, json : JValue) : Full[DataSource] = {

def toDataSource

def unserializeDataSource(id : DataSourceId, json : JValue) = {
for {
name <- extractJsonString(json, "name", x => Full(DataSourceName(x))).flatMap( Box(_))
description <- extractJsonString(json, "description").flatMap( Box(_))
sourceType <- extractJsonObj(json, "type", extractDataSourceType(_)).flatMap( Box(_))
runParam <- extractJsonObj(json, "runParam", extractDataSourceRunParam(_)).flatMap( Box(_))
timeOut <- extractJsonBigInt(json, "timeout", extractDuration).flatMap( Box(_))
enabled <- extractJsonBoolean(json, "enabled").flatMap( Box(_))
name <- extractJsonString(json, "name", x => Full(DataSourceName(x)))
description <- extractJsonString(json, "description")
sourceType <- extractJsonObj(json, "type", extractDataSourceType(_))
runParam <- extractJsonObj(json, "runParam", extractDataSourceRunParam(_))
timeOut <- extractJsonBigInt(json, "updateTimeOut", extractDuration)
enabled <- extractJsonBoolean(json, "enabled")
} yield {
DataSource(
(
id
, name = name
, sourceType = sourceType
, description = description
, enabled = enabled
, updateTimeOut = timeOut
, runParam = runParam
, name
, sourceType
, description
, enabled
, timeOut
, runParam
)
}
}
Expand All @@ -226,67 +234,52 @@ object DataSourceSerialiser {
} }.flatMap(identity)
}

def extractDataSourceRunParam(obj : JObject, base : Option[DataSourceRunParameters]) = {
def extractDataSourceRunParam(obj : JObject) = {

def extractSchedule(obj : JObject, base : DataSourceSchedule) = {
def extractSchedule(obj : JObject) = {
for {
scheduleBase <- extractJsonString(obj, "type", _ match {
case "scheduled" => Full(Scheduled(base.duration))
case "notscheduled" => Full(NoSchedule(base.duration))
case _ => Failure("not a valid value for datasource schedule")
}).map(_.getOrElse(base))
duration <- extractJsonBigInt(obj, "duration", extractDuration)
scheduleBase <- {

val t : Box[M[M[DataSourceSchedule]]] = extractJsonString(obj, "type", _ match {
case "scheduled" => Full(monad.map(duration)(d => Scheduled(d)))
case "notscheduled" => Full(monad.map(duration)(d => NoSchedule(d)))
case _ => Failure("not a valid value for datasource schedule")
})
t.map( monad.join(_))
}
} yield {
duration match {
case None => scheduleBase
case Some(newDuration) =>
scheduleBase match {
case Scheduled(_) => Scheduled(newDuration)
case NoSchedule(_) => NoSchedule(newDuration)
}
}
scheduleBase
}
}

for {
onGeneration <- extractJsonBoolean(obj, "onGeneration")
onNewNode <- extractJsonBoolean(obj, "onNewNode")
schedule <- extractJsonObj(obj, "schedule", extractSchedule(_,base.schedule))
schedule <- extractJsonObj(obj, "schedule", extractSchedule(_))
} yield {
base.copy(
schedule.getOrElse(base.schedule)
, onGeneration.getOrElse(base.onGeneration)
, onNewNode.getOrElse(base.onNewNode)
( monad.join(schedule)
, onGeneration
, onNewNode
)
}
}

def extractDataSourceType(obj : JObject, base : DataSourceType) = {
def extractDataSourceType(obj : JObject) = {

obj \ "name" match {
case JString(HttpDataSourceType.name) =>
val httpBase = base match {
case h : HttpDataSourceType => h
}

def extractHttpRequestMode(obj : JObject, base : HttpRequestMode) = {
def extractHttpRequestMode(obj : JObject) = {

obj \ "name" match {
case JString(OneRequestByNode.name) =>
Full(OneRequestByNode)
case JString(OneRequestAllNodes.name) =>
val allBase = base match {
case h : OneRequestAllNodes => h
case _ => OneRequestAllNodes("","")
}
for {
attribute <- extractJsonString(obj, "attribute")
path <- extractJsonString(obj, "path")
} yield {
OneRequestAllNodes(
path.getOrElse(allBase.matchingPath)
, attribute.getOrElse(allBase.nodeAttribute)
)
(path, attribute)
}
case x => Failure(s"Cannot extract request type from: ${x}")
}
Expand All @@ -296,27 +289,31 @@ object DataSourceSerialiser {
url <- extractJsonString(obj, "url")
path <- extractJsonString(obj, "path")
method <- extractJsonString(obj, "requestMethod")
sslCheck <- extractJsonBoolean(obj, "sslCheck")
checkSsl <- extractJsonBoolean(obj, "checkSsl")
timeout <- extractJsonBigInt(obj, "requestTimeout", extractDuration)
headers <- obj \ "headers" match {
headers <- extractJsonObj(obj, "headers", {
case JObject(fields) =>
sequence(fields.toSeq) {
field => extractJsonString(field, field.name, value => Full((field.name,value)))
}
} )

/*obj \ "headers" match {
case header@JObject(fields) =>
sequence(fields.toSeq) { field => extractJsonString(header, field.name, value => Full((field.name,value))).map(_.getOrElse((field.name,""))) }.map(fields => Some(fields.toMap))
case JNothing => Full(None)
case _ => Failure("oops")
}
case _ => Failure("oops")*/

requestMode <- extractJsonObj(obj, "requestMode", extractHttpRequestMode(_,httpBase.requestMode))
requestMode <- extractJsonObj(obj, "requestMode", extractHttpRequestMode(_))

} yield {
httpBase.copy(
url.getOrElse(httpBase.url)
, headers.getOrElse(httpBase.headers)
, method.getOrElse(httpBase.httpMethod)
, sslCheck.getOrElse(httpBase.sslCheck)
, path.getOrElse(httpBase.path)
, requestMode.getOrElse(httpBase.requestMode)
, timeout.getOrElse(httpBase.requestTimeOut)
(
url
, headers
, method
, checkSsl
, path
, requestMode
, timeout
)
}

Expand All @@ -325,3 +322,5 @@ object DataSourceSerialiser {
}

}

object backedDataSourceSerializer extends DataSourceSerialiser[Id.Id] with backendJson
Original file line number Diff line number Diff line change
Expand Up @@ -181,10 +181,14 @@ object Doobie {
}

implicit val DeserializeDataSource: Composite[RudderDataSource] = {
import com.normation.rudder.datasources.DataSourceSerialiser._
import com.normation.rudder.datasources.backedDataSourceSerializer._
import net.liftweb.json.compactRender
import net.liftweb.json.parse
Composite[(DataSourceId,String)].xmap(
tuple => unserializeDataSource(tuple._2)
tuple => unserializeDataSource(tuple._1,parse(tuple._2)) match {
case Full(s) => RudderDataSource apply s
case eb => throw new RuntimeException("oops")
}
, source => (source.id, compactRender(serializeDataSource(source)))
)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,17 +3,21 @@ package com.normation.rudder.repository.json
import net.liftweb.json._
import net.liftweb.common._
import com.normation.utils.Control._
import scalaz.Free
import scalaz.Coyoneda
import scalaz.~>
import scalaz.Monad
import scalaz.Id
import scalaz.Applicative
import scalaz.std.option._

object JsonExctractorUtils {
object bliz {

type optMonad[A] = Monad[Option]
}
trait JsonExctractorUtils[A[_]] {

implicit def monad : Monad[A]
/*
* JSON extractors
*/

/*
import scalaz.Monad
import net.liftweb.common.{ Box, Empty, Full }
Expand All @@ -22,10 +26,14 @@ import net.liftweb.common.{ Box, Empty, Full }
def bind[A, B](box: Box[A])(f: A => Box[B]) = box flatMap f
}
implicit object boxOptionMonad extends Monad[BoxOption] {
def point[A](a: => A) = Full(Some(a))
def bind[A, B](box: BoxOption[A])(f: A => BoxOption[B]) = box.flatMap { opt => opt.flatMap f }
}
final def full[A](a: A): Box[A] = Full(a)
final def empty[A]: Box[A] = Empty
def boxedIdentity[T] : T => Box[T] = Full(_)
final class WrapHelper[F[_]] {
def apply[A](a: A)(implicit ev: Applicative[F]): F[A] =
ev.point(a)
Expand Down Expand Up @@ -67,22 +75,23 @@ import net.liftweb.common.{ Box, Empty, Full }
}
}
}
}
}*/

private[this] def extractJson[T, U ] (json:JValue, key:String, convertTo : U => Box[T], validJson : PartialFunction[JValue, U])= {
def boxedIdentity[T] : T => Box[T] = Full(_)
protected[this] def extractJson[T, U ] (json:JValue, key:String, convertTo : U => Box[T], validJson : PartialFunction[JValue, U]) : Box[A[T]] /* {
json \ key match {
case JNothing => Full(None)
case value if validJson.isDefinedAt(value) =>
convertTo(validJson(value)).map(Some(_))
case invalidJson => Failure(s"Not a good value for parameter ${key}: ${compactRender(invalidJson)}")
}
}
}*/

def extractJsonString[T](json:JValue, key:String, convertTo : String => Box[T] = boxedIdentity[String]) = {
extractJson(json, key, convertTo ,{ case JString(value) => value } )

val free : Free[returnType,T] = Free.liftFC(JsonParseType[T,String](json,key,convertTo) )
Free.runFC(free)(jsonInter({ case JString(value) => value }))
//val free : Free[returnType,T] = Free.liftFC(JsonParseType[T,String](json,key,convertTo) )
//Free.runFC(free)(jsonInter({ case JString(value) => value }))
}

def extractJsonBoolean[T](json:JValue, key:String, convertTo : Boolean => Box[T] = boxedIdentity[Boolean] ) = {
Expand All @@ -98,7 +107,7 @@ import net.liftweb.common.{ Box, Empty, Full }
}

def extractJsonObj[T](json : JValue, key : String, jsonValueFun : JObject => Box[T]) = {
extractJson(json, key, jsonValueFun, { case obj : JObject => obj } )
extractJson(json, key, jsonValueFun, { case obj : JObject => obj } ).map(x => monad.map(x)(identity))
}

def extractJsonListString[T] (json: JValue, key: String)( convertTo: List[String] => Box[T] ): Box[Option[T]] = {
Expand All @@ -117,4 +126,27 @@ import net.liftweb.common.{ Box, Empty, Full }
case _ => Failure(s"Not a good value for parameter ${key}")
}
}
}

case object RestJson extends JsonExctractorUtils[Option] {

protected[this] def extractJson[T, U ] (json:JValue, key:String, convertTo : U => Box[T], validJson : PartialFunction[JValue, U]) = {
json \ key match {
case JNothing => Full(None)
case value if validJson.isDefinedAt(value) =>
convertTo(validJson(value)).map(Some(_))
case invalidJson => Failure(s"Not a good value for parameter ${key}: ${compactRender(invalidJson)}")
}
}
}

trait backendJson extends JsonExctractorUtils[Id.Id] {
protected[this] def extractJson[T, U ] (json:JValue, key:String, convertTo : U => Box[T], validJson : PartialFunction[JValue, U]) = {
json \ key match {
case value if validJson.isDefinedAt(value) =>
convertTo(validJson(value))
case JNothing => Failure(s"parameter ${key} cannot be empty")
case invalidJson => Failure(s"Not a good value for parameter ${key}: ${compactRender(invalidJson)}")
}
}
}
5 changes: 2 additions & 3 deletions rudder-web/src/main/scala/bootstrap/liftweb/AppConfig.scala
Original file line number Diff line number Diff line change
Expand Up @@ -140,7 +140,7 @@ import com.normation.rudder.db.Doobie
import com.normation.rudder.web.rest.settings.SettingsAPI8
import com.normation.rudder.web.rest.sharedFiles.SharedFilesAPI
import com.normation.rudder.web.rest.datasource._
import com.normation.rudder.datasources.MemoryDataSourceRepository
import com.normation.rudder.datasources.PGDataSourceRepository
import com.normation.rudder.datasources.DataSourceRepoImpl
import com.normation.rudder.datasources.HttpQueryDataSourceService

Expand Down Expand Up @@ -792,9 +792,8 @@ object RudderConfig extends Loggable {

val settingsApi8 = new SettingsAPI8(restExtractorService, configService, asyncDeploymentAgent, stringUuidGenerator)


val dataSourceRepository = new DataSourceRepoImpl(
new MemoryDataSourceRepository
new PGDataSourceRepository(doobie)
, new HttpQueryDataSourceService(nodeInfoService, roLDAPParameterRepository, woLdapNodeRepository, interpolationCompiler)
, stringUuidGenerator
)
Expand Down
Loading

0 comments on commit 8bd3b96

Please sign in to comment.