Skip to content

Commit

Permalink
Push forward of random variables with defined density
Browse files Browse the repository at this point in the history
Added classes :
1. DifferentiableMap
2. PushforwardMap
  • Loading branch information
mandar2812 committed Dec 25, 2016
1 parent 53a36c5 commit ef42700
Show file tree
Hide file tree
Showing 11 changed files with 126 additions and 6 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ package io.github.mandar2812.dynaml
import breeze.generic.UFunc
import breeze.linalg.{DenseVector, diag, product, scaleAdd, sum}
import breeze.numerics._
import io.github.mandar2812.dynaml.pipes.DataPipe
import org.apache.spark.annotation.Experimental

/**
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
package io.github.mandar2812.dynaml.analysis

import io.github.mandar2812.dynaml.pipes.DataPipe

/**
* A [[DataPipe]] which represents a differentiable transformation.
* */
trait DifferentiableMap[S, D, J] extends DataPipe[S, D] {

/**
* Returns the Jacobian of the transformation
* at the point x.
* */
def J(x: S): J
}

object DifferentiableMap {
def apply[S, D, J](f: (S) => D, j: (S) => J): DifferentiableMap[S, D, J] =
new DifferentiableMap[S, D, J] {
/**
* Returns the Jacobian of the transformation
* at the point x.
**/
override def J(x: S) = j(x)

override def run(data: S) = f(data)
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
package io.github.mandar2812.dynaml.analysis

import breeze.numerics.log
import breeze.stats.distributions.ContinuousDistr
import io.github.mandar2812.dynaml.pipes.{DataPipe, Encoder}
import io.github.mandar2812.dynaml.probability.{ContinuousDistrRV, RandomVarWithDistr, RandomVariable}
import spire.algebra.Field

/**
* @author mandar2812 on 22/12/2016.
*
* Push forward map is a function that has a well defined inverse
* as well as Jacobian of the inverse.
*/
abstract class PushforwardMap[
@specialized(Double) Source,
@specialized(Double) Destination,
@specialized(Double) Jacobian](
implicit detImpl: DataPipe[Jacobian, Double], field: Field[Destination])
extends Encoder[Source, Destination] { self =>
/**
* Represents the decoding/inverse operation.
*/
override val i: DifferentiableMap[Destination, Source, Jacobian]

def ->[R <: ContinuousDistrRV[Source]](r: R)
: RandomVarWithDistr[Destination, ContinuousDistr[Destination]] =
RandomVariable(new ContinuousDistr[Destination] {
override def unnormalizedLogPdf(x: Destination) =
r.underlyingDist.unnormalizedLogPdf(i(x)) + log(detImpl(i.J(x)))

override def logNormalizer = r.underlyingDist.logNormalizer

override def draw() = self.run(r.underlyingDist.draw())
})

}

object PushforwardMap {
def apply[S, D, J](forward: DataPipe[S, D], reverse: DifferentiableMap[D, S, J])(
implicit detImpl: DataPipe[J, Double], field: Field[D]) =
new PushforwardMap[S, D, J] {
/**
* Represents the decoding/inverse operation.
*/
override val i = reverse

override def run(data: S) = forward(data)
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,8 @@ class LaplacePosteriorMode[I](l: Likelihood[DenseVector[Double],
LaplacePosteriorMode.run(
nPoints, ParamOutEdges,
this.likelihood, initialP,
this.numIterations, identityPipe[(DenseMatrix[Double], DenseVector[Double])])
this.numIterations,
identityPipe[(DenseMatrix[Double], DenseVector[Double])])
}

object LaplacePosteriorMode {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,15 +19,15 @@ trait IIDRandomVariable[D, R <: RandomVariable[D]] extends RandomVariable[Stream

object IIDRandomVariable {

def apply[D, R <: RandomVariable[D]](base: R)(n: Int) = new IIDRandomVariable[D, R] {
def apply[D](base: RandomVariable[D])(n: Int) = new IIDRandomVariable[D, RandomVariable[D]] {

val baseRandomVariable = base

val num = n
}

def apply[C, D, R <: RandomVariable[D]](base: DataPipe[C, R])(n: Int) =
DataPipe((c: C) => new IIDRandomVariable[D, R] {
def apply[C, D](base: DataPipe[C, RandomVariable[D]])(n: Int) =
DataPipe((c: C) => new IIDRandomVariable[D, RandomVariable[D]] {

val baseRandomVariable = base(c)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ package io.github.mandar2812.dynaml.probability
*
* @tparam Q The type of observed variable (y)
* @tparam R The type of conditioning variable (f)
* @tparam S Result type of the hessian method.
* @tparam W The type representing a gaussian distribution for f
*
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,10 @@ class MeasurableFunction[
Domain1, Domain2](baseRV: RandomVariable[Domain1])(func: DataPipe[Domain1, Domain2])
extends RandomVariable[Domain2] {
override val sample: DataPipe[Unit, Domain2] = baseRV.sample > func

def _baseRandomVar = baseRV

//def asProbabilityModel = new ProbabilityModel()
}

object MeasurableFunction {
Expand All @@ -23,6 +27,6 @@ class RealValuedMeasurableFunction[Domain1](baseRV: RandomVariable[Domain1])(fun

object RealValuedMeasurableFunction {

def apply[Domain1](baseRV: RandomVariable[Domain1])(func: DataPipe[Domain1, Double])
: RealValuedMeasurableFunction[Domain1] = new RealValuedMeasurableFunction(baseRV)(func)
def apply[Domain1](baseRV: RandomVariable[Domain1])(func: (Domain1) => Double)
: RealValuedMeasurableFunction[Domain1] = new RealValuedMeasurableFunction(baseRV)(DataPipe(func))
}
Original file line number Diff line number Diff line change
Expand Up @@ -82,6 +82,8 @@ object DataPipe {
}
}



trait ParallelPipe[-Source1, +Result1, -Source2, +Result2]
extends DataPipe[(Source1, Source2), (Result1, Result2)] {

Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
package io.github.mandar2812.dynaml.pipes

import scalaxy.streams.optimize

/**
* @author mandar2812 date 23/10/2016.
*
Expand All @@ -15,6 +17,9 @@ trait Encoder[S, D] extends DataPipe[S, D] {
*/
val i: DataPipe[D, S]

def apply[T[S] <: Traversable[S]](data: T[S]):T[D] =
optimize { data.map(run).asInstanceOf[T[D]] }

/**
* Represents the composition of two
* encoders, resulting in a third encoder
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,9 @@ trait ReversibleScaler[S] extends Scaler[S] with Encoder[S, S]{
* */
override val i: Scaler[S]

override def apply[T[S] <: Traversable[S]](data: T[S]):T[S] =
optimize { data.map(run).asInstanceOf[T[S]] }

def *[T](that: ReversibleScaler[T]) = {

val firstInv = this.i
Expand Down
25 changes: 25 additions & 0 deletions scripts/randomvariables.scala
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
import io.github.mandar2812.dynaml.DynaMLPipe._
import io.github.mandar2812.dynaml.analysis.{DifferentiableMap, PushforwardMap}
import io.github.mandar2812.dynaml.pipes.DataPipe
import io.github.mandar2812.dynaml.probability.GaussianRV
import spire.implicits._
import com.quantifind.charts.Highcharts._
/**
* @author mandar date 22/12/2016.
*/

val g = GaussianRV(0.0, 0.25)

implicit val detImpl = identityPipe[Double]

val h: PushforwardMap[Double, Double, Double] = PushforwardMap(
DataPipe((x: Double) => math.exp(x)),
DifferentiableMap(
(x: Double) => math.log(x),
(x: Double) => 1.0/x)
)

val p = h->g

val x = Array.tabulate[(Double, Double)](100)(n => (n*0.03, p.underlyingDist.pdf(n*0.03)))
spline(x.toIterable)

0 comments on commit ef42700

Please sign in to comment.