Skip to content

Commit

Permalink
Added class for continuous valued stochastic processes
Browse files Browse the repository at this point in the history
  • Loading branch information
mandar2812 committed Oct 11, 2016
1 parent 4c95c6f commit aec2434
Show file tree
Hide file tree
Showing 3 changed files with 55 additions and 37 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ trait StochasticProcess[T, I, Y, W] extends Model[T, I, Y] {
* @tparam W Implementing class of the posterior distribution
*
* */
abstract class SecondOrderProcess[T, I, Y, K, M, W] extends StochasticProcess[T, I, Y, W] {
trait SecondOrderProcess[T, I, Y, K, M, W] extends StochasticProcess[T, I, Y, W] {

private val logger = Logger.getLogger(this.getClass)

Expand All @@ -72,3 +72,47 @@ abstract class SecondOrderProcess[T, I, Y, K, M, W] extends StochasticProcess[T,


}

/**
* @author mandar2812 date: 11/10/2016
*
* Blueprint for a continuous valued stochastic process, abstracts away the behavior
* common to sub-classes such as [[io.github.mandar2812.dynaml.models.gp.GPRegression]],
* [[io.github.mandar2812.dynaml.models.stp.StudentTRegression]] and others.
*
*/
abstract class ContinuousProcess[T, I, Y, W] extends StochasticProcess[T, I, Y, W] {

private val logger = Logger.getLogger(this.getClass)

/**
* Represents how many intervals away from the mean the error bars lie, defaults to 1
*/
private var errorSigma: Int = 1

def _errorSigma = errorSigma

def errorSigma_(s: Int) = errorSigma = s

/**
* Draw three predictions from the posterior predictive distribution
* 1) Mean or MAP estimate Y
* 2) Y- : The lower error bar estimate (mean - sigma*stdDeviation)
* 3) Y+ : The upper error bar. (mean + sigma*stdDeviation)
**/
def predictionWithErrorBars[U <: Seq[I]](testData: U, sigma: Int): Seq[(I, Y, Y, Y)]

/**
* Returns a prediction with error bars for a test set of indexes and labels.
* (Index, Actual Value, Prediction, Lower Bar, Higher Bar)
* */
def test(testData: T): Seq[(I, Y, Y, Y, Y)] = {
logger.info("Generating predictions for test set")
//Calculate the posterior predictive distribution for the test points.
val predictionWithError = this.predictionWithErrorBars(dataAsIndexSeq(testData), errorSigma)
//Collate the test data with the predictions and error bars
dataAsSeq(testData).zip(predictionWithError).map(i => (i._1._1, i._1._2,
i._2._2, i._2._3, i._2._4))
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ package io.github.mandar2812.dynaml.models.gp
import breeze.linalg._
import breeze.numerics.log
import io.github.mandar2812.dynaml.kernels.{DiracKernel, LocalScalarKernel}
import io.github.mandar2812.dynaml.models.SecondOrderProcess
import io.github.mandar2812.dynaml.models.{ContinuousProcess, SecondOrderProcess}
import io.github.mandar2812.dynaml.optimization.GloballyOptWithGrad
import io.github.mandar2812.dynaml.probability.MultGaussianRV
import org.apache.log4j.Logger
Expand All @@ -40,12 +40,11 @@ import org.apache.log4j.Logger
*
*/
abstract class AbstractGPRegressionModel[T, I](
cov: LocalScalarKernel[I],
n: LocalScalarKernel[I],
data: T, num: Int) extends
SecondOrderProcess[T, I, Double, Double, DenseMatrix[Double],
MultGaussianRV]
with GloballyOptWithGrad {
cov: LocalScalarKernel[I], n: LocalScalarKernel[I],
data: T, num: Int)
extends ContinuousProcess[T, I, Double, MultGaussianRV]
with SecondOrderProcess[T, I, Double, Double, DenseMatrix[Double], MultGaussianRV]
with GloballyOptWithGrad {

private val logger = Logger.getLogger(this.getClass)

Expand Down Expand Up @@ -223,18 +222,6 @@ with GloballyOptWithGrad {

override def predict(point: I): Double = predictionWithErrorBars(Seq(point), 1).head._2

/**
* Returns a prediction with error bars for a test set of indexes and labels.
* (Index, Actual Value, Prediction, Lower Bar, Higher Bar)
* */
def test(testData: T): Seq[(I, Double, Double, Double, Double)] = {
logger.info("Generating predictions for test set")
//Calculate the posterior predictive distribution for the test points.
val predictionWithError = this.predictionWithErrorBars(dataAsIndexSeq(testData), 1)
//Collate the test data with the predictions and error bars
dataAsSeq(testData).zip(predictionWithError).map(i => (i._1._1, i._1._2,
i._2._2, i._2._3, i._2._4))
}

/**
* Cache the training kernel and noise matrices
Expand Down
Original file line number Diff line number Diff line change
@@ -1,10 +1,9 @@
package io.github.mandar2812.dynaml.models.stp

import breeze.linalg.{DenseMatrix, DenseVector, cholesky}
import breeze.numerics._
import io.github.mandar2812.dynaml.kernels.LocalScalarKernel
import io.github.mandar2812.dynaml.models.SecondOrderProcess
import io.github.mandar2812.dynaml.optimization.{GloballyOptWithGrad, GloballyOptimizable}
import io.github.mandar2812.dynaml.models.{ContinuousProcess, SecondOrderProcess}
import io.github.mandar2812.dynaml.optimization.GloballyOptimizable
import io.github.mandar2812.dynaml.probability.MultStudentsTRV
import io.github.mandar2812.dynaml.probability.distributions.MultivariateStudentsT
import org.apache.log4j.Logger
Expand All @@ -16,7 +15,8 @@ abstract class AbstractSTPRegressionModel[T, I](
mu: Double, cov: LocalScalarKernel[I],
n: LocalScalarKernel[I],
data: T, num: Int)
extends SecondOrderProcess[T, I, Double, Double, DenseMatrix[Double], MultStudentsTRV]
extends ContinuousProcess[T, I, Double, MultStudentsTRV]
with SecondOrderProcess[T, I, Double, Double, DenseMatrix[Double], MultStudentsTRV]
with GloballyOptimizable {


Expand Down Expand Up @@ -166,19 +166,6 @@ abstract class AbstractSTPRegressionModel[T, I](
AbstractSTPRegressionModel.logLikelihood(current_state("degrees_of_freedom"),trainingLabels, kernelTraining)
}

/**
* Returns a prediction with error bars for a test set of indexes and labels.
* (Index, Actual Value, Prediction, Lower Bar, Higher Bar)
* */
def test(testData: T): Seq[(I, Double, Double, Double, Double)] = {
logger.info("Generating predictions for test set")
//Calculate the posterior predictive distribution for the test points.
val predictionWithError = this.predictionWithErrorBars(dataAsIndexSeq(testData), 1)
//Collate the test data with the predictions and error bars
dataAsSeq(testData).zip(predictionWithError).map(i => (i._1._1, i._1._2,
i._2._2, i._2._3, i._2._4))
}

/**
* Cache the training kernel and noise matrices
* for fast access in future predictions.
Expand Down

0 comments on commit aec2434

Please sign in to comment.