Skip to content

Commit

Permalink
Corrected :*: member of DecomposableCovariance
Browse files Browse the repository at this point in the history
  • Loading branch information
mandar2812 committed Dec 15, 2016
1 parent bbf07cf commit dad68ba
Show file tree
Hide file tree
Showing 4 changed files with 18 additions and 8 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -581,11 +581,7 @@ object DynaMLPipe {
* Creates an [[Encoder]] which replicates a
* [[DenseVector]] instance n times.
* */
val breezeDVReplicationEncoder = (n: Int) => Encoder((v: DenseVector[Double]) => {
Array.fill(n)(v)
}, (vs: Array[DenseVector[Double]]) => {
vs.head
})
val breezeDVReplicationEncoder = (n: Int) => genericReplicationEncoder[DenseVector[Double]](n)

def trainParametricModel[
G, T, Q, R, S, M <: ParameterizedLearner[G, T, Q, R, S]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@ import io.github.mandar2812.dynaml.DynaMLPipe
import io.github.mandar2812.dynaml.algebra.{PartitionedPSDMatrix, PartitionedVector}
import io.github.mandar2812.dynaml.pipes.{DataPipe, Encoder}

import scala.reflect.ClassTag

/**
* Scalar Kernel defines algebraic behavior for kernels of the form
* K: Index x Index -> Double, i.e. kernel functions whose output
Expand Down Expand Up @@ -127,12 +129,20 @@ class DecomposableCovariance[S](kernels: LocalScalarKernel[S]*)(
coupleAndKern._2.evaluate(u,v)
}))
}

override def gradient(x: S, y: S): Map[String, Double] = {
val (xs, ys) = (encoding*encoding)((x,y))
xs.zip(ys).zip(kernels).map(coupleAndKern => {
val (u,v) = coupleAndKern._1
coupleAndKern._2.gradient(u,v)
}).reduceLeft(_++_)
}
}

object DecomposableCovariance {

val :+: = DataPipe((l: Array[Double]) => l.sum)

val :*: = DataPipe((l: Array[Double]) => l.sum)
val :*: = DataPipe((l: Array[Double]) => l.product)

}
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,8 @@ import io.github.mandar2812.dynaml.optimization.GloballyOptWithGrad
import io.github.mandar2812.dynaml.probability.MultGaussianPRV
import org.apache.log4j.Logger

import scala.reflect.ClassTag

/**
* Single-Output Gaussian Process Regression Model
* Performs gp/spline smoothing/regression with
Expand All @@ -46,7 +48,7 @@ import org.apache.log4j.Logger
*/
abstract class AbstractGPRegressionModel[T, I](
cov: LocalScalarKernel[I], n: LocalScalarKernel[I],
data: T, num: Int)
data: T, num: Int)(implicit ev: ClassTag[I])
extends ContinuousProcess[T, I, Double, MultGaussianPRV]
with SecondOrderProcess[T, I, Double, Double, DenseMatrix[Double], MultGaussianPRV]
with GloballyOptWithGrad {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,14 +32,16 @@ import io.github.mandar2812.dynaml.probability.MultStudentsTPRV
import io.github.mandar2812.dynaml.probability.distributions.{BlockedMultivariateStudentsT, MultivariateStudentsT}
import org.apache.log4j.Logger

import scala.reflect.ClassTag

/**
* @author mandar2812 date 26/08/16.
* Implementation of a Students' T Regression model.
*/
abstract class AbstractSTPRegressionModel[T, I](
mu: Double, cov: LocalScalarKernel[I],
n: LocalScalarKernel[I],
data: T, num: Int)
data: T, num: Int)(implicit ev: ClassTag[I])
extends ContinuousProcess[T, I, Double, MultStudentsTPRV]
with SecondOrderProcess[T, I, Double, Double, DenseMatrix[Double], MultStudentsTPRV]
with GloballyOptimizable {
Expand Down

0 comments on commit dad68ba

Please sign in to comment.