Skip to content

Commit

Permalink
Consolidated DWT code
Browse files Browse the repository at this point in the history
Created separate classes for
1. Haar Wavelet Filter and its inverse
2. Haar Filter on vectors with grouped variables
  • Loading branch information
mandar2812 committed Dec 14, 2016
1 parent 265c8b3 commit 0f04003
Show file tree
Hide file tree
Showing 2 changed files with 126 additions and 53 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -23,11 +23,10 @@ import breeze.linalg.{DenseMatrix, DenseVector, diag}
import io.github.mandar2812.dynaml.evaluation.RegressionMetrics
import io.github.mandar2812.dynaml.models.ParameterizedLearner
import io.github.mandar2812.dynaml.models.gp.AbstractGPRegressionModel
import io.github.mandar2812.dynaml.optimization.{
CoupledSimulatedAnnealing, GPMLOptimizer,
GloballyOptWithGrad, GridSearch}
import io.github.mandar2812.dynaml.optimization.{CoupledSimulatedAnnealing, GPMLOptimizer, GloballyOptWithGrad, GridSearch}
import io.github.mandar2812.dynaml.pipes.{DataPipe, ReversibleScaler, Scaler, StreamDataPipe}
import io.github.mandar2812.dynaml.utils.{GaussianScaler, MVGaussianScaler, MinMaxScaler}
import io.github.mandar2812.dynaml.wavelets.{HaarWaveletFilter, InverseHaarWaveletFilter}
import org.apache.log4j.Logger
import org.renjin.script.RenjinScriptEngine
import org.renjin.sexp._
Expand Down Expand Up @@ -546,59 +545,13 @@ object DynaMLPipe {
* Constructs a data pipe which performs discrete Haar wavelet transform
* on a (breeze) vector signal.
* */
val haarWaveletFilter = (order: Int) => new ReversibleScaler[DenseVector[Double]] {

override val i = invHaarWaveletFilter(order)

override def run(signal: DenseVector[Double]) = {
//Check size of signal before constructing DWT matrix
assert(
signal.length == math.pow(2.0, order).toInt,
"Signal: "+signal+"\n is of length "+signal.length+
"\nLength of signal must be : 2^"+order
)

// Now construct DWT matrix
val invSqrtTwo = 1.0/math.sqrt(2.0)

val rowFactors = (0 until order).reverse.map(i => {
(1 to math.pow(2.0, i).toInt).map(k =>
invSqrtTwo/math.sqrt(order-i))})
.reduceLeft((a,b) => a ++ b).reverse

val appRowFactors = Seq(rowFactors.head) ++ rowFactors

val dwtvec = utils.haarMatrix(math.pow(2.0, order).toInt)*signal

dwtvec.mapPairs((row, v) => v*appRowFactors(row))
}
}
val haarWaveletFilter = (order: Int) => HaarWaveletFilter(order)

/**
* Implements the inverse Discrete Haar Wavelet Transform
*
* Constructs a data pipe which performs inverse discrete Haar wavelet transform
* on a (breeze) vector signal.
* */
val invHaarWaveletFilter = (order: Int) => Scaler((signal: DenseVector[Double]) => {
//Check size of signal before constructing DWT matrix
assert(
signal.length == math.pow(2.0, order).toInt,
"Signal: "+signal+"\n is of length "+signal.length+
"\nLength of signal must be : 2^"+order
)

// Now construct DWT matrix
val invSqrtTwo = 1.0/math.sqrt(2.0)

val rowFactors = (0 until order).reverse.map(i => {
(1 to math.pow(2.0, i).toInt).map(k =>
invSqrtTwo/math.sqrt(order-i))})
.reduceLeft((a,b) => a ++ b).reverse

val appRowFactors = Seq(rowFactors.head) ++ rowFactors
val normalizationMat: DenseMatrix[Double] = diag(DenseVector(appRowFactors.toArray))

utils.haarMatrix(math.pow(2.0, order).toInt).t*(normalizationMat*signal)
})
val invHaarWaveletFilter = (order: Int) => InverseHaarWaveletFilter(order)

def trainParametricModel[
G, T, Q, R, S, M <: ParameterizedLearner[G, T, Q, R, S]
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,120 @@
package io.github.mandar2812.dynaml.wavelets

import breeze.linalg.{DenseMatrix, DenseVector, diag}
import io.github.mandar2812.dynaml.pipes.{ReversibleScaler, Scaler}
import io.github.mandar2812.dynaml.utils

/**
* @author mandar2812 date: 14/12/2016.
*/
case class HaarWaveletFilter(order: Int) extends ReversibleScaler[DenseVector[Double]] {

val invSqrtTwo = 1.0/math.sqrt(2.0)

val rowFactors = (0 until order).reverse.map(i => {
(1 to math.pow(2.0, i).toInt).map(k =>
invSqrtTwo/math.sqrt(order-i))})
.reduceLeft((a,b) => a ++ b).reverse

val appRowFactors = Seq(rowFactors.head) ++ rowFactors

lazy val normalizationMat: DenseMatrix[Double] = diag(DenseVector(appRowFactors.toArray))

lazy val transformMat: DenseMatrix[Double] = utils.haarMatrix(math.pow(2.0, order).toInt)

override val i = InverseHaarWaveletFilter(order)

override def run(signal: DenseVector[Double]) = {
//Check size of signal before constructing DWT matrix
assert(
signal.length == math.pow(2.0, order).toInt,
"Signal: "+signal+"\n is of length "+signal.length+
"\nLength of signal must be : 2^"+order
)
normalizationMat*(transformMat*signal)
}
}

case class InverseHaarWaveletFilter(order: Int) extends Scaler[DenseVector[Double]] {

val invSqrtTwo = 1.0/math.sqrt(2.0)

val rowFactors = (0 until order).reverse.map(i => {
(1 to math.pow(2.0, i).toInt).map(k =>
invSqrtTwo/math.sqrt(order-i))})
.reduceLeft((a,b) => a ++ b).reverse

val appRowFactors = Seq(rowFactors.head) ++ rowFactors

lazy val normalizationMat: DenseMatrix[Double] = diag(DenseVector(appRowFactors.toArray))

lazy val transformMat: DenseMatrix[Double] = utils.haarMatrix(math.pow(2.0, order).toInt).t

override def run(signal: DenseVector[Double]): DenseVector[Double] = {
assert(
signal.length == math.pow(2.0, order).toInt,
"Signal: "+signal+"\n is of length "+signal.length+
"\nLength of signal must be : 2^"+order
)

transformMat*(normalizationMat*signal)
}
}


/**
* Computes Discrete Wavelet Transform when features are time shifted
* groups of various quantities. Often this is encountered in NARX models
* where a feature vector may look like (x_1, x_2, ..., y_1, y_2, ...)
*
* The class groups the dimensions into separate vectors for each variable
* and computes DWT on each group.
*
* @param orders A list containing the time exponents of each variable, the auto-regressive
* order is 2 exp (order)
* */
case class GroupedHaarWaveletFilter(orders: Array[Int]) extends ReversibleScaler[DenseVector[Double]] {

val componentFilters: Array[HaarWaveletFilter] = orders.map(HaarWaveletFilter)

val twoExp = (i: Int) => math.pow(2.0, i).toInt

val partitionIndices: Array[(Int, Int)] =
orders.map(twoExp).scanLeft(0)(_+_).sliding(2).map(c => (c.head, c.last)).toArray

assert(partitionIndices.length == orders.length, "Number of partitions must be equal to number of variable groups")

override val i: Scaler[DenseVector[Double]] = InvGroupedHaarWaveletFilter(orders)

override def run(data: DenseVector[Double]): DenseVector[Double] = DenseVector(
partitionIndices.zip(componentFilters).map(limitsAndFilter => {
val ((start, end), filter) = limitsAndFilter
filter(data(start until end)).toArray
}).reduceLeft((a,b) => a ++ b)
)

}

/**
* Inverse of the [[GroupedHaarWaveletFilter]]
*
* */
case class InvGroupedHaarWaveletFilter(orders: Array[Int]) extends Scaler[DenseVector[Double]] {

val componentFilters: Array[InverseHaarWaveletFilter] = orders.map(InverseHaarWaveletFilter)

val twoExp = (i: Int) => math.pow(2.0, i).toInt

val partitionIndices: Array[(Int, Int)] =
orders.map(twoExp).scanLeft(0)(_+_).sliding(2).map(c => (c.head, c.last)).toArray

assert(partitionIndices.length == orders.length, "Number of partitions must be equal to number of variable groups")

override def run(data: DenseVector[Double]): DenseVector[Double] = DenseVector(
partitionIndices.zip(componentFilters).map(limitsAndFilter => {
val ((start, end), filter) = limitsAndFilter
filter(data(start until end)).toArray
}).reduceLeft((a,b) => a ++ b)
)

}

0 comments on commit 0f04003

Please sign in to comment.