Skip to content

Commit

Permalink
Added slicing of distributed matrices and vectors
Browse files Browse the repository at this point in the history
  • Loading branch information
mandar2812 committed Oct 9, 2016
1 parent d9206f6 commit 2f7e9cf
Show file tree
Hide file tree
Showing 3 changed files with 20 additions and 0 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,8 @@ package io.github.mandar2812.dynaml.algebra
import breeze.linalg.NumericOps
import org.apache.spark.rdd.RDD

import scala.collection.immutable.NumericRange

/**
* @author mandar2812 date: 28/09/2016.
*
Expand All @@ -38,5 +40,7 @@ class DualSparkVector(baseDualVector: RDD[(Long, Double)])

override def t: SparkVector = new SparkVector(_baseDualVector)

def apply(r: NumericRange[Long]): SparkVector =
new SparkVector(_baseDualVector.filterByRange(r.min, r.max))

}
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,8 @@ package io.github.mandar2812.dynaml.algebra
import breeze.linalg.NumericOps
import org.apache.spark.rdd.RDD

import scala.collection.immutable.NumericRange

/**
* @author mandar2812 date: 28/09/2016
*
Expand All @@ -41,5 +43,14 @@ class SparkMatrix(baseMatrix: RDD[((Long, Long), Double)]) extends NumericOps[Sp

def t: SparkMatrix = new SparkMatrix(this.baseMatrix.map(c => ((c._1._2, c._1._1), c._2)))

def apply(r: NumericRange[Long], c: NumericRange[Long]): SparkMatrix =
new SparkMatrix(matrix.filterByRange((r.min, c.min), (r.max, c.max)))

}


class SparkSquareMatrix(baseMatrix: RDD[((Long, Long), Double)]) extends SparkMatrix(baseMatrix) {

assert(rows == cols, "For a square matrix, rows must be equal to columns")

}
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,8 @@ package io.github.mandar2812.dynaml.algebra
import breeze.linalg.NumericOps
import org.apache.spark.rdd.RDD

import scala.collection.immutable.NumericRange

/**
* @author mandar2812 date: 28/09/2016.
*
Expand All @@ -38,5 +40,8 @@ class SparkVector(baseVector: RDD[(Long, Double)])

override def t: DualSparkVector = new DualSparkVector(baseVector)

def apply(r: NumericRange[Long]): SparkVector =
new SparkVector(_baseVector.filterByRange(r.min, r.max))


}

0 comments on commit 2f7e9cf

Please sign in to comment.