Skip to content

Commit

Permalink
hide Neural Network’s Topology behind its interface. Make it “abstract”.
Browse files Browse the repository at this point in the history
  • Loading branch information
aligusnet committed Mar 5, 2017
1 parent 85fe3e3 commit 8fbf796
Show file tree
Hide file tree
Showing 8 changed files with 161 additions and 106 deletions.
2 changes: 1 addition & 1 deletion samples/neural_networks/Main.hs
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ main = do
let nnt = Sigmoid.makeTopology (LA.cols x) 10 [100, 100]
model = NN.NeuralNetwork nnt
-- Step 3. Initialize theta with randon values.
initTheta = Sigmoid.initializeTheta 5191711 nnt
initTheta = NN.initializeTheta 5191711 nnt

lambda = 5 / (fromIntegral $ LA.rows x)

Expand Down
52 changes: 16 additions & 36 deletions src/MachineLearning/NeuralNetwork.hs
Original file line number Diff line number Diff line change
Expand Up @@ -13,70 +13,50 @@ module MachineLearning.NeuralNetwork
(
Model(..)
, NeuralNetworkModel(..)
, Topology
, MLC.calcAccuracy

-- * Exported for testing purposes only.
, flatten
, unflatten
, T.Topology
, T.initializeTheta
, T.initializeThetaIO
, T.initializeThetaM
)

where

import Data.List (foldl')
import qualified Data.Vector.Storable as V
import qualified Numeric.LinearAlgebra as LA
import MachineLearning.Types (R, Vector, Matrix)
import qualified MachineLearning as ML
import qualified MachineLearning.Classification.Internal as MLC
import MachineLearning.Model (Model(..))
import MachineLearning.NeuralNetwork.Topology (Topology(..), loss, propagateForward, propagateBackward, numberOutputs)
import qualified MachineLearning.NeuralNetwork.Topology as T
import MachineLearning.NeuralNetwork.Regularization (Regularization(L2))


-- | Neural Network Model.
-- Takes neural network topology as a constructor argument.
newtype NeuralNetworkModel = NeuralNetwork Topology
newtype NeuralNetworkModel = NeuralNetwork T.Topology


instance Model NeuralNetworkModel where
hypothesis (NeuralNetwork topology) x theta = predictions'
where thetaList = unflatten topology theta
where thetaList = T.unflatten topology theta
predictions = LA.toRows $ calcScores topology x thetaList
predictions' = LA.vector $ map (fromIntegral . LA.maxIndex) predictions

cost (NeuralNetwork topology) lambda x y theta =
let ys = LA.fromColumns $ MLC.processOutputOneVsAll (numberOutputs topology) y
thetaList = unflatten topology theta
let ys = LA.fromColumns $ MLC.processOutputOneVsAll (T.numberOutputs topology) y
thetaList = T.unflatten topology theta
scores = calcScores topology x thetaList
in loss topology (L2 lambda) scores thetaList ys
in T.loss topology (L2 lambda) scores thetaList ys

gradient (NeuralNetwork topology) lambda x y theta =
let ys = LA.fromColumns $ MLC.processOutputOneVsAll (numberOutputs topology) y
thetaList = unflatten topology theta
(scores, cacheList) = propagateForward topology x thetaList
grad = flatten $ propagateBackward topology (L2 lambda) scores cacheList ys
let ys = LA.fromColumns $ MLC.processOutputOneVsAll (T.numberOutputs topology) y
thetaList = T.unflatten topology theta
(scores, cacheList) = T.propagateForward topology x thetaList
grad = T.flatten $ T.propagateBackward topology (L2 lambda) scores cacheList ys
in grad


-- | Score function. Takes a topology, X and theta list.
calcScores :: Topology -> Matrix -> [(Matrix, Matrix)] -> Matrix
calcScores topology x thetaList = fst $ propagateForward topology x thetaList


-- | Flatten list of matrices into vector.
flatten :: [(Matrix, Matrix)] -> Vector
flatten ms = V.concat $ map LA.flatten $ listOfTuplesToList ms


-- | Unflatten vector into list of matrices for given neural network topology.
unflatten :: Topology -> Vector -> [(Matrix, Matrix)]
unflatten (Topology sizes _ _) v =
let offsets = reverse $ foldl' (\os (r, c) -> (r+r*c + head os):os) [0] (init sizes)
ms = zipWith (\o (r, c) -> (LA.reshape r (slice o r), LA.reshape c (slice (o+r) (r*c)))) offsets sizes
slice o n = V.slice o n v
in ms


listOfTuplesToList [] = []
listOfTuplesToList ((a, b):xs) = a : b : listOfTuplesToList xs
calcScores :: T.Topology -> Matrix -> [(Matrix, Matrix)] -> Matrix
calcScores topology x thetaList = fst $ T.propagateForward topology x thetaList
6 changes: 5 additions & 1 deletion src/MachineLearning/NeuralNetwork/Layer.hs
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
{-# LANGUAGE RankNTypes #-}
{-|
Module: MachineLearning.NeuralNetwork.Layer
Description: Neural Network's Layer
Expand Down Expand Up @@ -25,6 +26,7 @@ import MachineLearning.Types (R, Matrix)
import qualified Data.Vector.Storable as V
import qualified Numeric.LinearAlgebra as LA
import Numeric.LinearAlgebra ((<>))
import qualified Control.Monad.Random as RndM


data Cache = Cache {
Expand All @@ -36,10 +38,12 @@ data Cache = Cache {


data Layer = Layer {
lForward :: Matrix -> Matrix -> Matrix -> Matrix
lUnits :: Int
, lForward :: Matrix -> Matrix -> Matrix -> Matrix
, lBackward :: Matrix -> Cache -> (Matrix, Matrix, Matrix)
, lActivation :: Matrix -> Matrix
, lActivationGradient :: Matrix -> Matrix -> Matrix
, lInitializeThetaM :: forall g. RndM.RandomGen g => (Int, Int) -> RndM.Rand g (Matrix, Matrix)
}


Expand Down
73 changes: 20 additions & 53 deletions src/MachineLearning/NeuralNetwork/Sigmoid.hs
Original file line number Diff line number Diff line change
Expand Up @@ -12,87 +12,54 @@ Sigmoid
module MachineLearning.NeuralNetwork.Sigmoid
(
makeTopology
, initializeTheta
, initializeThetaIO
, initializeThetaM

)

where


import qualified Data.Vector.Storable as V
import qualified Numeric.LinearAlgebra as LA
import System.Random (RandomGen)
import qualified Control.Monad.Random as RndM
import MachineLearning.Types (R, Vector, Matrix)
import qualified MachineLearning.LogisticModel as LM
import MachineLearning.Random
import MachineLearning.NeuralNetwork.Topology (Topology(..))
import MachineLearning.Random (getRandomRMatrixM)
import qualified MachineLearning.NeuralNetwork.Topology as T
import MachineLearning.NeuralNetwork.Layer (Layer(..), affineForward, affineBackward)


-- | Creates toplogy. Takes number of inputs, number of outputs and list of hidden layers.
makeTopology :: Int -> Int -> [Int] -> Topology
makeTopology nInputs nOutputs hlUnits = Topology sizes layers loss
where hiddenLayers = take (length hlUnits) $ repeat mkAffineSigmoidLayer
outputLayer = mkSigmoidOutputLayer
layers = hiddenLayers ++ [outputLayer]
layerSizes = nInputs : (hlUnits ++ [nOutputs])
sizes = getThetaSizes layerSizes
makeTopology :: Int -> Int -> [Int] -> T.Topology
makeTopology nInputs nOutputs hlUnits = T.makeTopology nInputs hiddenLayers outputLayer loss
where hiddenLayers = map mkAffineSigmoidLayer hlUnits
outputLayer = mkSigmoidOutputLayer nOutputs


mkAffineSigmoidLayer = Layer {
lForward = affineForward
mkAffineSigmoidLayer nUnits = Layer {
lUnits = nUnits
, lForward = affineForward
, lActivation = LM.sigmoid
, lBackward = affineBackward
, lActivationGradient = \z da -> da * LM.sigmoidGradient z
, lInitializeThetaM = initializeThetaM
}


mkSigmoidOutputLayer = Layer {
lForward = affineForward
mkSigmoidOutputLayer nUnits = Layer {
lUnits = nUnits
, lForward = affineForward
, lActivation = LM.sigmoid
, lBackward = affineBackward
, lActivationGradient = \scores y -> scores - y
, lInitializeThetaM = initializeThetaM
}


-- | Create and initialize weights vector with random values
-- for given neural network topology.
-- Takes a seed to initialize generator of random numbers as a first parameter.
initializeTheta :: Int -> Topology -> Vector
initializeTheta seed topology = RndM.evalRand (initializeThetaM topology) gen
where gen = RndM.mkStdGen seed


-- | Create and initialize weights vector with random values
-- for given neural network topology inside IO Monad.
initializeThetaIO :: Topology -> IO Vector
initializeThetaIO = RndM.evalRandIO . initializeThetaM


-- | Create and initialize weights vector with random values
-- for given neural network topology inside RandomMonad.
initializeThetaM :: RandomGen g => Topology -> RndM.Rand g Vector
initializeThetaM topology = V.concat <$> initializeThetaListM topology


-- | Create and initialize list of weights matrices with random values
-- for given neural network topology.
initializeThetaListM :: RandomGen g => Topology -> RndM.Rand g [Vector]
initializeThetaListM (Topology sizes _ _) = concat <$> mapM initTheta sizes
where initTheta (r, c) = do
let b :: Vector
b = LA.konst 0 r
eps = calcEps r c
sequence [return b, getRandomRVectorM (r*c) (-eps, eps)]
calcEps r c = (sqrt 6) / (sqrt . fromIntegral $ r + c)


-- | Returns dimensions of weight matrices for given neural network topology
getThetaSizes :: [Int] -> [(Int, Int)]
getThetaSizes nn = zipWith (\r c -> (r, c)) (tail nn) nn
initializeThetaM :: RndM.RandomGen g => (Int, Int) -> RndM.Rand g (Matrix, Matrix)
initializeThetaM (r, c) = do
let b = LA.konst 0 (1, r)
eps = (sqrt 6) / (sqrt . fromIntegral $ r + c)
w <- getRandomRMatrixM r c (-eps, eps)
return (b, w)


-- Sigmoid Loss function
Expand Down
90 changes: 87 additions & 3 deletions src/MachineLearning/NeuralNetwork/Topology.hs
Original file line number Diff line number Diff line change
Expand Up @@ -11,20 +11,34 @@ Neural Network's Topology

module MachineLearning.NeuralNetwork.Topology
(
Topology(..)
Topology
, LossFunc(..)
, makeTopology
, loss
, propagateForward
, propagateBackward
, numberOutputs
, initializeTheta
, initializeThetaIO
, initializeThetaM
, flatten
, unflatten
)

where

import Control.Monad (zipWithM)
import Data.List (foldl')
import MachineLearning.Types (R, Matrix)
import qualified Control.Monad.Random as RndM
import qualified Data.Vector.Storable as V
import qualified Numeric.LinearAlgebra as LA
import MachineLearning.Types (R, Vector, Matrix)
import MachineLearning.NeuralNetwork.Layer (Layer(..), Cache(..))
import MachineLearning.NeuralNetwork.Regularization (Regularization, forwardReg, backwardReg)


-- | Loss function's type.
-- Takes x, weights and y.
type LossFunc = Matrix -> [(Matrix, Matrix)] -> Matrix -> R


Expand All @@ -33,25 +47,42 @@ type LossFunc = Matrix -> [(Matrix, Matrix)] -> Matrix -> R
data Topology = Topology [(Int, Int)] [Layer] LossFunc


-- | Makes Neural Network's Topology.
-- Takes number of inputs, list of hidden layers, output layer and loss function.
makeTopology :: Int -> [Layer] -> Layer -> LossFunc -> Topology
makeTopology nInputs hiddenLayers outputLayer lossFunc =
let layers = hiddenLayers ++ [outputLayer]
layerSizes = nInputs : (map lUnits layers)
sizes = getThetaSizes layerSizes
in Topology sizes layers lossFunc


-- | Calculates loss for the given topology.
-- Takes topology, regularization, x, weights, y.
loss :: Topology -> Regularization -> Matrix -> [(Matrix, Matrix)] -> Matrix -> R
loss (Topology _ _ lf) reg x weights y =
let lossValue = lf x weights y
regValue = forwardReg reg weights
in lossValue + regValue


-- | Implementation of forward propagation algorithm.
propagateForward :: Topology -> Matrix -> [(Matrix, Matrix)] -> (Matrix, [Cache])
propagateForward (Topology _ layers _) x thetaList = foldl' f (x, []) $ zip thetaList layers
where f (a, cs) (theta, hl) =
let (a', cache) = forwardPass hl a theta
in (a', cache:cs)


-- | Makes one forward step for the given layer.
forwardPass :: Layer -> Matrix -> (Matrix, Matrix) -> (Matrix, Cache)
forwardPass layer a (b, w) = (a', Cache z a b w)
where z = lForward layer a b w
a' = lActivation layer z


-- | Implements backward propagation algorithm.
-- | Implementation of backward propagation algorithm.
propagateBackward :: Topology -> Regularization -> Matrix -> [Cache] -> Matrix -> [(Matrix, Matrix)]
propagateBackward (Topology _ layers _) reg scores (cache:cacheList) y = gradientList
where cache' = Cache scores (cacheX cache) (cacheB cache) (cacheW cache)
cacheList' = cache':cacheList
Expand All @@ -61,6 +92,8 @@ propagateBackward (Topology _ layers _) reg scores (cache:cacheList) y = gradien
in (da', (db, dw):grads)


-- | Makes one backward step for the given layer.
backwardPass :: Layer -> Regularization -> Matrix -> Cache -> (Matrix, Matrix, Matrix)
backwardPass layer reg da cache = (da', db, dw')
where delta = lActivationGradient layer (cacheZ cache) da
(da', db, dw) = lBackward layer delta cache
Expand All @@ -70,3 +103,54 @@ backwardPass layer reg da cache = (da', db, dw')
-- | Returns number of outputs of the given topology.
numberOutputs :: Topology -> Int
numberOutputs (Topology nnt _ _) = fst $ last nnt


-- | Returns dimensions of weight matrices for given neural network topology
getThetaSizes :: [Int] -> [(Int, Int)]
getThetaSizes nn = zipWith (\r c -> (r, c)) (tail nn) nn


-- | Create and initialize weights vector with random values
-- for given neural network topology.
-- Takes a seed to initialize generator of random numbers as a first parameter.
initializeTheta :: Int -> Topology -> Vector
initializeTheta seed topology = RndM.evalRand (initializeThetaM topology) gen
where gen = RndM.mkStdGen seed


-- | Create and initialize weights vector with random values
-- for given neural network topology inside IO Monad.
initializeThetaIO :: Topology -> IO Vector
initializeThetaIO = RndM.evalRandIO . initializeThetaM


-- | Create and initialize weights vector with random values
-- for given neural network topology inside RandomMonad.
initializeThetaM :: RndM.RandomGen g => Topology -> RndM.Rand g Vector
initializeThetaM topology = flatten <$> initializeThetaListM topology


-- | Create and initialize list of weights matrices with random values
-- for given neural network topology.
initializeThetaListM :: RndM.RandomGen g => Topology -> RndM.Rand g [(Matrix, Matrix)]
initializeThetaListM (Topology sizes layers _) = zipWithM lInitializeThetaM layers sizes


-- | Flatten list of matrices into vector.
flatten :: [(Matrix, Matrix)] -> Vector
flatten ms = V.concat $ map LA.flatten $ listOfTuplesToList ms


-- | Unflatten vector into list of matrices for given neural network topology.
unflatten :: Topology -> Vector -> [(Matrix, Matrix)]
unflatten (Topology sizes _ _) v =
let offsets = reverse $ foldl' (\os (r, c) -> (r+r*c + head os):os) [0] (init sizes)
ms = zipWith (\o (r, c) -> (LA.reshape r (slice o r), LA.reshape c (slice (o+r) (r*c)))) offsets sizes
slice o n = V.slice o n v
in ms


-- | Converts list of tuples into list.
listOfTuplesToList :: [(a, a)] -> [a]
listOfTuplesToList [] = []
listOfTuplesToList ((a, b):xs) = a : b : listOfTuplesToList xs

0 comments on commit 8fbf796

Please sign in to comment.