-
Notifications
You must be signed in to change notification settings - Fork 4
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
NN: Added ReLu Activation, Softmax Loss and TopologyMaker
- Loading branch information
Showing
8 changed files
with
184 additions
and
49 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,30 @@ | ||
{-| | ||
Module: MachineLearning.NeuralNetwork.ReluActivation | ||
Description: ReLu Activation | ||
Copyright: (c) Alexander Ignatyev, 2017 | ||
License: BSD-3 | ||
Stability: experimental | ||
Portability: POSIX | ||
ReLu Activation. | ||
-} | ||
|
||
module MachineLearning.NeuralNetwork.ReluActivation | ||
( | ||
relu | ||
, gradient | ||
) | ||
|
||
where | ||
|
||
|
||
import qualified Numeric.LinearAlgebra as LA | ||
import MachineLearning.Types (Matrix) | ||
|
||
|
||
relu :: Matrix -> Matrix | ||
relu x = x * (LA.step x) | ||
|
||
|
||
gradient :: Matrix -> Matrix -> Matrix | ||
gradient x dx = dx * (LA.step x) -- == dx[x<0] = 0 |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,41 @@ | ||
{-| | ||
Module: MachineLearning.NeuralNetwork.SoftmaxLoss | ||
Description: Softmax Loss. | ||
Copyright: (c) Alexander Ignatyev, 2017 | ||
License: BSD-3 | ||
Stability: experimental | ||
Portability: POSIX | ||
Softmax Loss. | ||
-} | ||
|
||
module MachineLearning.NeuralNetwork.SoftmaxLoss | ||
( | ||
scores | ||
, gradient | ||
, loss | ||
) | ||
|
||
where | ||
|
||
|
||
import qualified Data.Vector.Storable as V | ||
import qualified Numeric.LinearAlgebra as LA | ||
import MachineLearning.Types (R, Matrix) | ||
import MachineLearning.Utils (sumByRows, reduceByRows) | ||
|
||
scores x = x - reduceByRows V.maximum x | ||
|
||
|
||
gradient scores y = | ||
let sum_probs = sumByRows $ exp scores | ||
probs = (exp scores) / sum_probs | ||
in probs - y | ||
|
||
|
||
-- Softmax Loss function | ||
loss :: Matrix -> [(Matrix, Matrix)] -> Matrix -> R | ||
loss scores thetaList y = LA.sumElements $ (log sum_probs) - t | ||
where m = fromIntegral $ LA.rows scores | ||
sum_probs = sumByRows $ exp scores | ||
t = sumByRows $ scores * y |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,79 @@ | ||
{-| | ||
Module: MachineLearning.NeuralNetwork.TopologyMaker | ||
Description: Topology Maker | ||
Copyright: (c) Alexander Ignatyev, 2017 | ||
License: BSD-3 | ||
Stability: experimental | ||
Portability: POSIX | ||
Topology Maker. | ||
-} | ||
|
||
module MachineLearning.NeuralNetwork.TopologyMaker | ||
( | ||
Activation(..) | ||
, Loss(..) | ||
, makeTopology | ||
) | ||
|
||
where | ||
|
||
|
||
import qualified MachineLearning.NeuralNetwork.Topology as T | ||
import MachineLearning.NeuralNetwork.Layer (Layer(..), affineForward, affineBackward) | ||
import MachineLearning.NeuralNetwork.WeightInitialization (nguyen) | ||
import qualified MachineLearning.NeuralNetwork.ReluActivation as Relu | ||
import qualified MachineLearning.NeuralNetwork.SoftmaxLoss as Softmax | ||
import qualified MachineLearning.NeuralNetwork.Sigmoid as Sigmoid | ||
|
||
|
||
data Activation = ASigmoid | ARelu | ||
|
||
data Loss = LSigmoid | LSoftmax | ||
|
||
|
||
-- | Creates toplogy. Takes number of inputs, number of outputs and list of hidden layers. | ||
makeTopology :: Activation -> Loss -> Int -> Int -> [Int] -> T.Topology | ||
makeTopology a l nInputs nOutputs hlUnits = T.makeTopology nInputs hiddenLayers outputLayer (loss l) | ||
where hiddenLayers = map (mkAffineLayer a) hlUnits | ||
outputLayer = mkOutputLayer l nOutputs | ||
|
||
|
||
mkAffineLayer a nUnits = Layer { | ||
lUnits = nUnits | ||
, lForward = affineForward | ||
, lActivation = hiddenActivation a | ||
, lBackward = affineBackward | ||
, lActivationGradient = hiddenGradient a | ||
, lInitializeThetaM = nguyen | ||
} | ||
|
||
|
||
mkOutputLayer l nUnits = Layer { | ||
lUnits = nUnits | ||
, lForward = affineForward | ||
, lActivation = outputActivation l | ||
, lBackward = affineBackward | ||
, lActivationGradient = outputGradient l | ||
, lInitializeThetaM = nguyen | ||
} | ||
|
||
|
||
hiddenActivation ASigmoid = Sigmoid.sigmoid | ||
hiddenActivation ARelu = Relu.relu | ||
|
||
|
||
hiddenGradient ASigmoid = Sigmoid.gradient | ||
hiddenGradient ARelu = Relu.gradient | ||
|
||
|
||
outputActivation LSigmoid = Sigmoid.sigmoid | ||
outputActivation LSoftmax = Softmax.scores | ||
|
||
|
||
outputGradient LSigmoid = Sigmoid.outputGradient | ||
outputGradient LSoftmax = Softmax.gradient | ||
|
||
|
||
loss LSigmoid = Sigmoid.loss | ||
loss LSoftmax = Softmax.loss |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters