Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Added NFData instances for networks and layers #105

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
3 changes: 2 additions & 1 deletion src/Grenade/Core/Layer.hs
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,7 @@ import Data.Kind (Type)

import Grenade.Core.Shape
import Grenade.Core.LearningParameters
import Control.DeepSeq (NFData)

-- | Class for updating a layer. All layers implement this, as it
-- describes how to create and update the layer.
Expand All @@ -73,7 +74,7 @@ class UpdateLayer x where
-- need to implement it for all shapes, only ones which are
-- appropriate.
--
class UpdateLayer x => Layer x (i :: Shape) (o :: Shape) where
class (UpdateLayer x, NFData x) => Layer x (i :: Shape) (o :: Shape) where
-- | The Wengert tape for this layer. Includes all that is required
-- to generate the back propagated gradients efficiently. As a
-- default, `S i` is fine.
Expand Down
9 changes: 8 additions & 1 deletion src/Grenade/Core/Network.hs
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@ import Data.Kind (Type)
import Grenade.Core.Layer
import Grenade.Core.LearningParameters
import Grenade.Core.Shape
import Control.DeepSeq
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

For packages outside this repo, I really like to make all imports explcit or qualified.

I think you want:

import           Control.DeepSeq (NFData (..), deepseq)


-- | Type of a network.
--
Expand All @@ -68,6 +69,12 @@ instance Show (Network '[] '[i]) where
instance (Show x, Show (Network xs rs)) => Show (Network (x ': xs) (i ': rs)) where
show (x :~> xs) = show x ++ "\n~>\n" ++ show xs

instance NFData (Network '[] '[i]) where
rnf NNil = ()

instance NFData (Network xs rs) => NFData (Network (x ': xs) (i ': rs)) where
rnf (x :~> xs) = x `deepseq` xs `deepseq` ()

-- | Gradient of a network.
--
-- Parameterised on the layers of the network.
Expand Down Expand Up @@ -191,7 +198,7 @@ instance CreatableNetwork sublayers subshapes => UpdateLayer (Network sublayers
-- | Ultimate composition.
--
-- This allows a complete network to be treated as a layer in a larger network.
instance (CreatableNetwork sublayers subshapes, i ~ (Head subshapes), o ~ (Last subshapes)) => Layer (Network sublayers subshapes) i o where
instance (CreatableNetwork sublayers subshapes, i ~ (Head subshapes), o ~ (Last subshapes), NFData (Network sublayers subshapes)) => Layer (Network sublayers subshapes) i o where
type Tape (Network sublayers subshapes) i o = Tapes sublayers subshapes
runForwards = runNetwork
runBackwards = runGradient
4 changes: 4 additions & 0 deletions src/Grenade/Layers/Concat.hs
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ import Data.Kind (Type)
import Grenade.Core

import Numeric.LinearAlgebra.Static ( row, (===), splitRows, unrow, (#), split, R )
import Control.DeepSeq

-- | A Concatentating Layer.
--
Expand All @@ -52,6 +53,9 @@ import Numeric.LinearAlgebra.Static ( row, (===), splitRows, unrow, (#
data Concat :: Shape -> Type -> Shape -> Type -> Type where
Concat :: x -> y -> Concat m x n y

instance (NFData x, NFData y) => NFData (Concat m x n y) where
rnf (Concat x y) = rnf x `deepseq` rnf y `deepseq` ()

instance (Show x, Show y) => Show (Concat m x n y) where
show (Concat x y) = "Concat\n" ++ show x ++ "\n" ++ show y

Expand Down
4 changes: 4 additions & 0 deletions src/Grenade/Layers/Convolution.hs
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@ import Numeric.LinearAlgebra.Static hiding ((|||), build, toRows)
import Grenade.Core
import Grenade.Layers.Internal.Convolution
import Grenade.Layers.Internal.Update
import Control.DeepSeq

-- | A convolution layer for a neural network.
-- This uses the im2col convolution trick popularised by Caffe, which essentially turns the
Expand Down Expand Up @@ -113,6 +114,9 @@ instance Show (Convolution c f k k' s s') where
px = (fmap . fmap . fmap) render ms
in unlines $ foldl1 (zipWith (\a' b' -> a' ++ " | " ++ b')) $ px

instance NFData (Convolution c f k k' s s') where
rnf (Convolution a b) = rnf a `deepseq` rnf b `deepseq` ()

randomConvolution :: ( MonadRandom m
, KnownNat channels
, KnownNat filters
Expand Down
4 changes: 4 additions & 0 deletions src/Grenade/Layers/Crop.hs
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ import Grenade.Layers.Internal.Pad

import Numeric.LinearAlgebra (konst, subMatrix, diagBlock)
import Numeric.LinearAlgebra.Static (extract, create)
import Control.DeepSeq

-- | A cropping layer for a neural network.
data Crop :: Nat
Expand All @@ -45,6 +46,9 @@ data Crop :: Nat
-> Nat -> Type where
Crop :: Crop cropLeft cropTop cropRight cropBottom

instance NFData (Crop cropLeft cropTop cropRight cropBottom) where
rnf Crop = ()

instance Show (Crop cropLeft cropTop cropRight cropBottom) where
show Crop = "Crop"

Expand Down
4 changes: 4 additions & 0 deletions src/Grenade/Layers/Deconvolution.hs
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,7 @@ import Numeric.LinearAlgebra.Static hiding ((|||), build, toRows)
import Grenade.Core
import Grenade.Layers.Internal.Convolution
import Grenade.Layers.Internal.Update
import Control.DeepSeq

-- | A Deconvolution layer for a neural network.
-- This uses the im2col Convolution trick popularised by Caffe.
Expand Down Expand Up @@ -94,6 +95,9 @@ data Deconvolution' :: Nat -- Number of channels, for the first layer this could
=> !(L kernelFlattened channels) -- The kernel filter gradient
-> Deconvolution' channels filters kernelRows kernelColumns strideRows strideColumns

instance NFData (Deconvolution c f k k' s s') where
rnf (Deconvolution a b) = rnf a `deepseq` rnf b `deepseq` ()

instance Show (Deconvolution c f k k' s s') where
show (Deconvolution a _) = renderConv a
where
Expand Down
6 changes: 5 additions & 1 deletion src/Grenade/Layers/Dropout.hs
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ import Control.Monad.Random hiding (fromList)

import GHC.TypeLits
import Grenade.Core
import Control.DeepSeq

-- Dropout layer help to reduce overfitting.
-- Idea here is that the vector is a shape of 1s and 0s, which we multiply the input by.
Expand All @@ -20,7 +21,10 @@ import Grenade.Core
data Dropout = Dropout {
dropoutRate :: Double
, dropoutSeed :: Int
} deriving Show
} deriving (Show)
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why? Please revert this change.


instance NFData Dropout where
rnf (Dropout r s) = rnf r `deepseq` rnf s `deepseq` ()

instance UpdateLayer Dropout where
type Gradient Dropout = ()
Expand Down
4 changes: 4 additions & 0 deletions src/Grenade/Layers/Elu.hs
Original file line number Diff line number Diff line change
Expand Up @@ -19,13 +19,17 @@ import GHC.TypeLits
import Grenade.Core

import qualified Numeric.LinearAlgebra.Static as LAS
import Control.DeepSeq

-- | An exponential linear unit.
-- A layer which can act between any shape of the same dimension, acting as a
-- diode on every neuron individually.
data Elu = Elu
deriving Show

instance NFData Elu where
rnf Elu = ()

instance UpdateLayer Elu where
type Gradient Elu = ()
runUpdate _ _ _ = Elu
Expand Down
7 changes: 7 additions & 0 deletions src/Grenade/Layers/FullyConnected.hs
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ import Numeric.LinearAlgebra.Static
import Grenade.Core

import Grenade.Layers.Internal.Update
import Control.DeepSeq

-- | A basic fully connected (or inner product) neural network layer.
data FullyConnected i o = FullyConnected
Expand All @@ -31,6 +32,12 @@ data FullyConnected' i o = FullyConnected'
!(R o) -- Bias
!(L o i) -- Activations

instance NFData (FullyConnected' i o) where
rnf (FullyConnected' a b) = a `deepseq` b `deepseq` ()

instance NFData (FullyConnected i o) where
rnf (FullyConnected a b) = a `deepseq` b `deepseq` ()

instance Show (FullyConnected i o) where
show FullyConnected {} = "FullyConnected"

Expand Down
4 changes: 4 additions & 0 deletions src/Grenade/Layers/Logit.hs
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ import Data.Serialize
import Data.Singletons

import Grenade.Core
import Control.DeepSeq

-- | A Logit layer.
--
Expand All @@ -28,6 +29,9 @@ import Grenade.Core
data Logit = Logit
deriving Show

instance NFData Logit where
rnf Logit = ()

instance UpdateLayer Logit where
type Gradient Logit = ()
runUpdate _ _ _ = Logit
Expand Down
4 changes: 4 additions & 0 deletions src/Grenade/Layers/Merge.hs
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ import Data.Kind (Type)
#endif

import Grenade.Core
import Control.DeepSeq

-- | A Merging layer.
--
Expand All @@ -37,6 +38,9 @@ import Grenade.Core
data Merge :: Type -> Type -> Type where
Merge :: x -> y -> Merge x y

instance (NFData x, NFData y) => NFData (Merge x y) where
rnf (Merge x y) = rnf x `deepseq` rnf y `deepseq` ()

instance (Show x, Show y) => Show (Merge x y) where
show (Merge x y) = "Merge\n" ++ show x ++ "\n" ++ show y

Expand Down
4 changes: 4 additions & 0 deletions src/Grenade/Layers/Pad.hs
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ import Grenade.Layers.Internal.Pad

import Numeric.LinearAlgebra (konst, subMatrix, diagBlock)
import Numeric.LinearAlgebra.Static (extract, create)
import Control.DeepSeq

-- | A padding layer for a neural network.
--
Expand All @@ -47,6 +48,9 @@ data Pad :: Nat
-> Nat -> Type where
Pad :: Pad padLeft padTop padRight padBottom

instance NFData (Pad padLeft padTop padRight padBottom) where
rnf Pad = ()

instance Show (Pad padLeft padTop padRight padBottom) where
show Pad = "Pad"

Expand Down
4 changes: 4 additions & 0 deletions src/Grenade/Layers/Pooling.hs
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ import Grenade.Core
import Grenade.Layers.Internal.Pooling

import Numeric.LinearAlgebra.Static as LAS hiding ((|||), build, toRows)
import Control.DeepSeq

-- | A pooling layer for a neural network.
--
Expand All @@ -49,6 +50,9 @@ import Numeric.LinearAlgebra.Static as LAS hiding ((|||), build, toRow
data Pooling :: Nat -> Nat -> Nat -> Nat -> Type where
Pooling :: Pooling kernelRows kernelColumns strideRows strideColumns

instance NFData (Pooling k k' s s') where
rnf Pooling = ()

instance Show (Pooling k k' s s') where
show Pooling = "Pooling"

Expand Down
4 changes: 4 additions & 0 deletions src/Grenade/Layers/Relu.hs
Original file line number Diff line number Diff line change
Expand Up @@ -19,13 +19,17 @@ import GHC.TypeLits
import Grenade.Core

import qualified Numeric.LinearAlgebra.Static as LAS
import Control.DeepSeq

-- | A rectifying linear unit.
-- A layer which can act between any shape of the same dimension, acting as a
-- diode on every neuron individually.
data Relu = Relu
deriving Show

instance NFData Relu where
rnf Relu = ()

instance UpdateLayer Relu where
type Gradient Relu = ()
runUpdate _ _ _ = Relu
Expand Down
4 changes: 4 additions & 0 deletions src/Grenade/Layers/Reshape.hs
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ import Numeric.LinearAlgebra.Static
import Numeric.LinearAlgebra.Data as LA ( flatten )

import Grenade.Core
import Control.DeepSeq

-- | Reshape Layer
--
Expand All @@ -36,6 +37,9 @@ import Grenade.Core
data Reshape = Reshape
deriving Show

instance NFData Reshape where
rnf Reshape = ()

instance UpdateLayer Reshape where
type Gradient Reshape = ()
runUpdate _ _ _ = Reshape
Expand Down
4 changes: 4 additions & 0 deletions src/Grenade/Layers/Sinusoid.hs
Original file line number Diff line number Diff line change
Expand Up @@ -18,12 +18,16 @@ import Data.Serialize
import Data.Singletons

import Grenade.Core
import Control.DeepSeq

-- | A Sinusoid layer.
-- A layer which can act between any shape of the same dimension, performing a sin function.
data Sinusoid = Sinusoid
deriving Show

instance NFData Sinusoid where
rnf Sinusoid = ()

instance UpdateLayer Sinusoid where
type Gradient Sinusoid = ()
runUpdate _ _ _ = Sinusoid
Expand Down
4 changes: 4 additions & 0 deletions src/Grenade/Layers/Softmax.hs
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ import GHC.TypeLits
import Grenade.Core

import Numeric.LinearAlgebra.Static as LAS
import Control.DeepSeq

-- | A Softmax layer
--
Expand All @@ -33,6 +34,9 @@ import Numeric.LinearAlgebra.Static as LAS
data Softmax = Softmax
deriving Show

instance NFData Softmax where
rnf Softmax = ()

instance UpdateLayer Softmax where
type Gradient Softmax = ()
runUpdate _ _ _ = Softmax
Expand Down
4 changes: 4 additions & 0 deletions src/Grenade/Layers/Tanh.hs
Original file line number Diff line number Diff line change
Expand Up @@ -18,12 +18,16 @@ import Data.Serialize
import Data.Singletons

import Grenade.Core
import Control.DeepSeq

-- | A Tanh layer.
-- A layer which can act between any shape of the same dimension, performing a tanh function.
data Tanh = Tanh
deriving Show

instance NFData Tanh where
rnf Tanh = ()

instance UpdateLayer Tanh where
type Gradient Tanh = ()
runUpdate _ _ _ = Tanh
Expand Down
4 changes: 4 additions & 0 deletions src/Grenade/Layers/Trivial.hs
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ module Grenade.Layers.Trivial (
import Data.Serialize

import Grenade.Core
import Control.DeepSeq

-- | A Trivial layer.
--
Expand All @@ -25,6 +26,9 @@ import Grenade.Core
data Trivial = Trivial
deriving Show

instance NFData Trivial where
rnf Trivial = ()

instance Serialize Trivial where
put _ = return ()
get = return Trivial
Expand Down
3 changes: 2 additions & 1 deletion src/Grenade/Recurrent/Core/Layer.hs
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ import Data.Kind (Type)
#endif

import Grenade.Core
import Control.DeepSeq (NFData)

-- | Class for a recurrent layer.
-- It's quite similar to a normal layer but for the input and output
Expand All @@ -22,7 +23,7 @@ class UpdateLayer x => RecurrentUpdateLayer x where
-- | Shape of data that is passed between each subsequent run of the layer
type RecurrentShape x :: Type

class (RecurrentUpdateLayer x, Num (RecurrentShape x)) => RecurrentLayer x (i :: Shape) (o :: Shape) where
class (RecurrentUpdateLayer x, Num (RecurrentShape x), NFData x) => RecurrentLayer x (i :: Shape) (o :: Shape) where
-- | Wengert Tape
type RecTape x i o :: Type
-- | Used in training and scoring. Take the input from the previous
Expand Down