Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with
or
.
Download ZIP

Loading…

Correct haddock in AI/HNN/Recurrent/Network.hs #3

Merged
merged 1 commit into from

2 participants

@seppeljordan

This should fix Issue #2.

@alpmestan
Owner

Sweet thanks! To be honest, this module was @gatlin's experimental sandbox and hasn't been touched in a very long time, so if you're using it you're warned =)

Hoping to rewrite hnn one of these days, hopefully I'll have to do that for work at some point. Anyway, merging this, thanks!

@alpmestan alpmestan merged commit e5db84c into alpmestan:master

1 check passed

Details continuous-integration/travis-ci/pr The Travis CI build passed
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Commits on May 18, 2015
  1. @seppeljordan
This page is out of date. Refresh to see the latest.
Showing with 15 additions and 15 deletions.
  1. +15 −15 AI/HNN/Recurrent/Network.hs
View
30 AI/HNN/Recurrent/Network.hs
@@ -70,11 +70,11 @@ data Network a = Network
-- | Creates a network with an adjacency matrix of your choosing, specified as
-- an unboxed vector. You also must supply a vector of threshold values.
createNetwork :: (Variate a, Fractional a, Storable a) =>
- Int -> -- ^ number of total neurons neurons (input and otherwise)
- Int -> -- ^ number of inputs
- [a] -> -- ^ flat weight matrix
- [a] -> -- ^ threshold (bias) values for each neuron
- IO (Network a) -- ^ a new network
+ Int -- ^ number of total neurons neurons (input and otherwise)
+ -> Int -- ^ number of inputs
+ -> [a] -- ^ flat weight matrix
+ -> [a] -- ^ threshold (bias) values for each neuron
+ -> IO (Network a) -- ^ a new network
createNetwork n m matrix thresh = return $!
Network ( (n><n) matrix ) n m (n |> thresh)
@@ -83,11 +83,11 @@ createNetwork n m matrix thresh = return $!
-- precisely one time step. This is used by `evalNet` which is probably a
-- more convenient interface for client applications.
computeStep :: (Variate a, Num a, F.Storable a, Product a) =>
- Network a -> -- ^ Network to evaluate input
- Vector a -> -- ^ vector of pre-existing state
- (a -> a) -> -- ^ activation function
- Vector a -> -- ^ list of inputs
- Vector a -- ^ new state vector
+ Network a -- ^ Network to evaluate input
+ -> Vector a -- ^ vector of pre-existing state
+ -> (a -> a) -- ^ activation function
+ -> Vector a -- ^ list of inputs
+ -> Vector a -- ^ new state vector
computeStep (Network{..}) state activation input =
mapVector activation $! zipVectorWith (-) (weights <> prefixed) thresh
@@ -99,10 +99,10 @@ computeStep (Network{..}) state activation input =
-- | Iterates over a list of input vectors in sequence and computes one time
-- step for each.
evalNet :: (Num a, Variate a, Fractional a, Product a) =>
- Network a -> -- ^ Network to evaluate inputs
- [[a]] -> -- ^ list of input lists
- (a -> a) -> -- ^ activation function
- IO (Vector a) -- ^ output state vector
+ Network a -- ^ Network to evaluate inputs
+ -> [[a]] -- ^ list of input lists
+ -> (a -> a) -- ^ activation function
+ -> IO (Vector a) -- ^ output state vector
evalNet n@(Network{..}) inputs activation = do
s <- foldM (\x -> computeStepM n x activation) state inputsV
@@ -118,4 +118,4 @@ evalNet n@(Network{..}) inputs activation = do
-- | It's a simple, differentiable sigmoid function.
sigmoid :: Floating a => a -> a
sigmoid !x = 1 / (1 + exp (-x))
-{-# INLINE sigmoid #-}
+{-# INLINE sigmoid #-}
Something went wrong with that request. Please try again.