Skip to content
Browse files

Add Bayesian linear regression and type synonyms for readability

  • Loading branch information...
1 parent 2d46b76 commit de7928c3031a0be0df360608a224fe800b96433f @batterseapower committed
View
9 Algorithms/MachineLearning/BasisFunctions.hs
@@ -1,12 +1,13 @@
-- | Basis functions of various kinds, useful for e.g. use with the LinearRegression module
module Algorithms.MachineLearning.BasisFunctions where
+import Algorithms.MachineLearning.Framework
import Algorithms.MachineLearning.Utilities
-- | /Unnormalized/ 1D Gaussian, suitable for use as a basis function.
-gaussianBasis :: Double -- ^ Mean of the Gaussian
- -> Double -- ^ Standard deviation of the Gaussian
- -> Double -- ^ Point on X axis to sample
+gaussianBasis :: Mean -- ^ Mean of the Gaussian
+ -> Variance -- ^ Variance of the Gaussian
+ -> Double -- ^ Point on X axis to sample
-> Double
-gaussianBasis mean stdev x = exp (negate $ (square (x - mean)) / (2 * (square stdev)))
+gaussianBasis mean variance x = exp (negate $ (square (x - mean)) / (2 * variance))
View
10 Algorithms/MachineLearning/Framework.hs
@@ -14,6 +14,15 @@ import Numeric.LinearAlgebra
type Target = Double
type Weight = Double
+-- | Commonly called the "average" of a set of data.
+type Mean = Double
+
+-- | Variance is the mean squared deviation from the mean.
+type Variance = Double
+
+-- | Precision is the inverse of variance.
+type Precision = Double
+
-- | A positive constant indicating how strongly regularization should be applied. A good
-- choice for this parameter might be your belief about the variance of the inherent noise
-- in the samples (1/beta) divided by your belief about the variance of the weights that
@@ -22,6 +31,7 @@ type Weight = Double
-- See also equation 3.55 and 3.28 in Bishop.
type RegularizationCoefficient = Double
+
--
-- Injections to and from vectors
--
View
20 Algorithms/MachineLearning/LinearRegression.hs
@@ -2,7 +2,8 @@
-- | Linear regression models, as discussed in chapter 3 of Bishop.
module Algorithms.MachineLearning.LinearRegression (
- LinearModel, regressLinearModel, regressRegularizedLinearModel
+ LinearModel,
+ regressLinearModel, regressRegularizedLinearModel, bayesianLinearRegression
) where
import Algorithms.MachineLearning.Framework
@@ -56,4 +57,19 @@ regressLinearModelCore find_pinv basis_fns ds
= LinearModel { lm_basis_fns = basis_fns, lm_weights = weights }
where
designMatrix = applyMatrix (map (. fromVector) basis_fns) (ds_inputs ds) -- One row per sample, one column per basis function
- weights = find_pinv designMatrix <> (ds_targets ds)
+ weights = find_pinv designMatrix <> (ds_targets ds)
+
+
+-- | Bayesian linear regression, using a Gaussian prior for the weights centred at the origin. The precision of the
+-- weight prior is controlled by the parameter alpha, and our belief about the inherent noise in the data is controlled
+-- by the precision parameter beta.
+--
+-- Bayesion linear regression with this prior is entirely equivalent to calling 'regressRegularizedLinearModel' with
+-- lambda = alpha / beta.
+--
+-- Equation 3.55 in Bishop.
+bayesianLinearRegression :: (Vectorable input)
+ => Precision -- ^ Precision of Gaussian weight prior
+ -> Precision -- ^ Precision of noise on samples
+ -> [input -> Target] -> DataSet input -> LinearModel input
+bayesianLinearRegression alpha beta = regressRegularizedLinearModel (alpha / beta)
View
2 Algorithms/MachineLearning/Tests/Driver.hs
@@ -12,7 +12,7 @@ import System.Cmd
basisFunctions :: [Double -> Double]
-basisFunctions = const 1 : map (\mean -> gaussianBasis (rationalToDouble mean) 0.2) [-1,-0.9..1]
+basisFunctions = const 1 : map (\mean -> gaussianBasis (rationalToDouble mean) 0.04) [-1,-0.9..1]
sumOfSquaresError :: [(Double, Double)] -> Double
sumOfSquaresError targetsAndPredictions = sum $ map (abs . uncurry (-)) targetsAndPredictions

0 comments on commit de7928c

Please sign in to comment.
Something went wrong with that request. Please try again.