-
Notifications
You must be signed in to change notification settings - Fork 4
/
Main.hs
56 lines (47 loc) · 2.13 KB
/
Main.hs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
module Main where
import qualified Numeric.LinearAlgebra as LA
import qualified MachineLearning as ML
import qualified MachineLearning.Regression as MLR
main = do
-- Step 1. Data loading
m <- LA.loadMatrix "linear_regression/data.txt"
let (x, y) = ML.splitToXY m
-- Step 2. Feature Normalization
muSigma = ML.meanStddev x
xNorm = ML.featureNormalization muSigma x
x1 = ML.addBiasDimension xNorm
-- Step 3. Learning using 3 methods:
-- NE: Normal Equation (exact solution)
-- GD: Gradient Descent (basic iterative method)
-- BFGS: BFGS (most advanced iterative method)
zeroTheta = LA.konst 0 (LA.cols x1)
thetaNE = MLR.normalEquation x1 y
(thetaGD, optPathGD) = MLR.minimize (MLR.GradientDescent 0.01) MLR.LeastSquares 0.0001 5000 MLR.RegNone x1 y zeroTheta
(thetaBFGS, optPathBFGS) = MLR.minimize (MLR.BFGS2 0.1 0.1) MLR.LeastSquares 0.0001 1500 MLR.RegNone x1 y zeroTheta
-- Step 4. Prediction
xPredict = LA.matrix 2 [1650, 3]
xPredict1 = ML.addBiasDimension $ ML.featureNormalization muSigma xPredict
yPredictNE = MLR.hypothesis MLR.LeastSquares xPredict1 thetaNE
yPredictGD = MLR.hypothesis MLR.LeastSquares xPredict1 thetaGD
yPredictBFGS = MLR.hypothesis MLR.LeastSquares xPredict1 thetaBFGS
-- Step 5. Printing results
putStrLn "\n=== Linear Regression Sample Application ===\n"
putStrLn "Feature normalization, mu:"
LA.disp 4 $ fst muSigma
putStrLn "Feature normalization, sigma:"
LA.disp 4 $ snd muSigma
putStrLn "Normilized features:"
LA.dispShort 10 10 4 $ x LA.||| xNorm
putStrLn ""
putStrLn $ "Theta (Normal Equation): " ++ (show thetaNE)
putStrLn $ "Theta (Gradient Descent): " ++ (show thetaGD)
putStrLn $ "Theta (BFGS): " ++ (show thetaBFGS)
putStrLn ""
putStrLn $ "Prediction (Normal Equation): " ++ (show yPredictNE)
putStrLn $ "Prediction (Gradient Descent): " ++ (show yPredictGD)
putStrLn $ "Prediction (BFGS): " ++ (show yPredictBFGS)
putStrLn ""
putStrLn "Optimization Path (Gradient Descent):"
LA.dispShort 20 10 4 optPathGD
putStrLn "Optimization Path (BFGS2):"
LA.disp 3 optPathBFGS