Skip to content

Commit

Permalink
fix redundant call to Optimizer Cost Function
Browse files Browse the repository at this point in the history
  • Loading branch information
daelsepara committed May 10, 2019
1 parent 782c02c commit 536b2dc
Showing 1 changed file with 5 additions and 6 deletions.
11 changes: 5 additions & 6 deletions DeepLearnUI/Optimize.cs
Original file line number Diff line number Diff line change
Expand Up @@ -198,8 +198,10 @@ public bool Step(Func<double[], FuncOutput> F, double[] X)
Add(X, s, z1);

// evaluate cost - and gradient function with new params
var f2 = F(X).Error;
var df2 = F(X).Gradient;
var eval = F(X);

var f2 = eval.Error;
var df2 = eval.Gradient;

Evaluations++;

Expand Down Expand Up @@ -227,8 +229,6 @@ public bool Step(Func<double[], FuncOutput> F, double[] X)
var success = false;
var limit = -1.0;

FuncOutput eval;

while (true)
{
while (((f2 > f1 + z1 * RHO * d1) || (d2 > -SIG * d1)) && (M > 0))
Expand Down Expand Up @@ -379,8 +379,7 @@ public bool Step(Func<double[], FuncOutput> F, double[] X)
}

// if line searched succeeded
if (success)
{
if (success) {
f1 = f2;

// Polack-Ribiere direction
Expand Down

0 comments on commit 536b2dc

Please sign in to comment.