Skip to content

Commit

Permalink
Added repeatability test #12
Browse files Browse the repository at this point in the history
See discussion #4
  • Loading branch information
jrapin committed Dec 24, 2018
2 parents f44142f + 235713f commit ae1d2a4
Show file tree
Hide file tree
Showing 2 changed files with 110 additions and 1 deletion.
68 changes: 68 additions & 0 deletions nevergrad/optimization/recorded_recommendations.csv
Original file line number Diff line number Diff line change
@@ -0,0 +1,68 @@
,v0,v1,v2,v3
AlmostRotationInvariantDE,0.5018723037790331,0.138846177156,0.640761113342,0.527332665111
AlmostRotationInvariantDEAndBigPop,0.5018723037790331,0.138846177156,0.640761113342,0.527332665111
BPRotationInvariantDE,0.5018723037790331,0.138846177156,0.640761113342,0.527332665111
CauchyLHSSearch,-0.5279718770281393,1.341890246028,2.679071600542,3.596354526201
CauchyOnePlusOne,0.0,0.0,0.0,0.0
CauchyRandomSearch,-0.6941119287677578,-0.142549783657,-0.49073588424,-0.042644743308
CauchyScrHammersleySearch,-1.0,-1.0,-0.57735026919,0.324919696233
Cobyla,0.0,-0.345105717581,-0.1327329683,1.929130778103
DE,0.5018723037790331,0.138846177156,0.640761113342,0.527332665111
DiscreteOnePlusOne,0.7531428339492002,0.0,0.0,1.095956118009
DoubleFastGAOptimisticDiscreteOnePlusOne,0.0,0.0,0.0,0.0
DoubleFastGAOptimisticNoisyDiscreteOnePlusOne,0.0,0.0,0.0,0.0
FastGAOptimisticDiscreteOnePlusOne,0.7531428339492002,0.0,0.0,1.095956118009
FastGAOptimisticNoisyDiscreteOnePlusOne,0.7531428339492002,0.0,0.0,0.0
HaltonSearch,-0.31863936396437514,-0.764709673786,-0.70630256284,1.067570523878
HaltonSearchPlusMiddlePoint,0.0,0.0,0.0,0.0
HammersleySearch,0.21042839424792484,-1.150349380376,-0.139710298882,0.841621233573
HammersleySearchPlusMiddlePoint,0.21042839424792484,-1.150349380376,-0.139710298882,0.841621233573
LHSSearch,-0.39784189283479066,0.827925915041,1.20700341911,1.363717406132
LargeHaltonSearch,-67.44897501960817,43.072729929546,-25.33471031358,-56.594882193286
LargeHaltonSearchPlusMiddlePoint,0.0,0.0,0.0,0.0
LargeHammersleySearch,-67.44897501960817,-67.448975019608,43.072729929546,-25.33471031358
LargeHammersleySearchPlusMiddlePoint,0.0,0.0,0.0,0.0
LargeScrHaltonSearch,-67.44897501960817,-43.072729929546,25.33471031358,18.00123697927
LargeScrHaltonSearchPlusMiddlePoint,0.0,0.0,0.0,0.0
LargeScrHammersleySearch,-67.44897501960817,-67.448975019608,-43.072729929546,25.33471031358
LargeScrHammersleySearchPlusMiddlePoint,0.0,0.0,0.0,0.0
LargerScaleRandomSearchPlusMiddlePoint,0.0,0.0,0.0,0.0
LhsDE,0.699168996852192,-1.304922266757,-0.243759377149,1.748611947581
MiniDE,0.2926790424601103,0.069893219225,1.368907139306,0.558373976257
MiniLhsDE,0.349584498426096,-0.652461133378,-0.121879688574,0.87430597379
MiniQrDE,-0.4513673958219324,-0.159319681982,-0.610320174424,0.875343035626
NaiveTBPSA,0.0023801779694220026,-0.055814100046,-0.374630625777,1.333204035469
NelderMead,0.0,0.0,0.0,0.00025
NoisyBandit,0.7531428339492002,-1.534721340208,0.005127078132,-0.120227670156
NoisyDE,0.7955731541909096,0.254899299463,1.07158305166,0.648724166151
NoisyDiscreteOnePlusOne,0.7531428339492002,0.0,0.0,0.0
OnePlusOne,1.008204915087347,-0.909978549865,-1.025147209013,1.204646007403
OnePointDE,0.5018723037790331,0.138846177156,0.640761113342,0.527332665111
OptimisticNoisyDiscreteOnePlusOne,0.7531428339492002,0.0,0.0,0.0
PSO,-0.4961406669729999,0.443927903841,-0.072178875157,0.900858791338
PortfolioDiscreteOnePlusOne,0.0,0.216924599528,-0.400792463816,1.480550470665
PortfolioOptimisticNoisyDiscreteOnePlusOne,0.0,0.216924599528,-0.400792463816,1.480550470665
Powell,1.0,0.0,0.0,0.0
QrDE,-0.9027347916438648,-0.318639363964,-1.220640348847,1.750686071252
RandomScaleRandomSearch,0.060636445096626614,-0.054728819065,-0.061655405092,0.072450997209
RandomScaleRandomSearchPlusMiddlePoint,0.060636445096626614,-0.054728819065,-0.061655405092,0.072450997209
RandomSearch,1.012515476977173,-0.913869146706,-1.029530207374,1.209796449632
RandomSearchPlusMiddlePoint,1.012515476977173,-0.913869146706,-1.029530207374,1.209796449632
RecombiningOptimisticNoisyDiscreteOnePlusOne,0.7531428339492002,0.0,0.0,0.0
RecombiningPortfolioOptimisticNoisyDiscreteOnePlusOne,0.0,0.216924599528,-0.400792463816,1.480550470665
RescaleScrHammersleySearch,-0.841621233572912,-0.841621233573,-0.565948821933,0.125661346855
RotationInvariantDE,0.5018723037790331,0.138846177156,0.640761113342,0.527332665111
SQP,0.0,0.0,0.0,0.0
ScrHaltonSearch,-0.31863936396437514,-1.220640348847,1.750686071252,0.565948821933
ScrHaltonSearchPlusMiddlePoint,-1.150349380376008,1.220640348847,-0.841621233573,1.067570523878
ScrHammersleySearch,1.3829941271006378,-0.318639363964,-1.220640348847,1.750686071252
ScrHammersleySearchPlusMiddlePoint,-1.3829941271006378,0.0,0.430727299295,0.841621233573
SmallHaltonSearchPlusMiddlePoint,0.003186393639643752,0.007647096738,-0.017506860713,0.005659488219
SmallHammersleySearchPlusMiddlePoint,0.0021042839424792485,-0.011503493804,-0.001397102989,0.008416212336
SmallScaleRandomSearchPlusMiddlePoint,0.01012515476977173,-0.009138691467,-0.010295302074,0.012097964496
SmallScrHaltonSearchPlusMiddlePoint,-0.01150349380376008,0.012206403488,-0.008416212336,0.010675705239
SmallScrHammersleySearchPlusMiddlePoint,-0.013829941271006379,0.0,0.004307272993,0.008416212336
StupidRandom,-1.1543602352166882,-2.213333479397,-1.681756510395,-1.788094251062
TBPSA,0.0,0.0,0.0,0.0
TwoPointsDE,0.5018723037790331,0.138846177156,0.640761113342,0.527332665111
Zero,0.0,0.0,0.0,0.0
43 changes: 42 additions & 1 deletion nevergrad/optimization/test_optimizerlib.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,15 @@
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.

import random
import warnings
from pathlib import Path
from unittest import SkipTest
from unittest import TestCase
from typing import Type
import genty
import numpy as np
import pandas as pd
from ..common.typetools import ArrayLike
from . import base
from .recaster import FinishedUnderlyingOptimizerWarning
Expand All @@ -16,7 +20,10 @@


def fitness(x: ArrayLike) -> float:
return float(np.sum((np.array(x, copy=False) - np.array([0.5, -0.8]))**2))
"""Simple quadratic fitness function which can be used with dimension up to 4
"""
x0 = [0.5, -0.8, 0, 4][:len(x)]
return float(np.sum((np.array(x, copy=False) - x0)**2))


def check_optimizer(optimizer_cls: Type[base.Optimizer], budget: int = 300, verify_value: bool = True) -> None:
Expand All @@ -43,11 +50,45 @@ def check_optimizer(optimizer_cls: Type[base.Optimizer], budget: int = 300, veri
@genty.genty
class OptimizerTests(TestCase):

recommendations = pd.DataFrame(columns=[f"v{k}" for k in range(4)])
_RECOM_FILE = Path(__file__).parent / "recorded_recommendations.csv"

@classmethod
def setUpClass(cls):
# load recorded recommendations
if cls._RECOM_FILE.exists():
cls.recommendations = pd.read_csv(cls._RECOM_FILE, index_col=0)

@classmethod
def tearDownClass(cls):
# sort and remove unused names
# then update recommendation file
names = sorted(x for x in cls.recommendations.index if x in registry)
recom = cls.recommendations.loc[names, :]
recom.iloc[:, 1:] = np.round(recom.iloc[:, 1:], 12)
recom.to_csv(cls._RECOM_FILE)

@genty.genty_dataset(**{name: (name, optimizer,) for name, optimizer in registry.items() if "BO" not in name}) # type: ignore
def test_optimizers(self, name: str, optimizer_cls: Type[base.Optimizer]) -> None:
verify = not optimizer_cls.one_shot and name not in SLOW and "Discrete" not in name
check_optimizer(optimizer_cls, budget=300, verify_value=verify)

@genty.genty_dataset(**{name: (name, optimizer,) for name, optimizer in registry.items() if "BO" not in name})
def test_optimizers_recommendation(self, name, optimizer_cls):
if "CMA" in name:
raise SkipTest("Not playing nicely with the tests") # thread problem?
np.random.seed(12)
if optimizer_cls.recast:
random.seed(12) # may depend on non numpy generator
optim = optimizer_cls(dimension=4, budget=6, num_workers=1)
output = optim.optimize(fitness)
if name not in self.recommendations.index:
self.recommendations.loc[name, :] = tuple(output)
raise ValueError(f'Recorded the value for optimizer "{name}", please rerun this test locally.')
np.testing.assert_array_almost_equal(output, self.recommendations.loc[name, :], decimal=10,
err_msg="Something has changed, if this is normal, delete "
f"{self._RECOM_FILE} and rerun to update the values")


def test_pso_to_real() -> None:
output = optimizerlib.PSO.to_real([.3, .5, .9])
Expand Down

0 comments on commit ae1d2a4

Please sign in to comment.