Skip to content

Commit

Permalink
move Parameter and child classes to their own file to prepare for pip…
Browse files Browse the repository at this point in the history
…eline (#63)
  • Loading branch information
EthanJamesLew committed Mar 8, 2023
1 parent 84a2064 commit 72c935c
Show file tree
Hide file tree
Showing 5 changed files with 144 additions and 112 deletions.
4 changes: 3 additions & 1 deletion autokoopman/autokoopman.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,10 +14,12 @@
from autokoopman.core.tuner import (
HyperparameterTuner,
HyperparameterMap,
TrajectoryScoring,
)
from autokoopman.core.hyperparameter import (
ParameterSpace,
ContinuousParameter,
DiscreteParameter,
TrajectoryScoring,
)
from autokoopman.estimator.koopman import KoopmanDiscEstimator
from autokoopman.tuner.gridsearch import GridSearchTuner
Expand Down
132 changes: 132 additions & 0 deletions autokoopman/core/hyperparameter.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,132 @@
"""Mini-Language to Express Hyperparameter Sets
@TODO: look into backends to make this more robust
"""
import abc
from typing import Sequence, Callable
import random

import numpy as np

from autokoopman.core.format import _clip_list


class Parameter:
"""hyperparameter is a set that you can
* name
* sample randomly
* check membership
@param name: parameter identifier
"""

def __init__(self, name):
self._name = name

@abc.abstractmethod
def random(self):
"""get an element from the parameter at random"""
pass

@abc.abstractmethod
def is_member(self, item) -> bool:
...

def __contains__(self, item) -> bool:
return self.is_member(item)

@property
def name(self):
return self._name

def __repr__(self):
return f"<{self.__class__.__name__} Name: {self.name}>"


class FiniteParameter(Parameter):
"""a finite set of things"""

def __init__(self, name: str, elements: Sequence):
super(FiniteParameter, self).__init__(name)
self.elements = tuple(elements)

def is_member(self, item) -> bool:
return item in self.elements

def random(self):
return random.choice(self.elements)


class ContinuousParameter(Parameter):
"""a continuous, closed interval"""

@staticmethod
def loguniform(low=0.1, high=1, size=None):
return np.exp(np.random.uniform(np.log(low), np.log(high), size))

@staticmethod
def uniform(low=0, high=1, size=None):
return np.random.uniform(low, high, size)

def __init__(self, name: str, domain_lower, domain_upper, distribution="uniform"):
super(ContinuousParameter, self).__init__(name)
assert domain_upper >= domain_lower
self._interval = (domain_lower, domain_upper)
self.distribution = distribution

def is_member(self, item) -> bool:
return item >= self._interval[0] and item <= self._interval[1]

def random(self):
if isinstance(self.distribution, Callable):
return self.distribution()
elif hasattr(self, self.distribution):
return getattr(self, self.distribution)(
self._interval[0], self._interval[1]
)
else:
raise ValueError(f"cannot find distribution {self.distribution}")


class DiscreteParameter(FiniteParameter):
"""a range object"""

def __init__(self, name: str, domain_lower: int, domain_upper: int, step=1):
super(DiscreteParameter, self).__init__(
name, range(domain_lower, domain_upper, step)
)


class ParameterSpace(Parameter):
"""an interval hull"""

def __init__(self, name: str, coords: Sequence[Parameter]):
super(ParameterSpace, self).__init__(name)
self._coords = coords
self._cdict = {c.name: c for c in self._coords}

def is_member(self, item) -> bool:
return all([itemi in coordi for itemi, coordi in zip(item, self._coords)])

def random(self):
return [coordi.random() for coordi in self._coords]

def __getitem__(self, item):
assert (
item in self._cdict
), f"coordinate {item} was not found in space (values are {list(self._cdict.keys())})"
return self._cdict[item]

def __iter__(self):
for c in self._coords:
yield c

@property
def dimension(self):
return len(self._coords)

def __repr__(self):
return (
f"<{self.__class__.__name__} Name: {self.name} Dimensions: {self.dimension} "
f"Coordinates: {_clip_list([s.name+f': {s.__class__.__name__}' for s in self._coords])}>"
)
111 changes: 2 additions & 109 deletions autokoopman/core/tuner.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
import abc
import random
from typing import Sequence, Callable, TypedDict, Any
import numpy as np

Expand All @@ -9,118 +8,12 @@
UniformTimeTrajectory,
UniformTimeTrajectoriesData,
)
from autokoopman.core.format import _clip_list
from autokoopman.core.hyperparameter import ParameterSpace
from sklearn.model_selection import KFold


class Parameter:
def __init__(self, name):
self._name = name

@abc.abstractmethod
def random(self):
pass

@abc.abstractmethod
def is_member(self, item) -> bool:
...

def __contains__(self, item) -> bool:
return self.is_member(item)

@property
def name(self):
return self._name

def __repr__(self):
return f"<{self.__class__.__name__} Name: {self.name}>"


class FiniteParameter(Parameter):
def __init__(self, name: str, elements: Sequence):
super(FiniteParameter, self).__init__(name)
self.elements = tuple(elements)

def is_member(self, item) -> bool:
return item in self.elements

def random(self):
return random.choice(self.elements)


class ContinuousParameter(Parameter):
@staticmethod
def loguniform(low=0.1, high=1, size=None):
return np.exp(np.random.uniform(np.log(low), np.log(high), size))

@staticmethod
def uniform(low=0, high=1, size=None):
return np.random.uniform(low, high, size)

def __init__(self, name: str, domain_lower, domain_upper, distribution="uniform"):
super(ContinuousParameter, self).__init__(name)
assert domain_upper >= domain_lower
self._interval = (domain_lower, domain_upper)
self.distribution = distribution

def is_member(self, item) -> bool:
return item >= self._interval[0] and item <= self._interval[1]

def random(self):
if isinstance(self.distribution, Callable):
return self.distribution()
elif hasattr(self, self.distribution):
return getattr(self, self.distribution)(
self._interval[0], self._interval[1]
)
else:
raise ValueError(f"cannot find distribution {self.distribution}")


class DiscreteParameter(FiniteParameter):
def __init__(self, name: str, domain_lower: int, domain_upper: int, step=1):
super(DiscreteParameter, self).__init__(
name, range(domain_lower, domain_upper, step)
)


class ParameterSpace(Parameter):
def __init__(self, name: str, coords: Sequence[Parameter]):
super(ParameterSpace, self).__init__(name)
self._coords = coords
self._cdict = {c.name: c for c in self._coords}

def is_member(self, item) -> bool:
return all([itemi in coordi for itemi, coordi in zip(item, self._coords)])

def random(self):
return [coordi.random() for coordi in self._coords]

def __getitem__(self, item):
assert (
item in self._cdict
), f"coordinate {item} was not found in space (values are {list(self._cdict.keys())})"
return self._cdict[item]

def __iter__(self):
for c in self._coords:
yield c

@property
def dimension(self):
return len(self._coords)

def __repr__(self):
return (
f"<{self.__class__.__name__} Name: {self.name} Dimensions: {self.dimension} "
f"Coordinates: {_clip_list([s.name+f': {s.__class__.__name__}' for s in self._coords])}>"
)


class HyperparameterMap:
"""
define and associate a hyperparameter space with a moddel
"""
"""a map to associate a hyperparameter space with a model"""

def __init__(self, parameter_space: ParameterSpace):
self.parameter_space = parameter_space
Expand Down
2 changes: 2 additions & 0 deletions autokoopman/tuner/bayesianopt.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@
TuneResults,
TrajectoryScoring,
HyperparameterMap,
)
from autokoopman.core.hyperparameter import (
ParameterSpace,
FiniteParameter,
DiscreteParameter,
Expand Down
7 changes: 5 additions & 2 deletions autokoopman/tuner/gridsearch.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,12 @@
TuneResults,
TrajectoryScoring,
HyperparameterMap,
)
from autokoopman.core.hyperparameter import (
ParameterSpace,
)
import autokoopman.core.tuner as atuner
import autokoopman.core.hyperparameter as ahyp
import itertools
from typing import Callable

Expand All @@ -17,7 +20,7 @@ class GridSearchTuner(atuner.HyperparameterTuner):
def make_grid(space: ParameterSpace, n_samps):
parameters = []
for coord in space:
if isinstance(coord, atuner.ContinuousParameter):
if isinstance(coord, ahyp.ContinuousParameter):
if coord.distribution == "loguniform":
elems = np.logspace(
np.log10(coord._interval[0]),
Expand All @@ -29,7 +32,7 @@ def make_grid(space: ParameterSpace, n_samps):
parameters.append(
np.linspace(coord._interval[0], coord._interval[1], num=n_samps)
)
elif isinstance(coord, atuner.FiniteParameter):
elif isinstance(coord, ahyp.FiniteParameter):
parameters.append(list(coord.elements))
return parameters

Expand Down

0 comments on commit 72c935c

Please sign in to comment.