Skip to content

Commit

Permalink
Merge remote-tracking branch 'upstream/main'
Browse files Browse the repository at this point in the history
  • Loading branch information
aramoto99 committed Jan 9, 2024
2 parents 4c7a3b9 + f75e9fa commit 361d042
Show file tree
Hide file tree
Showing 8 changed files with 113 additions and 23 deletions.
8 changes: 4 additions & 4 deletions aiaccel/abci/batch.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ def create_abci_batch_file(
commands.append("2>")
commands.append("$error_file_path")

set_retult = _generate_command_line(
set_result = _generate_command_line(
command="aiaccel-set-result",
args=[
"--file=$output_file_path",
Expand All @@ -71,7 +71,7 @@ def create_abci_batch_file(
],
)

set_retult_no_error = _generate_command_line(
set_result_no_error = _generate_command_line(
command="aiaccel-set-result",
args=[
"--file=$output_file_path",
Expand All @@ -97,9 +97,9 @@ def create_abci_batch_file(
"error=`cat $error_file_path`",
'end_time=`date "+%Y-%m-%d %H:%M:%S"`',
'if [ -n "$error" ]; then',
"\t" + set_retult,
"\t" + set_result,
"else",
"\t" + set_retult_no_error,
"\t" + set_result_no_error,
"fi",
]

Expand Down
10 changes: 2 additions & 8 deletions aiaccel/converted_parameter.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,10 +39,7 @@ def __init__(self, param: Parameter, convert_log: bool = True):
super().__init__(param, convert_log)
self.lower = _convert_float(self, param.lower)
self.upper = _convert_float(self, param.upper)
if isinstance(param.initial, Iterable): # For Nelder-Mead
self.initial = [_convert_float(self, value) for value in param.initial]
else: # For others
self.initial = _convert_float(self, param.initial) if param.initial is not None else None
self.initial = param.initial

def sample(self, rng: RandomState, initial: bool = False) -> dict[str, str | float]:
if initial and self.initial is not None:
Expand All @@ -58,10 +55,7 @@ def __init__(self, param: Parameter, convert_log: bool = True, convert_int: bool
self.convert_int = convert_int
self.lower = _convert_int(self, param.lower)
self.upper = _convert_int(self, param.upper)
if isinstance(param.initial, Iterable): # For Nelder-Mead
self.initial = [_convert_int(self, value) for value in param.initial]
else: # For others
self.initial = _convert_int(self, param.initial) if param.initial is not None else None
self.initial = param.initial

def sample(self, rng: RandomState, initial: bool = False) -> dict[str, str | float]:
if initial and self.initial is not None:
Expand Down
2 changes: 2 additions & 0 deletions aiaccel/manager/job/model/local_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,8 @@ def conditions_job_finished(self, obj: "Job") -> bool:
return True
if obj.th_oh.get_returncode() is None or self.is_firsttime_called:
return False
elif obj.th_oh.is_alive():
return False
else:
self.write_results_to_database(obj)
self.is_firsttime_called = True
Expand Down
11 changes: 6 additions & 5 deletions aiaccel/optimizer/nelder_mead_optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,14 +49,15 @@ def __init__(self, config: DictConfig) -> None:
def convert_ndarray_to_parameter(self, ndarray: np.ndarray[Any, Any]) -> list[dict[str, float | int | str]]:
"""Convert a list of numpy.ndarray to a list of parameters."""
new_params = copy.deepcopy(self.base_params)
for name, value, b in zip(self.param_names, ndarray, self.bdrys):
for name, value in zip(self.param_names, ndarray):
for new_param in new_params:
if new_param["parameter_name"] == name:
new_param["value"] = value
if b[0] <= value <= b[1]:
new_param["out_of_boundary"] = False
else:
new_param["out_of_boundary"] = True
for value, b, new_param in zip(ndarray, self.bdrys, new_params):
if b[0] <= value <= b[1]:
new_param["out_of_boundary"] = False
else:
new_param["out_of_boundary"] = True
return new_params

def new_finished(self) -> list[int]:
Expand Down
35 changes: 34 additions & 1 deletion aiaccel/parameter.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
from __future__ import annotations

from typing import Any
from collections.abc import Iterable
from typing import Any, List

from numpy.random import RandomState
from omegaconf.base import UnionNode
Expand All @@ -24,6 +25,14 @@ def is_ordinal(data_type: str) -> bool:
return data_type.lower() == "ordinal"


def is_within_range(initial_value: int | float, lower: int | float, upper: int | float) -> bool:
return lower <= initial_value <= upper


def is_in_category(initial_value: Any, category_list: List[Any]) -> bool:
return initial_value in category_list


class AbstractParameter:
"""
A parameter class.
Expand Down Expand Up @@ -91,6 +100,15 @@ def unwrap(self, value: Any) -> Any:


class IntParameter(AbstractParameter):
def __init__(self, parameter: dict[str, Any]) -> None:
super().__init__(parameter)
if isinstance(self.initial, Iterable): # For Nelder-Mead
for value in self.initial:
if not is_within_range(value, self.lower, self.upper):
assert False, "initial is out of range"
elif self.initial is not None and not is_within_range(self.initial, self.lower, self.upper):
assert False, "initial is out of range"

def sample(self, rng: RandomState, initial: bool = False) -> dict[str, Any]:
if initial and self.initial is not None:
value = self.initial
Expand All @@ -100,6 +118,15 @@ def sample(self, rng: RandomState, initial: bool = False) -> dict[str, Any]:


class FloatParameter(AbstractParameter):
def __init__(self, parameter: dict[str, Any]) -> None:
super().__init__(parameter)
if isinstance(self.initial, Iterable): # For Nelder-Mead
for value in self.initial:
if not is_within_range(value, self.lower, self.upper):
assert False, "initial is out of range"
elif self.initial is not None and not is_within_range(self.initial, self.lower, self.upper):
assert False, "initial is out of range"

def sample(self, rng: RandomState, initial: bool = False) -> dict[str, Any]:
if initial and self.initial is not None:
value = self.initial
Expand All @@ -114,6 +141,9 @@ def __init__(self, parameter: dict[str, Any]) -> None:
if self.choices is not None:
self.choices = [self.unwrap(v) for v in self.choices]

if self.initial is not None and not is_in_category(self.initial, self.choices):
assert False, "initial is not included in choices"

def sample(self, rng: RandomState, initial: bool = False) -> dict[str, Any]:
if initial and self.initial is not None:
value = self.initial
Expand All @@ -128,6 +158,9 @@ def __init__(self, parameter: dict[str, Any]) -> None:
if self.sequence is not None:
self.sequence = [self.unwrap(v) for v in self.sequence]

if self.initial is not None and not is_in_category(self.initial, self.sequence):
assert False, "initial is not included in sequence"

def sample(self, rng: RandomState, initial: bool = False) -> dict[str, Any]:
if initial and self.initial is not None:
value = self.initial
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ readme = "README.md"
requires-python = ">=3.8.1,<3.12"
license = {text = "MIT"}
authors = [
{name = "aistairc", email = "aistairc@example.com"}
{name = "AIST", email = "onishi-masaki@aist.go.jp"}
]
classifiers = [
"License :: OSI Approved :: MIT License",
Expand Down
6 changes: 3 additions & 3 deletions tests/unit/test_converted_parameter.py
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,7 @@ def test_init(self, monkeypatch: pytest.MonkeyPatch) -> None:
assert param.convert_log is True
assert param.lower == np.log(self.float_param.lower)
assert param.upper == np.log(self.float_param.upper)
assert param.initial == np.log(self.float_param.initial)
assert param.initial == self.float_param.initial

with monkeypatch.context() as m:
m.setattr(self.float_param, "initial", [1.0, 1.5, 2.0])
Expand Down Expand Up @@ -165,14 +165,14 @@ def test_init(self, monkeypatch: pytest.MonkeyPatch) -> None:
param = ConvertedIntParameter(self.int_param)
assert param.lower == float(np.log(self.int_param.lower))
assert param.upper == float(np.log(self.int_param.upper))
assert param.initial == float(np.log(self.int_param.initial))
assert param.initial == float(self.int_param.initial)

with monkeypatch.context() as m:
m.setattr(self.int_param, "log", True)
param = ConvertedIntParameter(self.int_param, convert_int=False)
assert param.lower == int(np.log(self.int_param.lower))
assert param.upper == int(np.log(self.int_param.upper))
assert param.initial == int(np.log(self.int_param.initial))
assert param.initial == int(self.int_param.initial)

with monkeypatch.context() as m:
m.setattr(self.int_param, "initial", [1, 2])
Expand Down
62 changes: 61 additions & 1 deletion tests/unit/test_parameter.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,4 +59,64 @@ def test_sample(self):
hp = HyperParameterConfiguration(json_string)
assert False
except TypeError:
assert True
assert True

def test_initial(self):
# If initial is out of range, raises AssertionError
json_string = [
{
'name': 'a',
'type': 'uniform_int',
'lower': 0,
'upper': 10,
'initial': -1
}
]
try:
HyperParameterConfiguration(json_string)
assert False
except AssertionError:
assert True

json_string = [
{
'name': 'b',
'type': 'uniform_float',
'lower': 0.,
'upper': 10.,
'initial': -0.1
}
]
try:
HyperParameterConfiguration(json_string)
assert False
except AssertionError:
assert True

json_string = [
{
'name': 'c',
'type': 'categorical',
'choices': ['red', 'green', 'blue'],
'initial': 'yellow'
}
]
try:
HyperParameterConfiguration(json_string)
assert False
except AssertionError:
assert True

json_string = [
{
'name': 'd',
'type': 'ordinal',
'sequence': ['10', '20', '30'],
'initial': '40'
}
]
try:
HyperParameterConfiguration(json_string)
assert False
except AssertionError:
assert True

0 comments on commit 361d042

Please sign in to comment.