diff --git a/.gitignore b/.gitignore
index 4d30e9cb..5bc0c596 100644
--- a/.gitignore
+++ b/.gitignore
@@ -40,6 +40,7 @@ pip-delete-this-directory.txt
# Dev
**/_dev_scripts
+*.code-workspace
# Unit test / coverage reports
htmlcov/
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index b5f8ecf6..e37355ae 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -63,7 +63,7 @@ Solving an issue with a pull request is one of the most difficult ways to contri
## Open an issue
-You can contribute to this project by opening an issue. This could be a question, a bug report, a feature request or other types. In any case you should do a search beforehand to confirm, that a similar issue has not already been opened.
+You can contribute to this project by opening an issue. This could be a question, a bug report, a feature request or other types. In any case you should do a search beforehand to confirm, that a similar issue has not already been opened.
@@ -88,7 +88,7 @@ This could be a feature that could be very useful for your work, an interesting
-
+
---
diff --git a/Makefile b/Makefile
index 746de3f3..03fb97ea 100644
--- a/Makefile
+++ b/Makefile
@@ -94,4 +94,82 @@ install-editable:
reinstall: uninstall install
-reinstall-editable: uninstall install-editable
\ No newline at end of file
+reinstall-editable: uninstall install-editable
+
+# === Linting and Formatting Commands ===
+
+# Run ruff linter to check for code issues
+lint:
+ ruff check .
+
+# Run ruff linter with auto-fix for fixable issues
+lint-fix:
+ ruff check --fix .
+
+# Format code using ruff formatter
+format:
+ ruff format .
+
+# Check formatting without making changes
+format-check:
+ ruff format --check .
+
+# Sort imports using ruff (isort functionality)
+isort:
+ ruff check --select I --fix .
+
+# Check import sorting without making changes
+isort-check:
+ ruff check --select I .
+
+# Run all code quality checks (lint + format check + import check)
+check: lint format-check isort-check
+
+# Fix all auto-fixable issues (lint + format + imports)
+fix: lint-fix format isort
+
+# === Notebook-specific Commands ===
+
+# Run ruff on Jupyter notebooks
+lint-notebooks:
+ ruff check --include="*.ipynb" .
+
+# Fix ruff issues in Jupyter notebooks
+lint-notebooks-fix:
+ ruff check --include="*.ipynb" --fix .
+
+# Format Jupyter notebooks with black via nbqa
+format-notebooks:
+ pre-commit run nbqa-black --all-files
+
+# Run all notebook checks and fixes
+notebooks-fix: lint-notebooks-fix format-notebooks
+
+# === Pre-commit Commands ===
+
+# Install pre-commit hooks
+pre-commit-install:
+ pre-commit install
+
+# Run pre-commit on all files
+pre-commit-all:
+ pre-commit run --all-files
+
+# Run pre-commit on staged files only
+pre-commit:
+ pre-commit run
+
+# Update pre-commit hooks to latest versions
+pre-commit-update:
+ pre-commit autoupdate
+
+# === Combined Quality Commands ===
+
+# Run comprehensive code quality checks
+quality-check: check lint-notebooks
+
+# Fix all code quality issues
+quality-fix: fix notebooks-fix
+
+# Full quality workflow: install hooks, fix issues, run final check
+quality: pre-commit-install quality-fix quality-check
diff --git a/README.md b/README.md
index 2c0f7c68..f43bfddf 100644
--- a/README.md
+++ b/README.md
@@ -187,7 +187,7 @@ Hyperactive features a collection of optimization algorithms that can be used fo
-The examples above are not necessarily done with realistic datasets or training procedures.
+The examples above are not necessarily done with realistic datasets or training procedures.
The purpose is fast execution of the solution proposal and giving the user ideas for interesting usecases.
@@ -195,7 +195,7 @@ The purpose is fast execution of the solution proposal and giving the user ideas
## Sideprojects and Tools
-The following packages are designed to support Hyperactive and expand its use cases.
+The following packages are designed to support Hyperactive and expand its use cases.
| Package | Description |
|-------------------------------------------------------------------------------|--------------------------------------------------------------------------------------|
@@ -287,9 +287,9 @@ hyper.run()
- multiprocessing uses pickle
- joblib uses dill
- pathos uses cloudpickle
-
-
-- n_processes = "auto",
+
+
+- n_processes = "auto",
- Possible parameter types: (str, int)
- The maximum number of processes that are allowed to run simultaneously. If n_processes is of int-type there will only run n_processes-number of jobs simultaneously instead of all at once. So if n_processes=10 and n_jobs_total=35, then the schedule would look like this 10 - 10 - 10 - 5. This saves computational resources if there is a large number of n_jobs. If "auto", then n_processes is the sum of all n_jobs (from .add_search(...)).
@@ -318,7 +318,7 @@ hyper.run()
- optimizer = "default"
- Possible parameter types: ("default", initialized optimizer object)
- Instance of optimization class that can be imported from Hyperactive. "default" corresponds to the random search optimizer. The imported optimization classes from hyperactive are different from gfo. They only accept optimizer-specific-parameters. The following classes can be imported and used:
-
+
- HillClimbingOptimizer
- StochasticHillClimbingOptimizer
- RepulsingHillClimbingOptimizer
@@ -341,16 +341,16 @@ hyper.run()
- DirectAlgorithm
- TreeStructuredParzenEstimators
- ForestOptimizer
-
+
- Example:
```python
...
-
+
opt_hco = HillClimbingOptimizer(epsilon=0.08)
hyper = Hyperactive()
hyper.add_search(..., optimizer=opt_hco)
hyper.run()
-
+
...
```
@@ -373,10 +373,10 @@ hyper.run()
- warm_start
- List of parameter dictionaries that marks additional start points for the optimization run.
-
+
Example:
```python
- ...
+ ...
search_space = {
"x1": list(range(10, 150, 5)),
"x2": list(range(2, 12)),
@@ -399,10 +399,10 @@ hyper.run()
- pass_through = {}
- Possible parameter types: (dict)
- The pass_through accepts a dictionary that contains information that will be passed to the objective-function argument. This information will not change during the optimization run, unless the user does so by himself (within the objective-function).
-
+
Example:
```python
- ...
+ ...
def objective_function(para):
para.pass_through["stuff1"] # <--- this variable is 1
para.pass_through["stuff2"] # <--- this variable is 2
@@ -428,7 +428,7 @@ hyper.run()
- callbacks = {}
- Possible parameter types: (dict)
- - The callbacks enables you to pass functions to hyperactive that are called every iteration during the optimization run. The function has access to the same argument as the objective-function. You can decide if the functions are called before or after the objective-function is evaluated via the keys of the callbacks-dictionary. The values of the dictionary are lists of the callback-functions. The following example should show they way to use callbacks:
+ - The callbacks enables you to pass functions to hyperactive that are called every iteration during the optimization run. The function has access to the same argument as the objective-function. You can decide if the functions are called before or after the objective-function is evaluated via the keys of the callbacks-dictionary. The values of the dictionary are lists of the callback-functions. The following example should show they way to use callbacks:
Example:
@@ -460,13 +460,13 @@ hyper.run()
- catch = {}
- Possible parameter types: (dict)
- - The catch parameter provides a way to handle exceptions that occur during the evaluation of the objective-function or the callbacks. It is a dictionary that accepts the exception class as a key and the score that is returned instead as the value. This way you can handle multiple types of exceptions and return different scores for each.
+ - The catch parameter provides a way to handle exceptions that occur during the evaluation of the objective-function or the callbacks. It is a dictionary that accepts the exception class as a key and the score that is returned instead as the value. This way you can handle multiple types of exceptions and return different scores for each.
In the case of an exception it often makes sense to return `np.nan` as a score. You can see an example of this in the following code-snippet:
Example:
```python
...
-
+
hyper = Hyperactive()
hyper.add_search(
objective_function,
@@ -543,8 +543,8 @@ hyper.run()
-
-
+
+
@@ -568,12 +568,12 @@ hyper.run()
Objective Function
Each iteration consists of two steps:
- - The optimization step: decides what position in the search space (parameter set) to evaluate next
+ - The optimization step: decides what position in the search space (parameter set) to evaluate next
- The evaluation step: calls the objective function, which returns the score for the given position in the search space
-
+
The objective function has one argument that is often called "para", "params", "opt" or "access".
This argument is your access to the parameter set that the optimizer has selected in the
-corresponding iteration.
+corresponding iteration.
```python
def objective_function(opt):
@@ -628,7 +628,7 @@ A special feature of Hyperactive is shown in the next example. You can put not j
def func1():
# do stuff
return stuff
-
+
def func2():
# do stuff
@@ -647,7 +647,7 @@ If you want to put other types of variables (like numpy arrays, pandas dataframe
```python
def array1():
return np.array([1, 2, 3])
-
+
def array2():
return np.array([3, 2, 1])
@@ -660,7 +660,7 @@ search_space = {
}
```
-The functions contain the numpy arrays and returns them. This way you can use them inside the objective function.
+The functions contain the numpy arrays and returns them. This way you can use them inside the objective function.
@@ -670,25 +670,25 @@ The functions contain the numpy arrays and returns them. This way you can use th
Optimizer Classes
Each of the following optimizer classes can be initialized and passed to the "add_search"-method via the "optimizer"-argument. During this initialization the optimizer class accepts **only optimizer-specific-paramters** (no random_state, initialize, ... ):
-
+
```python
optimizer = HillClimbingOptimizer(epsilon=0.1, distribution="laplace", n_neighbours=4)
```
-
+
for the default parameters you can just write:
-
+
```python
optimizer = HillClimbingOptimizer()
```
-
+
and pass it to Hyperactive:
-
+
```python
hyper = Hyperactive()
hyper.add_search(model, search_space, optimizer=optimizer, n_iter=100)
hyper.run()
```
-
+
So the optimizer-classes are **different** from Gradient-Free-Optimizers. A more detailed explanation of the optimization-algorithms and the optimizer-specific-paramters can be found in the [Optimization Tutorial](https://github.com/SimonBlanke/optimization-tutorial).
- HillClimbingOptimizer
@@ -730,11 +730,11 @@ Each of the following optimizer classes can be initialized and passed to the "ad
example:
```python
{
- 'x1': 0.2,
+ 'x1': 0.2,
'x2': 0.3,
}
```
-
+
@@ -754,8 +754,8 @@ Each of the following optimizer classes can be initialized and passed to the "ad
- objective_function
- (callable)
-- returns: Pandas dataframe
-- The dataframe contains score and parameter information of the given objective_function found in the optimization run. If the parameter `times` is set to True the evaluation- and iteration- times are added to the dataframe.
+- returns: Pandas dataframe
+- The dataframe contains score and parameter information of the given objective_function found in the optimization run. If the parameter `times` is set to True the evaluation- and iteration- times are added to the dataframe.
example:
@@ -879,7 +879,7 @@ Each of the following optimizer classes can be initialized and passed to the "ad
v3.3.0 :heavy_check_mark:
- - [x] Early stopping
+ - [x] Early stopping
- [x] Shared memory dictionary between processes with the same objective function
@@ -943,7 +943,7 @@ Each of the following optimizer classes can be initialized and passed to the "ad
- [x] add early stopping feature to custom optimization strategies
- [x] display additional outputs from objective-function in results in command-line
- [x] add type hints to hyperactive-api
-
+
@@ -951,7 +951,7 @@ Each of the following optimizer classes can be initialized and passed to the "ad
v4.6.0 :heavy_check_mark:
- [x] add support for constrained optimization
-
+
@@ -992,7 +992,7 @@ Each of the following optimizer classes can be initialized and passed to the "ad
- [ ] new optimization algorithms from [Gradient-Free-Optimizers](https://github.com/SimonBlanke/Gradient-Free-Optimizers) will always be added to Hyperactive
- [ ] add "prune_search_space"-method to custom optimization strategy class
-
+
@@ -1007,37 +1007,37 @@ Each of the following optimizer classes can be initialized and passed to the "ad
Read this before opening a bug-issue
-
+
- Are you sure the bug is located in Hyperactive?
- The error might be located in the optimization-backend.
+ The error might be located in the optimization-backend.
Look at the error message from the command line. If one of the last messages look like this:
- File "/.../gradient_free_optimizers/...", line ...
- Then you should post the bug report in:
+ Then you should post the bug report in:
- https://github.com/SimonBlanke/Gradient-Free-Optimizers
Otherwise you can post the bug report in Hyperactive
-
+
- Do you have the correct Hyperactive version?
-
+
Every major version update (e.g. v2.2 -> v3.0) the API of Hyperactive changes.
Check which version of Hyperactive you have. If your major version is older you have two options:
-
+
Recommended: You could just update your Hyperactive version with:
```bash
pip install hyperactive --upgrade
```
This way you can use all the new documentation and examples from the current repository.
-
+
Or you could continue using the old version and use an old repository branch as documentation.
You can do that by selecting the corresponding branch. (top right of the repository. The default is "master" or "main")
So if your major version is older (e.g. v2.1.0) you can select the 2.x.x branch to get the old repository for that version.
-
+
- Provide example code for error reproduction
To understand and fix the issue I need an example code to reproduce the error.
I must be able to just copy the code into a py-file and execute it to reproduce the error.
-
+
@@ -1051,7 +1051,7 @@ This is expected of the current implementation of smb-optimizers. For all Sequen
search_space_size = 1
for value_ in search_space.values():
search_space_size *= len(value_)
-
+
print("search_space_size", search_space_size)
```
Reduce the search space size to resolve this error.
@@ -1097,17 +1097,17 @@ warnings.warn = warn
Warning: Not enough initial positions for population size
-
+
This warning occurs because Hyperactive needs more initial positions to choose from to generate a population for the optimization algorithm:
The number of initial positions is determined by the `initialize`-parameter in the `add_search`-method.
```python
# This is how it looks per default
initialize = {"grid": 4, "random": 2, "vertices": 4}
-
+
# You could set it to this for a maximum population of 20
initialize = {"grid": 4, "random": 12, "vertices": 4}
```
-
+
diff --git a/examples/hyperactive_intro.ipynb b/examples/hyperactive_intro.ipynb
index 7cf2c2f2..7892daa6 100644
--- a/examples/hyperactive_intro.ipynb
+++ b/examples/hyperactive_intro.ipynb
@@ -8,6 +8,16 @@
"## hyperactive - unified interfaces for optimizers and experiments"
]
},
+ {
+ "cell_type": "code",
+ "id": "ojjewxbtma",
+ "source": [
+ "\"\"\"Hyperactive tutorial demonstrating unified interfaces for optimizers.\"\"\""
+ ],
+ "metadata": {},
+ "execution_count": null,
+ "outputs": []
+ },
{
"cell_type": "markdown",
"id": "830f7eb7",
@@ -187,16 +197,17 @@
},
{
"cell_type": "code",
- "execution_count": 6,
+ "execution_count": null,
"id": "57110e86",
"metadata": {},
"outputs": [],
"source": [
- "from hyperactive.experiment.integrations import SklearnCvExperiment\n",
"from sklearn.datasets import load_iris\n",
- "from sklearn.svm import SVC\n",
"from sklearn.metrics import accuracy_score\n",
"from sklearn.model_selection import KFold\n",
+ "from sklearn.svm import SVC\n",
+ "\n",
+ "from hyperactive.experiment.integrations import SklearnCvExperiment\n",
"\n",
"X, y = load_iris(return_X_y=True)\n",
"\n",
@@ -368,16 +379,17 @@
},
{
"cell_type": "code",
- "execution_count": 11,
+ "execution_count": null,
"id": "5e2328c9",
"metadata": {},
"outputs": [],
"source": [
- "from hyperactive.experiment.integrations import SklearnCvExperiment\n",
"from sklearn.datasets import load_iris\n",
- "from sklearn.svm import SVC\n",
"from sklearn.metrics import accuracy_score\n",
"from sklearn.model_selection import KFold\n",
+ "from sklearn.svm import SVC\n",
+ "\n",
+ "from hyperactive.experiment.integrations import SklearnCvExperiment\n",
"\n",
"X, y = load_iris(return_X_y=True)\n",
"\n",
@@ -468,16 +480,17 @@
},
{
"cell_type": "code",
- "execution_count": 14,
+ "execution_count": null,
"id": "f9a4d922",
"metadata": {},
"outputs": [],
"source": [
- "from hyperactive.experiment.integrations import SklearnCvExperiment\n",
"from sklearn.datasets import load_iris\n",
- "from sklearn.svm import SVC\n",
"from sklearn.metrics import accuracy_score\n",
"from sklearn.model_selection import KFold\n",
+ "from sklearn.svm import SVC\n",
+ "\n",
+ "from hyperactive.experiment.integrations import SklearnCvExperiment\n",
"\n",
"X, y = load_iris(return_X_y=True)\n",
"\n",
@@ -495,33 +508,16 @@
"execution_count": null,
"id": "9a13b4f3",
"metadata": {},
- "outputs": [
- {
- "name": "stderr",
- "output_type": "stream",
- "text": [
- " \r"
- ]
- },
- {
- "data": {
- "text/plain": [
- "{'C': np.float64(0.1), 'gamma': np.float64(1.0)}"
- ]
- },
- "execution_count": 15,
- "metadata": {},
- "output_type": "execute_result"
- }
- ],
+ "outputs": [],
"source": [
"import numpy as np\n",
+ "\n",
"from hyperactive.opt import HillClimbing\n",
"\n",
"hillclimbing_config = {\n",
" \"search_space\": {\n",
- " \"C\": np.array([0.01, 0.1, 1, 10]),\n",
- " \"gamma\": np.array([0.0001, 0.01, 0.1, 1, 10]),\n",
+ " \"C\": np.array([0.01, 0.1, 1, 10]),\n",
+ " \"gamma\": np.array([0.0001, 0.01, 0.1, 1, 10]),\n",
" },\n",
" \"n_iter\": 100,\n",
"}\n",
@@ -535,33 +531,16 @@
"execution_count": null,
"id": "5aa7ca80",
"metadata": {},
- "outputs": [
- {
- "name": "stderr",
- "output_type": "stream",
- "text": [
- " \r"
- ]
- },
- {
- "data": {
- "text/plain": [
- "{'C': np.float64(1.0), 'gamma': np.float64(0.1)}"
- ]
- },
- "execution_count": 16,
- "metadata": {},
- "output_type": "execute_result"
- }
- ],
+ "outputs": [],
"source": [
"import numpy as np\n",
+ "\n",
"from hyperactive.opt import HillClimbing\n",
"\n",
"hill_climbing_config = {\n",
" \"search_space\": {\n",
- " \"C\": np.array([0.01, 0.1, 1, 10]),\n",
- " \"gamma\": np.array([0.0001, 0.01, 0.1, 1, 10]),\n",
+ " \"C\": np.array([0.01, 0.1, 1, 10]),\n",
+ " \"gamma\": np.array([0.0001, 0.01, 0.1, 1, 10]),\n",
" },\n",
" \"n_iter\": 100,\n",
"}\n",
@@ -608,7 +587,10 @@
"outputs": [],
"source": [
"# 1. defining the tuned estimator\n",
+ "from sklearn.datasets import load_iris\n",
+ "from sklearn.model_selection import train_test_split\n",
"from sklearn.svm import SVC\n",
+ "\n",
"from hyperactive.integrations.sklearn import OptCV\n",
"from hyperactive.opt import GridSearchSk as GridSearch\n",
"\n",
@@ -616,9 +598,6 @@
"tuned_svc = OptCV(SVC(), optimizer=GridSearch(param_grid))\n",
"\n",
"# 2. fitting the tuned estimator = tuning the hyperparameters\n",
- "from sklearn.datasets import load_iris\n",
- "from sklearn.model_selection import train_test_split\n",
- "\n",
"X, y = load_iris(return_X_y=True)\n",
"X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2)\n",
"\n",
@@ -1087,26 +1066,21 @@
"execution_count": null,
"id": "f606284b",
"metadata": {},
- "outputs": [
- {
- "name": "stderr",
- "output_type": "stream",
- "text": [
- " \r"
- ]
- }
- ],
+ "outputs": [],
"source": [
"# 1. defining the tuned estimator\n",
+ "from sklearn.datasets import load_iris\n",
+ "from sklearn.model_selection import train_test_split\n",
"from sklearn.svm import SVC\n",
+ "\n",
"from hyperactive.integrations.sklearn import OptCV\n",
"from hyperactive.opt import HillClimbing\n",
"\n",
"# picking the optimizer is the only part that changes!\n",
"hill_climbing_config = {\n",
" \"search_space\": {\n",
- " \"C\": np.array([0.01, 0.1, 1, 10]),\n",
- " \"gamma\": np.array([0.0001, 0.01, 0.1, 1, 10]),\n",
+ " \"C\": np.array([0.01, 0.1, 1, 10]),\n",
+ " \"gamma\": np.array([0.0001, 0.01, 0.1, 1, 10]),\n",
" },\n",
" \"n_iter\": 100,\n",
"}\n",
@@ -1115,9 +1089,6 @@
"tuned_svc = OptCV(SVC(), optimizer=hill_climbing)\n",
"\n",
"# 2. fitting the tuned estimator = tuning the hyperparameters\n",
- "from sklearn.datasets import load_iris\n",
- "from sklearn.model_selection import train_test_split\n",
- "\n",
"X, y = load_iris(return_X_y=True)\n",
"X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2)\n",
"\n",
diff --git a/examples/opt_strat_search_space_pruning.py b/examples/opt_strat_search_space_pruning.py
new file mode 100644
index 00000000..313d3559
--- /dev/null
+++ b/examples/opt_strat_search_space_pruning.py
@@ -0,0 +1,38 @@
+import numpy as np
+
+from hyperactive import Hyperactive
+
+
+from hyperactive.optimizers.strategies import CustomOptimizationStrategy
+from hyperactive.optimizers import (
+ HillClimbingOptimizer,
+ RandomSearchOptimizer,
+ BayesianOptimizer,
+)
+
+
+opt_strat = CustomOptimizationStrategy()
+opt_strat.add_optimizer(RandomSearchOptimizer(), duration=0.5)
+opt_strat.prune_search_space()
+opt_strat.add_optimizer(HillClimbingOptimizer(), duration=0.5)
+
+
+def objective_function(opt):
+ score = -opt["x1"] * opt["x1"]
+ return score, {"additional stuff": 1}
+
+
+search_space = {"x1": list(np.arange(-100, 101, 1))}
+n_iter = 100
+optimizer = opt_strat
+
+hyper = Hyperactive()
+hyper.add_search(
+ objective_function,
+ search_space,
+ n_iter=n_iter,
+ n_jobs=1,
+ optimizer=optimizer,
+ # random_state=1,
+)
+hyper.run()
diff --git a/examples/optimization_applications/GMM_Hyperactive_Example.ipynb b/examples/optimization_applications/GMM_Hyperactive_Example.ipynb
index e4a9c792..2690fac3 100644
--- a/examples/optimization_applications/GMM_Hyperactive_Example.ipynb
+++ b/examples/optimization_applications/GMM_Hyperactive_Example.ipynb
@@ -2,19 +2,27 @@
"cells": [
{
"cell_type": "code",
- "execution_count": 5,
+ "id": "rqk3u3ki08k",
+ "source": "\"\"\"Gaussian Mixture Model optimization example using Hyperactive.\"\"\"",
+ "metadata": {},
+ "execution_count": null,
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
"id": "54e01852",
"metadata": {},
"outputs": [],
"source": [
- "import numpy as np\n",
- "import pandas as pd\n",
+ "import warnings\n",
+ "\n",
"import matplotlib.pyplot as plt\n",
- "from pandas import DataFrame\n",
+ "import pandas as pd\n",
"from sklearn import datasets\n",
"from sklearn.mixture import GaussianMixture\n",
- "import warnings\n",
- "warnings.filterwarnings(\"ignore\") "
+ "\n",
+ "warnings.filterwarnings(\"ignore\")"
]
},
{
@@ -60,32 +68,41 @@
},
{
"cell_type": "code",
- "execution_count": 7,
+ "execution_count": null,
"id": "b38d7bd7",
"metadata": {},
"outputs": [],
"source": [
"def model(opt):\n",
- " gmm = GaussianMixture(n_components = opt[\"n_components\"])\n",
+ " \"\"\"Gaussian Mixture Model clustering with visualization.\n",
+ "\n",
+ " Args:\n",
+ " opt: Dictionary with optimization parameters including n_components.\n",
+ "\n",
+ " Returns\n",
+ " -------\n",
+ " Lower bound of log-likelihood value.\n",
+ " \"\"\"\n",
+ " gmm = GaussianMixture(n_components=opt[\"n_components\"])\n",
" gmm.fit(d)\n",
" # Assign a label to each sample\n",
" labels = gmm.predict(d)\n",
- " d['labels']= labels\n",
- " d0 = d[d['labels']== 0]\n",
- " d1 = d[d['labels']== 1]\n",
- " d2 = d[d['labels']== 2]\n",
- " \n",
+ " d[\"labels\"] = labels\n",
+ " d0 = d[d[\"labels\"] == 0]\n",
+ " d1 = d[d[\"labels\"] == 1]\n",
+ " d2 = d[d[\"labels\"] == 2]\n",
+ "\n",
" # plot three clusters in same plot\n",
- " plt.scatter(d0[0], d0[1], c ='r')\n",
- " plt.scatter(d1[0], d1[1], c ='yellow')\n",
- " plt.scatter(d2[0], d2[1], c ='g')\n",
- " \n",
+ " plt.scatter(d0[0], d0[1], c=\"r\")\n",
+ " plt.scatter(d1[0], d1[1], c=\"yellow\")\n",
+ " plt.scatter(d2[0], d2[1], c=\"g\")\n",
+ "\n",
" # print the converged log-likelihood value\n",
" return gmm.lower_bound_.mean()\n",
- " \n",
+ "\n",
" # print the number of iterations needed\n",
" # for the log-likelihood value to converge\n",
- " return gmm.n_iter_.mean() \n"
+ " return gmm.n_iter_.mean()"
]
},
{
diff --git a/examples/optimization_applications/ensemble_learning_example.py b/examples/optimization_applications/ensemble_learning_example.py
index 0a93e4ce..428929ca 100644
--- a/examples/optimization_applications/ensemble_learning_example.py
+++ b/examples/optimization_applications/ensemble_learning_example.py
@@ -1,6 +1,6 @@
"""
-This example shows how you can search for the best models in each layer in a
-stacking ensemble.
+This example shows how you can search for the best models in each layer in a
+stacking ensemble.
We want to create a stacking ensemble with 3 layers:
- a top layer with one model
diff --git a/examples/optimization_applications/feature_selection.py b/examples/optimization_applications/feature_selection.py
index f19f7c5a..02fa79d4 100644
--- a/examples/optimization_applications/feature_selection.py
+++ b/examples/optimization_applications/feature_selection.py
@@ -1,8 +1,8 @@
"""
-This example shows how to select the best features for a model
-and dataset.
+This example shows how to select the best features for a model
+and dataset.
-The boston dataset has 13 features, therefore we have 13 search space
+The boston dataset has 13 features, therefore we have 13 search space
dimensions for the feature selection.
The function "get_feature_indices" returns the list of features that
diff --git a/examples/optimization_applications/feature_transformation.py b/examples/optimization_applications/feature_transformation.py
index 6a5e894a..ff589399 100644
--- a/examples/optimization_applications/feature_transformation.py
+++ b/examples/optimization_applications/feature_transformation.py
@@ -1,7 +1,7 @@
"""
-This example shows how you can search for useful feature
+This example shows how you can search for useful feature
transformations for your dataset. This example is very similar to
-"feature_selection". It adds the possibility to change the features
+"feature_selection". It adds the possibility to change the features
with the numpy functions in the search space.
"""
diff --git a/examples/optimization_applications/memory.py b/examples/optimization_applications/memory.py
index 0fbef93b..c95d6560 100644
--- a/examples/optimization_applications/memory.py
+++ b/examples/optimization_applications/memory.py
@@ -1,13 +1,13 @@
"""
-Hyperactive saves all positions it explores in a memory dictionary. If it encounters
-this positions again Hyperactive will just read the score from the memory dictionary
+Hyperactive saves all positions it explores in a memory dictionary. If it encounters
+this positions again Hyperactive will just read the score from the memory dictionary
instead of reevaluating the objective function. If there is a machine-/deep-learning
model within the objective function this memory saves you a lot of computation
time, because it is much faster to just look up the score in a dictionary instead
of retraining an entire machine learning model.
-You can also pass the search data to the "memory_warm_start"-parameter of the next
-optimization run. This way the next optimization run has the memory of the
+You can also pass the search data to the "memory_warm_start"-parameter of the next
+optimization run. This way the next optimization run has the memory of the
previous run, which (again) saves you a lot of computation time.
"""
import time
diff --git a/examples/optimization_applications/multiple_scores.py b/examples/optimization_applications/multiple_scores.py
index 29632ad3..4407ef1c 100644
--- a/examples/optimization_applications/multiple_scores.py
+++ b/examples/optimization_applications/multiple_scores.py
@@ -8,7 +8,7 @@
X, y = data.data, data.target
"""
-Hyperactive cannot handle multi objective optimization.
+Hyperactive cannot handle multi objective optimization.
But we can achive something similar with a workaround.
The following example searches for the highest cv-score and the lowest training time.
It is possible by creating an objective/score from those two variables.
diff --git a/examples/optimization_applications/pretrained_nas.py b/examples/optimization_applications/pretrained_nas.py
index 49942b37..da8e4452 100644
--- a/examples/optimization_applications/pretrained_nas.py
+++ b/examples/optimization_applications/pretrained_nas.py
@@ -1,17 +1,17 @@
"""
This script describes how to save time during the optimization by
using a pretrained model. It is similar to the transer learning example,
-but here you do the training and model creation of the pretrained model
+but here you do the training and model creation of the pretrained model
yourself.
-The problem is that most of the optimization time is "waisted" by
+The problem is that most of the optimization time is "waisted" by
training the model. The time to find a new position to explore by
-Hyperactive is very small compared to the training time of
+Hyperactive is very small compared to the training time of
neural networks. This means, that we can do more optimization
-if we keep the training time as little as possible.
+if we keep the training time as little as possible.
The idea of pretrained neural architecture search is to pretrain a complete model one time.
-In the next step we remove the layers that should be optimized
+In the next step we remove the layers that should be optimized
and make the remaining layers not-trainable.
This results in a partial, pretrained, not-trainable model that will be
@@ -20,7 +20,7 @@
You can now add layers to the partial model in the objective function
and add the parameters or layers that will be optimized by Hyperactive.
-With each iteration of the optimization run we are only training
+With each iteration of the optimization run we are only training
the added layers of the model. This saves a lot of training time.
"""
diff --git a/examples/optimization_applications/search_space_example.py b/examples/optimization_applications/search_space_example.py
index 197638c3..3575392b 100644
--- a/examples/optimization_applications/search_space_example.py
+++ b/examples/optimization_applications/search_space_example.py
@@ -1,11 +1,11 @@
"""
-Hyperactive is very versatile, because it can handle not just numerical or
+Hyperactive is very versatile, because it can handle not just numerical or
string variables in the search space, but also functions. If you want to
search for the best list, numpy array, dataframed or class you can put them into a
function that returns them as shown in the example below.
This enables many possibilities for more complex optimization applications.
-Neural architecture search, feature engineering, ensemble optimization and many other applications are
+Neural architecture search, feature engineering, ensemble optimization and many other applications are
only possible or much easier if you can put functions in the search space.
"""
@@ -14,17 +14,17 @@
def function_0():
# do stuff in function0
- return
+ return
def function_1():
# do stuff in function1
- return
+ return
def function_2():
# do stuff in function2
- return
+ return
def list1():
diff --git a/examples/tested_and_supported_packages/mlxtend_example.py b/examples/tested_and_supported_packages/mlxtend_example.py
index 88e208c4..4f6091a5 100644
--- a/examples/tested_and_supported_packages/mlxtend_example.py
+++ b/examples/tested_and_supported_packages/mlxtend_example.py
@@ -41,4 +41,3 @@ def model(opt):
hyper = Hyperactive()
hyper.add_search(model, search_space, n_iter=25)
hyper.run()
-
diff --git a/examples/tested_and_supported_packages/multiprocessing_example.py b/examples/tested_and_supported_packages/multiprocessing_example.py
index 74b4b8b0..0294ea8b 100644
--- a/examples/tested_and_supported_packages/multiprocessing_example.py
+++ b/examples/tested_and_supported_packages/multiprocessing_example.py
@@ -3,7 +3,7 @@
in parallel. This can be done via multiprocessing, joblib or a custom wrapper-function.
The processes won't communicate with each other.
-You can add as many searches to the optimization run (.add_search(...)) and
+You can add as many searches to the optimization run (.add_search(...)) and
run each of those searches n-times (n_jobs).
In the example below we are performing 4 searches in parallel:
diff --git a/examples/tested_and_supported_packages/read_hdf5.py b/examples/tested_and_supported_packages/read_hdf5.py
new file mode 100644
index 00000000..ad0ce8c4
--- /dev/null
+++ b/examples/tested_and_supported_packages/read_hdf5.py
@@ -0,0 +1,24 @@
+import h5py
+
+filename = "my_model_weights_noTraining.h5"
+
+with h5py.File(filename, "r") as f:
+ # Print all root level object names (aka keys)
+ # these can be group or dataset names
+ print("Keys: %s" % f.keys())
+ # get first object name/key; may or may NOT be a group
+ a_group_key = list(f.keys())[0]
+
+ # get the object type for a_group_key: usually group or dataset
+ print("\n type a_group_key \n", type(f[a_group_key]), "\n")
+
+ # If a_group_key is a group name,
+ # this gets the object names in the group and returns as a list
+ data = list(f[a_group_key])
+
+ # If a_group_key is a dataset name,
+ # this gets the dataset values and returns as a list
+ data = list(f[a_group_key])
+ print("\n data \n", data, "\n")
+ # preferred methods to get dataset values:
+ ds_obj = f[a_group_key] # returns as a h5py dataset object
diff --git a/examples/v5_API_example/_optimizer_example.py b/examples/v5_API_example/_optimizer_example.py
new file mode 100644
index 00000000..5fcc200c
--- /dev/null
+++ b/examples/v5_API_example/_optimizer_example.py
@@ -0,0 +1,37 @@
+import numpy as np
+from sklearn.datasets import load_diabetes
+from sklearn.tree import DecisionTreeRegressor
+
+
+from hyperactive.search_config import SearchConfig
+from hyperactive.optimization.gradient_free_optimizers import (
+ HillClimbingOptimizer,
+ RandomRestartHillClimbingOptimizer,
+ RandomSearchOptimizer,
+)
+from hyperactive.optimization.talos import TalosOptimizer
+
+from experiments.sklearn import SklearnExperiment
+from experiments.test_function import AckleyFunction
+
+
+data = load_diabetes()
+X, y = data.data, data.target
+
+
+search_config1 = SearchConfig(
+ max_depth=list(np.arange(2, 15, 1)),
+ min_samples_split=list(np.arange(2, 25, 2)),
+)
+
+
+TalosOptimizer()
+
+experiment1 = SklearnExperiment()
+experiment1.setup(DecisionTreeRegressor, X, y, cv=4)
+
+
+optimizer = HillClimbingOptimizer()
+optimizer.add_search(experiment1, search_config1, n_iter=100)
+hyper = optimizer
+hyper.run(max_time=5)
diff --git a/examples/v5_API_example/test.py b/examples/v5_API_example/test.py
new file mode 100644
index 00000000..17a6fe6c
--- /dev/null
+++ b/examples/v5_API_example/test.py
@@ -0,0 +1,35 @@
+finetuned_opt = OptPipe([("first", FooOpt())("second", BarOpt(params))], more_params)
+
+column_transformer = BetterColumnTransformer(
+ [
+ {"name": "num", "transformer": StandardScaler(), "columns": ["age", "income"]},
+ {"name": "cat", "transformer": OneHotEncoder(), "columns": ["gender", "city"]},
+ ]
+)
+
+
+class MyPipeline(Pipeline):
+ def transform(self, data):
+ numeric = self.columns(["age", "income"]).apply(StandardScaler())
+ categorical = self.columns(["gender", "city"]).apply(OneHotEncoder())
+ combined = self.concat(numeric, categorical)
+ return combined.then(SVC())
+
+
+finetuned_opt = OptPipe(more_params)
+finetuned_opt.add_step(RandomOpt(), fraction=0.3)
+finetuned_opt.add_step(fraction=0.4)
+finetuned_opt.add_step(fraction=0.3)
+
+finetuned_opt
+
+
+class OptPipe:
+ def __init__(self, a, b, c):
+ self.a = a
+ self.b = b
+ self.c = c
+
+ def set_params(self):
+ # einzige Möglichkeit um a b c zu ändern
+ pass
diff --git a/extension_templates/experiments.py b/extension_templates/experiments.py
index d3a5496a..e9ecda17 100644
--- a/extension_templates/experiments.py
+++ b/extension_templates/experiments.py
@@ -175,7 +175,6 @@ def _evaluate(self, params):
metadata = {"some": "metadata"} # can be any dict
return value, metadata
-
# todo: implement this for testing purposes!
# required to run local automated unit and integration testing of estimator
# method should return default parameters, so that a test instance can be created
diff --git a/scripts/__init__.py b/scripts/__init__.py
deleted file mode 100644
index e69de29b..00000000
diff --git a/scripts/_generator.py b/scripts/_generator.py
deleted file mode 100644
index dc8c329d..00000000
--- a/scripts/_generator.py
+++ /dev/null
@@ -1,63 +0,0 @@
-import os
-from pathlib import Path
-
-# List of algorithm names and corresponding class names
-algo_info = [
- ("downhill_simplex", "DownhillSimplexOptimizer"),
- ("simulated_annealing", "SimulatedAnnealingOptimizer"),
- ("direct_algorithm", "DirectAlgorithm"),
- ("lipschitz_optimization", "LipschitzOptimizer"),
- ("pattern_search", "PatternSearch"),
- ("random_restart_hill_climbing", "RandomRestartHillClimbingOptimizer"),
- ("random_search", "RandomSearchOptimizer"),
- ("powells_method", "PowellsMethod"),
- ("differential_evolution", "DifferentialEvolutionOptimizer"),
- ("evolution_strategy", "EvolutionStrategyOptimizer"),
- ("genetic_algorithm", "GeneticAlgorithmOptimizer"),
- ("parallel_tempering", "ParallelTemperingOptimizer"),
- ("particle_swarm_optimization", "ParticleSwarmOptimizer"),
- ("spiral_optimization", "SpiralOptimization"),
- ("bayesian_optimization", "BayesianOptimizer"),
- ("forest_optimizer", "ForestOptimizer"),
- ("tree_structured_parzen_estimators", "TreeStructuredParzenEstimators"),
-]
-
-BASE_DIR = Path("generated_opt_algos")
-
-
-# Template for the Python class file
-def create_class_file_content(class_name: str) -> str:
- return f'''from hyperactive.opt._adapters._gfo import _BaseGFOadapter
-
-
-class {class_name}(_BaseGFOadapter):
-
- def _get_gfo_class(self):
- """Get the GFO class to use.
-
- Returns
- -------
- class
- The GFO class to use. One of the concrete GFO classes
- """
- from gradient_free_optimizers import {class_name}
-
- return {class_name}
-'''
-
-
-# Main generation loop
-for name, class_name in algo_info:
- algo_folder = BASE_DIR / name
- algo_folder.mkdir(parents=True, exist_ok=True)
-
- init_file = algo_folder / "__init__.py"
- class_file = algo_folder / f"_{name}.py"
-
- # Create __init__.py (empty)
- init_file.touch(exist_ok=True)
-
- # Write the optimizer class file
- class_file.write_text(create_class_file_content(class_name))
-
-print(f"Generated {len(algo_info)} folders in {BASE_DIR.resolve()}")
diff --git a/src/hyperactive/__init__.py b/src/hyperactive/__init__.py
index bc82b676..5106a46c 100644
--- a/src/hyperactive/__init__.py
+++ b/src/hyperactive/__init__.py
@@ -1,6 +1,13 @@
-# Author: Simon Blanke
-# Email: simon.blanke@yahoo.com
-# License: MIT License
+"""Hyperactive - A hyperparameter optimization and meta-learning library.
+
+This package provides tools for hyperparameter optimization using various
+optimization algorithms including random search, grid search, Bayesian
+optimization, and many others.
+
+Author: Simon Blanke
+Email: simon.blanke@yahoo.com
+License: MIT License
+"""
import importlib.metadata
@@ -10,7 +17,6 @@
from .hyperactive import Hyperactive
-
__all__ = [
"Hyperactive",
]
diff --git a/src/hyperactive/base/tests/test_endtoend.py b/src/hyperactive/base/tests/test_endtoend.py
index 4c5a1933..173988d2 100644
--- a/src/hyperactive/base/tests/test_endtoend.py
+++ b/src/hyperactive/base/tests/test_endtoend.py
@@ -8,11 +8,12 @@
def test_endtoend_hillclimbing():
"""Test end-to-end usage of HillClimbing optimizer with an experiment."""
# 1. define the experiment
- from hyperactive.experiment.integrations import SklearnCvExperiment
from sklearn.datasets import load_iris
- from sklearn.svm import SVC
from sklearn.metrics import accuracy_score
from sklearn.model_selection import KFold
+ from sklearn.svm import SVC
+
+ from hyperactive.experiment.integrations import SklearnCvExperiment
X, y = load_iris(return_X_y=True)
@@ -26,12 +27,13 @@ def test_endtoend_hillclimbing():
# 2. set up the HillClimbing optimizer
import numpy as np
+
from hyperactive.opt import HillClimbing
hillclimbing_config = {
"search_space": {
- "C": np.array([0.01, 0.1, 1, 10]),
- "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]),
+ "C": np.array([0.01, 0.1, 1, 10]),
+ "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]),
},
"n_iter": 100,
}
diff --git a/src/hyperactive/distribution.py b/src/hyperactive/distribution.py
index eea37214..38b4c393 100644
--- a/src/hyperactive/distribution.py
+++ b/src/hyperactive/distribution.py
@@ -1,8 +1,15 @@
-# Author: Simon Blanke
-# Email: simon.blanke@yahoo.com
-# License: MIT License
+"""Distribution module for parallel processing in hyperparameter optimization.
+
+This module provides various methods for distributing optimization processes
+across multiple cores or threads, including multiprocessing, pathos, and joblib.
+
+Author: Simon Blanke
+Email: simon.blanke@yahoo.com
+License: MIT License
+"""
from sys import platform
+
from tqdm import tqdm
if platform.startswith("linux"):
@@ -14,32 +21,29 @@
def single_process(process_func, process_infos):
+ """Execute processes sequentially in a single thread."""
return [process_func(*info) for info in process_infos]
def multiprocessing_wrapper(process_func, process_infos, n_processes):
+ """Execute processes using multiprocessing library."""
import multiprocessing as mp
- with mp.Pool(
- n_processes, initializer=initializer, initargs=initargs
- ) as pool:
+ with mp.Pool(n_processes, initializer=initializer, initargs=initargs) as pool:
return pool.map(process_func, process_infos)
def pathos_wrapper(process_func, search_processes_paras, n_processes):
+ """Execute processes using pathos multiprocessing library."""
import pathos.multiprocessing as pmp
- with pmp.Pool(
- n_processes, initializer=initializer, initargs=initargs
- ) as pool:
+ with pmp.Pool(n_processes, initializer=initializer, initargs=initargs) as pool:
return pool.map(process_func, search_processes_paras)
def joblib_wrapper(process_func, search_processes_paras, n_processes):
+ """Execute processes using joblib parallel processing."""
from joblib import Parallel, delayed
- jobs = [
- delayed(process_func)(*info_dict)
- for info_dict in search_processes_paras
- ]
+ jobs = [delayed(process_func)(*info_dict) for info_dict in search_processes_paras]
return Parallel(n_jobs=n_processes)(jobs)
diff --git a/src/hyperactive/experiment/integrations/__init__.py b/src/hyperactive/experiment/integrations/__init__.py
index 2d043871..6e14af6a 100644
--- a/src/hyperactive/experiment/integrations/__init__.py
+++ b/src/hyperactive/experiment/integrations/__init__.py
@@ -1,7 +1,6 @@
"""Integrations with packages for tuning."""
# copyright: hyperactive developers, MIT License (see LICENSE file)
-
from hyperactive.experiment.integrations.sklearn_cv import SklearnCvExperiment
__all__ = ["SklearnCvExperiment"]
diff --git a/src/hyperactive/experiment/integrations/sklearn_cv.py b/src/hyperactive/experiment/integrations/sklearn_cv.py
index fa32f7fe..77485e90 100644
--- a/src/hyperactive/experiment/integrations/sklearn_cv.py
+++ b/src/hyperactive/experiment/integrations/sklearn_cv.py
@@ -8,6 +8,7 @@
from hyperactive.base import BaseExperiment
+
class SklearnCvExperiment(BaseExperiment):
"""Experiment adapter for sklearn cross-validation experiments.
@@ -195,9 +196,9 @@ def get_test_params(cls, parameter_set="default"):
`create_test_instance` uses the first (or only) dictionary in `params`
"""
from sklearn.datasets import load_diabetes, load_iris
- from sklearn.svm import SVC, SVR
from sklearn.metrics import accuracy_score, mean_absolute_error
from sklearn.model_selection import KFold
+ from sklearn.svm import SVC, SVR
X, y = load_iris(return_X_y=True)
params_classif = {
@@ -282,7 +283,6 @@ def _guess_sign_of_sklmetric(scorer):
"roc_auc_score": True,
"top_k_accuracy_score": True,
"zero_one_loss": False,
-
# Regression
"d2_absolute_error_score": True,
"d2_pinball_score": True,
diff --git a/src/hyperactive/experiment/toy/__init__.py b/src/hyperactive/experiment/toy/__init__.py
index 33553866..0835e62c 100644
--- a/src/hyperactive/experiment/toy/__init__.py
+++ b/src/hyperactive/experiment/toy/__init__.py
@@ -1,6 +1,5 @@
"""Toy experiments."""
-
from hyperactive.experiment.toy._ackley import Ackley
from hyperactive.experiment.toy._parabola import Parabola
from hyperactive.experiment.toy._sphere import Sphere
diff --git a/src/hyperactive/experiment/toy/_ackley.py b/src/hyperactive/experiment/toy/_ackley.py
index 6eb67733..deb246f2 100644
--- a/src/hyperactive/experiment/toy/_ackley.py
+++ b/src/hyperactive/experiment/toy/_ackley.py
@@ -7,6 +7,8 @@
class Ackley(BaseExperiment):
+ """Ackley class."""
+
r"""Ackley function, common benchmark for optimization algorithms.
The Ackley function is a non-convex function used to test optimization algorithms.
diff --git a/src/hyperactive/experiment/toy/_parabola.py b/src/hyperactive/experiment/toy/_parabola.py
index d9672f96..0880dcf3 100644
--- a/src/hyperactive/experiment/toy/_parabola.py
+++ b/src/hyperactive/experiment/toy/_parabola.py
@@ -5,6 +5,8 @@
class Parabola(BaseExperiment):
+ """Parabola class."""
+
r"""2D parabola, common benchmark for optimization algorithms.
Parabola parameterized by the formula:
diff --git a/src/hyperactive/experiment/toy/_sphere.py b/src/hyperactive/experiment/toy/_sphere.py
index 09e9e5c6..9f415e1a 100644
--- a/src/hyperactive/experiment/toy/_sphere.py
+++ b/src/hyperactive/experiment/toy/_sphere.py
@@ -7,6 +7,8 @@
class Sphere(BaseExperiment):
+ """Sphere class."""
+
r"""Simple Sphere function, common benchmark for optimization algorithms.
Sphere function parameterized by the formula:
@@ -80,7 +82,7 @@ def _evaluate(self, params):
Additional metadata about the search.
"""
params_vec = np.array([params[f"x{i}"] for i in range(self.n_dim)])
- return np.sum(params_vec ** 2) + self.const, {}
+ return np.sum(params_vec**2) + self.const, {}
@classmethod
def get_test_params(cls, parameter_set="default"):
diff --git a/src/hyperactive/hyperactive.py b/src/hyperactive/hyperactive.py
index 860a65f1..ea472b8e 100644
--- a/src/hyperactive/hyperactive.py
+++ b/src/hyperactive/hyperactive.py
@@ -1,19 +1,24 @@
-# Author: Simon Blanke
-# Email: simon.blanke@yahoo.com
-# License: MIT License
+"""Main Hyperactive module providing the primary optimization interface.
+This module contains the Hyperactive class, which is the main entry point
+for hyperparameter optimization. It provides methods to add optimization
+searches, run them, and retrieve results.
+
+Author: Simon Blanke
+Email: simon.blanke@yahoo.com
+License: MIT License
+"""
import copy
import multiprocessing as mp
-import pandas as pd
+from typing import Union
-from typing import Union, List, Dict, Type
+import pandas as pd
from .optimizers import RandomSearchOptimizer
-from .run_search import run_search
-
-from .results import Results
from .print_results import PrintResults
+from .results import Results
+from .run_search import run_search
from .search_space import SearchSpace
@@ -21,12 +26,17 @@ class Hyperactive:
"""
Initialize the Hyperactive class to manage optimization processes.
- Parameters:
- - verbosity: List of verbosity levels (default: ["progress_bar", "print_results", "print_times"])
- - distribution: String indicating the distribution method (default: "multiprocessing")
- - n_processes: Number of processes to run in parallel or "auto" to determine automatically (default: "auto")
-
- Methods:
+ Parameters
+ ----------
+ - verbosity: List of verbosity levels
+ (default: ["progress_bar", "print_results", "print_times"])
+ - distribution: String indicating the distribution method
+ (default: "multiprocessing")
+ - n_processes: Number of processes to run in parallel or "auto"
+ to determine automatically (default: "auto")
+
+ Methods
+ -------
- add_search: Add a new optimization search process with specified parameters
- run: Execute the optimization searches
- best_para: Get the best parameters for a specific search
@@ -61,20 +71,16 @@ def _create_shared_memory(self):
_bundle_opt_processes.setdefault(name, []).append(opt_pros)
for opt_pros_l in _bundle_opt_processes.values():
- # Check if the lengths of the search spaces of all optimizers in the list are the same.
- if (
- len(set(len(opt_pros.s_space()) for opt_pros in opt_pros_l))
- == 1
- ):
+ # Check if the lengths of the search spaces of all optimizers
+ # in the list are the same.
+ if len({len(opt_pros.s_space()) for opt_pros in opt_pros_l}) == 1:
manager = mp.Manager() # get new manager.dict
shared_memory = manager.dict()
for opt_pros in opt_pros_l:
opt_pros.memory = shared_memory
else:
for opt_pros in opt_pros_l:
- opt_pros.memory = opt_pros_l[
- 0
- ].memory # get same manager.dict
+ opt_pros.memory = opt_pros_l[0].memory # get same manager.dict
@staticmethod
def _default_opt(optimizer):
@@ -85,17 +91,19 @@ def _default_opt(optimizer):
@staticmethod
def _default_search_id(search_id, objective_function):
+ """Set default search ID based on objective function name if not provided."""
if not search_id:
search_id = objective_function.__name__
return search_id
@staticmethod
def check_list(search_space):
+ """Validate that search space values are lists."""
for key in search_space.keys():
search_dim = search_space[key]
- error_msg = "Value in '{}' of search space dictionary must be of type list".format(
- key
+ error_msg = (
+ f"Value in '{key}' of search space dictionary must be of type list"
)
if not isinstance(search_dim, list):
print("Warning", error_msg)
@@ -104,18 +112,18 @@ def check_list(search_space):
def add_search(
self,
objective_function: callable,
- search_space: Dict[str, list],
+ search_space: dict[str, list],
n_iter: int,
search_id=None,
- optimizer: Union[str, Type[RandomSearchOptimizer]] = "default",
+ optimizer: Union[str, type[RandomSearchOptimizer]] = "default",
n_jobs: int = 1,
- initialize: Dict[str, int] = {"grid": 4, "random": 2, "vertices": 4},
- constraints: List[callable] = None,
- pass_through: Dict = None,
- callbacks: Dict[str, callable] = None,
- catch: Dict = None,
+ initialize: dict[str, int] = {"grid": 4, "random": 2, "vertices": 4},
+ constraints: list[callable] = None,
+ pass_through: dict = None,
+ callbacks: dict[str, callable] = None,
+ catch: dict = None,
max_score: float = None,
- early_stopping: Dict = None,
+ early_stopping: dict = None,
random_state: int = None,
memory: Union[str, bool] = "share",
memory_warm_start: pd.DataFrame = None,
@@ -123,16 +131,19 @@ def add_search(
"""
Add a new optimization search process with specified parameters.
- Parameters:
+ Parameters
+ ----------
- objective_function: The objective function to optimize.
- search_space: Dictionary defining the search space for optimization.
- n_iter: Number of iterations for the optimization process.
- search_id: Identifier for the search process (default: None).
- optimizer: The optimizer to use for the search process (default: "default").
- n_jobs: Number of parallel jobs to run (default: 1).
- - initialize: Dictionary specifying initialization parameters (default: {"grid": 4, "random": 2, "vertices": 4}).
+ - initialize: Dictionary specifying initialization parameters
+ (default: {"grid": 4, "random": 2, "vertices": 4}).
- constraints: List of constraint functions (default: None).
- - pass_through: Dictionary of additional parameters to pass through (default: None).
+ - pass_through: Dictionary of additional parameters to pass through
+ (default: None).
- callbacks: Dictionary of callback functions (default: None).
- catch: Dictionary of exceptions to catch during optimization (default: None).
- max_score: Maximum score to achieve (default: None).
@@ -141,7 +152,6 @@ def add_search(
- memory: Option to share memory between processes (default: "share").
- memory_warm_start: DataFrame containing warm start memory (default: None).
"""
-
self.check_list(search_space)
constraints = constraints or []
@@ -193,9 +203,9 @@ def run(self, max_time: float = None):
Run the optimization process with an optional maximum time limit.
Args:
- max_time (float, optional): Maximum time limit for the optimization process. Defaults to None.
+ max_time (float, optional): Maximum time limit for the optimization
+ process. Defaults to None.
"""
-
self._create_shared_memory()
for opt in self.opt_pros.values():
@@ -213,37 +223,44 @@ def best_para(self, id_):
"""
Retrieve the best parameters for a specific ID from the results.
- Parameters:
+ Parameters
+ ----------
- id_ (int): The ID of the parameters to retrieve.
- Returns:
- - Union[Dict[str, Union[int, float]], None]: The best parameters for the specified ID if found, otherwise None.
+ Returns
+ -------
+ - Union[dict[str, Union[int, float]], None]: The best parameters for the
+ specified ID if found, otherwise None.
- Raises:
+ Raises
+ ------
- ValueError: If the objective function name is not recognized.
"""
-
return self.results_.best_para(id_)
def best_score(self, id_):
"""
Return the best score for a specific ID from the results.
- Parameters:
+ Parameters
+ ----------
- id_ (int): The ID for which the best score is requested.
"""
-
return self.results_.best_score(id_)
def search_data(self, id_, times=False):
- """
- Retrieve search data for a specific ID from the results. Optionally exclude evaluation and iteration times if 'times' is set to False.
+ """Retrieve search data for a specific ID from the results.
- Parameters:
+ Optionally exclude evaluation and iteration times if 'times' is set to False.
+
+ Parameters
+ ----------
- id_ (int): The ID of the search data to retrieve.
- - times (bool, optional): Whether to exclude evaluation and iteration times. Defaults to False.
+ - times (bool, optional): Whether to exclude evaluation and iteration times.
+ Defaults to False.
- Returns:
+ Returns
+ -------
- pd.DataFrame: The search data for the specified ID.
columns are
@@ -256,10 +273,9 @@ def search_data(self, id_, times=False):
index is RangeIndex
"""
-
search_data_ = self.results_.search_data(id_)
- if times == False:
+ if not times:
search_data_.drop(
labels=["eval_times", "iter_times"],
axis=1,
diff --git a/src/hyperactive/integrations/__init__.py b/src/hyperactive/integrations/__init__.py
index dd5f708c..ac47e672 100644
--- a/src/hyperactive/integrations/__init__.py
+++ b/src/hyperactive/integrations/__init__.py
@@ -1,4 +1,7 @@
-# copyright: hyperactive developers, MIT License (see LICENSE file)
+"""Integrations package for third-party library compatibility.
+
+copyright: hyperactive developers, MIT License (see LICENSE file)
+"""
from hyperactive.integrations.sklearn import HyperactiveSearchCV, OptCV
diff --git a/src/hyperactive/integrations/sklearn/__init__.py b/src/hyperactive/integrations/sklearn/__init__.py
index e10bf5fe..461bccc6 100644
--- a/src/hyperactive/integrations/sklearn/__init__.py
+++ b/src/hyperactive/integrations/sklearn/__init__.py
@@ -1,8 +1,10 @@
-# copyright: hyperactive developers, MIT License (see LICENSE file)
+"""Scikit-learn integration package for Hyperactive.
+copyright: hyperactive developers, MIT License (see LICENSE file)
+"""
-from hyperactive.integrations.sklearn.hyperactive_search_cv import HyperactiveSearchCV
-from hyperactive.integrations.sklearn.opt_cv import OptCV
+from .hyperactive_search_cv import HyperactiveSearchCV
+from .opt_cv import OptCV
__all__ = [
"HyperactiveSearchCV",
diff --git a/src/hyperactive/integrations/sklearn/_compat.py b/src/hyperactive/integrations/sklearn/_compat.py
index 528af594..8ac9ae86 100644
--- a/src/hyperactive/integrations/sklearn/_compat.py
+++ b/src/hyperactive/integrations/sklearn/_compat.py
@@ -1,7 +1,6 @@
-"""
-Internal helpers that bridge behavioural differences between
-scikit-learn versions. Import *private* scikit-learn symbols **only**
-here and nowhere else.
+"""Internal helpers that bridge behavioural differences between scikit-learn versions.
+
+Import *private* scikit-learn symbols **only** here and nowhere else.
Copyright: Hyperactive contributors
License: MIT
@@ -10,7 +9,7 @@
from __future__ import annotations
import warnings
-from typing import Dict, Any
+from typing import Any
import sklearn
from packaging import version
@@ -63,12 +62,13 @@ def _safe_refit(estimator, X, y, fit_params):
# Still exists → re-export
from sklearn.utils.deprecation import _deprecate_Xt_in_inverse_transform
else:
- # Removed in 1.7 → provide drop-in replacement
- def _deprecate_Xt_in_inverse_transform( # noqa: N802 keep sklearn’s name
+ # Removed in 1.7 - provide drop-in replacement
+ def _deprecate_Xt_in_inverse_transform( # noqa: N802 keep sklearn's name
X: Any | None,
Xt: Any | None,
):
- """
+ """Handle deprecation of Xt parameter in inverse_transform.
+
scikit-learn ≤1.6 accepted both the old `Xt` parameter and the new
`X` parameter for `inverse_transform`. When only `Xt` is given we
return `Xt` and raise a deprecation warning (same behaviour that
@@ -92,9 +92,9 @@ def _deprecate_Xt_in_inverse_transform( # noqa: N802 keep sklearn’s name
def _check_method_params( # type: ignore[override] # noqa: N802
X,
- params: Dict[str, Any],
+ params: dict[str, Any],
):
- # passthrough – rely on estimator & indexable for validation
+ # passthrough - rely on estimator & indexable for validation
return params
diff --git a/src/hyperactive/integrations/sklearn/best_estimator.py b/src/hyperactive/integrations/sklearn/best_estimator.py
index 11d61e7b..d7241f4e 100644
--- a/src/hyperactive/integrations/sklearn/best_estimator.py
+++ b/src/hyperactive/integrations/sklearn/best_estimator.py
@@ -1,58 +1,68 @@
-# Author: Simon Blanke
-# Email: simon.blanke@yahoo.com
-# License: MIT License
+"""Best estimator utilities for scikit-learn integration.
+Author: Simon Blanke
+Email: simon.blanke@yahoo.com
+License: MIT License
+"""
from sklearn.utils.metaestimators import available_if
-from sklearn.exceptions import NotFittedError
from sklearn.utils.validation import check_is_fitted
-from .utils import _estimator_has
from ._compat import _deprecate_Xt_in_inverse_transform
+from .utils import _estimator_has
# NOTE Implementations of following methods from:
# https://github.com/scikit-learn/scikit-learn/blob/main/sklearn/model_selection/_search.py
# Tag: 1.5.1
class BestEstimator:
+ """BestEstimator class."""
@available_if(_estimator_has("score_samples"))
def score_samples(self, X):
+ """Score Samples function."""
check_is_fitted(self)
return self.best_estimator_.score_samples(X)
@available_if(_estimator_has("predict"))
def predict(self, X):
+ """Predict function."""
check_is_fitted(self)
return self.best_estimator_.predict(X)
@available_if(_estimator_has("predict_proba"))
def predict_proba(self, X):
+ """Predict Proba function."""
check_is_fitted(self)
return self.best_estimator_.predict_proba(X)
@available_if(_estimator_has("predict_log_proba"))
def predict_log_proba(self, X):
+ """Predict Log Proba function."""
check_is_fitted(self)
return self.best_estimator_.predict_log_proba(X)
@available_if(_estimator_has("decision_function"))
def decision_function(self, X):
+ """Decision Function function."""
check_is_fitted(self)
return self.best_estimator_.decision_function(X)
@available_if(_estimator_has("transform"))
def transform(self, X):
+ """Transform function."""
check_is_fitted(self)
return self.best_estimator_.transform(X)
@available_if(_estimator_has("inverse_transform"))
def inverse_transform(self, X=None, Xt=None):
+ """Inverse Transform function."""
X = _deprecate_Xt_in_inverse_transform(X, Xt)
check_is_fitted(self)
return self.best_estimator_.inverse_transform(X)
@property
def classes_(self):
+ """Classes function."""
_estimator_has("classes_")(self)
return self.best_estimator_.classes_
diff --git a/src/hyperactive/integrations/sklearn/checks.py b/src/hyperactive/integrations/sklearn/checks.py
index 2d107a0a..c752ee71 100644
--- a/src/hyperactive/integrations/sklearn/checks.py
+++ b/src/hyperactive/integrations/sklearn/checks.py
@@ -1,8 +1,16 @@
+"""Validation checks for scikit-learn integration."""
+
+
class Checks:
+ """Checks class."""
+
_fit_successful = False
def verify_fit(function):
+ """Verify Fit function."""
+
def wrapper(self, X, y):
+ """Wrap function call."""
out = function(self, X, y)
self._fit_successful = True
return out
diff --git a/src/hyperactive/integrations/sklearn/hyperactive_search_cv.py b/src/hyperactive/integrations/sklearn/hyperactive_search_cv.py
index cf7fdd61..903d2442 100644
--- a/src/hyperactive/integrations/sklearn/hyperactive_search_cv.py
+++ b/src/hyperactive/integrations/sklearn/hyperactive_search_cv.py
@@ -1,36 +1,38 @@
-# Author: Simon Blanke
-# Email: simon.blanke@yahoo.com
-# License: MIT License
+"""Hyperactive cross-validation search for scikit-learn integration.
+
+Author: Simon Blanke
+Email: simon.blanke@yahoo.com
+License: MIT License
+"""
from collections.abc import Callable
-from typing import Union, Dict, Type
+from typing import Union
from sklearn.base import BaseEstimator, clone
-from sklearn.metrics import check_scoring
-
-
from sklearn.base import BaseEstimator as SklearnBaseEstimator
+from sklearn.metrics import check_scoring
from hyperactive import Hyperactive
+from hyperactive.experiment.integrations.sklearn_cv import SklearnCvExperiment
+from ...optimizers import RandomSearchOptimizer
+from ._compat import _check_method_params, _safe_refit, _safe_validate_X_y
from .best_estimator import BestEstimator as _BestEstimator_
from .checks import Checks
-from ...optimizers import RandomSearchOptimizer
-from hyperactive.experiment.integrations.sklearn_cv import SklearnCvExperiment
-
-from ._compat import _check_method_params, _safe_validate_X_y, _safe_refit
class HyperactiveSearchCV(BaseEstimator, _BestEstimator_, Checks):
- """
- HyperactiveSearchCV class for hyperparameter tuning using cross-validation with sklearn estimators.
+ """HyperactiveSearchCV class for hyperparameter tuning with sklearn.
- Parameters:
+ This class provides a hyperparameter tuning interface compatible with sklearn.
+
+ Parameters
+ ----------
- estimator: SklearnBaseEstimator
The estimator to be tuned.
- - params_config: Dict[str, list]
+ - params_config: dict[str, list]
Dictionary containing the hyperparameter search space.
- - optimizer: Union[str, Type[RandomSearchOptimizer]], optional
+ - optimizer: Union[str, type[RandomSearchOptimizer]], optional
The optimizer to be used for hyperparameter search, default is "default".
- n_iter: int, optional
Number of parameter settings that are sampled, default is 100.
@@ -45,7 +47,8 @@ class HyperactiveSearchCV(BaseEstimator, _BestEstimator_, Checks):
- cv: int | "BaseCrossValidator" | Iterable | None, optional
Determines the cross-validation splitting strategy.
- Methods:
+ Methods
+ -------
- fit(X, y, **fit_params)
Fit the estimator and tune hyperparameters.
- score(X, y, **params)
@@ -57,8 +60,8 @@ class HyperactiveSearchCV(BaseEstimator, _BestEstimator_, Checks):
def __init__(
self,
estimator: "SklearnBaseEstimator",
- params_config: Dict[str, list],
- optimizer: Union[str, Type[RandomSearchOptimizer]] = "default",
+ params_config: dict[str, list],
+ optimizer: Union[str, type[RandomSearchOptimizer]] = "default",
n_iter: int = 100,
*,
scoring: Union[Callable, str, None] = None,
@@ -95,7 +98,8 @@ def fit(self, X, y, **fit_params):
"""
Fit the estimator using the provided training data.
- Parameters:
+ Parameters
+ ----------
- X: array-like or sparse matrix, shape (n_samples, n_features)
The training input samples.
- y: array-like, shape (n_samples,) or (n_samples, n_outputs)
@@ -103,11 +107,11 @@ def fit(self, X, y, **fit_params):
- **fit_params: dict of string -> object
Additional fit parameters.
- Returns:
+ Returns
+ -------
- self: object
Returns the instance itself.
"""
-
X, y = self._check_data(X, y)
fit_params = _check_method_params(X, params=fit_params)
@@ -145,7 +149,8 @@ def score(self, X, y=None, **params):
"""
Calculate the score of the best estimator on the input data.
- Parameters:
+ Parameters
+ ----------
- X: array-like or sparse matrix of shape (n_samples, n_features)
The input samples.
- y: array-like of shape (n_samples,), default=None
@@ -153,13 +158,14 @@ def score(self, X, y=None, **params):
- **params: dict
Additional parameters to be passed to the scoring function.
- Returns:
+ Returns
+ -------
- float
The score of the best estimator on the input data.
"""
-
return self.scorer_(self.best_estimator_, X, y, **params)
@property
def fit_successful(self):
+ """Fit Successful function."""
self._fit_successful
diff --git a/src/hyperactive/integrations/sklearn/opt_cv.py b/src/hyperactive/integrations/sklearn/opt_cv.py
index 3e535885..dad50306 100644
--- a/src/hyperactive/integrations/sklearn/opt_cv.py
+++ b/src/hyperactive/integrations/sklearn/opt_cv.py
@@ -1,4 +1,4 @@
-# copyright: hyperactive developers, MIT License (see LICENSE file)
+"""opt_cv module for Hyperactive optimization."""
from collections.abc import Callable
from typing import Union
@@ -11,7 +11,7 @@
)
from hyperactive.integrations.sklearn.checks import Checks
-from ._compat import _check_method_params, _safe_validate_X_y, _safe_refit
+from ._compat import _check_method_params, _safe_refit, _safe_validate_X_y
class OptCV(BaseEstimator, _BestEstimator_, Checks):
@@ -112,7 +112,6 @@ def fit(self, X, y, **fit_params):
self : object
Fitted Estimator.
"""
-
X, y = self._check_data(X, y)
fit_params = _check_method_params(X, params=fit_params)
@@ -167,4 +166,5 @@ def score(self, X, y=None, **params):
@property
def fit_successful(self):
+ """Fit Successful function."""
self._fit_successful
diff --git a/src/hyperactive/integrations/sklearn/utils.py b/src/hyperactive/integrations/sklearn/utils.py
index 6a25cb19..0c56ebb0 100644
--- a/src/hyperactive/integrations/sklearn/utils.py
+++ b/src/hyperactive/integrations/sklearn/utils.py
@@ -1,14 +1,9 @@
-# Author: Simon Blanke
+"""utils module for Hyperactive optimization."""
+
# Email: simon.blanke@yahoo.com
# License: MIT License
-from sklearn.utils.validation import (
- indexable,
- _check_method_params,
- check_is_fitted,
-)
-
# NOTE Implementations of following methods from:
# https://github.com/scikit-learn/scikit-learn/blob/main/sklearn/model_selection/_search.py
# Tag: 1.5.1
@@ -26,6 +21,7 @@ def _check_refit(search_cv, attr):
def _estimator_has(attr):
def check(self):
+ """Check function."""
_check_refit(self, attr)
if hasattr(self, "best_estimator_"):
# raise an AttributeError if `attr` does not exist
diff --git a/src/hyperactive/integrations/sktime/__init__.py b/src/hyperactive/integrations/sktime/__init__.py
index 09bdbd71..3fae604a 100644
--- a/src/hyperactive/integrations/sktime/__init__.py
+++ b/src/hyperactive/integrations/sktime/__init__.py
@@ -1,6 +1,8 @@
-# Author: Simon Blanke
-# Email: simon.blanke@yahoo.com
-# License: MIT License
+"""Sktime integration package for Hyperactive.
+Author: Simon Blanke
+Email: simon.blanke@yahoo.com
+License: MIT License
+"""
-from .main import HyperactiveSearchCV
+from .main import HyperactiveSearchCV as HyperactiveSearchCV
diff --git a/src/hyperactive/integrations/sktime/main.py b/src/hyperactive/integrations/sktime/main.py
index f0d7c5db..991f16de 100644
--- a/src/hyperactive/integrations/sktime/main.py
+++ b/src/hyperactive/integrations/sktime/main.py
@@ -1,8 +1,11 @@
-# Author: Simon Blanke
+"""main module for Hyperactive optimization."""
+
# Email: simon.blanke@yahoo.com
# License: MIT License
class HyperactiveSearchCV:
+ """HyperactiveSearchCV class."""
+
def __init__(self) -> None:
pass
diff --git a/src/hyperactive/opt/__init__.py b/src/hyperactive/opt/__init__.py
index bee93c9d..588b8c8d 100644
--- a/src/hyperactive/opt/__init__.py
+++ b/src/hyperactive/opt/__init__.py
@@ -4,31 +4,31 @@
from hyperactive.opt.gridsearch import GridSearchSk
from hyperactive.opt.random_search import RandomSearchSk
+
from .gfo import (
- HillClimbing,
- StochasticHillClimbing,
- RepulsingHillClimbing,
- SimulatedAnnealing,
+ BayesianOptimizer,
+ DifferentialEvolution,
+ DirectAlgorithm,
DownhillSimplexOptimizer,
- RandomSearch,
+ EvolutionStrategy,
+ ForestOptimizer,
+ GeneticAlgorithm,
GridSearch,
- RandomRestartHillClimbing,
- PowellsMethod,
- PatternSearch,
+ HillClimbing,
LipschitzOptimizer,
- DirectAlgorithm,
ParallelTempering,
ParticleSwarmOptimizer,
+ PatternSearch,
+ PowellsMethod,
+ RandomRestartHillClimbing,
+ RandomSearch,
+ RepulsingHillClimbing,
+ SimulatedAnnealing,
SpiralOptimization,
- GeneticAlgorithm,
- EvolutionStrategy,
- DifferentialEvolution,
- BayesianOptimizer,
+ StochasticHillClimbing,
TreeStructuredParzenEstimators,
- ForestOptimizer,
)
-
__all__ = [
"GridSearchSk",
"RandomSearchSk",
diff --git a/src/hyperactive/opt/_adapters/_gfo.py b/src/hyperactive/opt/_adapters/_gfo.py
index c2bae3d9..758a8809 100644
--- a/src/hyperactive/opt/_adapters/_gfo.py
+++ b/src/hyperactive/opt/_adapters/_gfo.py
@@ -2,9 +2,10 @@
# copyright: hyperactive developers, MIT License (see LICENSE file)
-from hyperactive.base import BaseOptimizer
from skbase.utils.stdout_mute import StdoutMute
+from hyperactive.base import BaseOptimizer
+
__all__ = ["_BaseGFOadapter"]
@@ -25,7 +26,6 @@ class _BaseGFOadapter(BaseOptimizer):
}
def __init__(self):
-
super().__init__()
if self.initialize is None:
@@ -107,18 +107,20 @@ def coerce_to_numpy(arr):
if not isinstance(arr, np.ndarray):
return np.array(arr)
return arr
-
+
coerced_search_space = {k: coerce_to_numpy(v) for k, v in search_space.items()}
return coerced_search_space
def _run(self, experiment, **search_config):
"""Run the optimization search process.
+
Parameters
----------
experiment : BaseExperiment
The experiment to optimize parameters for.
search_config : dict with str keys
identical to return of ``get_search_config``.
+
Returns
-------
dict with str keys
@@ -174,6 +176,7 @@ def get_test_params(cls, parameter_set="default"):
`create_test_instance` uses the first (or only) dictionary in `params`
"""
import numpy as np
+
from hyperactive.experiment.integrations import SklearnCvExperiment
sklearn_exp = SklearnCvExperiment.create_test_instance()
diff --git a/src/hyperactive/opt/gfo/__init__.py b/src/hyperactive/opt/gfo/__init__.py
index f1c430b7..c40f2bf2 100644
--- a/src/hyperactive/opt/gfo/__init__.py
+++ b/src/hyperactive/opt/gfo/__init__.py
@@ -2,28 +2,27 @@
# copyright: hyperactive developers, MIT License (see LICENSE file)
-from ._hillclimbing import HillClimbing
-from ._stochastic_hillclimbing import StochasticHillClimbing
-from ._repulsing_hillclimbing import RepulsingHillClimbing
-from ._simulated_annealing import SimulatedAnnealing
+from ._bayesian_optimization import BayesianOptimizer
+from ._differential_evolution import DifferentialEvolution
+from ._direct_algorithm import DirectAlgorithm
from ._downhill_simplex import DownhillSimplexOptimizer
-from ._random_search import RandomSearch
+from ._evolution_strategy import EvolutionStrategy
+from ._forest_optimizer import ForestOptimizer
+from ._genetic_algorithm import GeneticAlgorithm
from ._grid_search import GridSearch
-from ._random_restart_hill_climbing import RandomRestartHillClimbing
-from ._powells_method import PowellsMethod
-from ._pattern_search import PatternSearch
+from ._hillclimbing import HillClimbing
from ._lipschitz_optimization import LipschitzOptimizer
-from ._direct_algorithm import DirectAlgorithm
from ._parallel_tempering import ParallelTempering
from ._particle_swarm_optimization import ParticleSwarmOptimizer
+from ._pattern_search import PatternSearch
+from ._powells_method import PowellsMethod
+from ._random_restart_hill_climbing import RandomRestartHillClimbing
+from ._random_search import RandomSearch
+from ._repulsing_hillclimbing import RepulsingHillClimbing
+from ._simulated_annealing import SimulatedAnnealing
from ._spiral_optimization import SpiralOptimization
-from ._genetic_algorithm import GeneticAlgorithm
-from ._evolution_strategy import EvolutionStrategy
-from ._differential_evolution import DifferentialEvolution
-from ._bayesian_optimization import BayesianOptimizer
+from ._stochastic_hillclimbing import StochasticHillClimbing
from ._tree_structured_parzen_estimators import TreeStructuredParzenEstimators
-from ._forest_optimizer import ForestOptimizer
-
__all__ = [
"HillClimbing",
diff --git a/src/hyperactive/opt/gfo/_bayesian_optimization.py b/src/hyperactive/opt/gfo/_bayesian_optimization.py
index 0d4f6b13..a687e336 100644
--- a/src/hyperactive/opt/gfo/_bayesian_optimization.py
+++ b/src/hyperactive/opt/gfo/_bayesian_optimization.py
@@ -139,8 +139,6 @@ def get_test_params(cls, parameter_set="default"):
dict with str keys
The test parameters dictionary.
"""
- import numpy as np
-
params = super().get_test_params()
experiment = params[0]["experiment"]
more_params = {
diff --git a/src/hyperactive/opt/gfo/_differential_evolution.py b/src/hyperactive/opt/gfo/_differential_evolution.py
index 287b4fc6..a9a99687 100644
--- a/src/hyperactive/opt/gfo/_differential_evolution.py
+++ b/src/hyperactive/opt/gfo/_differential_evolution.py
@@ -128,8 +128,6 @@ def get_test_params(cls, parameter_set="default"):
dict with str keys
The test parameters dictionary.
"""
- import numpy as np
-
params = super().get_test_params()
experiment = params[0]["experiment"]
more_params = {
diff --git a/src/hyperactive/opt/gfo/_direct_algorithm.py b/src/hyperactive/opt/gfo/_direct_algorithm.py
index 9abf95de..cade954b 100644
--- a/src/hyperactive/opt/gfo/_direct_algorithm.py
+++ b/src/hyperactive/opt/gfo/_direct_algorithm.py
@@ -132,8 +132,6 @@ def get_test_params(cls, parameter_set="default"):
dict with str keys
The test parameters dictionary.
"""
- import numpy as np
-
params = super().get_test_params()
experiment = params[0]["experiment"]
more_params = {
diff --git a/src/hyperactive/opt/gfo/_downhill_simplex.py b/src/hyperactive/opt/gfo/_downhill_simplex.py
index 145a38ef..2d86cdb3 100644
--- a/src/hyperactive/opt/gfo/_downhill_simplex.py
+++ b/src/hyperactive/opt/gfo/_downhill_simplex.py
@@ -132,8 +132,6 @@ def get_test_params(cls, parameter_set="default"):
dict with str keys
The test parameters dictionary.
"""
- import numpy as np
-
params = super().get_test_params()
experiment = params[0]["experiment"]
more_params = {
diff --git a/src/hyperactive/opt/gfo/_evolution_strategy.py b/src/hyperactive/opt/gfo/_evolution_strategy.py
index 2df50c8c..6a9de8fb 100644
--- a/src/hyperactive/opt/gfo/_evolution_strategy.py
+++ b/src/hyperactive/opt/gfo/_evolution_strategy.py
@@ -138,8 +138,6 @@ def get_test_params(cls, parameter_set="default"):
dict with str keys
The test parameters dictionary.
"""
- import numpy as np
-
params = super().get_test_params()
experiment = params[0]["experiment"]
more_params = {
diff --git a/src/hyperactive/opt/gfo/_forest_optimizer.py b/src/hyperactive/opt/gfo/_forest_optimizer.py
index 5e971643..3af528d5 100644
--- a/src/hyperactive/opt/gfo/_forest_optimizer.py
+++ b/src/hyperactive/opt/gfo/_forest_optimizer.py
@@ -144,8 +144,6 @@ def get_test_params(cls, parameter_set="default"):
dict with str keys
The test parameters dictionary.
"""
- import numpy as np
-
params = super().get_test_params()
experiment = params[0]["experiment"]
more_params = {
diff --git a/src/hyperactive/opt/gfo/_genetic_algorithm.py b/src/hyperactive/opt/gfo/_genetic_algorithm.py
index aaf281dd..fa0bf92c 100644
--- a/src/hyperactive/opt/gfo/_genetic_algorithm.py
+++ b/src/hyperactive/opt/gfo/_genetic_algorithm.py
@@ -141,8 +141,6 @@ def get_test_params(cls, parameter_set="default"):
dict with str keys
The test parameters dictionary.
"""
- import numpy as np
-
params = super().get_test_params()
experiment = params[0]["experiment"]
more_params = {
diff --git a/src/hyperactive/opt/gfo/_grid_search.py b/src/hyperactive/opt/gfo/_grid_search.py
index 191c869c..e7480df1 100644
--- a/src/hyperactive/opt/gfo/_grid_search.py
+++ b/src/hyperactive/opt/gfo/_grid_search.py
@@ -124,8 +124,6 @@ def get_test_params(cls, parameter_set="default"):
dict with str keys
The test parameters dictionary.
"""
- import numpy as np
-
params = super().get_test_params()
experiment = params[0]["experiment"]
more_params = {
diff --git a/src/hyperactive/opt/gfo/_lipschitz_optimization.py b/src/hyperactive/opt/gfo/_lipschitz_optimization.py
index d5ddfe56..11cc41e0 100644
--- a/src/hyperactive/opt/gfo/_lipschitz_optimization.py
+++ b/src/hyperactive/opt/gfo/_lipschitz_optimization.py
@@ -132,8 +132,6 @@ def get_test_params(cls, parameter_set="default"):
dict with str keys
The test parameters dictionary.
"""
- import numpy as np
-
params = super().get_test_params()
experiment = params[0]["experiment"]
more_params = {
diff --git a/src/hyperactive/opt/gfo/_parallel_tempering.py b/src/hyperactive/opt/gfo/_parallel_tempering.py
index 9b8208ae..1ee57a86 100644
--- a/src/hyperactive/opt/gfo/_parallel_tempering.py
+++ b/src/hyperactive/opt/gfo/_parallel_tempering.py
@@ -127,8 +127,6 @@ def get_test_params(cls, parameter_set="default"):
dict with str keys
The test parameters dictionary.
"""
- import numpy as np
-
params = super().get_test_params()
experiment = params[0]["experiment"]
more_params = {
diff --git a/src/hyperactive/opt/gfo/_particle_swarm_optimization.py b/src/hyperactive/opt/gfo/_particle_swarm_optimization.py
index a24ac2f0..01944510 100644
--- a/src/hyperactive/opt/gfo/_particle_swarm_optimization.py
+++ b/src/hyperactive/opt/gfo/_particle_swarm_optimization.py
@@ -26,9 +26,11 @@ class ParticleSwarmOptimizer(_BaseGFOadapter):
inertia : float
The inertia of the swarm.
cognitive_weight : float
- A factor of the movement towards the personal best position of the individual optimizers in the population.
+ A factor of the movement towards the personal best position of the
+ individual optimizers in the population.
social_weight : float
- A factor of the movement towards the personal best position of the individual optimizers in the population.
+ A factor of the movement towards the personal best position of the
+ individual optimizers in the population.
temp_weight : float
The temperature weight of the swarm.
n_iter : int, default=100
@@ -136,8 +138,6 @@ def get_test_params(cls, parameter_set="default"):
dict with str keys
The test parameters dictionary.
"""
- import numpy as np
-
params = super().get_test_params()
experiment = params[0]["experiment"]
more_params = {
diff --git a/src/hyperactive/opt/gfo/_pattern_search.py b/src/hyperactive/opt/gfo/_pattern_search.py
index ac8d6d27..e8b04578 100644
--- a/src/hyperactive/opt/gfo/_pattern_search.py
+++ b/src/hyperactive/opt/gfo/_pattern_search.py
@@ -24,7 +24,8 @@ class PatternSearch(_BaseGFOadapter):
n_positions : int
Number of positions that the pattern consists of.
pattern_size : float
- The initial size of the patterns in percentage of the size of the search space in the corresponding dimension.
+ The initial size of the patterns in percentage of the size of the search
+ space in the corresponding dimension.
reduction : float
The factor that reduces the size of the pattern if no better position is found.
n_iter : int, default=100
@@ -128,8 +129,6 @@ def get_test_params(cls, parameter_set="default"):
dict with str keys
The test parameters dictionary.
"""
- import numpy as np
-
params = super().get_test_params()
experiment = params[0]["experiment"]
more_params = {
diff --git a/src/hyperactive/opt/gfo/_powells_method.py b/src/hyperactive/opt/gfo/_powells_method.py
index 00db4a54..d8ae3047 100644
--- a/src/hyperactive/opt/gfo/_powells_method.py
+++ b/src/hyperactive/opt/gfo/_powells_method.py
@@ -74,7 +74,7 @@ class PowellsMethod(_BaseGFOadapter):
"""
_tags = {
- "info:name": "Powell’s Method",
+ "info:name": "Powell's Method",
"info:local_vs_global": "local",
"info:explore_vs_exploit": "exploit",
"info:compute": "low",
@@ -125,8 +125,6 @@ def get_test_params(cls, parameter_set="default"):
dict with str keys
The test parameters dictionary.
"""
- import numpy as np
-
params = super().get_test_params()
experiment = params[0]["experiment"]
more_params = {
diff --git a/src/hyperactive/opt/gfo/_random_restart_hill_climbing.py b/src/hyperactive/opt/gfo/_random_restart_hill_climbing.py
index 8de5bafe..1ea898cb 100644
--- a/src/hyperactive/opt/gfo/_random_restart_hill_climbing.py
+++ b/src/hyperactive/opt/gfo/_random_restart_hill_climbing.py
@@ -126,8 +126,6 @@ def get_test_params(cls, parameter_set="default"):
dict with str keys
The test parameters dictionary.
"""
- import numpy as np
-
params = super().get_test_params()
experiment = params[0]["experiment"]
more_params = {
diff --git a/src/hyperactive/opt/gfo/_random_search.py b/src/hyperactive/opt/gfo/_random_search.py
index 06a25887..76aec077 100644
--- a/src/hyperactive/opt/gfo/_random_search.py
+++ b/src/hyperactive/opt/gfo/_random_search.py
@@ -113,8 +113,6 @@ def get_test_params(cls, parameter_set="default"):
dict with str keys
The test parameters dictionary.
"""
- import numpy as np
-
params = super().get_test_params()
experiment = params[0]["experiment"]
more_params = {
diff --git a/src/hyperactive/opt/gfo/_repulsing_hillclimbing.py b/src/hyperactive/opt/gfo/_repulsing_hillclimbing.py
index e76ba5d7..6560f098 100644
--- a/src/hyperactive/opt/gfo/_repulsing_hillclimbing.py
+++ b/src/hyperactive/opt/gfo/_repulsing_hillclimbing.py
@@ -138,8 +138,6 @@ def get_test_params(cls, parameter_set="default"):
dict with str keys
The test parameters dictionary.
"""
- import numpy as np
-
params = super().get_test_params()
experiment = params[0]["experiment"]
more_params = {
diff --git a/src/hyperactive/opt/gfo/_simulated_annealing.py b/src/hyperactive/opt/gfo/_simulated_annealing.py
index 6a169238..d1c31dbb 100644
--- a/src/hyperactive/opt/gfo/_simulated_annealing.py
+++ b/src/hyperactive/opt/gfo/_simulated_annealing.py
@@ -137,8 +137,6 @@ def get_test_params(cls, parameter_set="default"):
dict with str keys
The test parameters dictionary.
"""
- import numpy as np
-
params = super().get_test_params()
experiment = params[0]["experiment"]
more_params = {
diff --git a/src/hyperactive/opt/gfo/_spiral_optimization.py b/src/hyperactive/opt/gfo/_spiral_optimization.py
index e7fc457c..c3b6ad2e 100644
--- a/src/hyperactive/opt/gfo/_spiral_optimization.py
+++ b/src/hyperactive/opt/gfo/_spiral_optimization.py
@@ -25,8 +25,11 @@ class SpiralOptimization(_BaseGFOadapter):
population : int
The number of particles in the swarm.
decay_rate : float
- This parameter is a factor, that influences the radius of the particles during their spiral movement.
- Lower values accelerates the convergence of the particles to the best known position, while values above 1 eventually lead to a movement where the particles spiral away from each other.
+ This parameter is a factor, that influences the radius of the particles
+ during their spiral movement.
+ Lower values accelerates the convergence of the particles to the best
+ known position, while values above 1 eventually lead to a movement where
+ the particles spiral away from each other.
n_iter : int, default=100
The number of iterations to run the optimizer.
verbose : bool, default=False
@@ -126,8 +129,6 @@ def get_test_params(cls, parameter_set="default"):
dict with str keys
The test parameters dictionary.
"""
- import numpy as np
-
params = super().get_test_params()
experiment = params[0]["experiment"]
more_params = {
diff --git a/src/hyperactive/opt/gfo/_stochastic_hillclimbing.py b/src/hyperactive/opt/gfo/_stochastic_hillclimbing.py
index 7b8f7e40..73e9f100 100644
--- a/src/hyperactive/opt/gfo/_stochastic_hillclimbing.py
+++ b/src/hyperactive/opt/gfo/_stochastic_hillclimbing.py
@@ -138,8 +138,6 @@ def get_test_params(cls, parameter_set="default"):
dict with str keys
The test parameters dictionary.
"""
- import numpy as np
-
params = super().get_test_params()
experiment = params[0]["experiment"]
more_params = {
diff --git a/src/hyperactive/opt/gfo/_tree_structured_parzen_estimators.py b/src/hyperactive/opt/gfo/_tree_structured_parzen_estimators.py
index 765307b2..7631f70c 100644
--- a/src/hyperactive/opt/gfo/_tree_structured_parzen_estimators.py
+++ b/src/hyperactive/opt/gfo/_tree_structured_parzen_estimators.py
@@ -137,8 +137,6 @@ def get_test_params(cls, parameter_set="default"):
dict with str keys
The test parameters dictionary.
"""
- import numpy as np
-
params = super().get_test_params()
experiment = params[0]["experiment"]
more_params = {
diff --git a/src/hyperactive/opt/gridsearch/_sk.py b/src/hyperactive/opt/gridsearch/_sk.py
index 84e97625..d72b97aa 100644
--- a/src/hyperactive/opt/gridsearch/_sk.py
+++ b/src/hyperactive/opt/gridsearch/_sk.py
@@ -4,7 +4,6 @@
from collections.abc import Sequence
import numpy as np
-
from sklearn.model_selection import ParameterGrid
from hyperactive.base import BaseOptimizer
@@ -170,5 +169,5 @@ def get_test_params(cls, parameter_set="default"):
"experiment": ackley_exp,
"param_grid": param_grid,
}
-
+
return [params_sklearn, params_ackley]
diff --git a/src/hyperactive/opt/random_search.py b/src/hyperactive/opt/random_search.py
index 9f07ab3e..33840920 100644
--- a/src/hyperactive/opt/random_search.py
+++ b/src/hyperactive/opt/random_search.py
@@ -5,7 +5,6 @@
from collections.abc import Sequence
import numpy as np
-
from sklearn.model_selection import ParameterSampler
from hyperactive.base import BaseOptimizer
@@ -68,7 +67,7 @@ def _check_param_distributions(self, param_distributions):
for p in param_distributions:
for name, v in p.items():
if self._is_distribution(v):
- # Assume scipy frozen distribution – nothing to check
+ # Assume scipy frozen distribution - nothing to check
continue
if isinstance(v, np.ndarray) and v.ndim > 1:
@@ -83,7 +82,8 @@ def _check_param_distributions(self, param_distributions):
if len(v) == 0:
raise ValueError(
- f"Parameter values for ({name}) need to be a non-empty sequence."
+ f"Parameter values for ({name}) need to be a "
+ "non-empty sequence."
)
def _run(
diff --git a/src/hyperactive/optimizers/__init__.py b/src/hyperactive/optimizers/__init__.py
index c476012e..c1a087b7 100644
--- a/src/hyperactive/optimizers/__init__.py
+++ b/src/hyperactive/optimizers/__init__.py
@@ -1,55 +1,8 @@
-# Author: Simon Blanke
-# Email: simon.blanke@yahoo.com
-# License: MIT License
+"""Optimizers package for Hyperactive.
+Author: Simon Blanke
+Email: simon.blanke@yahoo.com
+License: MIT License
+"""
-from .optimizers import (
- HillClimbingOptimizer,
- StochasticHillClimbingOptimizer,
- RepulsingHillClimbingOptimizer,
- SimulatedAnnealingOptimizer,
- DownhillSimplexOptimizer,
- RandomSearchOptimizer,
- GridSearchOptimizer,
- RandomRestartHillClimbingOptimizer,
- RandomAnnealingOptimizer,
- PowellsMethod,
- PatternSearch,
- ParallelTemperingOptimizer,
- ParticleSwarmOptimizer,
- SpiralOptimization,
- GeneticAlgorithmOptimizer,
- EvolutionStrategyOptimizer,
- DifferentialEvolutionOptimizer,
- BayesianOptimizer,
- LipschitzOptimizer,
- DirectAlgorithm,
- TreeStructuredParzenEstimators,
- ForestOptimizer,
-)
-
-
-__all__ = [
- "HillClimbingOptimizer",
- "StochasticHillClimbingOptimizer",
- "RepulsingHillClimbingOptimizer",
- "SimulatedAnnealingOptimizer",
- "DownhillSimplexOptimizer",
- "RandomSearchOptimizer",
- "GridSearchOptimizer",
- "RandomRestartHillClimbingOptimizer",
- "RandomAnnealingOptimizer",
- "PowellsMethod",
- "PatternSearch",
- "ParallelTemperingOptimizer",
- "ParticleSwarmOptimizer",
- "SpiralOptimization",
- "GeneticAlgorithmOptimizer",
- "EvolutionStrategyOptimizer",
- "DifferentialEvolutionOptimizer",
- "BayesianOptimizer",
- "LipschitzOptimizer",
- "DirectAlgorithm",
- "TreeStructuredParzenEstimators",
- "ForestOptimizer",
-]
+from .optimizers import * # noqa: F403
diff --git a/src/hyperactive/optimizers/constraint.py b/src/hyperactive/optimizers/constraint.py
index f6d28f93..bec8e764 100644
--- a/src/hyperactive/optimizers/constraint.py
+++ b/src/hyperactive/optimizers/constraint.py
@@ -1,9 +1,11 @@
-# Author: Simon Blanke
+"""constraint module for Hyperactive optimization."""
+
# Email: simon.blanke@yahoo.com
# License: MIT License
def gfo2hyper(search_space, para):
+ """Gfo2Hyper function."""
values_dict = {}
for key, values in search_space.items():
pos_ = int(para[key])
@@ -13,10 +15,13 @@ def gfo2hyper(search_space, para):
class Constraint:
+ """Constraint class."""
+
def __init__(self, constraint, search_space):
self.constraint = constraint
self.search_space = search_space
def __call__(self, para):
+ """Call constraint on parameters."""
para = gfo2hyper(self.search_space, para)
return self.constraint(para)
diff --git a/src/hyperactive/optimizers/dictionary.py b/src/hyperactive/optimizers/dictionary.py
index ca30e652..f31d0b2b 100644
--- a/src/hyperactive/optimizers/dictionary.py
+++ b/src/hyperactive/optimizers/dictionary.py
@@ -1,17 +1,23 @@
-# Author: Simon Blanke
+"""dictionary module for Hyperactive optimization."""
+
# Email: simon.blanke@yahoo.com
# License: MIT License
class DictClass:
+ """DictClass class."""
+
def __init__(self):
self.para_dict = {}
def __getitem__(self, key):
+ """Get item from parameter dictionary."""
return self.para_dict[key]
def keys(self):
+ """Keys function."""
return self.para_dict.keys()
def values(self):
+ """Values function."""
return self.para_dict.values()
diff --git a/src/hyperactive/optimizers/hyper_gradient_conv.py b/src/hyperactive/optimizers/hyper_gradient_conv.py
index d06d9aa8..b7161d93 100644
--- a/src/hyperactive/optimizers/hyper_gradient_conv.py
+++ b/src/hyperactive/optimizers/hyper_gradient_conv.py
@@ -1,4 +1,5 @@
-# Author: Simon Blanke
+"""hyper_gradient_conv module for Hyperactive optimization."""
+
# Email: simon.blanke@yahoo.com
# License: MIT License
@@ -7,28 +8,34 @@
class HyperGradientConv:
+ """HyperGradientConv class."""
+
def __init__(self, s_space):
self.s_space = s_space
def value2position(self, value: list) -> list:
+ """Convert values to positions."""
return [
np.abs(v - np.array(space_dim)).argmin()
for v, space_dim in zip(value, self.s_space.values_l)
]
def value2para(self, value: list) -> dict:
+ """Convert values to parameters."""
return {key: p for key, p in zip(self.s_space.dim_keys, value)}
def para2value(self, para: dict) -> list:
+ """Convert parameters to values."""
return [para[para_name] for para_name in self.s_space.dim_keys]
def position2value(self, position):
+ """Position2Value function."""
return [
- space_dim[pos]
- for pos, space_dim in zip(position, self.s_space.values_l)
+ space_dim[pos] for pos, space_dim in zip(position, self.s_space.values_l)
]
def para_func2str(self, para):
+ """Para Func2Str function."""
return {
dim_key: (
para[dim_key].__name__
@@ -39,12 +46,14 @@ def para_func2str(self, para):
}
def value_func2str(self, value):
+ """Value Func2Str function."""
try:
return value.__name__
- except:
+ except AttributeError:
return value
def conv_para(self, para_hyper):
+ """Conv Para function."""
para_gfo = {}
for para in self.s_space.dim_keys:
value_hyper = para_hyper[para]
@@ -58,30 +67,30 @@ def conv_para(self, para_hyper):
if value_hyper in space_dim:
value_gfo = space_dim.index(value_hyper)
else:
- raise ValueError(
- f"'{value_hyper}' was not found in '{para}'"
- )
+ raise ValueError(f"'{value_hyper}' was not found in '{para}'")
para_gfo[para] = value_gfo
return para_gfo
def conv_initialize(self, initialize):
+ """Conv Initialize function."""
if "warm_start" in initialize:
warm_start_l = initialize["warm_start"]
- warm_start_gfo = [
- self.conv_para(warm_start) for warm_start in warm_start_l
- ]
+ warm_start_gfo = [self.conv_para(warm_start) for warm_start in warm_start_l]
initialize["warm_start"] = warm_start_gfo
return initialize
def get_list_positions(self, list1_values, search_dim):
+ """Get List Positions function."""
return [search_dim.index(value2) for value2 in list1_values]
def values2positions(self, values, search_dim):
+ """Values2Positions function."""
return np.array(search_dim).searchsorted(values)
def positions2results(self, positions):
+ """Positions2Results function."""
results_dict = {}
for para_name in self.s_space.dim_keys:
@@ -98,6 +107,7 @@ def positions2results(self, positions):
return results
def conv_memory_warm_start(self, results):
+ """Conv Memory Warm Start function."""
if results is None:
return results
@@ -113,13 +123,9 @@ def conv_memory_warm_start(self, results):
self.value_func2str(value) for value in result_dim_values
]
- list1_positions = self.get_list_positions(
- result_dim_values, search_dim
- )
+ list1_positions = self.get_list_positions(result_dim_values, search_dim)
else:
- list1_positions = self.values2positions(
- result_dim_values, search_dim
- )
+ list1_positions = self.values2positions(result_dim_values, search_dim)
df_positions_dict[dim_key] = list1_positions
diff --git a/src/hyperactive/optimizers/hyper_optimizer.py b/src/hyperactive/optimizers/hyper_optimizer.py
index c7710023..156b0010 100644
--- a/src/hyperactive/optimizers/hyper_optimizer.py
+++ b/src/hyperactive/optimizers/hyper_optimizer.py
@@ -1,16 +1,17 @@
-# Author: Simon Blanke
+"""hyper_optimizer module for Hyperactive optimization."""
+
# Email: simon.blanke@yahoo.com
# License: MIT License
-import numpy as np
-
-from .objective_function import ObjectiveFunction
+from .constraint import Constraint
from .hyper_gradient_conv import HyperGradientConv
+from .objective_function import ObjectiveFunction
from .optimizer_attributes import OptimizerAttributes
-from .constraint import Constraint
class HyperOptimizer(OptimizerAttributes):
+ """HyperOptimizer class."""
+
def __init__(self, **opt_params):
super().__init__()
self.opt_params = opt_params
@@ -32,6 +33,7 @@ def setup_search(
memory_warm_start,
verbosity,
):
+ """Set up search parameters."""
self.objective_function = objective_function
self.s_space = s_space
self.n_iter = n_iter
@@ -54,6 +56,7 @@ def setup_search(
self.verbosity = []
def convert_results2hyper(self):
+ """Convert Results2Hyper function."""
self.eval_times = sum(self.gfo_optimizer.eval_times)
self.iter_times = sum(self.gfo_optimizer.iter_times)
@@ -86,15 +89,12 @@ def _setup_process(self, nth_process):
# conv warm start for smbo from values into positions
if "warm_start_smbo" in self.opt_params:
- self.opt_params["warm_start_smbo"] = (
- self.hg_conv.conv_memory_warm_start(
- self.opt_params["warm_start_smbo"]
- )
+ self.opt_params["warm_start_smbo"] = self.hg_conv.conv_memory_warm_start(
+ self.opt_params["warm_start_smbo"]
)
gfo_constraints = [
- Constraint(constraint, self.s_space)
- for constraint in self.constraints
+ Constraint(constraint, self.s_space) for constraint in self.constraints
]
self.gfo_optimizer = self.optimizer_class(
@@ -109,6 +109,7 @@ def _setup_process(self, nth_process):
self.conv = self.gfo_optimizer.conv
def search(self, nth_process, p_bar):
+ """Search function."""
self._setup_process(nth_process)
gfo_wrapper_model = ObjectiveFunction(
@@ -120,9 +121,7 @@ def search(self, nth_process, p_bar):
)
gfo_wrapper_model.pass_through = self.pass_through
- memory_warm_start = self.hg_conv.conv_memory_warm_start(
- self.memory_warm_start
- )
+ memory_warm_start = self.hg_conv.conv_memory_warm_start(self.memory_warm_start)
gfo_objective_function = gfo_wrapper_model(self.s_space())
@@ -156,9 +155,7 @@ def search(self, nth_process, p_bar):
p_bar.set_postfix(
best_score=str(gfo_wrapper_model.optimizer.score_best),
best_pos=str(gfo_wrapper_model.optimizer.pos_best),
- best_iter=str(
- gfo_wrapper_model.optimizer.p_bar._best_since_iter
- ),
+ best_iter=str(gfo_wrapper_model.optimizer.p_bar._best_since_iter),
)
p_bar.update(1)
diff --git a/src/hyperactive/optimizers/objective_function.py b/src/hyperactive/optimizers/objective_function.py
index 7e8c80f2..2b4e38d6 100644
--- a/src/hyperactive/optimizers/objective_function.py
+++ b/src/hyperactive/optimizers/objective_function.py
@@ -1,12 +1,13 @@
-# Author: Simon Blanke
+"""objective_function module for Hyperactive optimization."""
+
# Email: simon.blanke@yahoo.com
# License: MIT License
-
from .dictionary import DictClass
def gfo2hyper(search_space, para):
+ """Gfo2Hyper function."""
values_dict = {}
for _, key in enumerate(search_space.keys()):
pos_ = int(para[key])
@@ -16,6 +17,8 @@ def gfo2hyper(search_space, para):
class ObjectiveFunction(DictClass):
+ """ObjectiveFunction class."""
+
def __init__(self, objective_function, optimizer, callbacks, catch, nth_process):
super().__init__()
@@ -28,10 +31,13 @@ def __init__(self, objective_function, optimizer, callbacks, catch, nth_process)
self.nth_iter = 0
def run_callbacks(self, type_):
+ """Run Callbacks function."""
if self.callbacks and type_ in self.callbacks:
[callback(self) for callback in self.callbacks[type_]]
def __call__(self, search_space):
+ """Make object callable with search space."""
+
# wrapper for GFOs
def _model(para):
self.nth_iter = len(self.optimizer.pos_l)
diff --git a/src/hyperactive/optimizers/optimizer_attributes.py b/src/hyperactive/optimizers/optimizer_attributes.py
index 4ef67b7b..a72a20ce 100644
--- a/src/hyperactive/optimizers/optimizer_attributes.py
+++ b/src/hyperactive/optimizers/optimizer_attributes.py
@@ -1,9 +1,12 @@
-# Author: Simon Blanke
+"""optimizer_attributes module for Hyperactive optimization."""
+
# Email: simon.blanke@yahoo.com
# License: MIT License
class OptimizerAttributes:
+ """OptimizerAttributes class."""
+
def __init__(self):
self.best_para = None
self.best_score = None
diff --git a/src/hyperactive/optimizers/optimizers.py b/src/hyperactive/optimizers/optimizers.py
index 61a30a51..34dce684 100644
--- a/src/hyperactive/optimizers/optimizers.py
+++ b/src/hyperactive/optimizers/optimizers.py
@@ -1,170 +1,260 @@
-# Author: Simon Blanke
+"""optimizers module for Hyperactive optimization."""
+
# Email: simon.blanke@yahoo.com
# License: MIT License
-
-from .hyper_optimizer import HyperOptimizer
-
from gradient_free_optimizers import (
- HillClimbingOptimizer as _HillClimbingOptimizer,
- StochasticHillClimbingOptimizer as _StochasticHillClimbingOptimizer,
- RepulsingHillClimbingOptimizer as _RepulsingHillClimbingOptimizer,
- SimulatedAnnealingOptimizer as _SimulatedAnnealingOptimizer,
+ BayesianOptimizer as _BayesianOptimizer,
+)
+from gradient_free_optimizers import (
+ DifferentialEvolutionOptimizer as _DifferentialEvolutionOptimizer,
+)
+from gradient_free_optimizers import (
+ DirectAlgorithm as _DirectAlgorithm_,
+)
+from gradient_free_optimizers import (
DownhillSimplexOptimizer as _DownhillSimplexOptimizer,
- RandomSearchOptimizer as _RandomSearchOptimizer,
+)
+from gradient_free_optimizers import (
+ EnsembleOptimizer as _EnsembleOptimizer,
+)
+from gradient_free_optimizers import (
+ EvolutionStrategyOptimizer as _EvolutionStrategyOptimizer,
+)
+from gradient_free_optimizers import (
+ ForestOptimizer as _ForestOptimizer,
+)
+from gradient_free_optimizers import (
+ GeneticAlgorithmOptimizer as _GeneticAlgorithmOptimizer,
+)
+from gradient_free_optimizers import (
GridSearchOptimizer as _GridSearchOptimizer,
- RandomRestartHillClimbingOptimizer as _RandomRestartHillClimbingOptimizer,
- RandomAnnealingOptimizer as _RandomAnnealingOptimizer,
- PowellsMethod as _PowellsMethod,
- PatternSearch as _PatternSearch,
+)
+from gradient_free_optimizers import (
+ HillClimbingOptimizer as _HillClimbingOptimizer,
+)
+from gradient_free_optimizers import (
+ LipschitzOptimizer as _LipschitzOptimizer_,
+)
+from gradient_free_optimizers import (
ParallelTemperingOptimizer as _ParallelTemperingOptimizer,
+)
+from gradient_free_optimizers import (
ParticleSwarmOptimizer as _ParticleSwarmOptimizer,
+)
+from gradient_free_optimizers import (
+ PatternSearch as _PatternSearch,
+)
+from gradient_free_optimizers import (
+ PowellsMethod as _PowellsMethod,
+)
+from gradient_free_optimizers import (
+ RandomAnnealingOptimizer as _RandomAnnealingOptimizer,
+)
+from gradient_free_optimizers import (
+ RandomRestartHillClimbingOptimizer as _RandomRestartHillClimbingOptimizer,
+)
+from gradient_free_optimizers import (
+ RandomSearchOptimizer as _RandomSearchOptimizer,
+)
+from gradient_free_optimizers import (
+ RepulsingHillClimbingOptimizer as _RepulsingHillClimbingOptimizer,
+)
+from gradient_free_optimizers import (
+ SimulatedAnnealingOptimizer as _SimulatedAnnealingOptimizer,
+)
+from gradient_free_optimizers import (
SpiralOptimization as _SpiralOptimization_,
- GeneticAlgorithmOptimizer as _GeneticAlgorithmOptimizer,
- EvolutionStrategyOptimizer as _EvolutionStrategyOptimizer,
- DifferentialEvolutionOptimizer as _DifferentialEvolutionOptimizer,
- BayesianOptimizer as _BayesianOptimizer,
- LipschitzOptimizer as _LipschitzOptimizer_,
- DirectAlgorithm as _DirectAlgorithm_,
+)
+from gradient_free_optimizers import (
+ StochasticHillClimbingOptimizer as _StochasticHillClimbingOptimizer,
+)
+from gradient_free_optimizers import (
TreeStructuredParzenEstimators as _TreeStructuredParzenEstimators,
- ForestOptimizer as _ForestOptimizer,
- EnsembleOptimizer as _EnsembleOptimizer,
)
+from .hyper_optimizer import HyperOptimizer
+
class HillClimbingOptimizer(HyperOptimizer):
+ """HillClimbingOptimizer class."""
+
def __init__(self, **opt_params):
super().__init__(**opt_params)
self.optimizer_class = _HillClimbingOptimizer
class StochasticHillClimbingOptimizer(HyperOptimizer):
+ """StochasticHillClimbingOptimizer class."""
+
def __init__(self, **opt_params):
super().__init__(**opt_params)
self.optimizer_class = _StochasticHillClimbingOptimizer
class RepulsingHillClimbingOptimizer(HyperOptimizer):
+ """RepulsingHillClimbingOptimizer class."""
+
def __init__(self, **opt_params):
super().__init__(**opt_params)
self.optimizer_class = _RepulsingHillClimbingOptimizer
class SimulatedAnnealingOptimizer(HyperOptimizer):
+ """SimulatedAnnealingOptimizer class."""
+
def __init__(self, **opt_params):
super().__init__(**opt_params)
self.optimizer_class = _SimulatedAnnealingOptimizer
class DownhillSimplexOptimizer(HyperOptimizer):
+ """DownhillSimplexOptimizer class."""
+
def __init__(self, **opt_params):
super().__init__(**opt_params)
self.optimizer_class = _DownhillSimplexOptimizer
class RandomSearchOptimizer(HyperOptimizer):
+ """RandomSearchOptimizer class."""
+
def __init__(self, **opt_params):
super().__init__(**opt_params)
self.optimizer_class = _RandomSearchOptimizer
class GridSearchOptimizer(HyperOptimizer):
+ """GridSearchOptimizer class."""
+
def __init__(self, **opt_params):
super().__init__(**opt_params)
self.optimizer_class = _GridSearchOptimizer
class RandomRestartHillClimbingOptimizer(HyperOptimizer):
+ """RandomRestartHillClimbingOptimizer class."""
+
def __init__(self, **opt_params):
super().__init__(**opt_params)
self.optimizer_class = _RandomRestartHillClimbingOptimizer
class RandomAnnealingOptimizer(HyperOptimizer):
+ """RandomAnnealingOptimizer class."""
+
def __init__(self, **opt_params):
super().__init__(**opt_params)
self.optimizer_class = _RandomAnnealingOptimizer
class PowellsMethod(HyperOptimizer):
+ """PowellsMethod class."""
+
def __init__(self, **opt_params):
super().__init__(**opt_params)
self.optimizer_class = _PowellsMethod
class PatternSearch(HyperOptimizer):
+ """PatternSearch class."""
+
def __init__(self, **opt_params):
super().__init__(**opt_params)
self.optimizer_class = _PatternSearch
class ParallelTemperingOptimizer(HyperOptimizer):
+ """ParallelTemperingOptimizer class."""
+
def __init__(self, **opt_params):
super().__init__(**opt_params)
self.optimizer_class = _ParallelTemperingOptimizer
class ParticleSwarmOptimizer(HyperOptimizer):
+ """ParticleSwarmOptimizer class."""
+
def __init__(self, **opt_params):
super().__init__(**opt_params)
self.optimizer_class = _ParticleSwarmOptimizer
class SpiralOptimization(HyperOptimizer):
+ """SpiralOptimization class."""
+
def __init__(self, **opt_params):
super().__init__(**opt_params)
self.optimizer_class = _SpiralOptimization_
class GeneticAlgorithmOptimizer(HyperOptimizer):
+ """GeneticAlgorithmOptimizer class."""
+
def __init__(self, **opt_params):
super().__init__(**opt_params)
self.optimizer_class = _GeneticAlgorithmOptimizer
class EvolutionStrategyOptimizer(HyperOptimizer):
+ """EvolutionStrategyOptimizer class."""
+
def __init__(self, **opt_params):
super().__init__(**opt_params)
self.optimizer_class = _EvolutionStrategyOptimizer
class DifferentialEvolutionOptimizer(HyperOptimizer):
+ """DifferentialEvolutionOptimizer class."""
+
def __init__(self, **opt_params):
super().__init__(**opt_params)
self.optimizer_class = _DifferentialEvolutionOptimizer
class BayesianOptimizer(HyperOptimizer):
+ """BayesianOptimizer class."""
+
def __init__(self, **opt_params):
super().__init__(**opt_params)
self.optimizer_class = _BayesianOptimizer
class LipschitzOptimizer(HyperOptimizer):
+ """LipschitzOptimizer class."""
+
def __init__(self, **opt_params):
super().__init__(**opt_params)
self.optimizer_class = _LipschitzOptimizer_
class DirectAlgorithm(HyperOptimizer):
+ """DirectAlgorithm class."""
+
def __init__(self, **opt_params):
super().__init__(**opt_params)
self.optimizer_class = _DirectAlgorithm_
class TreeStructuredParzenEstimators(HyperOptimizer):
+ """TreeStructuredParzenEstimators class."""
+
def __init__(self, **opt_params):
super().__init__(**opt_params)
self.optimizer_class = _TreeStructuredParzenEstimators
class ForestOptimizer(HyperOptimizer):
+ """ForestOptimizer class."""
+
def __init__(self, **opt_params):
super().__init__(**opt_params)
self.optimizer_class = _ForestOptimizer
class EnsembleOptimizer(HyperOptimizer):
+ """EnsembleOptimizer class."""
+
def __init__(self, **opt_params):
super().__init__(**opt_params)
self.optimizer_class = _EnsembleOptimizer
diff --git a/src/hyperactive/optimizers/strategies/__init__.py b/src/hyperactive/optimizers/strategies/__init__.py
index 805bb629..3cf615ce 100644
--- a/src/hyperactive/optimizers/strategies/__init__.py
+++ b/src/hyperactive/optimizers/strategies/__init__.py
@@ -1,11 +1,14 @@
-# Author: Simon Blanke
-# Email: simon.blanke@yahoo.com
-# License: MIT License
+"""Optimization strategies package for Hyperactive.
+Author: Simon Blanke
+Email: simon.blanke@yahoo.com
+License: MIT License
+"""
from .custom_optimization_strategy import CustomOptimizationStrategy
-
+from .optimization_strategy import BaseOptimizationStrategy
__all__ = [
"CustomOptimizationStrategy",
+ "BaseOptimizationStrategy",
]
diff --git a/src/hyperactive/optimizers/strategies/custom_optimization_strategy.py b/src/hyperactive/optimizers/strategies/custom_optimization_strategy.py
index ccf7c5f1..785cde15 100644
--- a/src/hyperactive/optimizers/strategies/custom_optimization_strategy.py
+++ b/src/hyperactive/optimizers/strategies/custom_optimization_strategy.py
@@ -1,4 +1,5 @@
-# Author: Simon Blanke
+"""custom_optimization_strategy module for Hyperactive optimization."""
+
# Email: simon.blanke@yahoo.com
# License: MIT License
@@ -6,6 +7,8 @@
class CustomOptimizationStrategy(BaseOptimizationStrategy):
+ """CustomOptimizationStrategy class."""
+
def __init__(self):
super().__init__()
@@ -13,6 +16,7 @@ def __init__(self):
self.duration_sum = 0
def add_optimizer(self, optimizer, duration=1, early_stopping=None):
+ """Add Optimizer function."""
self.duration_sum += duration
optimizer_setup = {
"optimizer": optimizer,
diff --git a/src/hyperactive/optimizers/strategies/optimization_strategy.py b/src/hyperactive/optimizers/strategies/optimization_strategy.py
index 0be4d00e..4dfc7370 100644
--- a/src/hyperactive/optimizers/strategies/optimization_strategy.py
+++ b/src/hyperactive/optimizers/strategies/optimization_strategy.py
@@ -1,12 +1,14 @@
-# Author: Simon Blanke
+"""optimization_strategy module for Hyperactive optimization."""
+
# Email: simon.blanke@yahoo.com
# License: MIT License
-
from .optimizer_attributes import OptimizerAttributes
class BaseOptimizationStrategy(OptimizerAttributes):
+ """BaseOptimizationStrategy class."""
+
def __init__(self):
super().__init__()
@@ -27,6 +29,7 @@ def setup_search(
memory_warm_start,
verbosity,
):
+ """Set up search parameters."""
self.objective_function = objective_function
self.s_space = s_space
self.n_iter = n_iter
@@ -52,16 +55,19 @@ def setup_search(
@property
def max_time(self):
+ """Max Time function."""
return self._max_time
@max_time.setter
def max_time(self, value):
+ """Max Time function."""
self._max_time = value
for optimizer_setup in self.optimizer_setup_l:
optimizer_setup["optimizer"].max_time = value
def search(self, nth_process, p_bar):
+ """Search function."""
for optimizer_setup in self.optimizer_setup_l:
hyper_opt = optimizer_setup["optimizer"]
duration = optimizer_setup["duration"]
diff --git a/src/hyperactive/optimizers/strategies/optimizer_attributes.py b/src/hyperactive/optimizers/strategies/optimizer_attributes.py
index 894f0c13..d23ef375 100644
--- a/src/hyperactive/optimizers/strategies/optimizer_attributes.py
+++ b/src/hyperactive/optimizers/strategies/optimizer_attributes.py
@@ -1,4 +1,5 @@
-# Author: Simon Blanke
+"""optimizer_attributes module for Hyperactive optimization."""
+
# Email: simon.blanke@yahoo.com
# License: MIT License
@@ -6,6 +7,8 @@
class OptimizerAttributes:
+ """OptimizerAttributes class."""
+
def __init__(self):
self.best_para = None
self.best_score = None
diff --git a/src/hyperactive/print_results.py b/src/hyperactive/print_results.py
index e5263e84..a7251f69 100644
--- a/src/hyperactive/print_results.py
+++ b/src/hyperactive/print_results.py
@@ -1,14 +1,20 @@
-# Author: Simon Blanke
-# Email: simon.blanke@yahoo.com
-# License: MIT License
+"""Results printing utilities for hyperparameter optimization.
+
+Author: Simon Blanke
+Email: simon.blanke@yahoo.com
+License: MIT License
+"""
import logging
+
import numpy as np
indent = " "
class PrintResults:
+ """PrintResults class."""
+
def __init__(self, opt_pros, verbosity):
self.opt_pros = opt_pros
self.verbosity = verbosity
@@ -23,7 +29,7 @@ def _print_times(self, eval_time, iter_time, n_iter):
eval_time,
"sec",
indent,
- "[{} %]".format(round(eval_time / iter_time * 100, 2)),
+ f"[{round(eval_time / iter_time * 100, 2)} %]",
)
print(
indent,
@@ -31,7 +37,7 @@ def _print_times(self, eval_time, iter_time, n_iter):
opt_time,
"sec",
indent,
- "[{} %]".format(round(opt_time / iter_time * 100, 2)),
+ f"[{round(opt_time / iter_time * 100, 2)} %]",
)
if iterPerSec >= 1:
print(
@@ -40,7 +46,7 @@ def _print_times(self, eval_time, iter_time, n_iter):
iter_time,
"sec",
indent,
- "[{} iter/sec]".format(round(iterPerSec, 2)),
+ f"[{round(iterPerSec, 2)} iter/sec]",
)
else:
secPerIter = iter_time / n_iter
@@ -50,11 +56,12 @@ def _print_times(self, eval_time, iter_time, n_iter):
iter_time,
"sec",
indent,
- "[{} sec/iter]".format(round(secPerIter, 2)),
+ f"[{round(secPerIter, 2)} sec/iter]",
)
print(" ")
def align_para_names(self, para_names):
+ """Align Para Names function."""
str_lengths = [len(str_) for str_ in para_names]
max_length = max(str_lengths)
@@ -74,7 +81,7 @@ def _print_results(
best_additional_results,
random_seed,
):
- print("\nResults: '{}'".format(objective_function.__name__), " ")
+ print(f"\nResults: '{objective_function.__name__}'", " ")
if best_para is None:
print(indent, "Best score:", best_score, " ")
print(indent, "Best parameter set:", best_para, " ")
@@ -93,8 +100,8 @@ def _print_results(
print(
indent,
indent,
- "'{}'".format(best_additional_result),
- "{}:".format(added_spaces),
+ f"'{best_additional_result}'",
+ f"{added_spaces}:",
best_additional_results[best_additional_result],
" ",
)
@@ -109,8 +116,8 @@ def _print_results(
print(
indent,
indent,
- "'{}'".format(para_key),
- "{}:".format(added_spaces),
+ f"'{para_key}'",
+ f"{added_spaces}:",
best_para[para_key],
" ",
)
@@ -122,6 +129,7 @@ def _print_results(
print(" ")
def print_process(self, results, nth_process):
+ """Print Process function."""
verbosity = self.verbosity
objective_function = self.opt_pros[nth_process].objective_function
search_space = self.opt_pros[nth_process].s_space.search_space
diff --git a/src/hyperactive/process.py b/src/hyperactive/process.py
index f008dc80..0e550d1d 100644
--- a/src/hyperactive/process.py
+++ b/src/hyperactive/process.py
@@ -1,7 +1,9 @@
-# Author: Simon Blanke
-# Email: simon.blanke@yahoo.com
-# License: MIT License
+"""Process handling for hyperparameter optimization.
+Author: Simon Blanke
+Email: simon.blanke@yahoo.com
+License: MIT License
+"""
from tqdm import tqdm
diff --git a/src/hyperactive/results.py b/src/hyperactive/results.py
index 9b014e2b..f793354e 100644
--- a/src/hyperactive/results.py
+++ b/src/hyperactive/results.py
@@ -1,13 +1,17 @@
-# Author: Simon Blanke
-# Email: simon.blanke@yahoo.com
-# License: MIT License
+"""Results handling for hyperparameter optimization.
+Author: Simon Blanke
+Email: simon.blanke@yahoo.com
+License: MIT License
+"""
import numpy as np
import pandas as pd
class Results:
+ """Results class."""
+
def __init__(self, results_list, opt_pros):
self.results_list = results_list
self.opt_pros = opt_pros
@@ -62,6 +66,7 @@ def _get_result(self, id_, result_name):
return search_data
def best_para(self, id_):
+ """Best Para function."""
best_para_ = self._get_result(id_, "best_para")
if best_para_ is not None:
@@ -70,6 +75,7 @@ def best_para(self, id_):
raise ValueError("objective function name not recognized")
def best_score(self, id_):
+ """Best Score function."""
best_score_ = self._get_result(id_, "best_score")
if best_score_ != -np.inf:
@@ -78,10 +84,9 @@ def best_score(self, id_):
raise ValueError("objective function name not recognized")
def search_data(self, id_):
+ """Search Data function."""
search_data = self._get_result(id_, "search_data")
- params = self.objFunc2results[id_]["params"]
-
if search_data is not None:
return search_data
diff --git a/src/hyperactive/run_search.py b/src/hyperactive/run_search.py
index d50eb0ef..b6a21b11 100644
--- a/src/hyperactive/run_search.py
+++ b/src/hyperactive/run_search.py
@@ -1,18 +1,21 @@
-# Author: Simon Blanke
-# Email: simon.blanke@yahoo.com
-# License: MIT License
+"""Search execution utilities for hyperparameter optimization.
+Author: Simon Blanke
+Email: simon.blanke@yahoo.com
+License: MIT License
+"""
from .distribution import (
- single_process,
joblib_wrapper,
multiprocessing_wrapper,
pathos_wrapper,
+ single_process,
)
from .process import _process_
def proxy(args):
+ """Proxy function."""
return _process_(*args)
@@ -38,6 +41,7 @@ def _get_distribution(distribution):
def run_search(opt_pros, distribution, n_processes):
+ """Run Search function."""
process_infos = list(opt_pros.items())
if n_processes == "auto":
diff --git a/src/hyperactive/search_space.py b/src/hyperactive/search_space.py
index cb90e092..3364267d 100644
--- a/src/hyperactive/search_space.py
+++ b/src/hyperactive/search_space.py
@@ -1,28 +1,39 @@
-# Author: Simon Blanke
-# Email: simon.blanke@yahoo.com
-# License: MIT License
+"""Search space utilities for hyperparameter optimization.
+
+Author: Simon Blanke
+Email: simon.blanke@yahoo.com
+License: MIT License
+"""
import numpy as np
class DictClass:
+ """DictClass class."""
+
def __init__(self, search_space):
self.search_space = search_space
def __getitem__(self, key):
+ """Get item from search space."""
return self.search_space[key]
def keys(self):
+ """Keys function."""
return self.search_space.keys()
def values(self):
+ """Values function."""
return self.search_space.values()
def items(self):
+ """Items function."""
return self.search_space.items()
class SearchSpace(DictClass):
+ """SearchSpace class."""
+
def __init__(self, search_space):
super().__init__(search_space)
self.search_space = search_space
@@ -42,16 +53,18 @@ def __init__(self, search_space):
self.func2str = self._create_num_str_ss()
def __call__(self):
+ """Return search space dictionary."""
return self.search_space
def dim_types(self):
+ """Dim Types function."""
data_types = {}
for dim_key in self.dim_keys:
dim_values = np.array(list(self.search_space[dim_key]))
try:
np.subtract(dim_values, dim_values)
np.array(dim_values).searchsorted(dim_values)
- except:
+ except (TypeError, ValueError):
_type_ = "object"
else:
_type_ = "number"
@@ -71,39 +84,43 @@ def _create_num_str_ss(self):
for value in dim_values:
try:
func_name = value.__name__
- except:
+ except AttributeError:
func_name = value
func2str[dim_key].append(func_name)
return func2str
def check_list(self):
+ """Check List function."""
for dim_key in self.dim_keys:
search_dim = self.search_space[dim_key]
- err_msg = "\n Value in '{}' of search space dictionary must be of type list \n".format(
- dim_key
+ err_msg = (
+ f"\n Value in '{dim_key}' of search space dictionary must be of "
+ "type list \n"
)
if not isinstance(search_dim, list):
raise ValueError(err_msg)
@staticmethod
def is_function(value):
+ """Is Function function."""
try:
value.__name__
- except:
+ except AttributeError:
return False
else:
return True
@staticmethod
def is_number(value):
+ """Is Number function."""
try:
float(value)
value * 0.1
value - 0.1
value / 0.1
- except:
+ except (TypeError, ValueError, ZeroDivisionError):
return False
else:
return True
@@ -115,23 +132,24 @@ def _string_or_object(self, dim_key, dim_values):
is_number = self.is_number(dim_value)
if not is_str and not is_func and not is_number:
- msg = "\n The value '{}' of type '{}' in the search space dimension '{}' must be number, string or function \n".format(
- dim_value, type(dim_value), dim_key
+ msg = (
+ f"\n The value '{dim_value}' of type '{type(dim_value)}' in the "
+ f"search space dimension '{dim_key}' must be number, string or "
+ "function \n"
)
raise ValueError(msg)
def check_non_num_values(self):
+ """Check Non Num Values function."""
for dim_key in self.dim_keys:
dim_values = np.array(list(self.search_space[dim_key]))
try:
np.subtract(dim_values, dim_values)
np.array(dim_values).searchsorted(dim_values)
- except:
+ except (TypeError, ValueError):
self._string_or_object(dim_key, dim_values)
else:
if dim_values.ndim != 1:
- msg = "Array-like object in '{}' must be one dimensional".format(
- dim_key
- )
+ msg = f"Array-like object in '{dim_key}' must be one dimensional"
raise ValueError(msg)
diff --git a/src/hyperactive/tests/test_all_objects.py b/src/hyperactive/tests/test_all_objects.py
index 3635b3bb..dda0890e 100644
--- a/src/hyperactive/tests/test_all_objects.py
+++ b/src/hyperactive/tests/test_all_objects.py
@@ -1,7 +1,6 @@
"""Automated tests based on the skbase test suite template."""
from inspect import isclass
-import shutil
from skbase.testing import BaseFixtureGenerator as _BaseFixtureGenerator
from skbase.testing import QuickTester as _QuickTester
@@ -282,11 +281,12 @@ def test_gfo_integration(self, object_instance):
optimizer = object_instance
# 1. define the experiment
- from hyperactive.experiment.integrations import SklearnCvExperiment
from sklearn.datasets import load_iris
- from sklearn.svm import SVC
from sklearn.metrics import accuracy_score
from sklearn.model_selection import KFold
+ from sklearn.svm import SVC
+
+ from hyperactive.experiment.integrations import SklearnCvExperiment
X, y = load_iris(return_X_y=True)
diff --git a/tests/__init__.py b/tests/__init__.py
index e69de29b..3d0064d7 100644
--- a/tests/__init__.py
+++ b/tests/__init__.py
@@ -0,0 +1 @@
+"""Test package for Hyperactive library functionality."""
diff --git a/tests/_local_test_optimization_strategies/_parametrize.py b/tests/_local_test_optimization_strategies/_parametrize.py
index 9e422b75..0ce5d050 100644
--- a/tests/_local_test_optimization_strategies/_parametrize.py
+++ b/tests/_local_test_optimization_strategies/_parametrize.py
@@ -1,27 +1,26 @@
from hyperactive.optimizers import (
- HillClimbingOptimizer,
- StochasticHillClimbingOptimizer,
- RepulsingHillClimbingOptimizer,
- SimulatedAnnealingOptimizer,
+ BayesianOptimizer,
+ DirectAlgorithm,
DownhillSimplexOptimizer,
- RandomSearchOptimizer,
+ EvolutionStrategyOptimizer,
+ ForestOptimizer,
GridSearchOptimizer,
- RandomRestartHillClimbingOptimizer,
- RandomAnnealingOptimizer,
- PowellsMethod,
- PatternSearch,
+ HillClimbingOptimizer,
+ LipschitzOptimizer,
ParallelTemperingOptimizer,
ParticleSwarmOptimizer,
+ PatternSearch,
+ PowellsMethod,
+ RandomAnnealingOptimizer,
+ RandomRestartHillClimbingOptimizer,
+ RandomSearchOptimizer,
+ RepulsingHillClimbingOptimizer,
+ SimulatedAnnealingOptimizer,
SpiralOptimization,
- EvolutionStrategyOptimizer,
- BayesianOptimizer,
- LipschitzOptimizer,
- DirectAlgorithm,
+ StochasticHillClimbingOptimizer,
TreeStructuredParzenEstimators,
- ForestOptimizer,
)
-
optimizers = (
"Optimizer",
[
diff --git a/tests/_local_test_optimization_strategies/_test_memory_warm_start.py b/tests/_local_test_optimization_strategies/_test_memory_warm_start.py
index 4f885e7d..0ed07622 100644
--- a/tests/_local_test_optimization_strategies/_test_memory_warm_start.py
+++ b/tests/_local_test_optimization_strategies/_test_memory_warm_start.py
@@ -1,11 +1,11 @@
import time
-import pytest
-import numpy as np
+import numpy as np
+import pytest
from hyperactive import Hyperactive
-from hyperactive.optimizers.strategies import CustomOptimizationStrategy
from hyperactive.optimizers import GridSearchOptimizer
+from hyperactive.optimizers.strategies import CustomOptimizationStrategy
from ._parametrize import optimizers_non_smbo
@@ -46,7 +46,7 @@ def test_memory_Warm_start_0():
d_time = time.time() - c_time
search_data = hyper.search_data(objective_function)
-
+
optimizer1 = hyper.opt_pros[0].optimizer_setup_l[0]["optimizer"]
optimizer2 = hyper.opt_pros[0].optimizer_setup_l[1]["optimizer"]
@@ -107,7 +107,6 @@ def test_memory_Warm_start_1():
assert d_time > 0.95
-
@pytest.mark.parametrize(*optimizers_non_smbo)
def test_memory_Warm_start_2(Optimizer_non_smbo):
optimizer1 = GridSearchOptimizer()
@@ -152,4 +151,3 @@ def test_memory_Warm_start_2(Optimizer_non_smbo):
print("\n d_time", d_time)
assert d_time < 0.9
-
\ No newline at end of file
diff --git a/tests/_local_test_optimization_strategies/_test_memory_warm_start_smbo.py b/tests/_local_test_optimization_strategies/_test_memory_warm_start_smbo.py
index cf9c93c1..6d8dde6a 100644
--- a/tests/_local_test_optimization_strategies/_test_memory_warm_start_smbo.py
+++ b/tests/_local_test_optimization_strategies/_test_memory_warm_start_smbo.py
@@ -1,11 +1,11 @@
import time
-import pytest
-import numpy as np
+import numpy as np
+import pytest
from hyperactive import Hyperactive
-from hyperactive.optimizers.strategies import CustomOptimizationStrategy
from hyperactive.optimizers import GridSearchOptimizer
+from hyperactive.optimizers.strategies import CustomOptimizationStrategy
from ._parametrize import optimizers_smbo
diff --git a/tests/_local_test_optimization_strategies/_test_strategy_combinations.py b/tests/_local_test_optimization_strategies/_test_strategy_combinations.py
index 40e788e6..628abb1e 100644
--- a/tests/_local_test_optimization_strategies/_test_strategy_combinations.py
+++ b/tests/_local_test_optimization_strategies/_test_strategy_combinations.py
@@ -1,6 +1,5 @@
-import pytest
import numpy as np
-
+import pytest
from hyperactive import Hyperactive
from hyperactive.optimizers.strategies import CustomOptimizationStrategy
diff --git a/tests/_local_test_optimization_strategies/_test_strategy_multiprocessing.py b/tests/_local_test_optimization_strategies/_test_strategy_multiprocessing.py
index 60d34fc0..dd378874 100644
--- a/tests/_local_test_optimization_strategies/_test_strategy_multiprocessing.py
+++ b/tests/_local_test_optimization_strategies/_test_strategy_multiprocessing.py
@@ -1,14 +1,6 @@
-import pytest
import numpy as np
-from hyperactive import Hyperactive
-from hyperactive.optimizers import RandomSearchOptimizer
-from hyperactive.optimizers.strategies import CustomOptimizationStrategy
-
-from ._parametrize import optimizers, optimizers_strat
-
-
def objective_function(opt):
score = -(opt["x1"] * opt["x1"] + opt["x2"] * opt["x2"])
return score
diff --git a/tests/_local_test_timings/_search_space_list.py b/tests/_local_test_timings/_search_space_list.py
index 3d0549ce..e83f9d90 100644
--- a/tests/_local_test_timings/_search_space_list.py
+++ b/tests/_local_test_timings/_search_space_list.py
@@ -9,7 +9,7 @@ def search_space_setup(size=1000):
pad_full = list(range(0, size))
pad_cat = list(range(int(size / 3)))
- pad_10 = list(range(int(size ** 0.1)))
+ pad_10 = list(range(int(size**0.1)))
search_space_0 = {
"x1": pad_full,
@@ -75,29 +75,6 @@ def func3():
"func1": [func1, func2, func3],
}
- class class1:
- pass
-
- class class2:
- pass
-
- class class3:
- pass
-
- def wr_func_1():
- return class1
-
- def wr_func_2():
- return class2
-
- def wr_func_3():
- return class3
-
- search_space_6 = {
- "x1": pad_cat,
- "class_1": [wr_func_1, wr_func_2, wr_func_3],
- }
-
class class1:
def __init__(self):
pass
@@ -119,6 +96,11 @@ def wr_func_2():
def wr_func_3():
return class3()
+ search_space_6 = {
+ "x1": pad_cat,
+ "class_1": [wr_func_1, wr_func_2, wr_func_3],
+ }
+
search_space_7 = {
"x1": pad_cat,
"class_obj_1": [wr_func_1, wr_func_2, wr_func_3],
diff --git a/tests/_local_test_timings/_test_memory.py b/tests/_local_test_timings/_test_memory.py
index 8b0bde28..d3ac4e8b 100644
--- a/tests/_local_test_timings/_test_memory.py
+++ b/tests/_local_test_timings/_test_memory.py
@@ -1,11 +1,11 @@
import time
+
import numpy as np
import pandas as pd
from sklearn.datasets import load_breast_cancer
+from sklearn.ensemble import GradientBoostingClassifier
from sklearn.model_selection import cross_val_score
from sklearn.tree import DecisionTreeClassifier
-from sklearn.ensemble import GradientBoostingClassifier
-
from hyperactive import Hyperactive
@@ -50,9 +50,6 @@ def objective_function(opt):
def test_memory_timeSave_1():
- data = load_breast_cancer()
- X, y = data.data, data.target
-
def objective_function(opt):
time.sleep(0.001)
return 1
diff --git a/tests/_local_test_timings/_test_memory_warm_start.py b/tests/_local_test_timings/_test_memory_warm_start.py
index fdd9c7c7..505458e8 100644
--- a/tests/_local_test_timings/_test_memory_warm_start.py
+++ b/tests/_local_test_timings/_test_memory_warm_start.py
@@ -1,7 +1,8 @@
import time
-import pytest
+
import numpy as np
import pandas as pd
+import pytest
from hyperactive import Hyperactive
@@ -10,7 +11,7 @@
dim_full = list(range(0, size))
dim_cat = list(range(round(size / 3)))
-dim_10 = list(range(round(size ** 0.1)))
+dim_10 = list(range(round(size**0.1)))
search_space_0 = {
"x1": dim_full,
diff --git a/tests/_local_test_timings/_test_memory_warm_start_n_jobs.py b/tests/_local_test_timings/_test_memory_warm_start_n_jobs.py
index f5ca6f83..a75a6031 100644
--- a/tests/_local_test_timings/_test_memory_warm_start_n_jobs.py
+++ b/tests/_local_test_timings/_test_memory_warm_start_n_jobs.py
@@ -1,7 +1,8 @@
import time
-import pytest
+
import numpy as np
import pandas as pd
+import pytest
from hyperactive import Hyperactive
@@ -10,7 +11,7 @@
dim_full = list(range(0, size))
dim_cat = list(range(round(size / 3)))
-dim_10 = list(range(round(size ** 0.1)))
+dim_10 = list(range(round(size**0.1)))
search_space_0 = {
"x1": dim_full,
@@ -282,7 +283,7 @@ def test_memory_warm_start_1(search_space):
@pytest.mark.parametrize("search_space", search_space_list)
-def test_memory_warm_start_1(search_space):
+def test_memory_warm_start_n_jobs(search_space):
n_iter = 1500
c_time = time.perf_counter()
diff --git a/tests/_local_test_timings/_test_shared_memory.py b/tests/_local_test_timings/_test_shared_memory.py
index 525c0f40..7cba7012 100644
--- a/tests/_local_test_timings/_test_shared_memory.py
+++ b/tests/_local_test_timings/_test_shared_memory.py
@@ -1,6 +1,4 @@
import time
-import numpy as np
-import pandas as pd
from hyperactive import Hyperactive
diff --git a/tests/_local_test_timings/_test_warm_start.py b/tests/_local_test_timings/_test_warm_start.py
index 0a602302..18856358 100644
--- a/tests/_local_test_timings/_test_warm_start.py
+++ b/tests/_local_test_timings/_test_warm_start.py
@@ -1,7 +1,4 @@
-import time
import pytest
-import numpy as np
-import pandas as pd
from hyperactive import Hyperactive
@@ -31,6 +28,3 @@ def test_warm_start_0(search_space):
initialize={"warm_start": [search_data0]},
)
hyper1.run()
-
-
-
diff --git a/tests/_local_test_timings/_test_warm_start_n_jobs.py b/tests/_local_test_timings/_test_warm_start_n_jobs.py
index 85ecb5d8..7baef210 100644
--- a/tests/_local_test_timings/_test_warm_start_n_jobs.py
+++ b/tests/_local_test_timings/_test_warm_start_n_jobs.py
@@ -1,7 +1,6 @@
-import time
-import pytest
import numpy as np
import pandas as pd
+import pytest
from hyperactive import Hyperactive
@@ -10,7 +9,7 @@
dim_full = list(range(0, size))
dim_cat = list(range(round(size / 3)))
-dim_10 = list(range(round(size ** 0.1)))
+dim_10 = list(range(round(size**0.1)))
search_space_0 = {
"x1": dim_full,
diff --git a/tests/_test_examples.py b/tests/_test_examples.py
index 8447e98e..cd891e76 100644
--- a/tests/_test_examples.py
+++ b/tests/_test_examples.py
@@ -1,11 +1,12 @@
-import os, sys, glob
+import glob
+import os
import subprocess
from subprocess import DEVNULL, STDOUT
here = os.path.dirname(os.path.abspath(__file__))
-files0 = glob.glob(here+"/../examples/*/*.py")
-files1 = glob.glob(here+"/../examples/*.py")
+files0 = glob.glob(here + "/../examples/*/*.py")
+files1 = glob.glob(here + "/../examples/*.py")
files = files0 + files1
@@ -16,7 +17,7 @@
try:
print("\033[0;33;40m Testing", file_name, end="...\r")
- subprocess.check_call(["python", file_path], stdout=DEVNULL, stderr=STDOUT)
+ subprocess.check_call(["python", file_path], stdout=DEVNULL, stderr=STDOUT) # noqa: S603, S607
except subprocess.CalledProcessError:
print("\033[0;31;40m Error in", file_name)
else:
diff --git a/tests/integrations/__init__.py b/tests/integrations/__init__.py
index e69de29b..0edd64e1 100644
--- a/tests/integrations/__init__.py
+++ b/tests/integrations/__init__.py
@@ -0,0 +1 @@
+"""Test package for integration tests with external libraries."""
diff --git a/tests/integrations/sklearn/__init__.py b/tests/integrations/sklearn/__init__.py
index e69de29b..294fa69c 100644
--- a/tests/integrations/sklearn/__init__.py
+++ b/tests/integrations/sklearn/__init__.py
@@ -0,0 +1 @@
+"""Test package for sklearn integration tests."""
diff --git a/tests/integrations/sklearn/test_parametrize_with_checks.py b/tests/integrations/sklearn/test_parametrize_with_checks.py
index 68eca80d..6ec62d76 100644
--- a/tests/integrations/sklearn/test_parametrize_with_checks.py
+++ b/tests/integrations/sklearn/test_parametrize_with_checks.py
@@ -1,12 +1,13 @@
+"""Test module for sklearn parametrize_with_checks integration."""
+
from sklearn import svm
+from sklearn.model_selection import KFold
+from sklearn.utils.estimator_checks import parametrize_with_checks
from hyperactive.integrations import HyperactiveSearchCV, OptCV
from hyperactive.opt import GridSearchSk as GridSearch
from hyperactive.optimizers import RandomSearchOptimizer
-from sklearn.model_selection import KFold
-from sklearn.utils.estimator_checks import parametrize_with_checks
-
svc = svm.SVC()
parameters = {"kernel": ["linear", "rbf"], "C": [1, 10]}
opt = RandomSearchOptimizer()
@@ -20,4 +21,5 @@
@parametrize_with_checks(ESTIMATORS)
def test_estimators(estimator, check):
+ """Test estimators with sklearn estimator checks."""
check(estimator)
diff --git a/tests/integrations/sklearn/test_sklearn_api.py b/tests/integrations/sklearn/test_sklearn_api.py
index fc4095ec..e8f959c1 100644
--- a/tests/integrations/sklearn/test_sklearn_api.py
+++ b/tests/integrations/sklearn/test_sklearn_api.py
@@ -1,18 +1,16 @@
-import pytest
-import numpy as np
+"""Test module for sklearn API integration."""
-from sklearn import svm, datasets
-from sklearn.naive_bayes import GaussianNB
+import numpy as np
+import pytest
+from sklearn import datasets, svm
from sklearn.decomposition import PCA
-from sklearn.datasets import make_blobs
from sklearn.exceptions import NotFittedError
-
+from sklearn.naive_bayes import GaussianNB
from sklearn.utils.validation import check_is_fitted
from hyperactive.integrations import HyperactiveSearchCV
from hyperactive.optimizers import RandomSearchOptimizer
-
iris = datasets.load_iris()
X, y = iris.data, iris.target
@@ -34,6 +32,7 @@
def test_fit():
+ """Test fitting the HyperactiveSearchCV estimator."""
search = HyperactiveSearchCV(svc, svc_params, opt)
search.fit(X, y)
@@ -41,6 +40,7 @@ def test_fit():
def test_not_fitted():
+ """Test behavior when estimator is not fitted."""
search = HyperactiveSearchCV(svc, svc_params, opt)
assert not search.fit_successful
@@ -51,6 +51,7 @@ def test_not_fitted():
def test_false_params():
+ """Test error handling with invalid parameters."""
search = HyperactiveSearchCV(svc, nb_params, opt)
with pytest.raises(ValueError):
search.fit(X, y)
@@ -59,6 +60,7 @@ def test_false_params():
def test_score():
+ """Test scoring functionality of the fitted estimator."""
search = HyperactiveSearchCV(svc, svc_params, opt)
search.fit(X, y)
score = search.score(X, y)
@@ -67,6 +69,7 @@ def test_score():
def test_classes_():
+ """Test access to fitted classes."""
search = HyperactiveSearchCV(svc, svc_params, opt)
search.fit(X, y)
@@ -74,6 +77,7 @@ def test_classes_():
def test_score_samples():
+ """Test score_samples method raises AttributeError."""
search = HyperactiveSearchCV(svc, svc_params, opt)
search.fit(X, y)
@@ -82,6 +86,7 @@ def test_score_samples():
def test_predict():
+ """Test prediction functionality."""
search = HyperactiveSearchCV(svc, svc_params, opt)
search.fit(X, y)
result = search.predict(X)
@@ -90,6 +95,7 @@ def test_predict():
def test_predict_proba():
+ """Test predict_proba method behavior."""
search = HyperactiveSearchCV(svc, svc_params, opt)
search.fit(X, y)
@@ -104,6 +110,7 @@ def test_predict_proba():
def test_predict_log_proba():
+ """Test predict_log_proba method behavior."""
search = HyperactiveSearchCV(svc, svc_params, opt)
search.fit(X, y)
@@ -118,6 +125,7 @@ def test_predict_log_proba():
def test_decision_function():
+ """Test decision_function method."""
search = HyperactiveSearchCV(svc, svc_params, opt)
search.fit(X, y)
result = search.decision_function(X)
@@ -126,6 +134,7 @@ def test_decision_function():
def test_transform():
+ """Test transform method behavior."""
search = HyperactiveSearchCV(svc, svc_params, opt)
search.fit(X, y)
@@ -140,6 +149,7 @@ def test_transform():
def test_inverse_transform():
+ """Test inverse_transform method behavior."""
search = HyperactiveSearchCV(svc, svc_params, opt)
search.fit(X, y)
@@ -154,6 +164,7 @@ def test_inverse_transform():
def test_best_params_and_score():
+ """Test access to best parameters and score."""
search = HyperactiveSearchCV(svc, svc_params, opt)
search.fit(X, y)
@@ -165,6 +176,7 @@ def test_best_params_and_score():
def test_search_data():
+ """Test access to search data after optimization."""
n_iter = 50
search = HyperactiveSearchCV(svc, svc_params, opt, n_iter=n_iter)
search.fit(X, y)
diff --git a/tests/test_callbacks.py b/tests/test_callbacks.py
index afe7bf93..b66f7249 100644
--- a/tests/test_callbacks.py
+++ b/tests/test_callbacks.py
@@ -1,17 +1,17 @@
-import copy
-import pytest
+"""Test module for callback functionality."""
+
import numpy as np
-import pandas as pd
from hyperactive import Hyperactive
-
search_space = {
"x1": list(np.arange(-100, 100, 1)),
}
def test_callback_0():
+ """Test callbacks executed before objective function."""
+
def callback_1(access):
access.stuff1 = 1
@@ -35,6 +35,8 @@ def objective_function(access):
def test_callback_1():
+ """Test callbacks executed before and after objective function."""
+
def callback_1(access):
access.stuff1 = 1
@@ -57,6 +59,8 @@ def objective_function(access):
def test_callback_2():
+ """Test callbacks with pass_through parameter."""
+
def callback_1(access):
access.pass_through["stuff1"] = 1
@@ -77,6 +81,8 @@ def objective_function(access):
def test_callback_3():
+ """Test callbacks executed after objective function with pass_through."""
+
def callback_1(access):
access.pass_through["stuff1"] = 1
diff --git a/tests/test_catch.py b/tests/test_catch.py
index 8297630d..377b8bd0 100644
--- a/tests/test_catch.py
+++ b/tests/test_catch.py
@@ -1,20 +1,21 @@
-import copy
-import pytest
+"""Test module for exception catching functionality."""
+
import math
+
import numpy as np
-import pandas as pd
from hyperactive import Hyperactive
-
search_space = {
"x1": list(np.arange(-100, 100, 1)),
}
def test_catch_1():
+ """Test catching TypeError exceptions in objective function."""
+
def objective_function(access):
- a = 1 + "str"
+ 1 + "str" # Intentional TypeError for testing
return 0
@@ -29,6 +30,8 @@ def objective_function(access):
def test_catch_2():
+ """Test catching ValueError exceptions in objective function."""
+
def objective_function(access):
math.sqrt(-10)
@@ -45,8 +48,10 @@ def objective_function(access):
def test_catch_3():
+ """Test catching ZeroDivisionError exceptions in objective function."""
+
def objective_function(access):
- x = 1 / 0
+ 1 / 0 # Intentional ZeroDivisionError for testing
return 0
@@ -61,10 +66,12 @@ def objective_function(access):
def test_catch_all_0():
+ """Test catching multiple exception types returning NaN values."""
+
def objective_function(access):
- a = 1 + "str"
- math.sqrt(-10)
- x = 1 / 0
+ 1 + "str" # Intentional TypeError for testing
+ math.sqrt(-10) # Intentional ValueError for testing
+ 1 / 0 # Intentional ZeroDivisionError for testing
return 0
@@ -87,10 +94,12 @@ def objective_function(access):
def test_catch_all_1():
+ """Test catching multiple exception types returning tuple values."""
+
def objective_function(access):
- a = 1 + "str"
- math.sqrt(-10)
- x = 1 / 0
+ 1 + "str" # Intentional TypeError for testing
+ math.sqrt(-10) # Intentional ValueError for testing
+ 1 / 0 # Intentional ZeroDivisionError for testing
return 0, {"error": False}
@@ -113,4 +122,4 @@ def objective_function(access):
error_ = hyper.search_data(objective_function)["error"].values[0]
assert math.isnan(nan_)
- assert error_ == True
+ assert error_
diff --git a/tests/test_constr_opt.py b/tests/test_constr_opt.py
index f26fce2f..64c4ea38 100644
--- a/tests/test_constr_opt.py
+++ b/tests/test_constr_opt.py
@@ -1,9 +1,13 @@
+"""Test module for constraint optimization functionality."""
+
import numpy as np
from hyperactive import Hyperactive
def test_constr_opt_0():
+ """Test constraint optimization with single constraint."""
+
def objective_function(para):
score = -para["x1"] * para["x1"]
return score
@@ -37,6 +41,8 @@ def constraint_1(para):
def test_constr_opt_1():
+ """Test constraint optimization with single constraint on 2D space."""
+
def objective_function(para):
score = -(para["x1"] * para["x1"] + para["x2"] * para["x2"])
return score
@@ -69,6 +75,7 @@ def constraint_1(para):
def test_constr_opt_2():
+ """Test constraint optimization with multiple constraints."""
n_iter = 50
def objective_function(para):
diff --git a/tests/test_distribution.py b/tests/test_distribution.py
index c0a3d93b..69e54163 100644
--- a/tests/test_distribution.py
+++ b/tests/test_distribution.py
@@ -1,5 +1,9 @@
+"""Test module for distribution functionality."""
+
+import sys
+
import numpy as np
-import sys, pytest
+import pytest
from tqdm import tqdm
from hyperactive import Hyperactive
@@ -9,6 +13,7 @@
def objective_function(opt):
+ """Return simple quadratic objective function for testing."""
score = -opt["x1"] * opt["x1"]
return score
@@ -19,6 +24,7 @@ def objective_function(opt):
def test_n_jobs_0():
+ """Test basic n_jobs functionality with 2 parallel jobs."""
hyper = Hyperactive()
hyper.add_search(objective_function, search_space, n_iter=15, n_jobs=2)
hyper.run()
@@ -27,6 +33,7 @@ def test_n_jobs_0():
def test_n_jobs_1():
+ """Test n_jobs functionality with 4 parallel jobs."""
hyper = Hyperactive()
hyper.add_search(objective_function, search_space, n_iter=15, n_jobs=4)
hyper.run()
@@ -35,6 +42,7 @@ def test_n_jobs_1():
def test_n_jobs_2():
+ """Test n_jobs functionality with 8 parallel jobs."""
hyper = Hyperactive()
hyper.add_search(objective_function, search_space, n_iter=15, n_jobs=8)
hyper.run()
@@ -43,12 +51,14 @@ def test_n_jobs_2():
def test_n_jobs_3():
+ """Test default n_jobs behavior (single job)."""
hyper = Hyperactive()
hyper.add_search(objective_function, search_space, n_iter=15)
hyper.run()
def test_n_jobs_5():
+ """Test multiple searches with n_jobs=2 each."""
hyper = Hyperactive()
hyper.add_search(objective_function, search_space, n_iter=15, n_jobs=2)
hyper.add_search(objective_function, search_space, n_iter=15, n_jobs=2)
@@ -59,6 +69,7 @@ def test_n_jobs_5():
def test_n_jobs_6():
+ """Test four searches with n_jobs=2 each."""
hyper = Hyperactive()
hyper.add_search(objective_function, search_space, n_iter=15, n_jobs=2)
hyper.add_search(objective_function, search_space, n_iter=15, n_jobs=2)
@@ -71,18 +82,21 @@ def test_n_jobs_6():
def test_n_jobs_7():
+ """Test n_jobs=-1 (use all available cores)."""
hyper = Hyperactive()
hyper.add_search(objective_function, search_space, n_iter=15, n_jobs=-1)
hyper.run()
def test_multiprocessing_0():
+ """Test multiprocessing distribution backend."""
hyper = Hyperactive(distribution="multiprocessing")
hyper.add_search(objective_function, search_space, n_iter=15, n_jobs=2)
hyper.run()
def test_multiprocessing_1():
+ """Test multiprocessing with custom initializer configuration."""
hyper = Hyperactive(
distribution={
"multiprocessing": {
@@ -96,12 +110,14 @@ def test_multiprocessing_1():
def test_joblib_0():
+ """Test joblib distribution backend."""
hyper = Hyperactive(distribution="joblib")
hyper.add_search(objective_function, search_space, n_iter=15, n_jobs=2)
hyper.run()
def test_joblib_1():
+ """Test custom joblib wrapper function."""
from joblib import Parallel, delayed
def joblib_wrapper(process_func, search_processes_paras, n_jobs, **kwargs):
@@ -121,12 +137,14 @@ def joblib_wrapper(process_func, search_processes_paras, n_jobs, **kwargs):
def test_pathos_0():
+ """Test pathos distribution backend."""
hyper = Hyperactive(distribution="pathos")
hyper.add_search(objective_function, search_space, n_iter=15, n_jobs=2)
hyper.run()
def test_n_processes_0():
+ """Test n_processes=1 with n_jobs=2."""
hyper = Hyperactive(n_processes=1)
hyper.add_search(objective_function, search_space, n_iter=15, n_jobs=2)
hyper.run()
@@ -135,6 +153,7 @@ def test_n_processes_0():
def test_n_processes_1():
+ """Test n_processes=2 with n_jobs=2."""
hyper = Hyperactive(n_processes=2)
hyper.add_search(objective_function, search_space, n_iter=15, n_jobs=2)
hyper.run()
@@ -143,6 +162,7 @@ def test_n_processes_1():
def test_n_processes_2():
+ """Test n_processes=4 with n_jobs=2."""
hyper = Hyperactive(n_processes=4)
hyper.add_search(objective_function, search_space, n_iter=15, n_jobs=2)
hyper.run()
@@ -151,6 +171,7 @@ def test_n_processes_2():
def test_n_processes_3():
+ """Test n_processes=4 with n_jobs=3."""
hyper = Hyperactive(n_processes=4)
hyper.add_search(objective_function, search_space, n_iter=15, n_jobs=3)
hyper.run()
@@ -159,6 +180,7 @@ def test_n_processes_3():
def test_n_processes_4():
+ """Test n_processes=1 with n_jobs=4."""
hyper = Hyperactive(n_processes=1)
hyper.add_search(objective_function, search_space, n_iter=15, n_jobs=4)
hyper.run()
@@ -167,6 +189,7 @@ def test_n_processes_4():
def test_n_processes_5():
+ """Test n_processes=1 with multiple searches having n_jobs=4."""
hyper = Hyperactive(n_processes=1)
hyper.add_search(objective_function, search_space, n_iter=15, n_jobs=4)
hyper.add_search(objective_function, search_space, n_iter=15, n_jobs=4)
diff --git a/tests/test_early_stop.py b/tests/test_early_stop.py
index 68fbb6cb..157a2988 100644
--- a/tests/test_early_stop.py
+++ b/tests/test_early_stop.py
@@ -1,18 +1,12 @@
-import time
-import pytest
+"""Test module for early stopping functionality."""
+
import numpy as np
-from sklearn.datasets import load_breast_cancer
-from sklearn.model_selection import cross_val_score
-from sklearn.tree import DecisionTreeClassifier
from hyperactive import Hyperactive
-from hyperactive.optimizers import (
- RandomSearchOptimizer,
- HillClimbingOptimizer,
-)
def objective_function(para):
+ """Return simple quadratic objective function for testing."""
score = -para["x1"] * para["x1"]
return score
@@ -23,6 +17,7 @@ def objective_function(para):
def test_early_stop_0():
+ """Test early stopping with both absolute and relative tolerance."""
early_stopping = {
"n_iter_no_change": 5,
"tol_abs": 0.1,
@@ -41,6 +36,7 @@ def test_early_stop_0():
def test_early_stop_1():
+ """Test early stopping with relative tolerance only."""
early_stopping = {
"n_iter_no_change": 5,
"tol_abs": None,
@@ -59,6 +55,7 @@ def test_early_stop_1():
def test_early_stop_2():
+ """Test early stopping with absolute tolerance only."""
early_stopping = {
"n_iter_no_change": 5,
"tol_abs": 0.1,
@@ -77,6 +74,8 @@ def test_early_stop_2():
def test_early_stop_3():
+ """Test early stopping without tolerance and verify iteration count."""
+
def objective_function(para):
score = -para["x1"] * para["x1"]
return score
@@ -110,6 +109,8 @@ def objective_function(para):
def test_early_stop_4():
+ """Test early stopping with absolute tolerance where no early stop occurs."""
+
def objective_function(para):
return para["x1"]
@@ -171,6 +172,8 @@ def objective_function(para):
def test_early_stop_5():
+ """Test early stopping with absolute tolerance where early stop occurs."""
+
def objective_function(para):
return para["x1"]
@@ -224,6 +227,8 @@ def objective_function(para):
def test_early_stop_6():
+ """Test early stopping with relative tolerance where no early stop occurs."""
+
def objective_function(para):
return para["x1"]
@@ -285,6 +290,8 @@ def objective_function(para):
def test_early_stop_7():
+ """Test early stopping with relative tolerance where early stop occurs."""
+
def objective_function(para):
return para["x1"]
diff --git a/tests/test_empty_output/__init__.py b/tests/test_empty_output/__init__.py
index e69de29b..215da704 100644
--- a/tests/test_empty_output/__init__.py
+++ b/tests/test_empty_output/__init__.py
@@ -0,0 +1 @@
+"""Test package for empty output functionality tests."""
diff --git a/tests/test_empty_output/non_verbose.py b/tests/test_empty_output/non_verbose.py
index ce8e56f7..b0c6634b 100644
--- a/tests/test_empty_output/non_verbose.py
+++ b/tests/test_empty_output/non_verbose.py
@@ -1,8 +1,12 @@
+"""Test module for non-verbose output functionality."""
+
import numpy as np
+
from hyperactive import Hyperactive
def ackley_function(para):
+ """Ackley optimization function for testing."""
x, y = para["x"], para["y"]
loss = (
diff --git a/tests/test_empty_output/test_empty_output.py b/tests/test_empty_output/test_empty_output.py
index 16788e8b..65bae03a 100644
--- a/tests/test_empty_output/test_empty_output.py
+++ b/tests/test_empty_output/test_empty_output.py
@@ -1,5 +1,10 @@
-import os, sys, subprocess, pytest
+"""Test module for empty output functionality."""
+import os
+import subprocess
+import sys
+
+import pytest
if sys.platform.startswith("win"):
pytest.skip("skip these tests for windows", allow_module_level=True)
@@ -13,7 +18,7 @@
def _run_subprocess(script):
output = []
- process = subprocess.Popen(
+ process = subprocess.Popen( # noqa: S603
[sys.executable, "-u", script],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
@@ -33,6 +38,7 @@ def _run_subprocess(script):
def test_empty_output():
+ """Test that verbose and non-verbose modes produce expected output."""
stdout_verb, stderr_verb = _run_subprocess(verbose_file)
stdout_non_verb, stderr_non_verb = _run_subprocess(non_verbose_file)
diff --git a/tests/test_empty_output/verbose.py b/tests/test_empty_output/verbose.py
index 0d567873..a74fb749 100644
--- a/tests/test_empty_output/verbose.py
+++ b/tests/test_empty_output/verbose.py
@@ -1,9 +1,14 @@
+"""Test module for verbose output functionality."""
+
import sys
+
import numpy as np
+
from hyperactive import Hyperactive
def ackley_function(para):
+ """Ackley optimization function for testing."""
x, y = para["x"], para["y"]
loss = (
diff --git a/tests/test_hyper_gradient_trafo.py b/tests/test_hyper_gradient_trafo.py
index 370a6e9b..5d9d2672 100644
--- a/tests/test_hyper_gradient_trafo.py
+++ b/tests/test_hyper_gradient_trafo.py
@@ -1,12 +1,16 @@
+"""Test module for hyper gradient transformation functionality."""
+
import time
-import pytest
+
import numpy as np
import pandas as pd
+import pytest
from hyperactive import Hyperactive
def objective_function_0(opt):
+ """Return simple quadratic objective function for testing."""
score = -opt["x1"] * opt["x1"]
return score
@@ -45,6 +49,7 @@ def objective_function_0(opt):
@pytest.mark.parametrize("search_space", search_space_para_0)
def test_trafo_0(search_space):
+ """Test search space transformations with various ranges."""
hyper = Hyperactive()
hyper.add_search(objective_function_0, search_space, n_iter=25)
hyper.run()
@@ -66,6 +71,7 @@ def test_trafo_0(search_space):
def objective_function_1(opt):
+ """Decision tree objective function for testing with sklearn."""
dtc = DecisionTreeClassifier(min_samples_split=opt["min_samples_split"])
scores = cross_val_score(dtc, X, y, cv=10)
time.sleep(0.1)
diff --git a/tests/test_initializers.py b/tests/test_initializers.py
index 6ec351ed..2498961f 100644
--- a/tests/test_initializers.py
+++ b/tests/test_initializers.py
@@ -1,8 +1,12 @@
+"""Test module for initializer functionality."""
+
import numpy as np
+
from hyperactive import Hyperactive
def objective_function(opt):
+ """Return simple quadratic objective function for testing."""
score = -opt["x1"] * opt["x1"]
return score
@@ -13,6 +17,7 @@ def objective_function(opt):
def test_initialize_warm_start_0():
+ """Test warm start initialization with optimal point."""
init = {
"x1": 0,
}
@@ -32,6 +37,7 @@ def test_initialize_warm_start_0():
def test_initialize_warm_start_1():
+ """Test warm start initialization with boundary point."""
search_space = {
"x1": list(np.arange(-10, 10, 1)),
}
@@ -54,6 +60,7 @@ def test_initialize_warm_start_1():
def test_initialize_vertices():
+ """Test vertices initialization strategy."""
initialize = {"vertices": 2}
hyper = Hyperactive()
@@ -69,6 +76,7 @@ def test_initialize_vertices():
def test_initialize_grid_0():
+ """Test grid initialization with optimal center point."""
search_space = {
"x1": list(np.arange(-1, 2, 1)),
}
@@ -87,6 +95,7 @@ def test_initialize_grid_0():
def test_initialize_grid_1():
+ """Test grid initialization with off-center optimal point."""
search_space = {
"x1": list(np.arange(-2, 3, 1)),
}
@@ -106,6 +115,7 @@ def test_initialize_grid_1():
def test_initialize_all_0():
+ """Test combination of all initialization strategies."""
search_space = {
"x1": list(np.arange(-2, 3, 1)),
}
diff --git a/tests/test_issues/__init__.py b/tests/test_issues/__init__.py
index e69de29b..6ef6a259 100644
--- a/tests/test_issues/__init__.py
+++ b/tests/test_issues/__init__.py
@@ -0,0 +1 @@
+"""Test package for issue reproduction tests."""
diff --git a/tests/test_issues/test_issue_25.py b/tests/test_issues/test_issue_25.py
index 40529d7c..4a898d95 100644
--- a/tests/test_issues/test_issue_25.py
+++ b/tests/test_issues/test_issue_25.py
@@ -1,3 +1,5 @@
+"""Test module for issue #25 reproduction."""
+
import numpy as np
import pandas as pd
@@ -5,6 +7,7 @@
def test_issue_25():
+ """Test issue 25 - memory warm start with CSV file persistence."""
# set a path to save the dataframe
path = "./search_data.csv"
search_space = {
@@ -30,14 +33,10 @@ def objective_function(para):
# append parameter dictionary to pandas dataframe
search_data = pd.read_csv(path, na_values="nan")
- search_data_new = pd.DataFrame(
- parameter_dict, columns=para_names, index=[0]
- )
+ search_data_new = pd.DataFrame(parameter_dict, columns=para_names, index=[0])
# search_data = search_data.append(search_data_new)
- search_data = pd.concat(
- [search_data, search_data_new], ignore_index=True
- )
+ search_data = pd.concat([search_data, search_data_new], ignore_index=True)
search_data.to_csv(path, index=False, na_rep="nan")
@@ -49,7 +48,7 @@ def objective_function(para):
search_data_0 = pd.read_csv(path, na_values="nan")
"""
- the second run should be much faster than before,
+ the second run should be much faster than before,
because Hyperactive already knows most parameters/scores
"""
hyper1 = Hyperactive()
diff --git a/tests/test_issues/test_issue_29.py b/tests/test_issues/test_issue_29.py
index 444c68ee..8a55db1f 100644
--- a/tests/test_issues/test_issue_29.py
+++ b/tests/test_issues/test_issue_29.py
@@ -1,11 +1,14 @@
+"""Test module for issue #29 reproduction."""
+
from sklearn.datasets import load_diabetes
-from sklearn.tree import DecisionTreeRegressor
from sklearn.model_selection import cross_val_score
+from sklearn.tree import DecisionTreeRegressor
from hyperactive import Hyperactive
def test_issue_29():
+ """Test issue 29 - accessing optimizer attributes during optimization."""
data = load_diabetes()
X, y = data.data, data.target
diff --git a/tests/test_issues/test_issue_34.py b/tests/test_issues/test_issue_34.py
index 66abb062..6dcc6de4 100644
--- a/tests/test_issues/test_issue_34.py
+++ b/tests/test_issues/test_issue_34.py
@@ -1,4 +1,7 @@
+"""Test module for issue #34 reproduction."""
+
import numpy as np
+
from hyperactive import Hyperactive
""" --- test search spaces with mixed int/float types --- """
@@ -6,6 +9,8 @@
def test_mixed_type_search_space_0():
+ """Test search space with integer type validation."""
+
def objective_function(para):
assert isinstance(para["x1"], int)
@@ -21,6 +26,8 @@ def objective_function(para):
def test_mixed_type_search_space_1():
+ """Test search space with float type validation."""
+
def objective_function(para):
assert isinstance(para["x2"], float)
@@ -36,6 +43,8 @@ def objective_function(para):
def test_mixed_type_search_space_2():
+ """Test search space with mixed integer and float type validation."""
+
def objective_function(para):
assert isinstance(para["x1"], int)
assert isinstance(para["x2"], float)
@@ -53,6 +62,8 @@ def objective_function(para):
def test_mixed_type_search_space_3():
+ """Test search space with mixed integer, float, and string type validation."""
+
def objective_function(para):
assert isinstance(para["x1"], int)
assert isinstance(para["x2"], float)
diff --git a/tests/test_max_score.py b/tests/test_max_score.py
index 9d5a4c8d..15f5f31d 100644
--- a/tests/test_max_score.py
+++ b/tests/test_max_score.py
@@ -1,17 +1,17 @@
+"""Test module for max score functionality."""
+
import time
+
import numpy as np
-from sklearn.datasets import load_breast_cancer
-from sklearn.model_selection import cross_val_score
-from sklearn.tree import DecisionTreeClassifier
from hyperactive import Hyperactive
from hyperactive.optimizers import (
- RandomSearchOptimizer,
HillClimbingOptimizer,
)
def objective_function(para):
+ """Return simple quadratic objective function for testing."""
score = -para["x1"] * para["x1"]
return score
@@ -22,6 +22,8 @@ def objective_function(para):
def test_max_score_0():
+ """Test max_score termination with hill climbing optimizer."""
+
def objective_function(para):
score = -para["x1"] * para["x1"]
return score
@@ -57,6 +59,8 @@ def objective_function(para):
def test_max_score_1():
+ """Test max_score termination with time constraint."""
+
def objective_function(para):
score = -para["x1"] * para["x1"]
time.sleep(0.01)
diff --git a/tests/test_max_time.py b/tests/test_max_time.py
index fe66e0e6..154af6ec 100644
--- a/tests/test_max_time.py
+++ b/tests/test_max_time.py
@@ -1,9 +1,14 @@
+"""Test module for max time functionality."""
+
import time
+
import numpy as np
+
from hyperactive import Hyperactive
def objective_function(para):
+ """Objective function for max time testing."""
score = -para["x1"] * para["x1"]
return score
@@ -14,6 +19,7 @@ def objective_function(para):
def test_max_time_0():
+ """Test max time constraint with short duration."""
c_time1 = time.perf_counter()
hyper = Hyperactive()
hyper.add_search(objective_function, search_space, n_iter=1000000)
@@ -24,6 +30,7 @@ def test_max_time_0():
def test_max_time_1():
+ """Test max time constraint with longer duration."""
c_time1 = time.perf_counter()
hyper = Hyperactive()
hyper.add_search(objective_function, search_space, n_iter=1000000)
diff --git a/tests/test_obj_func_arg.py b/tests/test_obj_func_arg.py
index 81ac34b8..7cf08625 100644
--- a/tests/test_obj_func_arg.py
+++ b/tests/test_obj_func_arg.py
@@ -1,6 +1,8 @@
+"""Test module for objective function argument functionality."""
+
import numpy as np
-from hyperactive import Hyperactive
+from hyperactive import Hyperactive
search_space = {
"x1": list(np.arange(0, 100, 1)),
@@ -8,8 +10,9 @@
def test_argument_0():
- def objective_function(para):
+ """Test objective function arguments with pass_through parameter."""
+ def objective_function(para):
print("\npara.nth_iter", para.nth_iter)
print("nth_iter_local", para.pass_through["nth_iter_local"])
diff --git a/tests/test_optimization_strategies/__init__.py b/tests/test_optimization_strategies/__init__.py
index e69de29b..e78ef282 100644
--- a/tests/test_optimization_strategies/__init__.py
+++ b/tests/test_optimization_strategies/__init__.py
@@ -0,0 +1 @@
+"""Test package for optimization strategy tests."""
diff --git a/tests/test_optimization_strategies/_parametrize.py b/tests/test_optimization_strategies/_parametrize.py
index 9e422b75..0ce5d050 100644
--- a/tests/test_optimization_strategies/_parametrize.py
+++ b/tests/test_optimization_strategies/_parametrize.py
@@ -1,27 +1,26 @@
from hyperactive.optimizers import (
- HillClimbingOptimizer,
- StochasticHillClimbingOptimizer,
- RepulsingHillClimbingOptimizer,
- SimulatedAnnealingOptimizer,
+ BayesianOptimizer,
+ DirectAlgorithm,
DownhillSimplexOptimizer,
- RandomSearchOptimizer,
+ EvolutionStrategyOptimizer,
+ ForestOptimizer,
GridSearchOptimizer,
- RandomRestartHillClimbingOptimizer,
- RandomAnnealingOptimizer,
- PowellsMethod,
- PatternSearch,
+ HillClimbingOptimizer,
+ LipschitzOptimizer,
ParallelTemperingOptimizer,
ParticleSwarmOptimizer,
+ PatternSearch,
+ PowellsMethod,
+ RandomAnnealingOptimizer,
+ RandomRestartHillClimbingOptimizer,
+ RandomSearchOptimizer,
+ RepulsingHillClimbingOptimizer,
+ SimulatedAnnealingOptimizer,
SpiralOptimization,
- EvolutionStrategyOptimizer,
- BayesianOptimizer,
- LipschitzOptimizer,
- DirectAlgorithm,
+ StochasticHillClimbingOptimizer,
TreeStructuredParzenEstimators,
- ForestOptimizer,
)
-
optimizers = (
"Optimizer",
[
diff --git a/tests/test_optimization_strategies/test_constr_opt.py b/tests/test_optimization_strategies/test_constr_opt.py
index 46988e0b..ec34e92d 100644
--- a/tests/test_optimization_strategies/test_constr_opt.py
+++ b/tests/test_optimization_strategies/test_constr_opt.py
@@ -1,11 +1,15 @@
+"""Test module for constraint optimization strategy."""
+
import numpy as np
from hyperactive import Hyperactive
-from hyperactive.optimizers.strategies import CustomOptimizationStrategy
from hyperactive.optimizers import HillClimbingOptimizer, RandomSearchOptimizer
+from hyperactive.optimizers.strategies import CustomOptimizationStrategy
def test_constr_opt_0():
+ """Test constrained optimization with single constraint."""
+
def objective_function(para):
score = -para["x1"] * para["x1"]
return score
@@ -47,6 +51,8 @@ def constraint_1(para):
def test_constr_opt_1():
+ """Test constrained optimization with multi-dimensional search space."""
+
def objective_function(para):
score = -(para["x1"] * para["x1"] + para["x2"] * para["x2"])
return score
@@ -87,6 +93,7 @@ def constraint_1(para):
def test_constr_opt_2():
+ """Test constrained optimization with multiple constraints."""
n_iter = 50
def objective_function(para):
diff --git a/tests/test_optimization_strategies/test_early_stopping.py b/tests/test_optimization_strategies/test_early_stopping.py
index c7d0b636..38a5559d 100644
--- a/tests/test_optimization_strategies/test_early_stopping.py
+++ b/tests/test_optimization_strategies/test_early_stopping.py
@@ -1,14 +1,14 @@
-import pytest
-import numpy as np
+"""Test module for early stopping optimization strategy."""
+import numpy as np
+import pytest
from hyperactive import Hyperactive
-from hyperactive.optimizers.strategies import CustomOptimizationStrategy
from hyperactive.optimizers import RandomSearchOptimizer
+from hyperactive.optimizers.strategies import CustomOptimizationStrategy
from ._parametrize import optimizers
-
n_iter_no_change_parametr = (
"n_iter_no_change",
[
@@ -22,6 +22,8 @@
@pytest.mark.parametrize(*n_iter_no_change_parametr)
@pytest.mark.parametrize(*optimizers)
def test_strategy_early_stopping_0(Optimizer, n_iter_no_change):
+ """Test early stopping strategy with different optimizers and iteration limits."""
+
def objective_function(para):
score = -para["x1"] * para["x1"]
return score
diff --git a/tests/test_optimization_strategies/test_search_space_pruning.py b/tests/test_optimization_strategies/test_search_space_pruning.py
index b0ba35a7..339ac6e4 100644
--- a/tests/test_optimization_strategies/test_search_space_pruning.py
+++ b/tests/test_optimization_strategies/test_search_space_pruning.py
@@ -1,17 +1,21 @@
+"""Test module for search space pruning optimization strategy."""
+
import time
-import pytest
-import numpy as np
+import numpy as np
+import pytest
from hyperactive import Hyperactive
-from hyperactive.optimizers.strategies import CustomOptimizationStrategy
from hyperactive.optimizers import GridSearchOptimizer
+from hyperactive.optimizers.strategies import CustomOptimizationStrategy
from ._parametrize import optimizers_smbo
@pytest.mark.parametrize(*optimizers_smbo)
def test_memory_Warm_start_smbo_0(Optimizer_smbo):
+ """Test memory warm start with SMBO optimizers and custom optimization strategy."""
+
def objective_function(opt):
time.sleep(0.01)
score = -(opt["x1"] * opt["x1"])
diff --git a/tests/test_optimizers/__init__.py b/tests/test_optimizers/__init__.py
index e69de29b..f623cb62 100644
--- a/tests/test_optimizers/__init__.py
+++ b/tests/test_optimizers/__init__.py
@@ -0,0 +1 @@
+"""Test package for optimizer tests."""
diff --git a/tests/test_optimizers/_parametrize.py b/tests/test_optimizers/_parametrize.py
index f82de783..ea8d7a9e 100644
--- a/tests/test_optimizers/_parametrize.py
+++ b/tests/test_optimizers/_parametrize.py
@@ -1,29 +1,28 @@
from hyperactive.optimizers import (
- HillClimbingOptimizer,
- StochasticHillClimbingOptimizer,
- RepulsingHillClimbingOptimizer,
- SimulatedAnnealingOptimizer,
+ BayesianOptimizer,
+ DifferentialEvolutionOptimizer,
+ DirectAlgorithm,
DownhillSimplexOptimizer,
- RandomSearchOptimizer,
+ EvolutionStrategyOptimizer,
+ ForestOptimizer,
+ GeneticAlgorithmOptimizer,
GridSearchOptimizer,
- RandomRestartHillClimbingOptimizer,
- RandomAnnealingOptimizer,
- PowellsMethod,
- PatternSearch,
+ HillClimbingOptimizer,
+ LipschitzOptimizer,
ParallelTemperingOptimizer,
ParticleSwarmOptimizer,
+ PatternSearch,
+ PowellsMethod,
+ RandomAnnealingOptimizer,
+ RandomRestartHillClimbingOptimizer,
+ RandomSearchOptimizer,
+ RepulsingHillClimbingOptimizer,
+ SimulatedAnnealingOptimizer,
SpiralOptimization,
- GeneticAlgorithmOptimizer,
- EvolutionStrategyOptimizer,
- DifferentialEvolutionOptimizer,
- BayesianOptimizer,
- LipschitzOptimizer,
- DirectAlgorithm,
+ StochasticHillClimbingOptimizer,
TreeStructuredParzenEstimators,
- ForestOptimizer,
)
-
optimizers = (
"Optimizer",
[
diff --git a/tests/test_optimizers/test_best_results.py b/tests/test_optimizers/test_best_results.py
index e252c9df..7972641e 100644
--- a/tests/test_optimizers/test_best_results.py
+++ b/tests/test_optimizers/test_best_results.py
@@ -1,22 +1,27 @@
-import pytest
-import numpy as np
+"""Test module for best results optimizer functionality."""
+import numpy as np
+import pytest
from hyperactive import Hyperactive
+
from ._parametrize import optimizers
def objective_function(opt):
+ """Return standard quadratic objective function."""
score = -opt["x1"] * opt["x1"]
return score
def objective_function_m5(opt):
+ """Quadratic objective function shifted by -5."""
score = -(opt["x1"] - 5) * (opt["x1"] - 5)
return score
def objective_function_p5(opt):
+ """Quadratic objective function shifted by +5."""
score = -(opt["x1"] + 5) * (opt["x1"] + 5)
return score
@@ -62,6 +67,7 @@ def objective_function_p5(opt):
@pytest.mark.parametrize(*search_space_para)
@pytest.mark.parametrize(*optimizers)
def test_best_results_0(Optimizer, search_space, objective):
+ """Test best score consistency with best parameters."""
search_space = search_space
objective_function = objective
@@ -87,6 +93,7 @@ def test_best_results_0(Optimizer, search_space, objective):
@pytest.mark.parametrize(*search_space_para)
@pytest.mark.parametrize(*optimizers)
def test_best_results_1(Optimizer, search_space, objective):
+ """Test best parameters are present in search data."""
search_space = search_space
objective_function = objective
diff --git a/tests/test_optimizers/test_gfo_wrapper.py b/tests/test_optimizers/test_gfo_wrapper.py
index 408ce3b5..8a54d75d 100644
--- a/tests/test_optimizers/test_gfo_wrapper.py
+++ b/tests/test_optimizers/test_gfo_wrapper.py
@@ -1,22 +1,28 @@
-import pytest
-import numpy as np
+"""Test module for GFO wrapper optimizer functionality."""
+import numpy as np
+import pytest
from tqdm import tqdm
-from ._parametrize import optimizers
+
from hyperactive.search_space import SearchSpace
+from ._parametrize import optimizers
+
def objective_function(opt):
+ """Return standard quadratic objective function."""
score = -opt["x1"] * opt["x1"]
return score
def objective_function_m5(opt):
+ """Quadratic objective function shifted by -5."""
score = -(opt["x1"] - 5) * (opt["x1"] - 5)
return score
def objective_function_p5(opt):
+ """Quadratic objective function shifted by +5."""
score = -(opt["x1"] + 5) * (opt["x1"] + 5)
return score
@@ -62,6 +68,7 @@ def objective_function_p5(opt):
@pytest.mark.parametrize(*search_space_para)
@pytest.mark.parametrize(*optimizers)
def test_gfo_opt_wrapper_0(Optimizer, search_space, objective):
+ """Test GFO optimizer wrapper functionality with various configurations."""
search_space = search_space
objective_function = objective
diff --git a/tests/test_optimizers/test_memory.py b/tests/test_optimizers/test_memory.py
index d9a91216..5ee443ce 100644
--- a/tests/test_optimizers/test_memory.py
+++ b/tests/test_optimizers/test_memory.py
@@ -1,12 +1,15 @@
-import pytest
-import numpy as np
+"""Test module for optimizer memory functionality."""
+import numpy as np
+import pytest
from hyperactive import Hyperactive
+
from ._parametrize import optimizers
def objective_function(opt):
+ """Return simple quadratic objective function for memory testing."""
score = -opt["x1"] * opt["x1"]
return score
@@ -16,6 +19,7 @@ def objective_function(opt):
@pytest.mark.parametrize(*optimizers)
def test_memory_0(Optimizer):
+ """Test memory functionality with multiple search runs."""
optimizer = Optimizer()
n_iter = 30
diff --git a/tests/test_optimizers/test_optimization_strategies.py b/tests/test_optimizers/test_optimization_strategies.py
index ac83d7cc..7b4baab6 100644
--- a/tests/test_optimizers/test_optimization_strategies.py
+++ b/tests/test_optimizers/test_optimization_strategies.py
@@ -1,15 +1,17 @@
-import pytest
-import numpy as np
+"""Test module for optimizer optimization strategy functionality."""
+import numpy as np
+import pytest
from hyperactive import Hyperactive
-from hyperactive.optimizers.strategies import CustomOptimizationStrategy
from hyperactive.optimizers import HillClimbingOptimizer
+from hyperactive.optimizers.strategies import CustomOptimizationStrategy
from ._parametrize import optimizers
def objective_function(opt):
+ """Two-dimensional quadratic objective function."""
score = -(opt["x1"] * opt["x1"] + opt["x2"] * opt["x2"])
return score
@@ -22,6 +24,7 @@ def objective_function(opt):
@pytest.mark.parametrize(*optimizers)
def test_strategy_combinations_0(Optimizer):
+ """Test custom optimization strategy with multiple optimizers."""
optimizer1 = Optimizer()
optimizer2 = HillClimbingOptimizer()
diff --git a/tests/test_pass_through.py b/tests/test_pass_through.py
index 2fa0ac96..7e2ba2af 100644
--- a/tests/test_pass_through.py
+++ b/tests/test_pass_through.py
@@ -1,11 +1,12 @@
+"""Test module for pass through functionality."""
+
import copy
-import pytest
+
import numpy as np
-import pandas as pd
+import pytest
from hyperactive import Hyperactive
-
search_space = {
"x1": list(np.arange(0, 100, 1)),
}
@@ -20,6 +21,7 @@ def _test_func_1():
def objective_function_0(opt):
+ """Test objective function for integer pass_through values."""
if opt.pass_through["stuff"] != 1:
print("\n pass_through:", opt.pass_through["stuff"])
assert False
@@ -29,6 +31,7 @@ def objective_function_0(opt):
def objective_function_1(opt):
+ """Test objective function for float pass_through values."""
if opt.pass_through["stuff"] != 0.001:
print("\n pass_through:", opt.pass_through["stuff"])
assert False
@@ -38,6 +41,7 @@ def objective_function_1(opt):
def objective_function_2(opt):
+ """Test objective function for list pass_through values."""
if opt.pass_through["stuff"] != [1, 2, 3]:
print("\n pass_through:", opt.pass_through["stuff"])
assert False
@@ -47,6 +51,7 @@ def objective_function_2(opt):
def objective_function_3(opt):
+ """Test objective function for function pass_through values."""
if opt.pass_through["stuff"] != _test_func:
print("\n pass_through:", opt.pass_through["stuff"])
assert False
@@ -79,6 +84,7 @@ def objective_function_3(opt):
@pytest.mark.parametrize(*pass_through_setups)
def test_pass_through_0(pass_through_setup):
+ """Test basic pass_through functionality with different data types."""
objective_function = pass_through_setup[0]
pass_through = pass_through_setup[1]
@@ -93,6 +99,7 @@ def test_pass_through_0(pass_through_setup):
def objective_function_0(opt):
+ """Test objective function for modifying integer pass_through values."""
if opt.nth_iter > 1:
assert opt.pass_through["stuff"] == 2
opt.pass_through["stuff"] = 2
@@ -102,6 +109,7 @@ def objective_function_0(opt):
def objective_function_1(opt):
+ """Test objective function for modifying float pass_through values."""
if opt.nth_iter > 1:
assert opt.pass_through["stuff"] == 0.002
opt.pass_through["stuff"] = 0.002
@@ -111,6 +119,7 @@ def objective_function_1(opt):
def objective_function_2(opt):
+ """Test objective function for modifying list pass_through values."""
if opt.nth_iter > 1:
assert 4 in opt.pass_through["stuff"]
opt.pass_through["stuff"].append(4)
@@ -120,6 +129,7 @@ def objective_function_2(opt):
def objective_function_3(opt):
+ """Test objective function for modifying function pass_through values."""
if opt.nth_iter > 1:
assert opt.pass_through["stuff"] == _test_func_1
opt.pass_through["stuff"] = _test_func_1
@@ -146,6 +156,7 @@ def objective_function_3(opt):
@pytest.mark.parametrize(*pass_through_setups)
def test_pass_through_1(pass_through_setup):
+ """Test pass_through modification during optimization."""
objective_function = pass_through_setup[0]
pass_through = pass_through_setup[1]
@@ -163,6 +174,7 @@ def test_pass_through_1(pass_through_setup):
@pytest.mark.parametrize(*pass_through_setups)
def test_pass_through_2(pass_through_setup):
+ """Test pass_through modification with 2 parallel jobs."""
objective_function = pass_through_setup[0]
pass_through = pass_through_setup[1]
@@ -181,6 +193,7 @@ def test_pass_through_2(pass_through_setup):
@pytest.mark.parametrize(*pass_through_setups)
def test_pass_through_3(pass_through_setup):
+ """Test pass_through modification with 4 parallel jobs."""
objective_function = pass_through_setup[0]
pass_through = pass_through_setup[1]
diff --git a/tests/test_random_state.py b/tests/test_random_state.py
index 210b6c89..987f7ee0 100644
--- a/tests/test_random_state.py
+++ b/tests/test_random_state.py
@@ -1,8 +1,12 @@
+"""Test module for random state functionality."""
+
import numpy as np
+
from hyperactive import Hyperactive
def objective_function(opt):
+ """Two-dimensional quadratic objective function for random state testing."""
score = -(opt["x1"] * opt["x1"] + opt["x2"] * opt["x2"])
return score
@@ -17,6 +21,7 @@ def objective_function(opt):
def test_random_state_n_jobs_0():
+ """Test random state behavior with n_jobs=2."""
n_jobs = 2
hyper = Hyperactive()
@@ -43,6 +48,7 @@ def test_random_state_n_jobs_0():
def test_random_state_n_jobs_1():
+ """Test random state behavior with n_jobs=3."""
n_jobs = 3
hyper = Hyperactive()
@@ -66,6 +72,7 @@ def test_random_state_n_jobs_1():
def test_random_state_n_jobs_2():
+ """Test random state behavior with n_jobs=4."""
n_jobs = 4
hyper = Hyperactive()
@@ -89,6 +96,7 @@ def test_random_state_n_jobs_2():
def test_random_state_0():
+ """Test reproducibility with same random state."""
hyper0 = Hyperactive()
hyper0.add_search(
objective_function,
@@ -116,6 +124,7 @@ def test_random_state_0():
def test_random_state_1():
+ """Test reproducibility with same random state (different value)."""
hyper0 = Hyperactive()
hyper0.add_search(
objective_function,
@@ -143,6 +152,7 @@ def test_random_state_1():
def test_random_state_2():
+ """Test different results with different random states."""
hyper0 = Hyperactive()
hyper0.add_search(
objective_function,
@@ -170,6 +180,7 @@ def test_random_state_2():
def test_no_random_state_0():
+ """Test non-reproducibility without fixed random state."""
hyper0 = Hyperactive()
hyper0.add_search(
objective_function,
diff --git a/tests/test_results.py b/tests/test_results.py
index 6e90d513..ac387169 100644
--- a/tests/test_results.py
+++ b/tests/test_results.py
@@ -1,11 +1,14 @@
-import pytest
+"""Test module for results functionality."""
+
import numpy as np
import pandas as pd
+import pytest
from hyperactive import Hyperactive
def objective_function(opt):
+ """Return simple quadratic objective function for results testing."""
score = -opt["x1"] * opt["x1"]
return score
@@ -16,6 +19,7 @@ def objective_function(opt):
def test_attributes_results_0():
+ """Test search data returns pandas DataFrame."""
hyper = Hyperactive()
hyper.add_search(objective_function, search_space, n_iter=100)
hyper.run()
@@ -24,6 +28,7 @@ def test_attributes_results_0():
def test_attributes_results_1():
+ """Test search data contains search space columns."""
hyper = Hyperactive()
hyper.add_search(objective_function, search_space, n_iter=100)
hyper.run()
@@ -32,6 +37,7 @@ def test_attributes_results_1():
def test_attributes_results_2():
+ """Test search data contains x1 column."""
hyper = Hyperactive()
hyper.add_search(objective_function, search_space, n_iter=100)
hyper.run()
@@ -40,6 +46,7 @@ def test_attributes_results_2():
def test_attributes_results_3():
+ """Test search data contains score column."""
hyper = Hyperactive()
hyper.add_search(objective_function, search_space, n_iter=100)
hyper.run()
@@ -48,6 +55,7 @@ def test_attributes_results_3():
def test_attributes_results_4():
+ """Test warm start initialization with specific value."""
hyper = Hyperactive()
hyper.add_search(
objective_function,
@@ -61,6 +69,7 @@ def test_attributes_results_4():
def test_attributes_results_5():
+ """Test warm start initialization with different value."""
hyper = Hyperactive()
hyper.add_search(
objective_function,
@@ -79,6 +88,8 @@ def test_attributes_results_5():
def test_attributes_results_6():
+ """Test memory disabled allows duplicate search space points."""
+
def objective_function(opt):
score = -opt["x1"] * opt["x1"]
return score
@@ -105,6 +116,8 @@ def objective_function(opt):
def test_attributes_results_7():
+ """Test search data without times parameter excludes timing columns."""
+
def objective_function(opt):
score = -opt["x1"] * opt["x1"]
return score
@@ -122,11 +135,13 @@ def objective_function(opt):
hyper.run()
search_data = hyper.search_data(objective_function)
- with pytest.raises(Exception) as e_info:
+ with pytest.raises(Exception):
search_data["eval_times"]
def test_attributes_results_8():
+ """Test search data without times parameter excludes iteration timing."""
+
def objective_function(opt):
score = -opt["x1"] * opt["x1"]
return score
@@ -144,11 +159,13 @@ def objective_function(opt):
hyper.run()
search_data = hyper.search_data(objective_function)
- with pytest.raises(Exception) as e_info:
+ with pytest.raises(Exception):
search_data["iter_times"]
def test_attributes_results_9():
+ """Test search data with times parameter includes timing columns."""
+
def objective_function(opt):
score = -opt["x1"] * opt["x1"]
return score
diff --git a/tests/test_results_methods.py b/tests/test_results_methods.py
index 9ba52539..d810c3e0 100644
--- a/tests/test_results_methods.py
+++ b/tests/test_results_methods.py
@@ -1,17 +1,22 @@
-import pytest
+"""Test module for results methods functionality."""
+
import numbers
+
import numpy as np
import pandas as pd
+import pytest
from hyperactive import Hyperactive
def objective_function(opt):
+ """Primary objective function for results testing."""
score = -opt["x1"] * opt["x1"]
return score
def objective_function1(opt):
+ """Secondary objective function for results testing."""
score = -opt["x1"] * opt["x1"]
return score
@@ -22,6 +27,7 @@ def objective_function1(opt):
def test_attributes_best_score_objective_function_0():
+ """Test best score returns numeric value."""
hyper = Hyperactive()
hyper.add_search(
objective_function,
@@ -34,6 +40,7 @@ def test_attributes_best_score_objective_function_0():
def test_attributes_best_score_objective_function_1():
+ """Test best score with multiple objective functions."""
hyper = Hyperactive()
hyper.add_search(
objective_function,
@@ -52,6 +59,7 @@ def test_attributes_best_score_objective_function_1():
"""
def test_attributes_best_score_search_id_0():
+ # Test best score with search ID.
hyper = Hyperactive()
hyper.add_search(
objective_function,
@@ -65,6 +73,7 @@ def test_attributes_best_score_search_id_0():
def test_attributes_best_score_search_id_1():
+ # Test best score with multiple search IDs.
hyper = Hyperactive()
hyper.add_search(
objective_function,
@@ -85,6 +94,7 @@ def test_attributes_best_score_search_id_1():
def test_attributes_best_para_objective_function_0():
+ """Test best parameters returns dictionary."""
hyper = Hyperactive()
hyper.add_search(
objective_function,
@@ -97,6 +107,7 @@ def test_attributes_best_para_objective_function_0():
def test_attributes_best_para_objective_function_1():
+ """Test best parameters with multiple objective functions."""
hyper = Hyperactive()
hyper.add_search(
objective_function,
@@ -115,6 +126,7 @@ def test_attributes_best_para_objective_function_1():
"""
def test_attributes_best_para_search_id_0():
+ # Test best parameters with search ID.
hyper = Hyperactive()
hyper.add_search(
objective_function,
@@ -128,6 +140,7 @@ def test_attributes_best_para_search_id_0():
def test_attributes_best_para_search_id_1():
+ # Test best parameters with multiple search IDs.
hyper = Hyperactive()
hyper.add_search(
objective_function,
@@ -148,6 +161,7 @@ def test_attributes_best_para_search_id_1():
def test_attributes_results_objective_function_0():
+ """Test search results returns DataFrame."""
hyper = Hyperactive()
hyper.add_search(
objective_function,
@@ -160,6 +174,7 @@ def test_attributes_results_objective_function_0():
def test_attributes_results_objective_function_1():
+ """Test search results with multiple objective functions."""
hyper = Hyperactive()
hyper.add_search(
objective_function,
@@ -178,6 +193,7 @@ def test_attributes_results_objective_function_1():
"""
def test_attributes_results_search_id_0():
+ # Test search results with search ID.
hyper = Hyperactive()
hyper.add_search(
objective_function,
@@ -191,6 +207,7 @@ def test_attributes_results_search_id_0():
def test_attributes_results_search_id_1():
+ # Test search results with multiple search IDs.
hyper = Hyperactive()
hyper.add_search(
objective_function,
@@ -211,6 +228,7 @@ def test_attributes_results_search_id_1():
def test_attributes_result_errors_0():
+ """Test error handling with no search runs."""
with pytest.raises(ValueError):
hyper = Hyperactive()
hyper.add_search(objective_function, search_space, n_iter=15)
@@ -220,6 +238,7 @@ def test_attributes_result_errors_0():
def test_attributes_result_errors_1():
+ """Test error handling with unknown objective function."""
with pytest.raises(ValueError):
hyper = Hyperactive()
hyper.add_search(objective_function, search_space, n_iter=15)
@@ -229,6 +248,7 @@ def test_attributes_result_errors_1():
def test_attributes_result_errors_2():
+ """Test error handling with unknown search ID."""
with pytest.raises(ValueError):
hyper = Hyperactive()
hyper.add_search(objective_function, search_space, n_iter=15)
diff --git a/tests/test_search_spaces.py b/tests/test_search_spaces.py
index f8f59d38..2727c8b6 100644
--- a/tests/test_search_spaces.py
+++ b/tests/test_search_spaces.py
@@ -1,15 +1,20 @@
-import sys, pytest
+"""Test module for search space functionality."""
+
+import sys
+
import numpy as np
import pandas as pd
+import pytest
from hyperactive import Hyperactive
-
if sys.platform.startswith("win"):
pytest.skip("skip these tests for windows", allow_module_level=True)
def test_search_space_0():
+ """Test search space with integer range."""
+
def objective_function(opt):
score = -opt["x1"] * opt["x1"]
return score
@@ -31,6 +36,8 @@ def objective_function(opt):
def test_search_space_1():
+ """Test search space with float range."""
+
def objective_function(opt):
score = -opt["x1"] * opt["x1"]
return score
@@ -52,6 +59,8 @@ def objective_function(opt):
def test_search_space_2():
+ """Test search space with numpy float range."""
+
def objective_function(opt):
score = -opt["x1"] * opt["x1"]
return score
@@ -74,6 +83,8 @@ def objective_function(opt):
def test_search_space_3():
+ """Test search space with function objects."""
+
def func1():
pass
@@ -105,6 +116,8 @@ def objective_function(opt):
def test_search_space_4():
+ """Test search space with class objects."""
+
class class1:
pass
@@ -136,6 +149,8 @@ def objective_function(opt):
def test_search_space_5():
+ """Test search space with initialized class instances."""
+
class class1:
def __init__(self):
pass
@@ -179,6 +194,8 @@ def objective_function(opt):
def test_search_space_6():
+ """Test search space with mixed parameter types."""
+
def objective_function(opt):
score = -opt["x1"] * opt["x1"]
return score
diff --git a/tests/test_warm_starts/__init__.py b/tests/test_warm_starts/__init__.py
index e69de29b..98ac7f85 100644
--- a/tests/test_warm_starts/__init__.py
+++ b/tests/test_warm_starts/__init__.py
@@ -0,0 +1 @@
+"""Test package for warm start functionality tests."""
diff --git a/tests/test_warm_starts/test_memory_warm_start.py b/tests/test_warm_starts/test_memory_warm_start.py
index 2aebbc05..b1c43e0f 100644
--- a/tests/test_warm_starts/test_memory_warm_start.py
+++ b/tests/test_warm_starts/test_memory_warm_start.py
@@ -1,46 +1,57 @@
-import time
-import pytest, sys
+"""Test module for memory warm start functionality."""
+
+import sys
+
import numpy as np
-import pandas as pd
+import pytest
from hyperactive import Hyperactive
-
if sys.platform.startswith("win"):
pytest.skip("skip these tests for windows", allow_module_level=True)
def func1():
+ """Test function 1 for search space."""
pass
def func2():
+ """Test function 2 for search space."""
pass
class class1:
+ """Test class for search space functionality."""
+
def __init__(self):
pass
class class2:
+ """Test class for search space functionality."""
+
def __init__(self):
pass
def class_f1():
+ """Return class1 for search space."""
return class1
def class_f2():
+ """Return class2 for search space."""
return class2
def numpy_f1():
+ """Return numpy array [0, 1] for search space."""
return np.array([0, 1])
def numpy_f2():
+ """Return numpy array [1, 0] for search space."""
return np.array([1, 0])
@@ -55,11 +66,13 @@ def numpy_f2():
def objective_function(opt):
+ """Return simple quadratic objective function for testing."""
score = -opt["x1"] * opt["x1"]
return score
def test_memory_warm_start_0():
+ """Test memory warm start from single job to single job."""
hyper0 = Hyperactive()
hyper0.add_search(objective_function, search_space, n_iter=15)
hyper0.run()
@@ -77,6 +90,7 @@ def test_memory_warm_start_0():
def test_memory_warm_start_1():
+ """Test memory warm start from multi-job to single job."""
hyper0 = Hyperactive(distribution="pathos")
hyper0.add_search(objective_function, search_space, n_iter=15, n_jobs=2)
hyper0.run()
@@ -94,6 +108,7 @@ def test_memory_warm_start_1():
def test_memory_warm_start_2():
+ """Test memory warm start from single job to multi-job."""
hyper0 = Hyperactive()
hyper0.add_search(objective_function, search_space, n_iter=15)
hyper0.run()
@@ -112,6 +127,7 @@ def test_memory_warm_start_2():
def test_memory_warm_start_3():
+ """Test memory warm start from multi-job to multi-job."""
hyper0 = Hyperactive(distribution="pathos")
hyper0.add_search(objective_function, search_space, n_iter=15, n_jobs=2)
hyper0.run()
diff --git a/tests/test_warm_starts/test_warm_start.py b/tests/test_warm_starts/test_warm_start.py
index 3a2e2a8d..60c1a7ac 100644
--- a/tests/test_warm_starts/test_warm_start.py
+++ b/tests/test_warm_starts/test_warm_start.py
@@ -1,46 +1,57 @@
-import time
-import pytest, sys
+"""Test module for warm start functionality."""
+
+import sys
+
import numpy as np
-import pandas as pd
+import pytest
from hyperactive import Hyperactive
-
if sys.platform.startswith("win"):
pytest.skip("skip these tests for windows", allow_module_level=True)
def func1():
+ """Test function 1 for search space."""
pass
def func2():
+ """Test function 2 for search space."""
pass
class class1:
+ """Test class for search space functionality."""
+
def __init__(self):
pass
class class2:
+ """Test class for search space functionality."""
+
def __init__(self):
pass
def class_f1():
+ """Return class1 for search space."""
return class1
def class_f2():
+ """Return class2 for search space."""
return class2
def numpy_f1():
+ """Return numpy array [0, 1] for search space."""
return np.array([0, 1])
def numpy_f2():
+ """Return numpy array [1, 0] for search space."""
return np.array([1, 0])
@@ -55,11 +66,13 @@ def numpy_f2():
def objective_function(opt):
+ """Return simple quadratic objective function for testing."""
score = -opt["x1"] * opt["x1"]
return score
def test_warm_start_0():
+ """Test warm start from single job to single job."""
hyper0 = Hyperactive()
hyper0.add_search(objective_function, search_space, n_iter=15)
hyper0.run()
@@ -77,6 +90,7 @@ def test_warm_start_0():
def test_warm_start_1():
+ """Test warm start from multi-job to single job."""
hyper0 = Hyperactive(distribution="pathos")
hyper0.add_search(objective_function, search_space, n_iter=15, n_jobs=2)
hyper0.run()
@@ -94,6 +108,7 @@ def test_warm_start_1():
def test_warm_start_2():
+ """Test warm start from single job to multi-job."""
hyper0 = Hyperactive()
hyper0.add_search(objective_function, search_space, n_iter=15)
hyper0.run()
@@ -112,6 +127,7 @@ def test_warm_start_2():
def test_warm_start_3():
+ """Test warm start from multi-job to multi-job."""
hyper0 = Hyperactive(distribution="pathos")
hyper0.add_search(objective_function, search_space, n_iter=15, n_jobs=2)
hyper0.run()
diff --git a/tests/test_warm_starts/test_warm_start_smbo.py b/tests/test_warm_starts/test_warm_start_smbo.py
index b818e58e..03c43944 100644
--- a/tests/test_warm_starts/test_warm_start_smbo.py
+++ b/tests/test_warm_starts/test_warm_start_smbo.py
@@ -1,54 +1,53 @@
-import time
-import pytest, sys
+# ruff: noqa: D100, D103
+import sys
+
import numpy as np
-import pandas as pd
+import pytest
from hyperactive import (
Hyperactive,
)
-
from hyperactive.optimizers import (
BayesianOptimizer,
- TreeStructuredParzenEstimators,
ForestOptimizer,
+ TreeStructuredParzenEstimators,
)
-
if sys.platform.startswith("win"):
pytest.skip("skip these tests for windows", allow_module_level=True)
-def func1():
+def _func1():
pass
-def func2():
+def _func2():
pass
-class class1:
+class _class1:
def __init__(self):
pass
-class class2:
+class _class2:
def __init__(self):
pass
-def class_f1():
- return class1
+def _class_f1():
+ return _class1
-def class_f2():
- return class2
+def _class_f2():
+ return _class2
-def numpy_f1():
+def _numpy_f1():
return np.array([0, 1])
-def numpy_f2():
+def _numpy_f2():
return np.array([1, 0])
@@ -56,13 +55,13 @@ def numpy_f2():
"x0": list(range(-3, 3)),
"x1": list(np.arange(-1, 1, 0.001)),
"string0": ["str0", "str1"],
- "function0": [func1, func2],
- "class0": [class_f1, class_f2],
- "numpy0": [numpy_f1, numpy_f2],
+ "function0": [_func1, _func2],
+ "class0": [_class_f1, _class_f2],
+ "numpy0": [_numpy_f1, _numpy_f2],
}
-def objective_function(opt):
+def _objective_function(opt):
score = -opt["x1"] * opt["x1"]
return score
@@ -80,15 +79,15 @@ def objective_function(opt):
@pytest.mark.parametrize("smbo_opt", smbo_opts)
def test_warm_start_smbo_0(smbo_opt):
hyper0 = Hyperactive()
- hyper0.add_search(objective_function, search_space, n_iter=n_iter)
+ hyper0.add_search(_objective_function, search_space, n_iter=n_iter)
hyper0.run()
- search_data0 = hyper0.search_data(objective_function)
+ search_data0 = hyper0.search_data(_objective_function)
smbo_opt_ = smbo_opt(warm_start_smbo=search_data0)
hyper1 = Hyperactive()
hyper1.add_search(
- objective_function,
+ _objective_function,
search_space,
n_iter=n_iter,
optimizer=smbo_opt_,
@@ -101,7 +100,7 @@ def test_warm_start_smbo_0(smbo_opt):
def test_warm_start_smbo_1(smbo_opt):
hyper0 = Hyperactive(distribution="pathos")
hyper0.add_search(
- objective_function,
+ _objective_function,
search_space,
n_iter=n_iter,
n_jobs=2,
@@ -109,12 +108,12 @@ def test_warm_start_smbo_1(smbo_opt):
)
hyper0.run()
- search_data0 = hyper0.search_data(objective_function)
+ search_data0 = hyper0.search_data(_objective_function)
smbo_opt_ = smbo_opt(warm_start_smbo=search_data0)
hyper1 = Hyperactive()
hyper1.add_search(
- objective_function, search_space, n_iter=n_iter, optimizer=smbo_opt_
+ _objective_function, search_space, n_iter=n_iter, optimizer=smbo_opt_
)
hyper1.run()
@@ -122,15 +121,15 @@ def test_warm_start_smbo_1(smbo_opt):
@pytest.mark.parametrize("smbo_opt", smbo_opts)
def test_warm_start_smbo_2(smbo_opt):
hyper0 = Hyperactive()
- hyper0.add_search(objective_function, search_space, n_iter=n_iter)
+ hyper0.add_search(_objective_function, search_space, n_iter=n_iter)
hyper0.run()
- search_data0 = hyper0.search_data(objective_function)
+ search_data0 = hyper0.search_data(_objective_function)
smbo_opt_ = smbo_opt(warm_start_smbo=search_data0)
hyper1 = Hyperactive(distribution="joblib")
hyper1.add_search(
- objective_function,
+ _objective_function,
search_space,
n_iter=n_iter,
n_jobs=2,
@@ -143,15 +142,15 @@ def test_warm_start_smbo_2(smbo_opt):
@pytest.mark.parametrize("smbo_opt", smbo_opts)
def test_warm_start_smbo_3(smbo_opt):
hyper0 = Hyperactive(distribution="pathos")
- hyper0.add_search(objective_function, search_space, n_iter=n_iter, n_jobs=2)
+ hyper0.add_search(_objective_function, search_space, n_iter=n_iter, n_jobs=2)
hyper0.run()
- search_data0 = hyper0.search_data(objective_function)
+ search_data0 = hyper0.search_data(_objective_function)
smbo_opt_ = smbo_opt(warm_start_smbo=search_data0)
hyper1 = Hyperactive(distribution="joblib")
hyper1.add_search(
- objective_function,
+ _objective_function,
search_space,
n_iter=n_iter,
n_jobs=2,