Skip to content

Commit

Permalink
updating organization of deephyper.analysis
Browse files Browse the repository at this point in the history
  • Loading branch information
Deathn0t committed Apr 4, 2024
1 parent 53bbc4a commit be33cec
Show file tree
Hide file tree
Showing 16 changed files with 72 additions and 32 deletions.
3 changes: 2 additions & 1 deletion deephyper/__init__.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
"""
DeepHyper's software architecture is designed to be modular and extensible. It is built on top of the following main sub-packages:
DeepHyper's software architecture is designed to be modular and extensible. It is built on top of the following main subpackages:
* :mod:`deephyper.ensemble`: Tools to build ensembles of neural networks with uncertainty quantification.
* :mod:`deephyper.nas`: Tools to define neural architecture search space and evaluation strategy.
Expand All @@ -12,6 +12,7 @@
DeepHyper installation requires **Python >= 3.7**.
"""

import warnings
from deephyper.__version__ import __version__, __version_suffix__ # noqa: F401

Expand Down
5 changes: 3 additions & 2 deletions deephyper/analysis/__init__.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
"""
This analysis sub-package contains modules to analyze the results deephyper.
This analysis subpackage contains modules to analyze results returned by deephyper.
"""

from ._rank import rank
from ._matplotlib import figure_size, update_matplotlib_rc

__all__ = ["rank"]
__all__ = ["rank", "figure_size", "update_matplotlib_rc"]
18 changes: 18 additions & 0 deletions deephyper/analysis/hps/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
"""Subpackage for hyperparameter search analysis.
"""

from ._hps import (
filter_failed_objectives,
parameters_at_max,
plot_search_trajectory_single_objective_hps,
plot_worker_utilization,
read_results_from_csv,
)

__all__ = [
"filter_failed_objectives",
"parameters_at_max",
"plot_search_trajectory_single_objective_hps",
"plot_worker_utilization",
"read_results_from_csv",
]
44 changes: 29 additions & 15 deletions deephyper/analysis/hpo.py → deephyper/analysis/hps/_hps.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,10 +10,24 @@
from matplotlib.ticker import MaxNLocator

from deephyper.analysis import rank
from deephyper.analysis._paxplot import pax_parallel
from deephyper.analysis.hps._paxplot import pax_parallel


def filter_failed_objectives(df: pd.DataFrame) -> Tuple[pd.DataFrame, pd.DataFrame]:
def read_results_from_csv(file_path: str) -> pd.DataFrame:
"""Read the results of a Hyperparameter Search from a CSV file.
Args:
file_path (str): the path to the CSV file.
Returns:
pd.DataFrame: the results of a Hyperparameter Search.
"""
return pd.read_csv(file_path, index_col=None)


def filter_failed_objectives(
df: pd.DataFrame,
) -> Tuple[pd.DataFrame, pd.DataFrame]:
"""Filter out lines from the DataFrame with failed objectives.
Args:
Expand All @@ -24,15 +38,15 @@ def filter_failed_objectives(df: pd.DataFrame) -> Tuple[pd.DataFrame, pd.DataFra
"""
# Single-Objective
if "objective" in df.columns:
if pd.api.types.is_string_dtype(df.objective):
mask = df.objective.str.startswith("F")
if pd.api.types.is_string_dtype(df["objective"]):
mask = df["objective"].str.startswith("F")

df_with_failures = df[mask]

df_without_failures = df[~mask]
df_without_failures.loc[
:, "objective"
] = df_without_failures.objective.astype(float)
df_without_failures.loc[:, "objective"] = df_without_failures[
"objective"
].astype(float)
else:
df_without_failures = df
df_with_failures = df[np.zeros(len(df), dtype=bool)]
Expand Down Expand Up @@ -77,31 +91,32 @@ def parameters_at_max(
return config, value


def plot_search_trajectory_single_objective_hpo(
results, show_failures: bool = True, ax=None, **kwargs
def plot_search_trajectory_single_objective_hps(
results, show_failures: bool = True, column="objective", ax=None, **kwargs
):
"""Plot the search trajectory of a Single-Objective Hyperparameter Search.
Args:
results (pd.DataFrame): the results of a Hyperparameter Search.
show_failures (bool, optional): whether to show the failed objectives. Defaults to ``True``.
column (str, optional): the column to use for the y-axis of the plot. Defaults to ``"objective"``.
ax (matplotlib.pyplot.axes): the axes to use for the plot.
Returns:
(matplotlib.pyplot.figure, matplotlib.pyplot.axes): the figure and axes of the plot.
"""

if results.objective.dtype != np.float64:
if results[column].dtype != np.float64:
x = np.arange(len(results))
mask_failed = np.where(results.objective.str.startswith("F"))[0]
mask_success = np.where(~results.objective.str.startswith("F"))[0]
mask_failed = np.where(results[column].str.startswith("F"))[0]
mask_success = np.where(~results[column].str.startswith("F"))[0]
x_success, x_failed = x[mask_success], x[mask_failed]
y_success = results.objective[mask_success].astype(float)
y_success = results[column][mask_success].astype(float)
else:
x = np.arange(len(results))
x_success = x
x_failed = np.array([])
y_success = results.objective
y_success = results[column]

y_min, y_max = y_success.min(), y_success.max()
y_min = y_min - 0.05 * (y_max - y_min)
Expand Down Expand Up @@ -223,7 +238,6 @@ def plot_worker_utilization(
ax.set_ylabel("Utilization")
else:
ax.set_ylabel("Active Workers")
ax.legend()
ax.grid(True)
ax.set_xlim(x.min(), x.max())

Expand Down
File renamed without changes.
3 changes: 2 additions & 1 deletion deephyper/core/cli/_new_problem.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,15 @@
Create a DeepHyper Problem
--------------------------
Command line to create a new problem sub-package in a DeepHyper projet package.
Command line to create a new problem subpackage in a DeepHyper projet package.
It can be used with:
.. code-block:: console
$ deephyper new-problem hps problem_name
"""

import glob
import os
import pathlib
Expand Down
2 changes: 1 addition & 1 deletion deephyper/evaluator/__init__.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
"""
This evaluator sub-package provides a common interface to execute isolated tasks with different parallel backends and system properties. This interface is used by search algorithm to perform black-box optimization (the black-box being represented by the ``run``-function).
This evaluator subpackage provides a common interface to execute isolated tasks with different parallel backends and system properties. This interface is used by search algorithm to perform black-box optimization (the black-box being represented by the ``run``-function).
An ``Evaluator``, when instanciated, is bound to a ``run``-function which takes as first argument a dictionnary and optionally has other keyword-arguments. The ``run``-function has to return a Python serializable value (under ``pickle`` protocol). In it's most basic form the return value is a ``float``.
An example ``run``-function is:
Expand Down
3 changes: 2 additions & 1 deletion deephyper/evaluator/storage/__init__.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
"""This sub-package provides an interface to implement new storage clients. The base class defining this interface is the :class:`deephyper.evaluator.storage.Storage`. A storage in our langage is a synonym of memory. Different databases or memory systems can be used through this interface (e.g., key-value storage, relational database, etc.).
"""This subpackage provides an interface to implement new storage clients. The base class defining this interface is the :class:`deephyper.evaluator.storage.Storage`. A storage in our langage is a synonym of memory. Different databases or memory systems can be used through this interface (e.g., key-value storage, relational database, etc.).
"""

from deephyper.evaluator.storage._storage import Storage
from deephyper.evaluator.storage._memory_storage import MemoryStorage

Expand Down
9 changes: 5 additions & 4 deletions deephyper/nas/__init__.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,10 @@
"""This sub-package his dedicated to the definition of neural architecture search space and evaluation strategy. The implementation is using Tensorflow 2.X and Keras API. The main concepts are:
"""This subpackage his dedicated to the definition of neural architecture search space and evaluation strategy. The implementation is using Tensorflow 2.X and Keras API. The main concepts are:
* :class:`deephyper.nas.KSearchSpace`: An object to define a search space of neural architectures.
* :mod:`deephyper.nas.run`: A sub-package to define the evaluation strategy of a neural architecture (e.g., training procedure).
* :mod:`deephyper.nas.operation`: A sub-package to define operations of the neural architecture search space.
* :mod:`deephyper.nas.node`: A sub-package to define nodes of the neural architecture search space which is represented as a direct acyclic graph.
* :mod:`deephyper.nas.run`: A subpackage to define the evaluation strategy of a neural architecture (e.g., training procedure).
* :mod:`deephyper.nas.operation`: A subpackage to define operations of the neural architecture search space.
* :mod:`deephyper.nas.node`: A subpackage to define nodes of the neural architecture search space which is represented as a direct acyclic graph.
"""

from ._nx_search_space import NxSearchSpace
from ._keras_search_space import KSearchSpace

Expand Down
3 changes: 2 additions & 1 deletion deephyper/nas/run/__init__.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
"""The :mod:`deephyper.nas.run` sub-package provides a set of functions which can evaluates configurations generated by search algorithms of DeepHyper.
"""The :mod:`deephyper.nas.run` subpackage provides a set of functions which can evaluates configurations generated by search algorithms of DeepHyper.
"""

from ._run_base_trainer import run_base_trainer
from ._run_distributed_base_trainer import run_distributed_base_trainer
from ._run_debug_arch import run_debug_arch
Expand Down
3 changes: 2 additions & 1 deletion deephyper/problem/__init__.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
"""This sub-package provides tools to define hyperparameter and neural architecture search problems. Some features of this module are based on the `ConfigSpace <https://automl.github.io/ConfigSpace/master/>`_ project. The main classes provided by this module are:
"""This subpackage provides tools to define hyperparameter and neural architecture search problems. Some features of this module are based on the `ConfigSpace <https://automl.github.io/ConfigSpace/master/>`_ project. The main classes provided by this module are:
- :class:`deephyper.problem.HpProblem`: A class to define a hyperparameter search problem.
- :class:`deephyper.problem.NaProblem`: A class to define a neural architecture search problem.
"""

from ConfigSpace import * # noqa: F401, F403

from ._hyperparameter import HpProblem
Expand Down
2 changes: 1 addition & 1 deletion deephyper/search/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
"""This sub-package provides an interface to implement new search algorithms as well as some already implemented search algorithms. One module of this sub-package is specialized for hyperparameter optimization algorithms ``deephyper.search.hps`` and an other is specialized for neural architecture search ``deephyper.search.nas``.
"""This subpackage provides an interface to implement new search algorithms as well as some already implemented search algorithms. One module of this subpackage is specialized for hyperparameter optimization algorithms ``deephyper.search.hps`` and an other is specialized for neural architecture search ``deephyper.search.nas``.
The :class:`deephyper.search.Search` class provides the generic interface of a search.
Expand Down
2 changes: 1 addition & 1 deletion deephyper/search/hps/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
"""Sub-package for hyperparameter search algorithms.
"""Subpackage for hyperparameter search algorithms.
.. warning:: All search algorithms are MAXIMIZING the objective function. If you want to MINIMIZE the objective function, you have to return the negative of you objective.
"""
Expand Down
3 changes: 2 additions & 1 deletion deephyper/search/nas/__init__.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
"""Sub-package for neural architecture search algorithms.
"""Subpackage for neural architecture search algorithms.
.. warning:: All search algorithms are MAXIMIZING the objective function. If you want to MINIMIZE the objective function, you have to return the negative of you objective.
"""

from deephyper.search.nas._base import NeuralArchitectureSearch
from deephyper.search.nas._regevo import RegularizedEvolution
from deephyper.search.nas._agebo import AgEBO
Expand Down
2 changes: 1 addition & 1 deletion deephyper/sklearn/__init__.py
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
"""Sub-package providing tools for automl.
"""Subpackage providing tools for automl.
"""
2 changes: 1 addition & 1 deletion deephyper/test/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
"""Sub-package dedicated to reusable testing tools for DeepHyper"""
"""Subpackage dedicated to reusable testing tools for DeepHyper"""

from ._command import run
from ._parse_result import parse_result
Expand Down

0 comments on commit be33cec

Please sign in to comment.