Skip to content

Commit

Permalink
Merge branch 'optuna:master' into fix-lightGBM-argument
Browse files Browse the repository at this point in the history
  • Loading branch information
Alnusjaponica committed Jul 13, 2023
2 parents d6fdcb7 + 88acc1b commit 4a7057e
Show file tree
Hide file tree
Showing 39 changed files with 850 additions and 259 deletions.
2 changes: 0 additions & 2 deletions .github/workflows/checks-integration.yml
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,6 @@ jobs:
sudo apt-get -y install openmpi-bin libopenmpi-dev libopenblas-dev
# TODO(Shinichi): Remove the version constraint on SQLAlchemy
# TODO(Shinichi): Remove the version constraint on PyTorch Lightning
- name: Install
run: |
python -m pip install -U pip
Expand All @@ -42,7 +41,6 @@ jobs:
pip install --progress-bar off -U bayesmark
pip install --progress-bar off -U kurobako
pip install "sqlalchemy<2.0.0"
pip install "pytorch-lightning<2.0.0"
- name: Output installed packages
run: |
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/coverage.yml
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ jobs:
sudo apt-get update
sudo apt-get -y install openmpi-bin libopenmpi-dev libopenblas-dev
# TODO(Shinichi): Remove the version constraint on PyTorch Lightning
# TODO(Shinichi): Remove the version constraint on Numpy
# TODO(c-bata): Remove the version constraint on fakeredis
- name: Install
run: |
Expand All @@ -56,7 +56,7 @@ jobs:
pip install --progress-bar off .[test]
pip install --progress-bar off .[optional]
pip install --progress-bar off .[integration] --extra-index-url https://download.pytorch.org/whl/cpu
pip install "pytorch-lightning<2.0.0"
pip install "numpy<1.24.0"
pip install "fakeredis<2.11.1"
echo 'import coverage; coverage.process_startup()' > sitecustomize.py
Expand Down
6 changes: 2 additions & 4 deletions .github/workflows/mac-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,6 @@ jobs:
restore-keys: |
${{ runner.os }}-3.8-${{ env.cache-name }}-${{ hashFiles('**/pyproject.toml') }}
# TODO(Shinichi): Remove the version constraint on PyTorch Lightning
# TODO(c-bata): Remove the version constraint on fakeredis
- name: Install
run: |
Expand All @@ -53,7 +52,6 @@ jobs:
optuna --version
pip install --progress-bar off .[test]
pip install --progress-bar off .[optional]
pip install "pytorch-lightning<2.0.0"
pip install "fakeredis<2.11.1"
- name: Output installed packages
Expand Down Expand Up @@ -105,7 +103,7 @@ jobs:
brew install open-mpi
brew install openblas
# TODO(Shinichi): Remove the version constraint on PyTorch Lightning
# TODO(Shinichi): Remove the version constraint on Numpy
# TODO(c-bata): Remove the version constraint on fakeredis
- name: Install
run: |
Expand All @@ -119,7 +117,7 @@ jobs:
pip install --progress-bar off .[test]
pip install --progress-bar off .[optional]
pip install --progress-bar off .[integration]
pip install "pytorch-lightning<2.0.0"
pip install "numpy<1.24.0"
pip install "fakeredis<2.11.1"
- name: Output installed packages
Expand Down
6 changes: 3 additions & 3 deletions .github/workflows/tests-integration.yml
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ jobs:

strategy:
matrix:
python-version: ['3.7', '3.8', '3.9', '3.10', '3.11']
python-version: ['3.8', '3.9', '3.10', '3.11']

steps:
- name: Checkout
Expand All @@ -46,7 +46,7 @@ jobs:
sudo apt-get update
sudo apt-get -y install openmpi-bin libopenmpi-dev libopenblas-dev
# TODO(Shinichi): Remove the version constraint on PyTorch Lightning
# TODO(Shinichi): Remove the version constraint on Numpy
# TODO(c-bata): Remove the version constraint on fakeredis
- name: Install
run: |
Expand All @@ -60,7 +60,7 @@ jobs:
pip install --progress-bar off .[test]
pip install --progress-bar off .[optional]
pip install --progress-bar off .[integration] --extra-index-url https://download.pytorch.org/whl/cpu
pip install "pytorch-lightning<2.0.0"
pip install "numpy<1.24.0"
pip install "fakeredis<2.11.1"
- name: Output installed packages
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/tests-storage.yml
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ jobs:
--health-timeout 5s
--health-retries 5
redis:
image: redis:6.2.6
image: redis
ports:
- 6379:6379

Expand Down
6 changes: 2 additions & 4 deletions .github/workflows/windows-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,6 @@ jobs:
restore-keys: |
${{ runner.os }}-3.9-${{ env.cache-name }}-${{ hashFiles('**/pyproject.toml') }}
# TODO(Shinichi): Remove the version constraint on PyTorch Lightning
# TODO(c-bata): Remove the version constraint on fakeredis
- name: Install
run: |
Expand All @@ -53,7 +52,6 @@ jobs:
optuna --version
pip install --progress-bar off .[test]
pip install --progress-bar off .[optional]
pip install "pytorch-lightning<2.0.0"
pip install PyQt6 # Install PyQT for using QtAgg as matplotlib backend.
pip install "fakeredis<2.11.1"
Expand Down Expand Up @@ -109,7 +107,7 @@ jobs:
with:
mpi: "msmpi"

# TODO(Shinichi): Remove the version constraint on PyTorch Lightning
# TODO(Shinichi): Remove the version constraint on Numpy
# TODO(c-bata): Remove the version constraint on fakeredis
- name: Install
run: |
Expand All @@ -122,7 +120,7 @@ jobs:
pip install --progress-bar off .[test]
pip install --progress-bar off .[optional]
pip install --progress-bar off .[integration]
pip install "pytorch-lightning<2.0.0"
pip install "numpy<1.24.0"
pip install "distributed<2023.3.2"
pip install "fakeredis<2.11.1"
Expand Down
6 changes: 5 additions & 1 deletion benchmarks/asv/optimize.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,11 @@ def time_optimize(self, args: str) -> None:
"inmemory, tpe, 1000",
"inmemory, cmaes, 1000",
"sqlite, random, 1000",
"cached_sqlite, random, 1000",
"sqlite, tpe, 1000",
"sqlite, cmaes, 1000",
"journal, random, 1000",
"journal, tpe, 1000",
"journal, cmaes, 1000",
)
param_names = ["storage, sampler, n_trials"]
timeout = 600
8 changes: 4 additions & 4 deletions docs/source/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,14 +16,14 @@
# import sys
# sys.path.insert(0, os.path.abspath('.'))

import pkg_resources
import warnings

import plotly.io as pio
from sklearn.exceptions import ConvergenceWarning
from sphinx_gallery.sorting import FileNameSortKey

__version__ = pkg_resources.get_distribution("optuna").version
import optuna


# -- Project information -----------------------------------------------------

Expand All @@ -32,9 +32,9 @@
author = "Optuna Contributors."

# The short X.Y version
version = __version__
version = optuna.version.__version__
# The full version, including alpha/beta/rc tags
release = __version__
release = optuna.version.__version__

# -- General configuration ---------------------------------------------------

Expand Down
1 change: 1 addition & 0 deletions docs/source/reference/integration.rst
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@ BoTorch
:nosignatures:

optuna.integration.BoTorchSampler
optuna.integration.botorch.logei_candidates_func
optuna.integration.botorch.qei_candidates_func
optuna.integration.botorch.qnei_candidates_func
optuna.integration.botorch.qehvi_candidates_func
Expand Down
4 changes: 4 additions & 0 deletions optuna/artifacts/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
from optuna.artifacts._filesystem import FileSystemArtifactStore


__all__ = ["FileSystemArtifactStore"]
63 changes: 63 additions & 0 deletions optuna/artifacts/_filesystem.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
from __future__ import annotations

import os
import shutil
from typing import TYPE_CHECKING

from optuna.artifacts.exceptions import ArtifactNotFound


if TYPE_CHECKING:
from typing import BinaryIO


class FileSystemArtifactStore:
"""An artifact backend for file systems.
Example:
.. code-block:: python
import optuna
from optuna_dashboard.artifact import upload_artifact
from optuna_dashboard.artifact.file_system import FileSystemBackend
artifact_backend = FileSystemBackend("./artifacts")
def objective(trial: optuna.Trial) -> float:
... = trial.suggest_float("x", -10, 10)
file_path = generate_example_png(...)
upload_artifact(artifact_backend, trial, file_path)
return ...
"""

def __init__(self, base_path: str) -> None:
self._base_path = base_path

def open_reader(self, artifact_id: str) -> BinaryIO:
filepath = os.path.join(self._base_path, artifact_id)
try:
f = open(filepath, "rb")
except FileNotFoundError as e:
raise ArtifactNotFound("not found") from e
return f

def write(self, artifact_id: str, content_body: BinaryIO) -> None:
filepath = os.path.join(self._base_path, artifact_id)
with open(filepath, "wb") as f:
shutil.copyfileobj(content_body, f)

def remove(self, artifact_id: str) -> None:
filepath = os.path.join(self._base_path, artifact_id)
try:
os.remove(filepath)
except FileNotFoundError as e:
raise ArtifactNotFound("not found") from e


if TYPE_CHECKING:
# A mypy-runtime assertion to ensure that LocalArtifactBackend
# implements all abstract methods in ArtifactBackendProtocol.
from ._protocol import ArtifactStore

_: ArtifactStore = FileSystemArtifactStore("")
59 changes: 59 additions & 0 deletions optuna/artifacts/_protocol.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
from __future__ import annotations

from typing import TYPE_CHECKING


try:
from typing import Protocol
except ImportError:
from typing_extensions import Protocol # type: ignore


if TYPE_CHECKING:
from typing import BinaryIO


class ArtifactStore(Protocol):
"""A protocol defining the interface for an artifact backend.
An artifact backend is responsible for managing the storage and retrieval
of artifact data. The backend should provide methods for opening, writing
and removing artifacts.
"""

def open_reader(self, artifact_id: str) -> BinaryIO:
"""Open the artifact identified by the artifact_id.
This method should return a binary file-like object in read mode, similar to
``open(..., mode="rb")``. If the artifact does not exist, an
:exc:`~optuna_dashboard.artifact.exceptions.ArtifactNotFound` exception
should be raised.
Args:
artifact_id: The identifier of the artifact to open.
Returns:
BinaryIO: A binary file-like object that can be read from.
"""
...

def write(self, artifact_id: str, content_body: BinaryIO) -> None:
"""Save the content to the backend.
Args:
artifact_id: The identifier of the artifact to write to.
content_body: The content to write to the artifact.
"""
...

def remove(self, artifact_id: str) -> None:
"""Remove the artifact identified by the artifact_id.
This method should delete the artifact from the backend. If the artifact does not
exist, an :exc:`~optuna_dashboard.artifact.exceptions.ArtifactNotFound` exception
may be raised.
Args:
artifact_id: The identifier of the artifact to remove.
"""
...
12 changes: 12 additions & 0 deletions optuna/artifacts/exceptions.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
from optuna.exceptions import OptunaError


class ArtifactNotFound(OptunaError):
"""Exception raised when an artifact is not found.
It is typically raised while calling
:meth:`~optuna_dashboard.artifact.protocol.ArtifactBackend.open` or
:meth:`~optuna_dashboard.artifact.protocol.ArtifactBackend.remove` methods.
"""

...
Loading

0 comments on commit 4a7057e

Please sign in to comment.