Skip to content

Commit

Permalink
tuner: Renamed Optimizer to Tuner
Browse files Browse the repository at this point in the history
Fixes: #1200
  • Loading branch information
programmer290399 committed Oct 1, 2021
1 parent a434df7 commit 002d7f5
Show file tree
Hide file tree
Showing 9 changed files with 32 additions and 31 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
- Spacy `model_name_or_path` was changed to `model_name`. Functionality is the
same, it still accepts a name or a path.
- Renamed `accuracy()` to `score()`.
- Renamed `Optimizer` to `Tuner`.
### Fixed
- Record object key properties are now always strings

Expand Down
10 changes: 5 additions & 5 deletions dffml/optimizer/__init__.py → dffml/tuner/__init__.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
# SPDX-License-Identifier: MIT
# Copyright (c) 2020 Intel Corporation
"""
This is the Optimizer plugin
This is the Tuner plugin
"""
from .optimizer import (
OptimizerConfig,
OptimizerContext,
Optimizer,
from .tuner import (
TunerConfig,
TunerContext,
Tuner,
)
from .parameter_grid import ParameterGrid
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
field,
)
from ..high_level.ml import train, score
from .optimizer import Optimizer, OptimizerContext
from .tuner import Tuner, TunerContext
from ..util.entrypoint import entrypoint
from ..source.source import BaseSource, Record
from ..accuracy.accuracy import AccuracyScorer, AccuracyContext
Expand All @@ -20,9 +20,9 @@ class ParameterGridConfig:
parameters: dict = field("Parameters to be optimized")


class ParameterGridContext(OptimizerContext):
class ParameterGridContext(TunerContext):
"""
Parameter Grid Optimizer
Parameter Grid Tuner
"""

async def optimize(
Expand Down Expand Up @@ -92,7 +92,7 @@ async def optimize(


@entrypoint("parameter_grid")
class ParameterGrid(Optimizer):
class ParameterGrid(Tuner):

CONFIG = ParameterGridConfig
CONTEXT = ParameterGridContext
18 changes: 9 additions & 9 deletions dffml/optimizer/optimizer.py → dffml/tuner/tuner.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,12 +13,12 @@


@config
class OptimizerConfig:
class TunerConfig:
pass


class OptimizerContext(abc.ABC, BaseDataFlowFacilitatorObjectContext):
def __init__(self, parent: "Optimizer") -> None:
class TunerContext(abc.ABC, BaseDataFlowFacilitatorObjectContext):
def __init__(self, parent: "Tuner") -> None:
self.parent = parent

@abc.abstractmethod
Expand Down Expand Up @@ -58,15 +58,15 @@ async def optimize(
raise NotImplementedError()


@base_entry_point("dffml.optimizer", "optimizer")
class Optimizer(BaseDataFlowFacilitatorObject):
@base_entry_point("dffml.tuner", "tuner")
class Tuner(BaseDataFlowFacilitatorObject):
"""
Abstract base class which should be derived from
and implemented using various optimizers.
and implemented using various tuners.
"""

CONFIG = OptimizerConfig
CONTEXT = OptimizerContext
CONFIG = TunerConfig
CONTEXT = TunerContext

def __call__(self) -> OptimizerContext:
def __call__(self) -> TunerContext:
return self.CONTEXT(self)
2 changes: 1 addition & 1 deletion docs/plugins/index.rst
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,6 @@ open an `issue <https://github.com/intel/dffml/issues/new?assignees=&labels=docu
dffml_source
dffml_model
dffml_accuracy
dffml_optimizer
dffml_tuner
dffml_operation
dffml_service_cli
6 changes: 3 additions & 3 deletions examples/notebooks/tuning_models.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -348,7 +348,7 @@
"execution_count": 13,
"source": [
"from dffml.accuracy import MeanSquaredErrorAccuracy\n",
"from dffml.optimizer import ParameterGrid\n",
"from dffml.tuner import ParameterGrid\n",
"\n",
"parameter_optim = ParameterGrid(\n",
" parameters = {\n",
Expand All @@ -358,8 +358,8 @@
" }\n",
")\n",
"async with contextlib.AsyncExitStack() as astack:\n",
" optimizer = await astack.enter_async_context(parameter_optim)\n",
" octx = await astack.enter_async_context(optimizer())\n",
" tuner = await astack.enter_async_context(parameter_optim)\n",
" octx = await astack.enter_async_context(tuner())\n",
"\n",
"tuned_accuracy1 = await octx.optimize(model, predict_feature, scorer, train_data, test_data)\n",
"print(\"Tuned Accuracy:\", tuned_accuracy1)"
Expand Down
6 changes: 0 additions & 6 deletions scripts/docs/templates/dffml_optimizer.rst

This file was deleted.

6 changes: 6 additions & 0 deletions scripts/docs/templates/dffml_tuner.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
Tuner
=====

Tuners are implementations of :class:`dffml.tuner.Tuner`,
they abstract the usage of tuning methods.

6 changes: 3 additions & 3 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -169,9 +169,9 @@ class InstallException(Exception):
"mse = dffml.accuracy.mse:MeanSquaredErrorAccuracy",
"clf = dffml.accuracy.clf:ClassificationAccuracy",
],
# Optimizer
"dffml.optimizer": [
"parameter_grid = dffml.optimizer.parameter_grid:ParameterGrid",
# Tuner
"dffml.tuner": [
"parameter_grid = dffml.tuner.parameter_grid:ParameterGrid",
],
},
)

0 comments on commit 002d7f5

Please sign in to comment.