Skip to content

Commit

Permalink
refactor: deprecate cpu (#631)
Browse files Browse the repository at this point in the history
* refactor: deprecate cpu

* chore: update changelog

* refactor: removed CPU constant

* refactor: remove noteboook_login function

* removed mentions of 'cpu' and 'notebook_login'
  • Loading branch information
LMMilliken committed Dec 14, 2022
1 parent c81dcff commit e535e6a
Show file tree
Hide file tree
Showing 6 changed files with 4 additions and 27 deletions.
4 changes: 4 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,10 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0

### Removed

- Remove `cpu` parameter from `create_run` function. ([#631](https://github.com/jina-ai/finetuner/pull/631))

- Remove `notebook_login` function. ([#631](https://github.com/jina-ai/finetuner/pull/631))

### Changed

- Adjust Finetuner based on API changes for Jina AI Cloud. ([#637](https://github.com/jina-ai/finetuner/pull/637))
Expand Down
13 changes: 0 additions & 13 deletions finetuner/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,15 +48,6 @@ def login(force: bool = False, interactive: Optional[bool] = None):
ft.login(force=force, interactive=interactive)


def notebook_login(force: bool = False):
warnings.warn(
message='Function `notebook_login` will be deprecated from Finetuner 0.7.0,'
'please use `login(interactive=True)` instead.',
category=DeprecationWarning,
)
ft.login(force=force, interactive=True)


def list_callbacks() -> Dict[str, callback.CallbackStubType]:
"""List available callbacks."""
return {
Expand Down Expand Up @@ -132,7 +123,6 @@ def fit(
scheduler_step: str = 'batch',
freeze: bool = False,
output_dim: Optional[int] = None,
cpu: bool = True,
device: str = 'cuda',
num_workers: int = 4,
to_onnx: bool = False,
Expand Down Expand Up @@ -196,8 +186,6 @@ def fit(
:param freeze: If set to `True`, will freeze all layers except the last one.
:param output_dim: The expected output dimension as `int`.
If set, will attach a projection head.
:param cpu: Whether to use the CPU. If set to `False` a GPU will be used.
Will be deprecated from 0.7.0.
:param device: Whether to use the CPU, if set to `cuda`, a Nvidia GPU will be used.
otherwise use `cpu` to run a cpu job.
:param num_workers: Number of CPU workers. If `cpu: False` this is the number of
Expand Down Expand Up @@ -240,7 +228,6 @@ def fit(
scheduler_step=scheduler_step,
freeze=freeze,
output_dim=output_dim,
cpu=cpu,
device=device,
num_workers=num_workers,
to_onnx=to_onnx,
Expand Down
1 change: 0 additions & 1 deletion finetuner/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@
CONFIG = 'config'
FINETUNER_VERSION = 'finetuner_version'
DEVICE = 'device'
CPU = 'cpu'
CPUS = 'cpus'
GPUS = 'gpus'
NUM_WORKERS = 'num_workers'
Expand Down
8 changes: 0 additions & 8 deletions finetuner/experiment.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
import warnings
from dataclasses import fields
from typing import Any, Dict, List, Optional, TextIO, Union

Expand All @@ -11,7 +10,6 @@
BATCH_SIZE,
CALLBACKS,
CONFIG,
CPU,
CREATED_AT,
DESCRIPTION,
DEVICE,
Expand Down Expand Up @@ -186,12 +184,6 @@ def create_run(
device = kwargs.get(DEVICE, 'cuda')
if device == 'cuda':
device = 'gpu'
if kwargs.get(CPU, True):
warnings.warn(
message='Parameter `cpu` will be deprecated from Finetuner 0.7.0,'
'please use `device="cpu" or `device="cuda" instead.`',
category=DeprecationWarning,
)

num_workers = kwargs.get(NUM_WORKERS, 4)
run = self._client.create_run(
Expand Down
4 changes: 0 additions & 4 deletions finetuner/finetuner.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,6 @@ def login(self, force: bool = False, interactive: Optional[bool] = None):
and create a default experiment.
:param force: If set to true, overwrite token and re-login.
:param interactive: If set to true, will use `notebook_login` as interactive
mode.
Note: Calling `login` is necessary for using finetuner.
"""
Expand Down Expand Up @@ -164,7 +162,6 @@ def create_run(
scheduler_step: str = 'batch',
freeze: bool = False,
output_dim: Optional[int] = None,
cpu: bool = True,
device: str = 'cuda',
num_workers: int = 4,
to_onnx: bool = False,
Expand Down Expand Up @@ -203,7 +200,6 @@ def create_run(
scheduler_step=scheduler_step,
freeze=freeze,
output_dim=output_dim,
cpu=cpu,
device=device,
num_workers=num_workers,
to_onnx=to_onnx,
Expand Down
1 change: 0 additions & 1 deletion tests/unit/test_experiment.py
Original file line number Diff line number Diff line change
Expand Up @@ -149,7 +149,6 @@ def test_create_run_config():
freeze=False,
output_dim=None,
multi_modal=False,
cpu=False,
device='cuda',
)
assert config == expected_config

0 comments on commit e535e6a

Please sign in to comment.