Skip to content

Commit

Permalink
Remove setting of PT_HPU_LAZY_MODE=2 in training_args.py (huggingface…
Browse files Browse the repository at this point in the history
  • Loading branch information
vivekgoe authored Jan 10, 2024
1 parent 4e67153 commit 979c132
Show file tree
Hide file tree
Showing 3 changed files with 6 additions and 5 deletions.
2 changes: 1 addition & 1 deletion docs/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,6 @@ RUN npm install npm@9.8.1 -g && \
npm install n -g && \
n latest

RUN git clone $clone_url && cd optimum-habana && git checkout $commit_sha
RUN git clone $clone_url optimum-habana && cd optimum-habana && git checkout $commit_sha
RUN python3 -m pip install --no-cache-dir --upgrade pip
RUN python3 -m pip install --no-cache-dir ./optimum-habana[quality]
7 changes: 3 additions & 4 deletions optimum/habana/transformers/training_args.py
Original file line number Diff line number Diff line change
Expand Up @@ -648,12 +648,11 @@ def _setup_devices(self) -> "torch.device":

if self.use_lazy_mode:
logger.info("Enabled lazy mode.")
# TODO: remove the block below when upgrade to SynapseAI 1.13 is done
# as eager mode will not be available anymore
elif not self.torch_compile:
if os.getenv("PT_HPU_LAZY_MODE", "1") != "0":
os.environ["PT_HPU_LAZY_MODE"] = "2"
logger.info("Enabled eager mode because use_lazy_mode=False.")
raise ValueError(
"Lazy mode or compile mode not enabled => eager mode should be enabled using PT_HPU_LAZY_MODE=0"
)

if self.deepspeed:
# Need to do similar for Accelerator init
Expand Down
2 changes: 2 additions & 0 deletions tests/test_trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
import numpy as np
from huggingface_hub import HfFolder, delete_repo, list_repo_commits
from parameterized import parameterized
from pytest import mark
from requests.exceptions import HTTPError
from transformers import IntervalStrategy, PretrainedConfig, is_torch_available
from transformers.hyperparameter_search import ALL_HYPERPARAMETER_SEARCH_BACKENDS
Expand Down Expand Up @@ -706,6 +707,7 @@ def setUp(self):
self.n_epochs = args.num_train_epochs
self.batch_size = args.train_batch_size

@mark.skip("Skip this test until PT_HPU_LAZY_MODE=0 is set as default for all tests")
def test_eager_mode(self):
train_dataset = RegressionDataset()
eval_dataset = RegressionDataset()
Expand Down

0 comments on commit 979c132

Please sign in to comment.