From 44d9053baa34438c3a4143f7cde626653d298c03 Mon Sep 17 00:00:00 2001 From: Marc Date: Mon, 4 Nov 2019 13:54:38 +0100 Subject: [PATCH] Limit the number of pytorch threads to the available cpus --- allentune/modules/allennlp_runner.py | 4 ++++ requirements.txt | 1 + 2 files changed, 5 insertions(+) diff --git a/allentune/modules/allennlp_runner.py b/allentune/modules/allennlp_runner.py index c2fcd7d..efe5123 100644 --- a/allentune/modules/allennlp_runner.py +++ b/allentune/modules/allennlp_runner.py @@ -8,6 +8,7 @@ from typing import Optional import pandas as pd +import torch from allennlp.commands.train import train_model from allennlp.common.params import Params, parse_overrides, with_fallback from allennlp.common.util import import_submodules @@ -50,6 +51,9 @@ def train_func(config, reporter): logger.warning(f"No GPU specified, using CPU.") params_dict["trainer"]["cuda_device"] = -1 + if args.cpus_per_trial > 0: + torch.set_num_threads(args.cpus_per_trial) + params = Params(params_dict) logger.debug(f"AllenNLP Configuration: {params.as_dict()}") diff --git a/requirements.txt b/requirements.txt index eb85651..471edea 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,6 +3,7 @@ ray==0.6.2 git+git://github.com/allenai/allennlp@27ebcf6ba3e02afe341a5e62cb1a7d5c6906c0c9 seaborn pandas +torch # Testing pytest