Skip to content

Commit

Permalink
Use THSetNumThreads instead of omp_set_num_threads
Browse files Browse the repository at this point in the history
Set OMP num threads to one in the data loader.

Fixes #81
Fixes #82
  • Loading branch information
colesbury committed Oct 17, 2016
1 parent d293c17 commit 3931bee
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 12 deletions.
15 changes: 3 additions & 12 deletions torch/csrc/Module.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -141,24 +141,15 @@ static PyObject * THPModule_initExtension(PyObject *self, PyObject *shm_manager_

static PyObject * THPModule_getNumThreads(PyObject *module)
{
#ifdef _OPENMP
return PyLong_FromLong(omp_get_max_threads());
#else
return PyLong_FromLong(1);
#endif
return PyLong_FromLong(THGetNumThreads());
}

static PyObject * THPModule_setNumThreads(PyObject *module, PyObject *arg)
{
THPUtils_assert(THPUtils_checkLong(arg), "set_num_threads expects an int, "
"but got %s", THPUtils_typename(arg));
#ifdef _OPENMP
omp_set_num_threads(THPUtils_unpackLong(arg));
#else
PyErr_WarnEx(PyExc_RuntimeWarning, "set_num_threads is a no-op - torch was "
"compiled without OpenMP support", 1);
#endif
return 0;
THSetNumThreads((int)THPUtils_unpackLong(arg));
Py_RETURN_NONE;
}

bool THPModule_isTensor(PyObject *obj)
Expand Down
1 change: 1 addition & 0 deletions torch/utils/data/dataloader.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ def _processBatch(dataset, indices, collate_fn):


def _workerLoop(dataset, index_queue, data_queue, collate_fn):
torch.set_num_threads(1)
while True:
batch_indices = index_queue.get()

Expand Down

0 comments on commit 3931bee

Please sign in to comment.