Skip to content

Commit

Permalink
Start a new unstable development branch and remove ultiprocess and di…
Browse files Browse the repository at this point in the history
…ll in favour of futures
  • Loading branch information
gb119 committed Mar 7, 2021
1 parent ed7a48d commit 8e149f8
Show file tree
Hide file tree
Showing 20 changed files with 348 additions and 341 deletions.
2 changes: 1 addition & 1 deletion Makefile
Expand Up @@ -14,7 +14,7 @@ clean:
- find -name '__pycache__' -exec rm -rf {} \;

test:
pytest -n `python -c 'import os;print(min(8,os.cpu_count()))'`
pytest -s -n `python -c 'import os;print(min(8,os.cpu_count()))'`

check:
prospector -E -0 --profile-path=. -P .landscape.yml Stoner > prospector-report.txt
Expand Down
2 changes: 1 addition & 1 deletion Stoner/__init__.py
Expand Up @@ -35,7 +35,7 @@
Options = _Options()


__version_info__ = ("0", "10", "0rc5")
__version_info__ = ("0", "11", "0dev")
__version__ = ".".join(__version_info__)

__homepath__ = pathlib.Path(__file__).parent.resolve()
Expand Down
6 changes: 6 additions & 0 deletions Stoner/folders/core.py
Expand Up @@ -261,6 +261,7 @@ def __new__(cls, *args, **kargs):
self._root = "."
self._default_store = None
self.directory = None
self.executor = None
return self

def __init__(self, *args, **kargs):
Expand Down Expand Up @@ -696,6 +697,11 @@ def __clone__(self, other=None, attrs_only=False):
######## Methods to implement the MutableMapping abstract methods #########
######## And to provide a mapping interface that mainly access groups #####

def __del__(self):
"""Clean up the exececutor if it is defined."""
if self.executor:
self.executor.shutdown()

def __getitem__(self, name):
"""Try to get either a group or an object.
Expand Down
9 changes: 4 additions & 5 deletions Stoner/folders/each.py
Expand Up @@ -356,9 +356,11 @@ def iter(self, func, *args, **kargs):
_byname = kargs.pop("_byname", False)
_serial = kargs.pop("_serial", False)
self._folder.fetch() # Prefetch thefolder in case we can do it in parallel
p, imap = get_pool(_serial)
self._folder.executor = get_pool(self._folder, _serial)
for ix, (f, ret) in enumerate(
imap(partial(_worker, func=func, args=args, kargs=kargs, byname=_byname), self._folder)
self._folder.executor.map(
partial(_worker, func=func, args=args, kargs=kargs, byname=_byname), self._folder
)
):
new_d = f
if self._folder.debug:
Expand All @@ -377,6 +379,3 @@ def iter(self, func, *args, **kargs):
name = self._folder.__names__()[ix]
self._folder.__setter__(name, new_d)
yield ret
if p is not None:
p.close()
p.join()
7 changes: 2 additions & 5 deletions Stoner/folders/mixins.py
Expand Up @@ -325,16 +325,13 @@ def fetch(self):
With multiprocess enabled this will parallel load the contents of the folder into memory.
"""
p, imap = get_pool()
for (f, name) in imap(
self.executor = get_pool(self)
for (f, name) in self.executor.map(
partial(_loader, loader=self.loader, typ=self._type, directory=self.directory), self.not_loaded
):
self.__setter__(
name, self.on_load_process(f)
) # This doesn't run on_load_process in parallel, but it's not expensive enough to make it worth it.
if p is not None:
p.close()
p.join()
return self

def getlist(self, **kargs):
Expand Down
38 changes: 25 additions & 13 deletions Stoner/folders/utils.py
Expand Up @@ -10,19 +10,31 @@
"removeDisallowedFilenameChars",
]
import os.path as path
from os import cpu_count
import re
import string
import fnmatch
import pathlib
from multiprocessing.pool import ThreadPool
from concurrent import futures

from numpy import array
import multiprocess as multiprocessing

from Stoner.compat import string_types, _pattern_type
from Stoner.tools import get_option


class _fake_executor:

"""Minimal class to fake the bits of the executor protocol that we need."""

def __init__(self, *args, **kargs):
"""Fake constructor."""
self.map = map # set the map method

def shutdown(self):
"""Fake shutdown method."""


def pathsplit(pth):
"""Split pth into a sequence of individual parts with path.split."""
pth = pathlib.Path(pth)
Expand Down Expand Up @@ -105,27 +117,27 @@ def filter_files(files, patterns, keep=True):
return files


def get_pool(_serial=False):
"""Get a Pool and map implementation depending on options.
def get_pool(folder=None, _serial=False):
"""Get a concurrent.futures compatible executor.
Returns:
Pool(),map: Pool object if possible and map implementation.
(futures.Executor):
Executor on which to run the distributed job.
"""
if get_option("multiprocessing") and not _serial:
try:
if get_option("threading"):
p = ThreadPool(processes=int(multiprocessing.cpu_count() - 1))
executor = futures.ThreadPoolExecutor(max_workers=cpu_count())
else:
p = multiprocessing.Pool(int(multiprocessing.cpu_count() / 2))
imap = p.imap
executor = futures.ProcessPoolExecutor(max_workers=cpu_count())
except (ArithmeticError, AttributeError, LookupError, RuntimeError, NameError, OSError, TypeError, ValueError):
# Fallback to non-multiprocessing if necessary
p = None
imap = map
executor = None
else:
p = None
imap = map
return p, imap
executor = _fake_executor()
if getattr(folder, "executor", False):
folder.executor.shutdown()
return executor


def removeDisallowedFilenameChars(filename):
Expand Down
4 changes: 2 additions & 2 deletions Stoner/plot/formats.py
Expand Up @@ -482,10 +482,10 @@ def customise_axes(self, ax, plot):
In the DefaultPlotStyle class this method is used to set SI units
plotting mode for all axes.
"""
ax.xaxis.set_major_locator(self.xlocater())
ax.yaxis.set_major_locator(self.ylocater())
ax.set_xticklabels(ax.get_xticks(), size=self.template_xtick__labelsize)
ax.set_yticklabels(ax.get_yticks(), size=self.template_ytick__labelsize)
ax.xaxis.set_major_locator(self.xlocater())
ax.yaxis.set_major_locator(self.ylocater())
if isinstance(self.xformatter, Formatter):
xformatter = self.xformatter
else:
Expand Down
14 changes: 7 additions & 7 deletions doc/samples/plot-folder-test.py
Expand Up @@ -33,13 +33,13 @@ def extra(i, j, d):
d.xlabel = r"Field $\mu_0H\,$"
d.ylabel = "Abs. (arb)"
d.plt_legend(loc=3)
d.annotate_fit(FMR_Power, fontdict={"size": 8}, x=0.05, y=0.25)
d.annotate_fit(FMR_Power, mode="eng", fontdict={"size": 8}, x=0.05, y=0.25)


def do_fit(f):
"""Fit just one set of data."""
f.template = template
f["cut"] = f.threshold(1.75e5, rising=False, falling=True)
f["cut"] = f.threshold(0.75e5, rising=False, falling=True)
f["Frequency"] = (f // "Frequency").mean()
f.lmfit(
FMR_Power, result=True, header="Fit", bounds=lambda x, r: x < f["cut"]
Expand Down Expand Up @@ -109,9 +109,9 @@ def do_fit(f):

# Merge the two field signs into a single file, taking care of the error columns too
result = resfldr[0].clone
for c in [0, 2, 4, 6, 8, 9, 10]:
for c in [0, 1, 3, 5, 7]:
result.data[:, c] = (resfldr[1][:, c] + resfldr[0][:, c]) / 2.0
for c in [1, 3, 5, 7]:
for c in [2, 4, 6, 8]:
result.data[:, c] = gmean((resfldr[0][:, c], resfldr[1][:, c]), axis=0)

# Doing the Kittel fit with an orthogonal distance regression as we have x errors not y errors
Expand All @@ -121,8 +121,8 @@ def do_fit(f):
)
result.setas[-1] = "y"

result.template.yformatter = TexEngFormatter
result.template.xformatter = TexEngFormatter
# result.template.yformatter = TexEngFormatter
# result.template.xformatter = TexEngFormatter
result.labels = None
result.figure(figsize=(6, 8))
result.subplot(211)
Expand All @@ -133,7 +133,7 @@ def do_fit(f):

# Get alpha
result.subplot(212)
result.setas(y="Delta_H", e="Delta_H.stderr", x="Freq")
result.setas(y="Delta_H", e="Delta_H err", x="Freq")
result.y /= mu_0
result.e /= mu_0
result.lmfit(Linear, result=True, header="Width", output="report")
Expand Down
4 changes: 1 addition & 3 deletions scripts/VSManalysis_v2.py
Expand Up @@ -217,9 +217,7 @@ def editData(Data, operations):
Data = Stoner.Data("EditedFiles/" + pathsplit[0] + "_edit.txt")
break
except ValueError:
timeout += (
1
) # if get 5 files unreadable in a row then finish the program
timeout += 1 # if get 5 files unreadable in a row then finish the program
print("Could not read file ", path)
if timeout <= 5:
break
Expand Down
4 changes: 2 additions & 2 deletions tests/Stoner/Image/test_kerr.py
Expand Up @@ -11,15 +11,15 @@
from Stoner import Data,__home__
import numpy as np
import pytest
import sys
from os import path
import os
import matplotlib.pyplot as plt

import warnings

import Stoner
Stoner.Options.multiprocessing = False
Stoner.Options.threading = True


#data arrays for testing - some useful small images for tests

Expand Down
1 change: 1 addition & 0 deletions tests/Stoner/Image/test_stack.py
Expand Up @@ -12,6 +12,7 @@
import Stoner
import pytest
Stoner.Options.multiprocessing=False
Stoner.Options.threading=True

testdir=os.path.join(os.path.dirname(__file__),"coretestdata","testims")

Expand Down
77 changes: 44 additions & 33 deletions tests/Stoner/analysis/fitting/test_mixins.py
Expand Up @@ -8,7 +8,7 @@
@author: phygbu
"""
import unittest
import pytest
import sys
import os.path as path
import numpy as np
Expand All @@ -19,56 +19,67 @@
sys.path.insert(0,pth)
from Stoner import Data

from Stoner.analysis.fitting.mixins import _curve_fit_result

def fit(x,a,b,c):
"""Fitting function"""
return a*x**2+b*x+c

class AnalysisMixins_test(unittest.TestCase):

"""Path to sample Data File"""
datadir=path.join(pth,"sample-data")
datadir=path.join(pth,"sample-data")


def setUp(self):
def test_cuve_fit():
x_data=np.linspace(-10,10,101)
y_data=0.01*x_data**2+0.3*x_data-2

x_data=np.linspace(-10,10,101)
y_data=0.01*x_data**2+0.3*x_data-2
y_data*=np.random.normal(size=101,loc=1.0,scale=0.01)
x_data+=np.random.normal(size=101,scale=0.02)

y_data*=np.random.normal(size=101,loc=1.0,scale=0.01)
x_data+=np.random.normal(size=101,scale=0.02)
sdata=Data(x_data,y_data,column_headers=["X","Y"])
sdata.setas="xy"
for output,fmt in zip(["fit","row","full","dict","data"],[tuple,np.ndarray,tuple,dict,Data]):
res=sdata.curve_fit(fit,p0=[0.02,0.2,2],output=output)
assert isinstance(res,fmt),"Failed to get expected output from curve_fit for {} (got {})".format(output,type(res))

self.data=Data(x_data,y_data,column_headers=["X","Y"])
self.data.setas="xy"
def test_lmfit():
x_data=np.linspace(-10,10,101)
y_data=0.01*x_data**2+0.3*x_data-2

y_data*=np.random.normal(size=101,loc=1.0,scale=0.01)
x_data+=np.random.normal(size=101,scale=0.02)

sdata=Data(x_data,y_data,column_headers=["X","Y"])
sdata.setas="xy"
for output,fmt in zip(["fit","row","full","dict","data"],[tuple,np.ndarray,tuple,dict,Data]):
res=sdata.lmfit(fit,p0=[0.02,0.2,2],output=output)
assert isinstance(res,fmt),"Failed to get expected output from lmfit for {} (got {})".format(output,type(res))

def test_cuve_fit(self):
for output,fmt in zip(["fit","row","full","dict","data"],[tuple,np.ndarray,tuple,dict,Data]):
res=self.data.curve_fit(fit,p0=[0.02,0.2,2],output=output)
self.assertTrue(isinstance(res,fmt),"Failed to get expected output from curve_fit for {} (got {})".format(output,type(res)))
def test_odr():
x_data=np.linspace(-10,10,101)
y_data=0.01*x_data**2+0.3*x_data-2

def test_lmfit(self):
for output,fmt in zip(["fit","row","full","dict","data"],[tuple,np.ndarray,tuple,dict,Data]):
res=self.data.lmfit(fit,p0=[0.02,0.2,2],output=output)
self.assertTrue(isinstance(res,fmt),"Failed to get expected output from lmfit for {} (got {})".format(output,type(res)))
y_data*=np.random.normal(size=101,loc=1.0,scale=0.01)
x_data+=np.random.normal(size=101,scale=0.02)

def test_odr(self):
for output,fmt in zip(["fit","row","full","dict","data"],[tuple,np.ndarray,tuple,dict,Data]):
res=self.data.odr(fit,p0=[0.02,0.2,2],output=output)
self.assertTrue(isinstance(res,fmt),"Failed to get expected output from idr for {} (got {})".format(output,type(res)))
sdata=Data(x_data,y_data,column_headers=["X","Y"])
sdata.setas="xy"
for output,fmt in zip(["fit","row","full","dict","data"],[tuple,np.ndarray,tuple,dict,Data]):
res=sdata.odr(fit,p0=[0.02,0.2,2],output=output)
assert isinstance(res,fmt),"Failed to get expected output from idr for {} (got {})".format(output,type(res))

def test_differential_evolution(self):
for output,fmt in zip(["fit","row","full","dict","data"],[tuple,np.ndarray,tuple,dict,Data]):
res=self.data.differential_evolution(fit,p0=[0.02,0.2,2],output=output)
self.assertTrue(isinstance(res,fmt),"Failed to get expected output from differential_evolution for {} (got {})".format(output,type(res)))
def test_differential_evolution():
x_data=np.linspace(-10,10,101)
y_data=0.01*x_data**2+0.3*x_data-2

y_data*=np.random.normal(size=101,loc=1.0,scale=0.01)
x_data+=np.random.normal(size=101,scale=0.02)

sdata=Data(x_data,y_data,column_headers=["X","Y"])
sdata.setas="xy"
for output,fmt in zip(["fit","row","full","dict","data"],[tuple,np.ndarray,tuple,dict,Data]):
res=sdata.differential_evolution(fit,p0=[0.02,0.2,2],output=output)
assert isinstance(res,fmt),"Failed to get expected output from differential_evolution for {} (got {})".format(output,type(res))

if __name__=="__main__": # Run some tests manually to allow debugging
test=AnalysisMixins_test("test_cuve_fit")
test.setUp()
unittest.main()
#test.test_apply()
pytest.main(["--pdb",__file__])


0 comments on commit 8e149f8

Please sign in to comment.