Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

New approximation methods: KNeighbors and RadiusNeighbors #145

Merged
merged 1 commit into from
Mar 30, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -46,13 +46,13 @@ install:
conda create --yes -n test python="3.6";
fi
- source activate test
- pip install numpy scipy matplotlib pip nose sphinx==1.4 gpy torch
- pip install numpy scipy matplotlib pip nose sphinx==1.4 gpy torch sklearn
- pip install setuptools
- pip install coveralls
- pip install coverage
- python setup.py install

script:
script:
- coverage run test.py

after_success:
Expand Down
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@
See the [**Examples**](#examples) section below and the [**Tutorials**](tutorials/README.md) to have an idea of the potential of this package.

## Dependencies and installation
**EZyRB** requires `numpy`, `scipy`, `matplotlib`, `vtk`, `nose` (for local
**EZyRB** requires `numpy`, `scipy`, `sklearn`, `matplotlib`, `vtk`, `nose` (for local
test) and `sphinx` (to generate the documentation).The coe has been tested with
Python3.5 version, but it should be compatible with Python3. It can be
installed using `pip` or directly from the source code.
Expand Down
5 changes: 4 additions & 1 deletion ezyrb/__init__.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
__all__ = [
'database',
'reduction', 'pod',
'approximation', 'rbf', 'linear', 'gpr', 'ann'
'approximation', 'rbf', 'linear', 'gpr', 'ann',
'kneighbors_regressor', 'radius_neighbors_regressor'
]

from .meta import *
Expand All @@ -14,3 +15,5 @@
from .gpr import GPR
from .reducedordermodel import ReducedOrderModel
from .ann import ANN
from .kneighbors_regressor import KNeighborsRegressor
from .radius_neighbors_regressor import RadiusNeighborsRegressor
20 changes: 20 additions & 0 deletions ezyrb/kneighbors_regressor.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
"""
Wrapper for K-Neighbors Regressor.
"""

import numpy as np
from sklearn.neighbors import KNeighborsRegressor as Regressor

from .neighbors_regressor import NeighborsRegressor


class KNeighborsRegressor(NeighborsRegressor):
"""
K-Neighbors Regressor.

:param kwargs: arguments passed to the internal instance of
KNeighborsRegressor.
"""

def __init__(self, **kwargs):
self.regressor = Regressor(**kwargs)
41 changes: 41 additions & 0 deletions ezyrb/neighbors_regressor.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
import numpy as np
from .approximation import Approximation


class NeighborsRegressor(Approximation):
"""
A generic superclass for wrappers of *NeighborsRegressor from sklearn.

:param kwargs: arguments passed to the internal instance of
*NeighborsRegressor.
"""

def __init__(self, **kwargs):
raise NotImplementedError("This class must be extended, not used.")

def fit(self, points, values):
"""
Construct the interpolator given `points` and `values`.

:param array_like points: the coordinates of the points.
:param array_like values: the values in the points.
"""
points = np.array(points).reshape(len(points), -1)
values = np.array(values)

self.regressor.fit(points, values)

def predict(self, new_point):
"""
Evaluate interpolator at given `new_points`.

:param array_like new_points: the coordinates of the given points.
:return: the interpolated values.
:rtype: numpy.ndarray
"""
if isinstance(new_point, np.ndarray) or isinstance(new_point, list):
new_point = np.array(new_point).reshape(len(new_point), -1)
else:
new_point = np.array([new_point])

return self.regressor.predict(new_point)
20 changes: 20 additions & 0 deletions ezyrb/radius_neighbors_regressor.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
"""
Wrapper for RadiusNeighborsRegressor.
"""

import numpy as np
from sklearn.neighbors import RadiusNeighborsRegressor as Regressor

from .neighbors_regressor import NeighborsRegressor


class RadiusNeighborsRegressor(NeighborsRegressor):
"""
Radius Neighbors Regressor.

:param kwargs: arguments passed to the internal instance of
RadiusNeighborsRegressor.
"""

def __init__(self, **kwargs):
self.regressor = Regressor(**kwargs)
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
KEYWORDS='pod interpolation reduced-basis model-order-reduction'

REQUIRED = [
'future', 'numpy', 'scipy', 'matplotlib', 'GPy'
'future', 'numpy', 'scipy', 'matplotlib', 'GPy', 'sklearn'
]

EXTRAS = {
Expand Down
69 changes: 69 additions & 0 deletions tests/test_k_neighbors_regressor.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
import numpy as np

from unittest import TestCase
from ezyrb import KNeighborsRegressor, Database, POD, ReducedOrderModel

class TestKNeighbors(TestCase):
def test_params(self):
reg = KNeighborsRegressor(n_neighbors=20, algorithm='kd_tree')
assert reg.regressor.get_params()['n_neighbors'] == 20
assert reg.regressor.get_params()['algorithm'] == 'kd_tree'

def test_fit_onescalarparam_scalarfunc(self):
reg = KNeighborsRegressor()
reg.fit([1], [20])
assert reg.regressor.n_samples_fit_ == 1

def test_fit_scalarparam_scalarfunc(self):
reg = KNeighborsRegressor()
reg.fit([1, 2, 5, 7, 2], [2, 5, 7, 83, 3])
assert reg.regressor.n_samples_fit_ == 5

def test_fit_biparam_scalarfunc(self):
reg = KNeighborsRegressor()
reg.fit([[1, 2], [6, 7], [8, 9]], [1, 5, 6])
assert reg.regressor.n_samples_fit_ == 3

def test_fit_biparam_bifunc(self):
reg = KNeighborsRegressor()
reg.fit([[1, 2], [6, 7], [8, 9]], [[1, 0], [20, 5], [8, 6]])
assert reg.regressor.n_samples_fit_ == 3

def test_kneighbors(self):
reg = KNeighborsRegressor(n_neighbors=2)
reg.fit([[1, 2], [6, 7], [8, 9]], [[1, 0], [20, 5], [8, 6]])
neigh_idx = reg.regressor.kneighbors([[6, 6]], return_distance=False)[0]
assert neigh_idx[0] == 1
assert neigh_idx[1] == 2
assert len(neigh_idx) == 2

def test_predict(self):
reg = KNeighborsRegressor(n_neighbors=1)
reg.fit([[1, 2], [6, 7], [8, 9]], [[1, 0], [20, 5], [8, 6]])
neigh_idx = reg.regressor.predict([[1,2], [8,9], [6,7]])
assert (neigh_idx[0] == [1,0]).all()
assert (neigh_idx[1] == [8,6]).all()
assert (neigh_idx[2] == [20,5]).all()

def test_with_db_predict(self):
reg = KNeighborsRegressor(n_neighbors=1)
pod = POD()
db = Database(np.array([1, 2, 3])[:,None], np.array([1, 5, 3])[:,None])
rom = ReducedOrderModel(db, pod, reg)

rom.fit()
assert rom.predict([1]) == 1
assert rom.predict([2]) == 5
assert rom.predict([3]) == 3

def test_wrong1(self):
# wrong number of params
with self.assertRaises(Exception):
reg = KNeighborsRegressor()
reg.fit([[1, 2], [6,], [8, 9]], [[1, 0], [20, 5], [8, 6]])

def test_wrong2(self):
# wrong number of values
fandreuz marked this conversation as resolved.
Show resolved Hide resolved
with self.assertRaises(Exception):
reg = KNeighborsRegressor()
reg.fit([[1, 2], [6,], [8, 9]], [[20, 5], [8, 6]])
69 changes: 69 additions & 0 deletions tests/test_radius_neighbors_regressor.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
import numpy as np

from unittest import TestCase
from ezyrb import RadiusNeighborsRegressor, POD, Database, ReducedOrderModel

class TestRadius(TestCase):
def test_params(self):
reg = RadiusNeighborsRegressor(radius=3.0, algorithm='kd_tree')
assert reg.regressor.get_params()['radius'] == 3.0
assert reg.regressor.get_params()['algorithm'] == 'kd_tree'

def test_fit_onescalarparam_scalarfunc(self):
reg = RadiusNeighborsRegressor()
reg.fit([1], [20])
assert reg.regressor.n_samples_fit_ == 1

def test_fit_scalarparam_scalarfunc(self):
reg = RadiusNeighborsRegressor()
reg.fit([1, 2, 5, 7, 2], [2, 5, 7, 83, 3])
assert reg.regressor.n_samples_fit_ == 5

def test_fit_biparam_scalarfunc(self):
reg = RadiusNeighborsRegressor()
reg.fit([[1, 2], [6, 7], [8, 9]], [1, 5, 6])
assert reg.regressor.n_samples_fit_ == 3

def test_fit_biparam_bifunc(self):
reg = RadiusNeighborsRegressor()
reg.fit([[1, 2], [6, 7], [8, 9]], [[1, 0], [20, 5], [8, 6]])
assert reg.regressor.n_samples_fit_ == 3

def test_radiusneighbors(self):
reg = RadiusNeighborsRegressor(radius=3.0)
reg.fit([[1, 2], [6, 7], [8, 9]], [[1, 0], [20, 5], [8, 6]])
neigh_idx = reg.regressor.radius_neighbors([[7,8]], return_distance=False)[0]
assert neigh_idx[0] == 1
assert neigh_idx[1] == 2
assert len(neigh_idx) == 2

def test_predict(self):
reg = RadiusNeighborsRegressor(radius=0.5)
reg.fit([[1, 2], [6, 7], [8, 9]], [[1, 0], [20, 5], [8, 6]])
neigh_idx = reg.regressor.predict([[1,2], [8,9], [6,7]])
assert (neigh_idx[0] == [1,0]).all()
assert (neigh_idx[1] == [8,6]).all()
assert (neigh_idx[2] == [20, 5]).all()

def test_with_db_predict(self):
reg = RadiusNeighborsRegressor(radius=0.5)
pod = POD()
db = Database(np.array([1, 2, 3])[:,None], np.array([1, 5, 3])[:,None])
rom = ReducedOrderModel(db, pod, reg)

rom.fit()
assert rom.predict([1]) == 1
assert rom.predict([2]) == 5
assert rom.predict([3]) == 3

def test_wrong1(self):
# wrong number of params
with self.assertRaises(Exception):
reg = RadiusNeighborsRegressor()
reg.fit([[1, 2], [6,], [8, 9]], [[1, 0], [20, 5], [8, 6]])

def test_wrong2(self):
# wrong number of values
fandreuz marked this conversation as resolved.
Show resolved Hide resolved
with self.assertRaises(Exception):
reg = RadiusNeighborsRegressor()
reg.fit([[1, 2], [6,], [8, 9]], [[20, 5], [8, 6]])