Skip to content

Commit

Permalink
Merge branch 'master' into joss-paper
Browse files Browse the repository at this point in the history
  • Loading branch information
thieu1995 committed Feb 23, 2024
2 parents ba12ebd + 9d667cd commit a9c12a7
Show file tree
Hide file tree
Showing 9 changed files with 61 additions and 52 deletions.
2 changes: 1 addition & 1 deletion docs/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
# Defining the exact version will make sure things don't break
sphinx==4.4.0
sphinx_rtd_theme==1.0.0
readthedocs-sphinx-search==0.1.1
readthedocs-sphinx-search==0.3.2
sphinxcontrib-bibtex==2.5.0
numpy>=1.17.1
scipy>=1.7.1
1 change: 1 addition & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -3,3 +3,4 @@ scipy>=1.7.1
pytest==7.1.2
pytest-cov==4.0.0
flake8>=4.0.1
scikit-learn>=1.0.1
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ def readme():
],
install_requires=["numpy>=1.15.1", "scipy>=1.7.1"],
extras_require={
"dev": ["pytest>=7.0", "pytest-cov==4.0.0", "flake8>=4.0.1"],
"dev": ["pytest>=7.0", "pytest-cov==4.0.0", "flake8>=4.0.1", "scikit-learn>=1.0.1"],
},
python_requires='>=3.7',
)
65 changes: 31 additions & 34 deletions tests/test_comparisons/test_sklearn_classification.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,17 @@
# --------------------------------------------------%

import numpy as np
from permetrics import ClassificationMetric
import pytest
from sklearn.metrics import accuracy_score, f1_score, fbeta_score, precision_score, recall_score

from permetrics import ClassificationMetric


def is_close_enough(x1, x2, eps=1e-5):
if abs(x1 - x2) <= eps:
return True
return False


@pytest.fixture(scope="module") # scope: Call only 1 time at the beginning
def data():
Expand All @@ -18,23 +25,23 @@ def data():

# Example one-hot encoded y_true and y_pred
y_true2 = np.array([[0, 1, 0], # Class 1
[1, 0, 0], # Class 0
[0, 0, 1], # Class 2
[0, 1, 0], # Class 1
[0, 0, 1]]) # Class 2
[1, 0, 0], # Class 0
[0, 0, 1], # Class 2
[0, 1, 0], # Class 1
[0, 0, 1]]) # Class 2
y_pred2 = np.array([[0.1, 0.8, 0.1], # Predicted probabilities for Class 1, Class 0, Class 2
[0.7, 0.2, 0.1],
[0.2, 0.3, 0.5],
[0.3, 0.6, 0.1],
[0.1, 0.2, 0.7]])
[0.7, 0.2, 0.1],
[0.2, 0.3, 0.5],
[0.3, 0.6, 0.1],
[0.1, 0.2, 0.7]])
cm2 = ClassificationMetric(y_true=y_true2, y_pred=y_pred2)

y_true3 = np.array([0, 1, 2, 0, 2]) # Class 2
y_pred3 = np.array([[0.1, 0.8, 0.1], # Predicted probabilities for Class 1, Class 0, Class 2
[0.7, 0.2, 0.1],
[0.2, 0.3, 0.5],
[0.3, 0.6, 0.1],
[0.1, 0.2, 0.7]])
[0.7, 0.2, 0.1],
[0.2, 0.3, 0.5],
[0.3, 0.6, 0.1],
[0.1, 0.2, 0.7]])
cm3 = ClassificationMetric(y_true=y_true3, y_pred=y_pred3)
return (y_true1, y_pred1), (y_true2, y_pred2), (y_true3, y_pred3), cm1, cm2, cm3

Expand All @@ -43,66 +50,56 @@ def test_AS(data):
(y_true1, y_pred1), (y_true2, y_pred2), (y_true3, y_pred3), cm1, cm2, cm3 = data
res11 = cm1.PS(average="micro")
res12 = accuracy_score(y_true1, y_pred1)
assert res11 == res12
assert is_close_enough(res11, res12)

# res21 = cm2.PS(average="micro")
# res22 = accuracy_score(y_true2, y_pred2) # ValueError: Classification metrics can't handle a mix of multiclass and continuous-multioutput targets
# assert res21 == res22
# assert is_close_enough(res21, res22)

# res31 = cm3.PS(average="micro")
# res32 = accuracy_score(y_true3, y_pred3) # ValueError: Classification metrics can't handle a mix of multiclass and continuous-multioutput targets
# assert res31 == res32


# avg_paras = [None, "macro", "micro", "weighted"]
# outs = (dict, float, float, float)
#
# for idx, avg in enumerate(avg_paras):
# for cm in data:
# res = cm.PS(average=avg)
# assert isinstance(res, outs[idx])
# assert is_close_enough(res31, res32)


def test_F1S(data):
(y_true1, y_pred1), (y_true2, y_pred2), (y_true3, y_pred3), cm1, cm2, cm3 = data
res11 = cm1.F1S(average="micro")
res12 = f1_score(y_true1, y_pred1, average="micro")
assert res11 == res12
assert is_close_enough(res11, res12)

res11 = cm1.F1S(average="macro")
res12 = f1_score(y_true1, y_pred1, average="macro")
assert res11 == res12
assert is_close_enough(res11, res12)


def test_FBS(data):
(y_true1, y_pred1), (y_true2, y_pred2), (y_true3, y_pred3), cm1, cm2, cm3 = data
res11 = cm1.FBS(average="micro", beta=1.5)
res12 = fbeta_score(y_true1, y_pred1, average="micro", beta=1.5)
assert res11 == res12
assert is_close_enough(res11, res12)

res11 = cm1.FBS(average="macro", beta=2.0)
res12 = fbeta_score(y_true1, y_pred1, average="macro", beta=2.0)
assert res11 == res12
assert is_close_enough(res11, res12)


def test_PS(data):
(y_true1, y_pred1), (y_true2, y_pred2), (y_true3, y_pred3), cm1, cm2, cm3 = data
res11 = cm1.PS(average="micro")
res12 = precision_score(y_true1, y_pred1, average="micro")
assert res11 == res12
assert is_close_enough(res11, res12)

res11 = cm1.PS(average="macro")
res12 = precision_score(y_true1, y_pred1, average="macro")
assert res11 == res12
assert is_close_enough(res11, res12)


def test_RS(data):
(y_true1, y_pred1), (y_true2, y_pred2), (y_true3, y_pred3), cm1, cm2, cm3 = data
res11 = cm1.RS(average="micro")
res12 = recall_score(y_true1, y_pred1, average="micro")
assert res11 == res12
assert is_close_enough(res11, res12)

res11 = cm1.RS(average="macro")
res12 = recall_score(y_true1, y_pred1, average="macro")
assert res11 == res12

assert is_close_enough(res11, res12)
5 changes: 3 additions & 2 deletions tests/test_comparisons/test_sklearn_clustering.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,12 +5,13 @@
# --------------------------------------------------%

import numpy as np
from permetrics import ClusteringMetric
import pytest
from sklearn.metrics import mutual_info_score, normalized_mutual_info_score, \
adjusted_rand_score, rand_score, \
completeness_score, homogeneity_score, v_measure_score, \
fowlkes_mallows_score, calinski_harabasz_score, davies_bouldin_score
import pytest

from permetrics import ClusteringMetric

np.random.seed(42)

Expand Down
25 changes: 16 additions & 9 deletions tests/test_comparisons/test_sklearn_regression.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,17 @@
# --------------------------------------------------%

import numpy as np
from permetrics import RegressionMetric
import pytest
from sklearn.metrics import explained_variance_score, max_error, mean_absolute_error, \
mean_squared_error, median_absolute_error, r2_score, mean_absolute_percentage_error
import pytest

from permetrics import RegressionMetric


def is_close_enough(x1, x2, eps=1e-5):
if abs(x1 - x2) <= eps:
return True
return False


@pytest.fixture(scope="module") # scope: Call only 1 time at the beginning
Expand All @@ -23,46 +30,46 @@ def test_EVS(data):
y_true, y_pred, rm = data
res11 = rm.EVS()
res12 = explained_variance_score(y_true, y_pred)
assert res11 == res12
assert is_close_enough(res11, res12)


def test_ME(data):
y_true, y_pred, rm = data
res11 = rm.ME()
res12 = max_error(y_true, y_pred)
assert res11 == res12
assert is_close_enough(res11, res12)


def test_MAE(data):
y_true, y_pred, rm = data
res11 = rm.MAE()
res12 = mean_absolute_error(y_true, y_pred)
assert res11 == res12
assert is_close_enough(res11, res12)


def test_MSE(data):
y_true, y_pred, rm = data
res11 = rm.MSE()
res12 = mean_squared_error(y_true, y_pred)
assert res11 == res12
assert is_close_enough(res11, res12)


def test_MedAE(data):
y_true, y_pred, rm = data
res11 = rm.MedAE()
res12 = median_absolute_error(y_true, y_pred)
assert res11 == res12
assert is_close_enough(res11, res12)


def test_R2(data):
y_true, y_pred, rm = data
res11 = rm.R2()
res12 = r2_score(y_true, y_pred)
assert res11 == res12
assert is_close_enough(res11, res12)


def test_MAPE(data):
y_true, y_pred, rm = data
res11 = rm.MAPE()
res12 = mean_absolute_percentage_error(y_true, y_pred)
assert res11 == res12
assert is_close_enough(res11, res12)
3 changes: 2 additions & 1 deletion tests/test_features/test_classification.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,10 @@
# --------------------------------------------------%

import numpy as np
from permetrics import ClassificationMetric
import pytest

from permetrics import ClassificationMetric


@pytest.fixture(scope="module") # scope: Call only 1 time at the beginning
def data():
Expand Down
3 changes: 2 additions & 1 deletion tests/test_features/test_clustering.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,10 @@
# --------------------------------------------------%

import numpy as np
from permetrics import ClusteringMetric
import pytest

from permetrics import ClusteringMetric

np.random.seed(42)


Expand Down
7 changes: 4 additions & 3 deletions tests/test_features/test_regression.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,10 @@
# --------------------------------------------------%

import numpy as np
from permetrics import RegressionMetric
import pytest

from permetrics import RegressionMetric


@pytest.fixture(scope="module") # scope: Call only 1 time at the beginning
def data():
Expand Down Expand Up @@ -510,10 +511,10 @@ def test_NRMSE(data):
def test_RSE(data):
rm1, rm2 = data[0], data[1]
# 1D
res = rm1.RSE(n_paras=5) # 5 data samples
res = rm1.RSE(n_paras=5) # 5 data samples
assert isinstance(res, (float))
# ND
res = rm2.RSE(n_paras=6, multi_output=None) # 6 data samples
res = rm2.RSE(n_paras=6, multi_output=None) # 6 data samples
assert isinstance(res, (float))
res = rm2.RSE(n_paras=6, multi_output="raw_values")
assert isinstance(res, (list, tuple, np.ndarray))
Expand Down

0 comments on commit a9c12a7

Please sign in to comment.