Skip to content

Commit

Permalink
bugfix auc (#7)
Browse files Browse the repository at this point in the history
  • Loading branch information
bkleyn committed Jul 14, 2021
1 parent 3a5b3d8 commit e7813ad
Show file tree
Hide file tree
Showing 4 changed files with 26 additions and 2 deletions.
6 changes: 6 additions & 0 deletions CHANGELOG.txt
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,12 @@
CHANGELOG
=========

-------------------------------------------------------------------------------
July 12, 2021 1.2.2
-------------------------------------------------------------------------------

- Fixed bug with Area under the Curve (AUC) metric when only one class exists.

-------------------------------------------------------------------------------
June 23, 2021 1.2.1
-------------------------------------------------------------------------------
Expand Down
2 changes: 1 addition & 1 deletion jurity/_version.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,4 @@
# Copyright FMR LLC <opensource@fidelity.com>
# SPDX-License-Identifier: Apache-2.0

__version__ = "1.2.1"
__version__ = "1.2.2"
5 changes: 4 additions & 1 deletion jurity/recommenders/auc.py
Original file line number Diff line number Diff line change
Expand Up @@ -142,7 +142,10 @@ def get_score(self, actual_results: pd.DataFrame, predicted_results: pd.DataFram
@staticmethod
def _get_results(results: List[np.ndarray]) -> float:
results = np.concatenate(results)
return roc_auc_score(results[:, 0], results[:, 1])
if len(set(results[:, 0])) > 1:
return roc_auc_score(results[:, 0], results[:, 1])
else:
return np.nan

def _get_extended_results(self, results: List[np.ndarray]) -> dict:
auc = self._get_results(results)
Expand Down
15 changes: 15 additions & 0 deletions tests/test_reco_binary.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@

import unittest

import numpy as np
import pandas as pd

from jurity.recommenders import BinaryRecoMetrics
Expand Down Expand Up @@ -106,3 +107,17 @@ def test_auc_extended(self):

self.assertEqual(0.78125, results['auc'])
self.assertEqual(12, results['support'])

def test_auc_one_class(self):
# Test immediate calculation of AUC
metric = BinaryRecoMetrics.AUC(click_column='click')
actual = pd.DataFrame({Constants.user_id: [1, 2, 3, 4],
Constants.item_id: [1, 2, 0, 3],
'click': [0, 0, 0, 0]})

predicted = pd.DataFrame({Constants.user_id: [1, 2, 3, 4],
Constants.item_id: [1, 2, 2, 3],
'click': [0.1, 0.9, 0.1, 0.1]})

auc = metric.get_score(actual, predicted)
self.assertTrue(np.isnan(auc))

0 comments on commit e7813ad

Please sign in to comment.