Skip to content

Commit

Permalink
Fix/roc (#263)
Browse files Browse the repository at this point in the history
* fix issue with roc metric

* Update CHANGELOG.md
  • Loading branch information
ethanwharris authored and MattPainter01 committed Jul 30, 2018
1 parent 84cbc92 commit d971ac7
Show file tree
Hide file tree
Showing 3 changed files with 8 additions and 4 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
### Removed
### Fixed
- Fixed a bug where checkpointers would not save the model in some cases
- Fixed a bug with the ROC metric causing it to not work

## [0.1.4] - 2018-07-23
### Added
Expand Down
10 changes: 6 additions & 4 deletions tests/metrics/test_roc_auc_score.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,11 +14,12 @@ class TestRocAucScore(unittest.TestCase):
@patch('sklearn.metrics')
def test_one_hot(self, mock_sklearn_metrics):
mock_sklearn_metrics.roc_auc_score = Mock()
metric = RocAucScore(one_hot_classes=3, one_hot_offset=1)
metric = RocAucScore(one_hot_classes=3, one_hot_offset=1).build()
metric.reset({torchbearer.DEVICE: 'cpu', torchbearer.DATA_TYPE: torch.float32})
metric.process({torchbearer.BATCH: 0, torchbearer.DEVICE: 'cpu', torchbearer.DATA_TYPE: torch.float32,
res = metric.process({torchbearer.BATCH: 0, torchbearer.DEVICE: 'cpu', torchbearer.DATA_TYPE: torch.float32,
torchbearer.Y_TRUE: torch.LongTensor([1, 2, 3]),
torchbearer.Y_PRED: torch.FloatTensor([[0.0, 0.0, 0.0], [1.1, 1.1, 1.1], [2.2, 2.2, 2.2]])})
self.assertTrue('roc_auc_score' in res)
mock_sklearn_metrics.roc_auc_score.assert_called_once()
self.assertTrue(np.array_equal(np.array([[1, 0, 0], [0, 1, 0], [0, 0, 1]]),
mock_sklearn_metrics.roc_auc_score.call_args_list[0][0][0]))
Expand All @@ -31,11 +32,12 @@ def test_one_hot(self, mock_sklearn_metrics):
@patch('sklearn.metrics')
def test_non_one_hot(self, mock_sklearn_metrics):
mock_sklearn_metrics.roc_auc_score = Mock()
metric = RocAucScore(one_hot_labels=False)
metric = RocAucScore(one_hot_labels=False).build()
metric.reset({torchbearer.DEVICE: 'cpu', torchbearer.DATA_TYPE: torch.float32})
metric.process({torchbearer.BATCH: 0, torchbearer.DEVICE: 'cpu', torchbearer.DATA_TYPE: torch.float32,
res = metric.process({torchbearer.BATCH: 0, torchbearer.DEVICE: 'cpu', torchbearer.DATA_TYPE: torch.float32,
torchbearer.Y_TRUE: torch.LongTensor([[1, 1, 1], [2, 2, 2], [3, 3, 3]]),
torchbearer.Y_PRED: torch.FloatTensor([[0.0, 0.0, 0.0], [1.1, 1.1, 1.1], [2.2, 2.2, 2.2]])})
self.assertTrue('roc_auc_score' in res)
mock_sklearn_metrics.roc_auc_score.assert_called_once()
self.assertTrue(np.array_equal(np.array([[1, 1, 1], [2, 2, 2], [3, 3, 3]]),
mock_sklearn_metrics.roc_auc_score.call_args_list[0][0][0]))
Expand Down
1 change: 1 addition & 0 deletions torchbearer/metrics/roc_auc_score.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@

@metrics.default_for_key('roc_auc')
@metrics.default_for_key('roc_auc_score')
@metrics.to_dict
class RocAucScore(metrics.EpochLambda):
"""Area Under ROC curve metric.
Expand Down

0 comments on commit d971ac7

Please sign in to comment.