Skip to content

Commit

Permalink
0.8.3
Browse files Browse the repository at this point in the history
  • Loading branch information
juanbc committed Nov 28, 2022
1 parent 4491ebb commit 9a1d6ab
Show file tree
Hide file tree
Showing 4 changed files with 47 additions and 18 deletions.
6 changes: 6 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,12 @@
# Changelog of Scikit-Criteria

<!-- BODY -->

## Version 0.8.3

- Fixed a bug detected on the EntropyWeighted, Now works as the literature
specifies

## Version 0.8.2

- We bring back Python 3.7 because is the version used in google.colab.
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ build-backend = "setuptools.build_meta"

[project]
name = "scikit-criteria"
version = "0.8.2"
version = "0.8.3"
authors = [{name = "Juan B Cabral & QuatroPe", email = "jbcabral@unc.edu.ar"}]
readme = "README.md"
license = {file = "LICENSE.txt"}
Expand Down
29 changes: 15 additions & 14 deletions skcriteria/preprocessing/weighters.py
Original file line number Diff line number Diff line change
Expand Up @@ -208,38 +208,39 @@ def _weight_matrix(self, matrix, **kwargs):


def entropy_weights(matrix):
"""Calculate the weights as the entropy of each criterion.
"""Calculate the weights as the complement of the entropy of each \
criterion.
It uses the underlying ``scipy.stats.entropy`` function which assumes that
the values of the criteria are probabilities of a distribution.
This routine will normalize the criteria if they don’t sum to 1.
The logarithmic base to use is the number of rows/alternatives in the
matrix.
This routine will normalize the sum of the weights to 1.
See Also
--------
scipy.stats.entropy :
Calculate the entropy of a distribution for given probability values.
Examples
--------
>>> from skcriteria.preprocess import entropy_weights
>>> mtx = [[1, 2], [3, 4]]
>>> entropy_weights(mtx)
array([0.46906241, 0.53093759])
"""
entropy = scipy.stats.entropy(matrix, axis=0)
return entropy / np.sum(entropy)
base = len(matrix)
entropy = scipy.stats.entropy(matrix, base=base, axis=0)
entropy_divergence = 1 - entropy
return entropy_divergence / np.sum(entropy_divergence)


class EntropyWeighter(SKCWeighterABC):
"""Assigns the entropy of the criteria as weights.
"""Assigns the complement of the entropy of the criteria as weights.
It uses the underlying ``scipy.stats.entropy`` function which assumes that
the values of the criteria are probabilities of a distribution.
This transformer will normalize the criteria if they don't sum to 1.
The logarithmic base to use is the number of rows/alternatives in the
matrix.
This transformer will normalize the sum of the weights to 1.
See Also
--------
Expand Down
28 changes: 25 additions & 3 deletions tests/preprocessing/test_weighters.py
Original file line number Diff line number Diff line change
Expand Up @@ -215,12 +215,12 @@ def test_EntropyWeighter_simple_matrix():
expected = skcriteria.mkdm(
matrix=[[1, 2], [4, 16]],
objectives=[min, max],
weights=[0.589239, 0.410761],
weights=[0.358889, 0.641111],
)

weighter = EntropyWeighter()

result = weighter.transform(dm)

assert result.aequals(expected, atol=1e-5)


Expand All @@ -235,7 +235,7 @@ def test_EntropyWeighter(decision_matrix):
min_objectives_proportion=0.5,
)

entropy = scipy.stats.entropy(dm.matrix, axis=0)
entropy = 1 - scipy.stats.entropy(dm.matrix, base=10, axis=0)

expected = skcriteria.mkdm(
matrix=dm.matrix,
Expand All @@ -252,6 +252,28 @@ def test_EntropyWeighter(decision_matrix):
assert result.equals(expected)


def test_EntropyWeighter_less_predictable_more_weight():

dm = skcriteria.mkdm(
[
[1, 20, 300],
[1, 20, 400],
[1, 30, 500],
[1, 30, 600],
[1, 40, 700],
[1, 40, 800],
],
objectives=[max, max, max],
criteria="C0 C1 C2".split(),
)

weighter = EntropyWeighter()
result = weighter.transform(dm)

assert result.weights["C0"] < result.weights["C1"]
assert result.weights["C1"] < result.weights["C2"]


# =============================================================================
# CRITIC
# =============================================================================
Expand Down

0 comments on commit 9a1d6ab

Please sign in to comment.