-
Notifications
You must be signed in to change notification settings - Fork 35
/
classify_digits.py
executable file
·70 lines (57 loc) · 1.82 KB
/
classify_digits.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
#!/usr/bin/env python
"""
Example of using the hierarchical classifier to classify (a subset of) the digits data set.
Demonstrated some of the capabilities, e.g using a Pipeline as the base estimator,
defining a non-trivial class hierarchy, etc.
"""
from sklearn import svm
from sklearn.decomposition import TruncatedSVD
from sklearn.metrics import classification_report
from sklearn.model_selection import train_test_split
from sklearn.pipeline import make_pipeline
from sklearn_hierarchical.classifier import HierarchicalClassifier
from sklearn_hierarchical.constants import ROOT
from sklearn_hierarchical.tests.fixtures import make_digits_dataset
# Used for seeding random state
RANDOM_STATE = 42
def classify_digits():
"""Test that a nontrivial hierarchy leaf classification behaves as expected.
We build the following class hierarchy along with data from the handwritten digits dataset:
<ROOT>
/ \
A B
/ \ / \ \
1 7 3 8 9
"""
class_hierarchy = {
ROOT: ["A", "B"],
"A": [1, 7],
"B": [3, 8, 9],
}
base_estimator = make_pipeline(
TruncatedSVD(n_components=24),
svm.SVC(
gamma=0.001,
kernel="rbf",
probability=True
),
)
clf = HierarchicalClassifier(
base_estimator=base_estimator,
class_hierarchy=class_hierarchy,
)
X, y = make_digits_dataset(
targets=[1, 7, 3, 8, 9],
as_str=False,
)
X_train, X_test, y_train, y_test = train_test_split(
X,
y,
test_size=0.2,
random_state=RANDOM_STATE,
)
clf.fit(X_train, y_train)
y_pred = clf.predict(X_test)
print("Classification Report:\n", classification_report(y_test, y_pred))
if __name__ == "__main__":
classify_digits()