-
Notifications
You must be signed in to change notification settings - Fork 835
/
Copy pathmake_test_models.py
258 lines (209 loc) · 7.22 KB
/
make_test_models.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
import argparse
import os
from pathlib import Path
from typing import Optional
import numpy as np
import tensorflow as tf
import xgboost
from alibi.datasets import fetch_adult
from alibi.explainers import ALE, AnchorImage, AnchorTabular, KernelShap, TreeShap
from sklearn.compose import ColumnTransformer
from sklearn.datasets import load_iris, load_wine
from sklearn.ensemble import RandomForestClassifier
from sklearn.impute import SimpleImputer
from sklearn.linear_model import LogisticRegression
from sklearn.model_selection import train_test_split
from sklearn.pipeline import Pipeline
from sklearn.preprocessing import OneHotEncoder, StandardScaler
from sklearn.svm import SVC
def make_anchor_image(dirname: Optional[Path] = None) -> AnchorImage:
url = "https://storage.googleapis.com/seldon-models/alibi-detect/classifier/"
path_model = os.path.join(url, "cifar10", "resnet32", "model.h5")
save_path = tf.keras.utils.get_file("resnet32", path_model)
model = tf.keras.models.load_model(save_path)
# we drop the first batch dimension because AnchorImage expects a single image
image_shape = model.get_layer(index=0).input_shape[0][1:]
alibi_model = AnchorImage(predictor=model.predict, image_shape=image_shape)
if dirname is not None:
alibi_model.save(dirname)
return alibi_model
def make_kernel_shap(dirname: Optional[Path] = None) -> KernelShap:
np.random.seed(0)
# load data
wine = load_wine()
data = wine.data
target = wine.target
target_names = wine.target_names
feature_names = wine.feature_names
# train classifier
X_train, X_test, y_train, y_test = train_test_split(
data, target, test_size=0.2, random_state=0
)
scaler = StandardScaler().fit(X_train)
X_train_norm = scaler.transform(X_train)
X_test_norm = scaler.transform(X_test)
classifier = SVC(
kernel="rbf",
C=1,
gamma=0.1,
decision_function_shape="ovr",
# n_cls trained with data from one class as positive
# and remainder of data as neg
random_state=0,
)
classifier.fit(X_train_norm, y_train)
# build kernel shap model
pred_fcn = classifier.decision_function
svm_explainer = KernelShap(pred_fcn)
svm_explainer.fit(X_train_norm)
if dirname is not None:
svm_explainer.save(dirname)
return svm_explainer
def make_tree_shap(dirname: Optional[Path] = None) -> TreeShap:
np.random.seed(0)
# get X_train for explainer fit
adult = fetch_adult()
data = adult.data
target = adult.target
data_perm = np.random.permutation(np.c_[data, target])
data = data_perm[:, :-1]
target = data_perm[:, -1]
idx = 30000
X_train, y_train = data[:idx, :], target[:idx]
X_test, y_test = data[idx + 1 :, :], target[idx + 1 :]
d_train = xgboost.DMatrix(X_train, label=y_train)
d_test = xgboost.DMatrix(X_test, label=y_test)
params = {
"eta": 0.01,
"objective": "binary:logistic",
"subsample": 0.5,
"base_score": np.mean(y_train),
"eval_metric": "logloss",
}
model = xgboost.train(
params,
d_train,
5000,
evals=[(d_test, "test")],
verbose_eval=100,
early_stopping_rounds=20,
)
tree_explainer = TreeShap(model, model_output="raw", task="classification")
tree_explainer.fit(X_train)
if dirname is not None:
tree_explainer.save(dirname)
return tree_explainer
def make_ale(dirname: Optional[Path] = None) -> ALE:
data = load_iris()
feature_names = data.feature_names
target_names = data.target_names
X = data.data
y = data.target
X_train, X_test, y_train, y_test = train_test_split(
X, y, test_size=0.25, random_state=42
)
# train model
lr = LogisticRegression(max_iter=200)
lr.fit(X_train, y_train)
# create explainer
logit_fun_lr = lr.decision_function
logit_ale_lr = ALE(
logit_fun_lr, feature_names=feature_names, target_names=target_names
)
if dirname is not None:
logit_ale_lr.save(dirname)
return logit_ale_lr
def make_anchor_tabular(dirname: Optional[Path] = None) -> AnchorTabular:
# train model
iris_data = load_iris()
clf = LogisticRegression(solver="liblinear", multi_class="ovr")
clf.fit(iris_data.data, iris_data.target)
# create explainer
explainer = AnchorTabular(clf.predict, feature_names=iris_data.feature_names)
explainer.fit(iris_data.data, disc_perc=(25, 50, 75))
if dirname is not None:
explainer.save(dirname)
return explainer
def make_anchor_tabular_income(dirname: Optional[Path] = None) -> AnchorTabular:
# adapted from:
# https://docs.seldon.io/projects/alibi/en/latest/examples/anchor_tabular_adult.html
np.random.seed(0)
# prepare data
adult = fetch_adult()
data = adult.data
target = adult.target
feature_names = adult.feature_names
category_map = adult.category_map
data_perm = np.random.permutation(np.c_[data, target])
data = data_perm[:, :-1]
target = data_perm[:, -1]
# build model
idx = 30000
X_train, Y_train = data[:idx, :], target[:idx]
X_test, Y_test = data[idx + 1 :, :], target[idx + 1 :]
ordinal_features = [
x for x in range(len(feature_names)) if x not in list(category_map.keys())
]
ordinal_transformer = Pipeline(
steps=[
("imputer", SimpleImputer(strategy="median")),
("scaler", StandardScaler()),
]
)
categorical_features = list(category_map.keys())
categorical_transformer = Pipeline(
steps=[
("imputer", SimpleImputer(strategy="median")),
("onehot", OneHotEncoder(handle_unknown="ignore")),
]
)
preprocessor = ColumnTransformer(
transformers=[
("num", ordinal_transformer, ordinal_features),
("cat", categorical_transformer, categorical_features),
]
)
clf = RandomForestClassifier(n_estimators=50)
model_pipeline = Pipeline(
steps=[
("preprocess", preprocessor),
("classifier", clf),
]
)
model_pipeline.fit(X_train, Y_train)
explainer = AnchorTabular(
model_pipeline.predict, feature_names, categorical_names=category_map, seed=1
)
explainer.fit(X_train, disc_perc=[25, 50, 75])
if dirname is not None:
explainer.save(dirname)
return explainer
def _main():
args_parser = argparse.ArgumentParser(add_help=False)
args_parser.add_argument(
"--model",
type=str,
help="The model to create",
)
args_parser.add_argument(
"--model_dir",
type=Path,
help="Where to save",
)
args = args_parser.parse_args()
model_name = args.model
model_dir = args.model_dir
if model_name == "anchor_image":
make_anchor_image(model_dir)
elif model_name == "kernel_shap":
make_kernel_shap(model_dir)
elif model_name == "tree_shap":
make_tree_shap(model_dir)
elif model_name == "ale":
make_ale(model_dir)
elif model_name == "anchor_tabular":
make_anchor_tabular(model_dir)
elif model_name == "anchor_tabular_income":
make_anchor_tabular_income(model_dir)
if __name__ == "__main__":
_main()