-
Notifications
You must be signed in to change notification settings - Fork 47
/
Copy pathtest_finetuning.py
79 lines (57 loc) · 3.3 KB
/
test_finetuning.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
import numpy as np
import tensorflow as tf
from sklearn.base import clone
from adapt.utils import make_classification_da
from adapt.parameter_based import FineTuning
from tensorflow.keras.initializers import GlorotUniform
from tensorflow.keras.optimizers import Adam
np.random.seed(0)
tf.random.set_seed(0)
encoder = tf.keras.Sequential()
encoder.add(tf.keras.layers.Dense(50, activation="relu", kernel_initializer=GlorotUniform(seed=0)))
encoder.add(tf.keras.layers.Dense(50, activation="relu", kernel_initializer=GlorotUniform(seed=0)))
task = tf.keras.Sequential()
task.add(tf.keras.layers.Dense(1, activation="sigmoid", kernel_initializer=GlorotUniform(seed=0)))
ind = np.random.choice(100, 10)
Xs, ys, Xt, yt = make_classification_da()
def test_finetune():
model = FineTuning(encoder=encoder, task=task, loss="bce", optimizer=Adam(), random_state=0)
model.fit(Xs, ys, epochs=100, verbose=0)
assert np.mean((model.predict(Xt).ravel()>0.5) == yt) < 0.7
fine_tuned = FineTuning(encoder=model.encoder_, task=model.task_,
training=False,
loss="bce", optimizer=Adam(), random_state=0)
fine_tuned.fit(Xt[ind], yt[ind], epochs=100, verbose=0)
assert np.abs(fine_tuned.encoder_.get_weights()[0] - model.encoder_.get_weights()[0]).sum() == 0.
assert np.mean((fine_tuned.predict(Xt).ravel()>0.5) == yt) > 0.6
assert np.mean((fine_tuned.predict(Xt).ravel()>0.5) == yt) < 0.8
fine_tuned = FineTuning(encoder=model.encoder_, task=model.task_,
training=True,
loss="bce", optimizer=Adam(), random_state=0)
fine_tuned.fit(Xt[ind], yt[ind], epochs=100, verbose=0)
assert np.abs(fine_tuned.encoder_.get_weights()[0] - model.encoder_.get_weights()[0]).sum() > 0.5
assert np.mean((fine_tuned.predict(Xt).ravel()>0.5) == yt) > 0.9
fine_tuned = FineTuning(encoder=model.encoder_, task=model.task_,
training=[True, False],
loss="bce", optimizer=Adam(), random_state=0)
fine_tuned.fit(Xt[ind], yt[ind], epochs=100, verbose=0)
assert np.abs(fine_tuned.encoder_.get_weights()[0] - model.encoder_.get_weights()[0]).sum() == 0.
assert np.abs(fine_tuned.encoder_.get_weights()[-1] - model.encoder_.get_weights()[-1]).sum() > .5
fine_tuned = FineTuning(encoder=model.encoder_, task=model.task_,
training=[False],
loss="bce", optimizer=Adam(), random_state=0)
fine_tuned.fit(Xt[ind], yt[ind], epochs=100, verbose=0)
assert np.abs(fine_tuned.encoder_.get_weights()[0] - model.encoder_.get_weights()[0]).sum() == 0.
assert np.abs(fine_tuned.encoder_.get_weights()[-1] - model.encoder_.get_weights()[-1]).sum() == 0
def test_finetune_pretrain():
model = FineTuning(encoder=encoder, task=task, pretrain=True, pretrain__epochs=2,
loss="bce", optimizer=Adam(), random_state=0)
model.fit(Xs, ys, epochs=1, verbose=0)
def test_clone():
model = FineTuning(encoder=encoder, task=task,
loss="bce", optimizer=Adam(), random_state=0)
model.fit(Xs, ys, epochs=1, verbose=0)
new_model = clone(model)
new_model.fit(Xs, ys, epochs=1, verbose=0)
new_model.predict(Xs);
assert model is not new_model