-
Notifications
You must be signed in to change notification settings - Fork 0
/
metrics.py
82 lines (73 loc) · 3.28 KB
/
metrics.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
import tensorflow as tf
import numpy as np
import scipy
def masked_softmax_cross_entropy(preds, labels, mask):
"""Softmax cross-entropy loss with masking."""
loss = tf.nn.softmax_cross_entropy_with_logits(logits=preds, labels=labels)
mask = tf.cast(mask, dtype=tf.float32)
mask /= tf.reduce_mean(mask)
loss *= mask
return tf.reduce_mean(loss)
def masked_accuracy(preds, labels, mask):
"""Accuracy with masking."""
correct_prediction = tf.equal(tf.argmax(preds, 1), tf.argmax(labels, 1))
accuracy_all = tf.cast(correct_prediction, tf.float32)
mask = tf.cast(mask, dtype=tf.float32)
mask /= tf.reduce_mean(mask)
accuracy_all *= mask
return tf.reduce_mean(accuracy_all)
def get_placeholder_by_name(name):
try:
return tf.get_default_graph().get_tensor_by_name(name+":0")
except:
return tf.placeholder(tf.int32, name=name)
def align_loss(outlayer, ILL, gamma, k):
left = ILL[:, 0]
right = ILL[:, 1]
t = len(ILL)
left_x = tf.nn.embedding_lookup(outlayer, left)
right_x = tf.nn.embedding_lookup(outlayer, right)
A = tf.reduce_sum(tf.abs(left_x - right_x), 1)
neg_left = get_placeholder_by_name("neg_left") #tf.placeholder(tf.int32, [t * k], "neg_left")
neg_right = get_placeholder_by_name("neg_right") #tf.placeholder(tf.int32, [t * k], "neg_right")
neg_l_x = tf.nn.embedding_lookup(outlayer, neg_left)
neg_r_x = tf.nn.embedding_lookup(outlayer, neg_right)
B = tf.reduce_sum(tf.abs(neg_l_x - neg_r_x), 1)
C = - tf.reshape(B, [t, k])
D = A + gamma
L1 = tf.nn.relu(tf.add(C, tf.reshape(D, [t, 1])))
neg_left = get_placeholder_by_name("neg2_left") #tf.placeholder(tf.int32, [t * k], "neg2_left")
neg_right = get_placeholder_by_name("neg2_right") #tf.placeholder(tf.int32, [t * k], "neg2_right")
neg_l_x = tf.nn.embedding_lookup(outlayer, neg_left)
neg_r_x = tf.nn.embedding_lookup(outlayer, neg_right)
B = tf.reduce_sum(tf.abs(neg_l_x - neg_r_x), 1)
C = - tf.reshape(B, [t, k])
L2 = tf.nn.relu(tf.add(C, tf.reshape(D, [t, 1])))
return (tf.reduce_sum(L1) + tf.reduce_sum(L2)) / (2.0 * k * t)
def get_hits(vec, test_pair, top_k=(1, 10, 50, 100)):
Lvec = np.array([vec[e1] for e1, e2 in test_pair])
Rvec = np.array([vec[e2] for e1, e2 in test_pair])
sim = scipy.spatial.distance.cdist(Lvec, Rvec, metric='cityblock')
top_lr = [0] * len(top_k)
for i in range(Lvec.shape[0]):
rank = sim[i, :].argsort()
rank_index = np.where(rank == i)[0][0]
for j in range(len(top_k)):
if rank_index < top_k[j]:
top_lr[j] += 1
top_rl = [0] * len(top_k)
for i in range(Rvec.shape[0]):
rank = sim[:, i].argsort()
rank_index = np.where(rank == i)[0][0]
for j in range(len(top_k)):
if rank_index < top_k[j]:
top_rl[j] += 1
print('For each left:')
for i in range(len(top_lr)):
print('Hits@%d: %.2f%%' % (top_k[i], top_lr[i] / len(test_pair) * 100))
print('For each right:')
for i in range(len(top_rl)):
print('Hits@%d: %.2f%%' % (top_k[i], top_rl[i] / len(test_pair) * 100))
def get_combine_hits(se_vec, ae_vec, beta, test_pair, top_k=(1, 10, 50, 100)):
vec = np.concatenate([se_vec*beta, ae_vec*(1.0-beta)], axis=1)
get_hits(vec, test_pair, top_k)