Skip to content

Commit

Permalink
Refactored loss_modules (#653)
Browse files Browse the repository at this point in the history
  • Loading branch information
KushalP1 committed Mar 19, 2020
1 parent 5ffe45c commit 1b3105b
Showing 1 changed file with 47 additions and 79 deletions.
126 changes: 47 additions & 79 deletions ludwig/models/modules/loss_modules.py
Original file line number Diff line number Diff line change
Expand Up @@ -121,46 +121,8 @@ def cross_entropy_sequence_loss(logits, targets, sequence_length):
def sampled_softmax_cross_entropy(output_placeholder, feature_hidden, logits,
vector_labels, class_weights,
class_biases, loss, num_classes):
output_exp = tf.cast(tf.expand_dims(output_placeholder, -1), tf.int64)
if loss['sampler'] == 'fixed_unigram':
sampled_values = tf.nn.fixed_unigram_candidate_sampler(
true_classes=output_exp,
num_true=1,
num_sampled=loss['negative_samples'],
unique=loss['unique'],
range_max=num_classes,
unigrams=loss['class_counts'],
distortion=loss['distortion']
)
elif loss['sampler'] == 'uniform':
sampled_values = tf.nn.uniform_candidate_sampler(
true_classes=output_exp,
num_true=1,
num_sampled=loss['negative_samples'],
unique=loss['unique'],
range_max=num_classes
)
elif loss['sampler'] == 'log_uniform':
sampled_values = tf.nn.log_uniform_candidate_sampler(
true_classes=output_exp,
num_true=1,
num_sampled=loss['negative_samples'],
unique=loss['unique'],
range_max=num_classes
)
elif loss['sampler'] == 'learned_unigram':
sampled_values = tf.nn.fixed_unigram_candidate_sampler(
true_classes=output_exp,
num_true=1,
num_sampled=loss['negative_samples'],
unique=loss['unique'],
range_max=num_classes,
unigrams=loss['class_counts'],
distortion=loss['distortion']
)
else:
raise ValueError('Unsupported sampler {}'.format(loss['sampler']))

output_exp = tf.cast(tf.expand_dims(output_placeholder, -1), tf.int64)
sampled_values = obtained_sampled_values(num_classes, output_exp, loss)
train_loss = tf.nn.sampled_softmax_loss(weights=tf.transpose(class_weights),
biases=class_biases,
labels=output_exp,
Expand Down Expand Up @@ -198,45 +160,7 @@ def sequence_sampled_softmax_cross_entropy(targets, targets_sequence_length,
# unpadded_targets = targets[:, :batch_max_seq_length]
# output_exp = tf.cast(tf.reshape(unpadded_targets, [-1, 1]), tf.int64)
output_exp = tf.cast(tf.reshape(targets, [-1, 1]), tf.int64)

if loss['sampler'] == 'fixed_unigram':
sampled_values = tf.nn.fixed_unigram_candidate_sampler(
true_classes=output_exp,
num_true=1,
num_sampled=loss['negative_samples'],
unique=loss['unique'],
range_max=num_classes,
unigrams=loss['class_counts'],
distortion=loss['distortion']
)
elif loss['sampler'] == 'uniform':
sampled_values = tf.nn.uniform_candidate_sampler(
true_classes=output_exp,
num_true=1,
num_sampled=loss['negative_samples'],
unique=loss['unique'],
range_max=num_classes
)
elif loss['sampler'] == 'log_uniform':
sampled_values = tf.nn.log_uniform_candidate_sampler(
true_classes=output_exp,
num_true=1,
num_sampled=loss['negative_samples'],
unique=loss['unique'],
range_max=num_classes
)
elif loss['sampler'] == 'learned_unigram':
sampled_values = tf.nn.fixed_unigram_candidate_sampler(
true_classes=output_exp,
num_true=1,
num_sampled=loss['negative_samples'],
unique=loss['unique'],
range_max=num_classes,
unigrams=loss['class_counts'],
distortion=loss['distortion']
)
else:
raise ValueError('Unsupported sampler {}'.format(loss['sampler']))
sampled_values = obtained_sampled_values(num_classes, output_exp, loss)

def _sampled_loss(labels, logits):
labels = tf.cast(labels, tf.int64)
Expand Down Expand Up @@ -324,3 +248,47 @@ def loss_multilabel(logits, vector_labels, loss):
'sum': tf.contrib.layers.sum_regularizer,
'None': lambda x: None,
None: lambda x: None}



def obtained_sampled_values(num_classes, output_exp, loss):
"""returns sampled_values using the chosen sampler"""
if loss['sampler'] == 'fixed_unigram':
sampled_values = tf.nn.fixed_unigram_candidate_sampler(
true_classes=output_exp,
num_true=1,
num_sampled=loss['negative_samples'],
unique=loss['unique'],
range_max=num_classes,
unigrams=loss['class_counts'],
distortion=loss['distortion']
)
elif loss['sampler'] == 'uniform':
sampled_values = tf.nn.uniform_candidate_sampler(
true_classes=output_exp,
num_true=1,
num_sampled=loss['negative_samples'],
unique=loss['unique'],
range_max=num_classes
)
elif loss['sampler'] == 'log_uniform':
sampled_values = tf.nn.log_uniform_candidate_sampler(
true_classes=output_exp,
num_true=1,
num_sampled=loss['negative_samples'],
unique=loss['unique'],
range_max=num_classes
)
elif loss['sampler'] == 'learned_unigram':
sampled_values = tf.nn.fixed_unigram_candidate_sampler(
true_classes=output_exp,
num_true=1,
num_sampled=loss['negative_samples'],
unique=loss['unique'],
range_max=num_classes,
unigrams=loss['class_counts'],
distortion=loss['distortion']
)
else:
raise ValueError('Unsupported sampler {}'.format(loss['sampler']))
return sampled_values

0 comments on commit 1b3105b

Please sign in to comment.