diff --git a/Sources/DeepLearning/Loss.swift b/Sources/DeepLearning/Loss.swift index 1cfaa6298..eed80dc63 100644 --- a/Sources/DeepLearning/Loss.swift +++ b/Sources/DeepLearning/Loss.swift @@ -16,14 +16,38 @@ import TensorFlow #endif +/// Computes the mean squared error between logits and labels. +/// +/// - Parameters: +/// - logits: One-hot encoded outputs from a neural network. +/// - labels: One-hot encoded values that correspond to the correct output. @differentiable public func meanSquaredError( predicted: Tensor, expected: Tensor) -> Tensor { return (expected - predicted).squared().mean() } +/// Computes the softmax cross entropy (categorical cross entropy) between logits and labels. +/// +/// - Parameters: +/// - logits: One-hot encoded outputs from a neural network. +/// - labels: One-hot encoded values that correspond to the correct output. @differentiable public func softmaxCrossEntropy( logits: Tensor, labels: Tensor) -> Tensor { return -(labels * logSoftmax(logits)).mean(alongAxes: 0).sum() } + +/// Computes the sigmoid cross entropy (binary cross entropy) between logits and labels. +/// +/// - Parameters: +/// - logits: Single continuous values from `0` to `1`. +/// - labels: Integer values that correspond to the correct output. +@differentiable +public func sigmoidCrossEntropy( + logits: Tensor, labels: Tensor +) -> Tensor { + let loss = labels * log(logits) + + (Tensor(1) - labels) * log(Tensor(1) - logits) + return -loss.mean(alongAxes: 0).sum() +}