Skip to content
This repository was archived by the owner on Jul 1, 2023. It is now read-only.
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
24 changes: 24 additions & 0 deletions Sources/DeepLearning/Loss.swift
Original file line number Diff line number Diff line change
Expand Up @@ -16,14 +16,38 @@
import TensorFlow
#endif

/// Computes the mean squared error between logits and labels.
///
/// - Parameters:
/// - logits: One-hot encoded outputs from a neural network.
/// - labels: One-hot encoded values that correspond to the correct output.
@differentiable
public func meanSquaredError<Scalar: TensorFlowFloatingPoint>(
predicted: Tensor<Scalar>, expected: Tensor<Scalar>) -> Tensor<Scalar> {
return (expected - predicted).squared().mean()
}

/// Computes the softmax cross entropy (categorical cross entropy) between logits and labels.
///
/// - Parameters:
/// - logits: One-hot encoded outputs from a neural network.
/// - labels: One-hot encoded values that correspond to the correct output.
@differentiable
public func softmaxCrossEntropy<Scalar: TensorFlowFloatingPoint>(
logits: Tensor<Scalar>, labels: Tensor<Scalar>) -> Tensor<Scalar> {
return -(labels * logSoftmax(logits)).mean(alongAxes: 0).sum()
}

/// Computes the sigmoid cross entropy (binary cross entropy) between logits and labels.
///
/// - Parameters:
/// - logits: Single continuous values from `0` to `1`.
/// - labels: Integer values that correspond to the correct output.
@differentiable
public func sigmoidCrossEntropy<Scalar: TensorFlowFloatingPoint>(
logits: Tensor<Scalar>, labels: Tensor<Scalar>
) -> Tensor<Scalar> {
let loss = labels * log(logits) +
(Tensor<Scalar>(1) - labels) * log(Tensor<Scalar>(1) - logits)
return -loss.mean(alongAxes: 0).sum()
}