Skip to content

Commit

Permalink
PR feedback batch #1.
Browse files Browse the repository at this point in the history
  • Loading branch information
bartchr808 committed May 18, 2019
1 parent 8c9f88d commit af4859a
Show file tree
Hide file tree
Showing 2 changed files with 6 additions and 32 deletions.
22 changes: 0 additions & 22 deletions stdlib/public/TensorFlow/Gradients.swift
Original file line number Diff line number Diff line change
Expand Up @@ -658,19 +658,7 @@ extension Tensor where Scalar : TensorFlowFloatingPoint {
v.unbroadcast(to: origShape)
})
}

@inlinable
func _vjpBroadcast<OtherScalar>(
like other: Tensor<OtherScalar>
) -> (Tensor, (Tensor) -> Tensor)
where OtherScalar : TensorFlowScalar {
return (broadcast(like: other), { [origShape = self.shapeTensor] v in
v.unbroadcast(like: origShape)
})
}
}

extension Tensor where Scalar : Numeric {
@inlinable
func _vjpUnbroadcast(
toShape shape: Tensor<Int32>
Expand All @@ -688,14 +676,4 @@ extension Tensor where Scalar : Numeric {
v.broadcast(to: origShape)
})
}

@inlinable
func _vjpUnbroadcast<OtherScalar>(
like other: Tensor<OtherScalar>
) -> (Tensor, (Tensor) -> Tensor)
where OtherScalar : TensorFlowScalar {
return (unbroadcast(like: other), { [origShape = self.shapeTensor] v in
v.broadcast(like: origShape)
})
}
}
16 changes: 6 additions & 10 deletions stdlib/public/TensorFlow/Ops.swift
Original file line number Diff line number Diff line change
Expand Up @@ -1600,15 +1600,13 @@ public extension Tensor {

public extension Tensor {
@inlinable
// SWIFT_ENABLE_TENSORFLOW
@differentiable(wrt: self, vjp: _vjpBroadcast(toShape:)
where Scalar : TensorFlowFloatingPoint)
func broadcast(toShape shape: Tensor<Int32>) -> Tensor {
return Raw.broadcastTo(self, shape: shape)
}

@inlinable
// SWIFT_ENABLE_TENSORFLOW
@differentiable(wrt: self, vjp: _vjpBroadcast(to:)
where Scalar : TensorFlowFloatingPoint)
func broadcast(to shape: TensorShape) -> Tensor {
Expand All @@ -1618,8 +1616,7 @@ public extension Tensor {
/// Broadcast to the same shape as the specified `Tensor`.
/// - Precondition: The specified shape must be compatible for broadcasting.
@inlinable
// SWIFT_ENABLE_TENSORFLOW
@differentiable(wrt: self, vjp: _vjpBroadcast(like:)
@differentiable(wrt: self
where Scalar : TensorFlowFloatingPoint)
func broadcast<OtherScalar>(like other: Tensor<OtherScalar>) -> Tensor {
return broadcast(toShape: other.shapeTensor)
Expand All @@ -1628,8 +1625,8 @@ public extension Tensor {

public extension Tensor where Scalar : Numeric {
@inlinable
// SWIFT_ENABLE_TENSORFLOW
@differentiable(wrt: self, vjp: _vjpUnbroadcast(toShape:))
@differentiable(wrt: self, vjp: _vjpUnbroadcast(toShape:)
where Scalar : TensorFlowFloatingPoint)
func unbroadcast(toShape otherShape: Tensor<Int32>) -> Tensor {
let rankDiff = (rankTensor - otherShape.scalarCountTensor).rankLifted()
let ones: Tensor<Int32> = Raw.fill(dims: rankDiff, value: Tensor<Int32>(1))
Expand All @@ -1642,15 +1639,14 @@ public extension Tensor where Scalar : Numeric {
}

@inlinable
// SWIFT_ENABLE_TENSORFLOW
@differentiable(wrt: self, vjp: _vjpUnbroadcast(like:))
@differentiable(wrt: self where Scalar : TensorFlowFloatingPoint)
func unbroadcast<OtherScalar>(like other: Tensor<OtherScalar>) -> Tensor {
return unbroadcast(toShape: other.shapeTensor)
}

@inlinable
// SWIFT_ENABLE_TENSORFLOW
@differentiable(wrt: self, vjp: _vjpUnbroadcast(to:))
@differentiable(wrt: self, vjp: _vjpUnbroadcast(to:)
where Scalar : TensorFlowFloatingPoint)
func unbroadcast(to shape: TensorShape) -> Tensor {
return unbroadcast(toShape: Tensor<Int32>(shape.dimensions.map(Int32.init)))
}
Expand Down

0 comments on commit af4859a

Please sign in to comment.