From 21e7c951733bdfd42056c4a210754958379df610 Mon Sep 17 00:00:00 2001 From: Brennan Saeta Date: Sat, 23 Feb 2019 23:29:57 +0000 Subject: [PATCH] Add default (identity) activations to the Dense layer initializers. This change follows along with the defaulted values for Conv and other layers. --- Sources/DeepLearning/Layer.swift | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Sources/DeepLearning/Layer.swift b/Sources/DeepLearning/Layer.swift index c52e8297f..7263f4a6a 100644 --- a/Sources/DeepLearning/Layer.swift +++ b/Sources/DeepLearning/Layer.swift @@ -137,7 +137,7 @@ public extension Dense where Scalar.RawSignificand: FixedWidthInteger { init( inputSize: Int, outputSize: Int, - activation: @escaping Activation, + activation: @escaping Activation = identity, generator: inout G ) { self.init(weight: Tensor(glorotUniform: [Int32(inputSize), Int32(outputSize)], @@ -146,7 +146,7 @@ public extension Dense where Scalar.RawSignificand: FixedWidthInteger { activation: activation) } - init(inputSize: Int, outputSize: Int, activation: @escaping Activation) { + init(inputSize: Int, outputSize: Int, activation: @escaping Activation = identity) { self.init(inputSize: inputSize, outputSize: outputSize, activation: activation, generator: &PhiloxRandomNumberGenerator.global) }