Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

✨ feat(core): Layer architecture #4

Merged
merged 4 commits into from
Oct 10, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/mr_validation.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ name: Swift
on: [push]

jobs:
build:
Tests:
runs-on: macos-latest

steps:
Expand Down
6 changes: 6 additions & 0 deletions CODEOWNERS
Validating CODEOWNERS rules …
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
# Below is a list of MAKit code owners who should review changes
# before delivering into the release.

# Code owning should propagate to contributers upon request.

* @jean-francoisreboud
17 changes: 5 additions & 12 deletions Sources/MAKit/Core/Function/Activation.swift
Original file line number Diff line number Diff line change
Expand Up @@ -7,18 +7,11 @@

import Foundation

/// Activation function to be used in a Layer.
/// Activation function to be used in a layer.
open class ActivationFunction: Codable
{
let _name: String

/// Get the name of the function.
var name: String
{
get {
return _name
}
}
let name: String

/// Forward GPU kernel.
open var forwardKernel: String
Expand Down Expand Up @@ -47,7 +40,7 @@ open class ActivationFunction: Codable
///
public init(_ name: String)
{
_name = name
self.name = name
}

///
Expand All @@ -61,7 +54,7 @@ open class ActivationFunction: Codable
required public init(from decoder: Decoder) throws
{
let container = try decoder.container(keyedBy: Keys.self)
_name = try container.decode(String.self, forKey: .name)
name = try container.decode(String.self, forKey: .name)
}

///
Expand All @@ -78,7 +71,7 @@ open class ActivationFunction: Codable
public func encode(to encoder: Encoder) throws
{
var container = encoder.container(keyedBy: Keys.self)
try container.encode(_name, forKey: .name)
try container.encode(name, forKey: .name)
}

///
Expand Down
148 changes: 148 additions & 0 deletions Sources/MAKit/Core/Function/Normalization.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,148 @@
//
// Normalization.swift
// MAKit
//
// Created by Jean-François Reboud on 09/10/2022.
//

import Accelerate

/// Normalization function to be used in a layer.
class Normalization
{
/// Slight modification to avoid "divide by 0" errors.
static let _Ɛ: Double = 1e-5

///
/// Forward Gradient Checking CPU.
///
/// - Parameters:
/// - outs: The data to normalize.
/// - β: A bias to add to the normalization result.
/// - Ɣ: A weight to scale the normalization result.
/// - Returns: The data normalized.
///
static func forwardGC(outs: [Double],
β: Double,
Ɣ: Double) -> [Double]
{
let μ = vDSP.mean(outs)
let tmp1 = vDSP.add(-μ, outs)
let σ2 = vDSP.meanSquare(tmp1)
let xHat = vDSP.divide(tmp1, sqrt(σ2 + _Ɛ))
let outsNew = vDSP.add(β, vDSP.multiply(Ɣ, xHat))
return outsNew
}

///
/// Forward Training CPU.
///
/// - Parameters:
/// - outs: The data to normalize.
/// - β: A bias to add to the normalization result.
/// - Ɣ: A weight to scale the normalization result.
/// - Returns: (The data normalized,
/// The data normalized without taking into account the bias and the weight,
/// The average of the data,
/// The deviation of the data).
///
static func forward(outs: [Double],
β: Double,
Ɣ: Double) -> (outsNew: [Double],
xHat: [Double],
μ: Double,
σ2: Double)
{

let μ = vDSP.mean(outs)
let tmp1 = vDSP.add(-μ, outs)
let σ2 = vDSP.meanSquare(tmp1)
let xHat = vDSP.divide(tmp1, sqrt(σ2 + _Ɛ))
let outsNew = vDSP.add(β, vDSP.multiply(Ɣ, xHat))

return (outsNew: outsNew,
xHat: xHat,
μ: μ,
σ2: σ2)
}

///
/// Forward Inference CPU.
///
/// - Parameters:
/// - outs: The data to normalize.
/// - μ: A global average of data.
/// - σ2: A global deviation of data.
/// - β: A bias to add to the normalization result.
/// - Ɣ: A weight to scale the normalization result.
/// - Returns: The data normalized.
///
static func forward(outs: [Double],
μ: Double,
σ2: Double,
β: Double,
Ɣ: Double) -> [Double]
{
let tmp1 = vDSP.add(-μ, outs)
let xHat = vDSP.divide(tmp1, sqrt(σ2 + _Ɛ))
return vDSP.add(β, vDSP.multiply(Ɣ, xHat))
}

///
/// Backward Training CPU.
///
/// - Parameters:
/// - delta: The gradients to back propagate.
/// - xHat: The data normalized without taking into account the bias and the weight.
/// - σ2: The deviation of the data.
/// - Ɣ: The weight that scaled the normalization result.
/// - Returns: (The gradient taking into account the normalization,
/// The gradient of β,
/// The gradient of Ɣ).
///
static func backward(delta: [Double],
xHat: [Double],
σ2: Double,
Ɣ: Double) -> (deltaNew: [Double],
dβ: Double,
dƔ: Double)
{
let nbElems = delta.count
let factor = 1.0 / (Double(nbElems) * sqrt(σ2 + _Ɛ))

let sum0 = vDSP.sum(delta)
let mult0 = vDSP.multiply(delta, xHat)

let sum1 = Ɣ * sum0
let sum2 = Ɣ * vDSP.sum(mult0)

let tmp1 = vDSP.add(
multiplication: (delta, Ɣ * Double(nbElems)),
multiplication: (xHat, -sum2))
let deltaNew = vDSP.add(
multiplication: (tmp1, factor), -factor * sum1)

let dƔ = vDSP.sum(mult0)
let dβ = sum0

return (deltaNew: deltaNew,
dβ: dβ,
dƔ: dƔ)
}

///
/// Backward Inference CPU.
///
/// - Parameters:
/// - delta: The gradients to back propagate.
/// - σ2: The deviation of the data.
/// - Ɣ: The weight that scaled the normalization result.
/// - Returns: The gradient taking into account the normalization.
///
static func backward(delta: [Double],
σ2: Double,
Ɣ: Double) -> [Double]
{
return vDSP.multiply(Ɣ / sqrt(σ2 + _Ɛ), delta)
}
}
6 changes: 3 additions & 3 deletions Sources/MAKit/Core/Layer/Layer.swift
Original file line number Diff line number Diff line change
Expand Up @@ -135,8 +135,8 @@ open class Layer: Codable
/// Create a layer.
///
/// - Parameters:
/// - layerPrev: Previous layer that has been queued in the `Model`.
/// - params: Contextual parameters of a Model.
/// - layerPrev: Previous layer that has been queued to the model.
/// - params: Contextual parameters linking to the model.
///
public init(layerPrev: Layer?, params: MAKit.Model.Params)
{
Expand Down Expand Up @@ -266,7 +266,7 @@ open class Layer: Codable
}

///
/// Get every layers (a "graph") between the very first of the `Model` until this.
/// Get every layers (a "graph") between the very first of the `Model` and this.
///
/// - Parameter layerPrev: The different layers found in the "graph".
///
Expand Down
Loading