Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 4 additions & 4 deletions docs/neural-network/optimizers/stochastic.md
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
<span style="float:right;"><a href="https://github.com/RubixML/ML/blob/master/src/NeuralNet/Optimizers/Stochastic.php">[source]</a></span>
<span style="float:right;"><a href="https://github.com/RubixML/ML/blob/master/src/NeuralNet/Optimizers/Stochastic/Stochastic.php">[source]</a></span>

# Stochastic
A constant learning rate optimizer based on vanilla Stochastic Gradient Descent.
A constant learning rate optimizer based on vanilla Stochastic Gradient Descent (SGD).

## Parameters
| # | Name | Default | Type | Description |
Expand All @@ -10,7 +10,7 @@ A constant learning rate optimizer based on vanilla Stochastic Gradient Descent.

## Example
```php
use Rubix\ML\NeuralNet\Optimizers\Stochastic;
use Rubix\ML\NeuralNet\Optimizers\Stochastic\Stochastic;

$optimizer = new Stochastic(0.01);
```
```
29 changes: 29 additions & 0 deletions src/NeuralNet/Optimizers/Base/Optimizer.php
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
<?php

namespace Rubix\ML\NeuralNet\Optimizers\Base;

use NDArray;
use Rubix\ML\NeuralNet\Parameters\Parameter;
use Stringable;

/**
* Optimizer
*
* @category Machine Learning
* @package Rubix/ML
* @author Andrew DalPino
* @author Samuel Akopyan <leumas.a@gmail.com>
*/
interface Optimizer extends Stringable
{
/**
* Take a step of gradient descent for a given parameter.
*
* @internal
*
* @param Parameter $param
* @param NDArray $gradient
* @return NDArray
*/
public function step(Parameter $param, NDArray $gradient) : NDArray;
}
69 changes: 69 additions & 0 deletions src/NeuralNet/Optimizers/Stochastic/Stochastic.php
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
<?php

namespace Rubix\ML\NeuralNet\Optimizers\Stochastic;

use NDArray;
use NumPower;
use Rubix\ML\NeuralNet\Optimizers\Base\Optimizer;
use Rubix\ML\NeuralNet\Parameters\Parameter;
use Rubix\ML\Exceptions\InvalidArgumentException;

/**
* Stochastic
*
* SGD (Stochastic Gradient Descent) optimizer -
* a constant learning rate gradient descent optimizer.
*
* @category Machine Learning
* @package Rubix/ML
* @author Andrew DalPino
* @author Samuel Akopyan <leumas.a@gmail.com>
*/
class Stochastic implements Optimizer
{
/**
* The learning rate that controls the global step size.
*
* @var float
*/
protected float $rate;

/**
* @param float $rate
* @throws InvalidArgumentException
*/
public function __construct(float $rate = 0.01)
{
if ($rate <= 0.0) {
throw new InvalidArgumentException("Learning rate must be greater than 0, $rate given.");
}

$this->rate = $rate;
}

/**
* Take a step of gradient descent for a given parameter.
*
* @internal
*
* @param Parameter $param
* @param NDArray $gradient
* @return NDArray
*/
public function step(Parameter $param, NDArray $gradient) : NDArray
{
return NumPower::multiply($gradient, $this->rate);
}

/**
* Return the string representation of the object.
*
* @internal
*
* @return string
*/
public function __toString() : string
{
return "Stochastic (rate: {$this->rate})";
}
}
99 changes: 99 additions & 0 deletions src/NeuralNet/Parameters/Parameter.php
Original file line number Diff line number Diff line change
@@ -0,0 +1,99 @@
<?php

namespace Rubix\ML\NeuralNet\Parameters;

use NDArray;
use NumPower;
use Rubix\ML\NeuralNet\Optimizers\Base\Optimizer;

/**
* Parameter
*
* A wrapper over an NDArray from NumPower that marks the parameter as trainable
* and provides updates via the optimizer.
*
* @internal
*
* @category Machine Learning
* @package Rubix/ML
* @author Andrew DalPino
* @author Samuel Akopyan <leumas.a@gmail.com>
*/

/**
* Parameter
*
*/
class Parameter
{
/**
* The auto incrementing id.
*
* @var int
*/
protected static int $counter = 0;

/**
* The unique identifier of the parameter.
*
* @var int
*/
protected int $id;

/**
* The parameter.
*
* @var NDArray
*/
protected NDArray $param;

/**
* @param NDArray $param
*/
public function __construct(NDArray $param)
{
$this->id = self::$counter++;
$this->param = $param;
}

/**
* Return the unique identifier of the parameter.
*
* @return int
*/
public function id(): int
{
return $this->id;
}

/**
* Return the wrapped parameter.
*
* @return NDArray
*/
public function param(): NDArray
{
return $this->param;
}

/**
* Update the parameter with the gradient and optimizer.
*
* @param NDArray $gradient
* @param Optimizer $optimizer
*/
public function update(NDArray $gradient, Optimizer $optimizer): void
{
$step = $optimizer->step($this, $gradient);

$this->param = NumPower::subtract($this->param, $step);
}

/**
* Perform a deep copy of the object upon cloning.
*/
public function __clone(): void
{
$this->param = clone $this->param;
}
}
80 changes: 80 additions & 0 deletions tests/NeuralNet/Optimizers/Stochastic/StochasticTest.php
Original file line number Diff line number Diff line change
@@ -0,0 +1,80 @@
<?php

declare(strict_types=1);

namespace Rubix\ML\Tests\NeuralNet\Optimizers\Stochastic;

use Generator;
use NDArray;
use NumPower;
use PHPUnit\Framework\Attributes\CoversClass;
use PHPUnit\Framework\Attributes\DataProvider;
use PHPUnit\Framework\Attributes\Group;
use PHPUnit\Framework\Attributes\Test;
use PHPUnit\Framework\Attributes\TestDox;
use PHPUnit\Framework\TestCase;
use Rubix\ML\Exceptions\InvalidArgumentException;
use Rubix\ML\NeuralNet\Parameters\Parameter;
use Rubix\ML\NeuralNet\Optimizers\Stochastic\Stochastic;

#[Group('Optimizers')]
#[CoversClass(Stochastic::class)]
class StochasticTest extends TestCase
{
protected Stochastic $optimizer;

public static function stepProvider() : Generator
{
yield [
new Parameter(NumPower::array([
[0.1, 0.6, -0.4],
[0.5, 0.6, -0.4],
[0.1, 0.1, -0.7],
])),
NumPower::array([
[0.01, 0.05, -0.02],
[-0.01, 0.02, 0.03],
[0.04, -0.01, -0.5],
]),
[
[0.00001, 0.00005, -0.00002],
[-0.00001, 0.00002, 0.00003],
[0.00004, -0.00001, -0.0005],
],
];
}

protected function setUp() : void
{
$this->optimizer = new Stochastic(0.001);
}

#[Test]
#[TestDox('Throws exception when constructed with invalid learning rate')]
public function testConstructorWithInvalidRate() : void
{
$this->expectException(InvalidArgumentException::class);

new Stochastic(0.0);
}

#[Test]
#[TestDox('Can be cast to a string')]
public function testToString() : void
{
self::assertEquals('Stochastic (rate: 0.001)', (string) $this->optimizer);
}

/**
* @param Parameter $param
* @param NDArray $gradient
* @param list<list<float>> $expected
*/
#[DataProvider('stepProvider')]
public function testStep(Parameter $param, NDArray $gradient, array $expected) : void
{
$step = $this->optimizer->step(param: $param, gradient: $gradient);

self::assertEqualsWithDelta($expected, $step->toArray(), 1e-7);
}
}
49 changes: 49 additions & 0 deletions tests/NeuralNet/Parameters/ParameterTest.php
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
<?php

declare(strict_types = 1);

namespace Rubix\ML\Tests\NeuralNet\Parameters;

use NumPower;
use PHPUnit\Framework\Attributes\CoversClass;
use PHPUnit\Framework\Attributes\Group;
use Rubix\ML\NeuralNet\Optimizers\Base\Optimizer;
use Rubix\ML\NeuralNet\Parameters\Parameter;
use Rubix\ML\NeuralNet\Optimizers\Stochastic\Stochastic;
use PHPUnit\Framework\TestCase;

#[Group('Parameters')]
#[CoversClass(Parameter::class)]
class ParameterTest extends TestCase
{
protected Parameter $param;

protected Optimizer $optimizer;

protected function setUp() : void
{
$this->param = new Parameter(NumPower::array([
[5, 4],
[-2, 6],
]));

$this->optimizer = new Stochastic();
}

public function testUpdate() : void
{
$gradient = NumPower::array([
[2, 1],
[1, -2],
]);

$expected = [
[4.98, 3.99],
[-2.01, 6.02],
];

$this->param->update(gradient: $gradient, optimizer: $this->optimizer);

self::assertEqualsWithDelta($expected, $this->param->param()->toArray(), 1e-7);
}
}
Loading