diff --git a/docs/neural-network/optimizers/stochastic.md b/docs/neural-network/optimizers/stochastic.md
index 3fc10afb5..4422e0ddc 100644
--- a/docs/neural-network/optimizers/stochastic.md
+++ b/docs/neural-network/optimizers/stochastic.md
@@ -1,7 +1,7 @@
-[source]
+[source]
# Stochastic
-A constant learning rate optimizer based on vanilla Stochastic Gradient Descent.
+A constant learning rate optimizer based on vanilla Stochastic Gradient Descent (SGD).
## Parameters
| # | Name | Default | Type | Description |
@@ -10,7 +10,7 @@ A constant learning rate optimizer based on vanilla Stochastic Gradient Descent.
## Example
```php
-use Rubix\ML\NeuralNet\Optimizers\Stochastic;
+use Rubix\ML\NeuralNet\Optimizers\Stochastic\Stochastic;
$optimizer = new Stochastic(0.01);
-```
\ No newline at end of file
+```
diff --git a/src/NeuralNet/Optimizers/Base/Optimizer.php b/src/NeuralNet/Optimizers/Base/Optimizer.php
new file mode 100644
index 000000000..0815cf0ce
--- /dev/null
+++ b/src/NeuralNet/Optimizers/Base/Optimizer.php
@@ -0,0 +1,29 @@
+
+ */
+interface Optimizer extends Stringable
+{
+ /**
+ * Take a step of gradient descent for a given parameter.
+ *
+ * @internal
+ *
+ * @param Parameter $param
+ * @param NDArray $gradient
+ * @return NDArray
+ */
+ public function step(Parameter $param, NDArray $gradient) : NDArray;
+}
diff --git a/src/NeuralNet/Optimizers/Stochastic/Stochastic.php b/src/NeuralNet/Optimizers/Stochastic/Stochastic.php
new file mode 100644
index 000000000..ffd9daf30
--- /dev/null
+++ b/src/NeuralNet/Optimizers/Stochastic/Stochastic.php
@@ -0,0 +1,69 @@
+
+ */
+class Stochastic implements Optimizer
+{
+ /**
+ * The learning rate that controls the global step size.
+ *
+ * @var float
+ */
+ protected float $rate;
+
+ /**
+ * @param float $rate
+ * @throws InvalidArgumentException
+ */
+ public function __construct(float $rate = 0.01)
+ {
+ if ($rate <= 0.0) {
+ throw new InvalidArgumentException("Learning rate must be greater than 0, $rate given.");
+ }
+
+ $this->rate = $rate;
+ }
+
+ /**
+ * Take a step of gradient descent for a given parameter.
+ *
+ * @internal
+ *
+ * @param Parameter $param
+ * @param NDArray $gradient
+ * @return NDArray
+ */
+ public function step(Parameter $param, NDArray $gradient) : NDArray
+ {
+ return NumPower::multiply($gradient, $this->rate);
+ }
+
+ /**
+ * Return the string representation of the object.
+ *
+ * @internal
+ *
+ * @return string
+ */
+ public function __toString() : string
+ {
+ return "Stochastic (rate: {$this->rate})";
+ }
+}
diff --git a/src/NeuralNet/Parameters/Parameter.php b/src/NeuralNet/Parameters/Parameter.php
new file mode 100644
index 000000000..efa7cf88a
--- /dev/null
+++ b/src/NeuralNet/Parameters/Parameter.php
@@ -0,0 +1,99 @@
+
+ */
+
+/**
+ * Parameter
+ *
+ */
+class Parameter
+{
+ /**
+ * The auto incrementing id.
+ *
+ * @var int
+ */
+ protected static int $counter = 0;
+
+ /**
+ * The unique identifier of the parameter.
+ *
+ * @var int
+ */
+ protected int $id;
+
+ /**
+ * The parameter.
+ *
+ * @var NDArray
+ */
+ protected NDArray $param;
+
+ /**
+ * @param NDArray $param
+ */
+ public function __construct(NDArray $param)
+ {
+ $this->id = self::$counter++;
+ $this->param = $param;
+ }
+
+ /**
+ * Return the unique identifier of the parameter.
+ *
+ * @return int
+ */
+ public function id(): int
+ {
+ return $this->id;
+ }
+
+ /**
+ * Return the wrapped parameter.
+ *
+ * @return NDArray
+ */
+ public function param(): NDArray
+ {
+ return $this->param;
+ }
+
+ /**
+ * Update the parameter with the gradient and optimizer.
+ *
+ * @param NDArray $gradient
+ * @param Optimizer $optimizer
+ */
+ public function update(NDArray $gradient, Optimizer $optimizer): void
+ {
+ $step = $optimizer->step($this, $gradient);
+
+ $this->param = NumPower::subtract($this->param, $step);
+ }
+
+ /**
+ * Perform a deep copy of the object upon cloning.
+ */
+ public function __clone(): void
+ {
+ $this->param = clone $this->param;
+ }
+}
diff --git a/tests/NeuralNet/Optimizers/Stochastic/StochasticTest.php b/tests/NeuralNet/Optimizers/Stochastic/StochasticTest.php
new file mode 100644
index 000000000..57a50335f
--- /dev/null
+++ b/tests/NeuralNet/Optimizers/Stochastic/StochasticTest.php
@@ -0,0 +1,80 @@
+optimizer = new Stochastic(0.001);
+ }
+
+ #[Test]
+ #[TestDox('Throws exception when constructed with invalid learning rate')]
+ public function testConstructorWithInvalidRate() : void
+ {
+ $this->expectException(InvalidArgumentException::class);
+
+ new Stochastic(0.0);
+ }
+
+ #[Test]
+ #[TestDox('Can be cast to a string')]
+ public function testToString() : void
+ {
+ self::assertEquals('Stochastic (rate: 0.001)', (string) $this->optimizer);
+ }
+
+ /**
+ * @param Parameter $param
+ * @param NDArray $gradient
+ * @param list> $expected
+ */
+ #[DataProvider('stepProvider')]
+ public function testStep(Parameter $param, NDArray $gradient, array $expected) : void
+ {
+ $step = $this->optimizer->step(param: $param, gradient: $gradient);
+
+ self::assertEqualsWithDelta($expected, $step->toArray(), 1e-7);
+ }
+}
diff --git a/tests/NeuralNet/Parameters/ParameterTest.php b/tests/NeuralNet/Parameters/ParameterTest.php
new file mode 100644
index 000000000..9d0af9321
--- /dev/null
+++ b/tests/NeuralNet/Parameters/ParameterTest.php
@@ -0,0 +1,49 @@
+param = new Parameter(NumPower::array([
+ [5, 4],
+ [-2, 6],
+ ]));
+
+ $this->optimizer = new Stochastic();
+ }
+
+ public function testUpdate() : void
+ {
+ $gradient = NumPower::array([
+ [2, 1],
+ [1, -2],
+ ]);
+
+ $expected = [
+ [4.98, 3.99],
+ [-2.01, 6.02],
+ ];
+
+ $this->param->update(gradient: $gradient, optimizer: $this->optimizer);
+
+ self::assertEqualsWithDelta($expected, $this->param->param()->toArray(), 1e-7);
+ }
+}