diff --git a/docs/neural-network/hidden-layers/activation.md b/docs/neural-network/hidden-layers/activation.md
index a4e4cde73..57d4dc46c 100644
--- a/docs/neural-network/hidden-layers/activation.md
+++ b/docs/neural-network/hidden-layers/activation.md
@@ -1,4 +1,4 @@
-[source]
+[source]
# Activation
Activation layers apply a user-defined non-linear activation function to their inputs. They often work in conjunction with [Dense](dense.md) layers as a way to transform their output.
@@ -10,8 +10,8 @@ Activation layers apply a user-defined non-linear activation function to their i
## Example
```php
-use Rubix\ML\NeuralNet\Layers\Activation;
-use Rubix\ML\NeuralNet\ActivationFunctions\ReLU;
+use Rubix\ML\NeuralNet\Layers\Activation\Activation;
+use Rubix\ML\NeuralNet\ActivationFunctions\ReLU\ReLU;
$layer = new Activation(new ReLU());
-```
\ No newline at end of file
+```
diff --git a/docs/neural-network/hidden-layers/batch-norm.md b/docs/neural-network/hidden-layers/batch-norm.md
index 99fdefd22..373113e14 100644
--- a/docs/neural-network/hidden-layers/batch-norm.md
+++ b/docs/neural-network/hidden-layers/batch-norm.md
@@ -1,4 +1,4 @@
-[source]
+[source]
# Batch Norm
Batch Norm layers normalize the activations of the previous layer such that the mean activation is *close* to 0 and the standard deviation is *close* to 1. Adding Batch Norm reduces the amount of covariate shift within the network which makes it possible to use higher learning rates and thus converge faster under some circumstances.
@@ -12,12 +12,12 @@ Batch Norm layers normalize the activations of the previous layer such that the
## Example
```php
-use Rubix\ML\NeuralNet\Layers\BatchNorm;
-use Rubix\ML\NeuralNet\Initializers\Constant;
-use Rubix\ML\NeuralNet\Initializers\Normal;
+use Rubix\ML\NeuralNet\Layers\BatchNorm\BatchNorm;
+use Rubix\ML\NeuralNet\Initializers\Constant\Constant;
+use Rubix\ML\NeuralNet\Initializers\Normal\Normal;
$layer = new BatchNorm(0.7, new Constant(0.), new Normal(1.));
```
## References
-[^1]: S. Ioffe et al. (2015). Batch Normalization: Accelerating Deep Network Training by Reducing Internal Covariate Shift.
\ No newline at end of file
+[^1]: S. Ioffe et al. (2015). Batch Normalization: Accelerating Deep Network Training by Reducing Internal Covariate Shift.
diff --git a/docs/neural-network/hidden-layers/dense.md b/docs/neural-network/hidden-layers/dense.md
index cf4a7bd4c..db382d0a0 100644
--- a/docs/neural-network/hidden-layers/dense.md
+++ b/docs/neural-network/hidden-layers/dense.md
@@ -1,4 +1,4 @@
-[source]
+[source]
# Dense
Dense (or *fully connected*) hidden layers are layers of neurons that connect to each node in the previous layer by a parameterized synapse. They perform a linear transformation on their input and are usually followed by an [Activation](activation.md) layer. The majority of the trainable parameters in a standard feed forward neural network are contained within Dense hidden layers.
@@ -14,9 +14,9 @@ Dense (or *fully connected*) hidden layers are layers of neurons that connect to
## Example
```php
-use Rubix\ML\NeuralNet\Layers\Dense;
+use Rubix\ML\NeuralNet\Layers\Dense\Dense;
use Rubix\ML\NeuralNet\Initializers\He;
use Rubix\ML\NeuralNet\Initializers\Constant;
$layer = new Dense(100, 1e-4, true, new He(), new Constant(0.0));
-```
\ No newline at end of file
+```
diff --git a/docs/neural-network/hidden-layers/dropout.md b/docs/neural-network/hidden-layers/dropout.md
index 566f83bad..28414f8ca 100644
--- a/docs/neural-network/hidden-layers/dropout.md
+++ b/docs/neural-network/hidden-layers/dropout.md
@@ -1,4 +1,4 @@
-[source]
+[source]
# Dropout
Dropout is a regularization technique to reduce overfitting in neural networks by preventing complex co-adaptations on training data. It works by temporarily disabling output nodes during each training pass. It also acts as an efficient way of performing model averaging with the parameters of neural networks.
@@ -10,10 +10,10 @@ Dropout is a regularization technique to reduce overfitting in neural networks b
## Example
```php
-use Rubix\ML\NeuralNet\Layers\Dropout;
+use Rubix\ML\NeuralNet\Layers\Dropout\Dropout;
$layer = new Dropout(0.2);
```
## References
-[^1]: N. Srivastava et al. (2014). Dropout: A Simple Way to Prevent Neural Networks from Overfitting.
\ No newline at end of file
+[^1]: N. Srivastava et al. (2014). Dropout: A Simple Way to Prevent Neural Networks from Overfitting.
diff --git a/docs/neural-network/hidden-layers/noise.md b/docs/neural-network/hidden-layers/noise.md
index 7979549af..4d29732cb 100644
--- a/docs/neural-network/hidden-layers/noise.md
+++ b/docs/neural-network/hidden-layers/noise.md
@@ -1,4 +1,4 @@
-[source]
+[source]
# Noise
This layer adds random Gaussian noise to the inputs with a user-defined standard deviation. Noise added to neural network activations acts as a regularizer by indirectly adding a penalty to the weights through the cost function in the output layer.
@@ -10,10 +10,10 @@ This layer adds random Gaussian noise to the inputs with a user-defined standard
## Example
```php
-use Rubix\ML\NeuralNet\Layers\Noise;
+use Rubix\ML\NeuralNet\Layers\Noise\Noise;
$layer = new Noise(1e-3);
```
## References
-[^1]: C. Gulcehre et al. (2016). Noisy Activation Functions.
\ No newline at end of file
+[^1]: C. Gulcehre et al. (2016). Noisy Activation Functions.
diff --git a/docs/neural-network/hidden-layers/placeholder1d.md b/docs/neural-network/hidden-layers/placeholder1d.md
new file mode 100644
index 000000000..f70575eee
--- /dev/null
+++ b/docs/neural-network/hidden-layers/placeholder1d.md
@@ -0,0 +1,17 @@
+[source]
+
+# Placeholder 1D
+
+The Placeholder 1D input layer represents the future input values of a mini batch (matrix) of single dimensional tensors (vectors) to the neural network. It performs shape validation on the input and then forwards it unchanged to the next layer.
+
+## Parameters
+| # | Name | Default | Type | Description |
+|---|---|---|---|---|
+| 1 | inputs | | int | The number of input nodes (features). |
+
+## Example
+```php
+use Rubix\ML\NeuralNet\Layers\Placeholder1D\Placeholder1D;
+
+$layer = new Placeholder1D(10);
+```
diff --git a/docs/neural-network/hidden-layers/prelu.md b/docs/neural-network/hidden-layers/prelu.md
index baaef2f32..22a5b4762 100644
--- a/docs/neural-network/hidden-layers/prelu.md
+++ b/docs/neural-network/hidden-layers/prelu.md
@@ -1,4 +1,4 @@
-[source]
+[source]
# PReLU
Parametric Rectified Linear Units are leaky rectifiers whose *leakage* coefficient is learned during training. Unlike standard [Leaky ReLUs](../activation-functions/leaky-relu.md) whose leakage remains constant, PReLU layers can adjust the leakage to better suite the model on a per node basis.
@@ -14,8 +14,8 @@ $$
## Example
```php
-use Rubix\ML\NeuralNet\Layers\PReLU;
-use Rubix\ML\NeuralNet\Initializers\Normal;
+use Rubix\ML\NeuralNet\Layers\PReLU\PReLU;
+use Rubix\ML\NeuralNet\Initializers\Normal\Normal;
$layer = new PReLU(new Normal(0.5));
```
diff --git a/docs/neural-network/hidden-layers/swish.md b/docs/neural-network/hidden-layers/swish.md
index e91138566..29e6677f7 100644
--- a/docs/neural-network/hidden-layers/swish.md
+++ b/docs/neural-network/hidden-layers/swish.md
@@ -1,4 +1,4 @@
-[source]
+[source]
# Swish
Swish is a parametric activation layer that utilizes smooth rectified activation functions. The trainable *beta* parameter allows each activation function in the layer to tailor its output to the training set by interpolating between the linear function and ReLU.
@@ -10,8 +10,8 @@ Swish is a parametric activation layer that utilizes smooth rectified activation
## Example
```php
-use Rubix\ML\NeuralNet\Layers\Swish;
-use Rubix\ML\NeuralNet\Initializers\Constant;
+use Rubix\ML\NeuralNet\Layers\Swish\Swish;
+use Rubix\ML\NeuralNet\Initializers\Constant\Constant;
$layer = new Swish(new Constant(1.0));
```
diff --git a/phpunit.xml b/phpunit.xml
index 22063bc22..379cdc0a2 100644
--- a/phpunit.xml
+++ b/phpunit.xml
@@ -83,5 +83,6 @@
+
diff --git a/src/NeuralNet/FeedForwards/FeedForward.php b/src/NeuralNet/FeedForwards/FeedForward.php
new file mode 100644
index 000000000..155a0ec00
--- /dev/null
+++ b/src/NeuralNet/FeedForwards/FeedForward.php
@@ -0,0 +1,275 @@
+
+ */
+class FeedForward extends Network
+{
+ /**
+ * The input layer to the network.
+ *
+ * @var Input
+ */
+ protected Input $input;
+
+ /**
+ * The hidden layers of the network.
+ *
+ * @var list
+ */
+ protected array $hidden = [
+ //
+ ];
+
+ /**
+ * The pathing of the backward pass through the hidden layers.
+ *
+ * @var list
+ */
+ protected array $backPass = [
+ //
+ ];
+
+ /**
+ * The output layer of the network.
+ *
+ * @var Output
+ */
+ protected Output $output;
+
+ /**
+ * The gradient descent optimizer used to train the network.
+ *
+ * @var Optimizer
+ */
+ protected Optimizer $optimizer;
+
+ /**
+ * @param Input $input
+ * @param Hidden[] $hidden
+ * @param Output $output
+ * @param Optimizer $optimizer
+ */
+ public function __construct(Input $input, array $hidden, Output $output, Optimizer $optimizer)
+ {
+ $hidden = array_values($hidden);
+
+ $backPass = array_reverse($hidden);
+
+ $this->input = $input;
+ $this->hidden = $hidden;
+ $this->output = $output;
+ $this->optimizer = $optimizer;
+ $this->backPass = $backPass;
+ }
+
+ /**
+ * Return the input layer.
+ *
+ * @return Input
+ */
+ public function input() : Input
+ {
+ return $this->input;
+ }
+
+ /**
+ * Return an array of hidden layers indexed left to right.
+ *
+ * @return list
+ */
+ public function hidden() : array
+ {
+ return $this->hidden;
+ }
+
+ /**
+ * Return the output layer.
+ *
+ * @return Output
+ */
+ public function output() : Output
+ {
+ return $this->output;
+ }
+
+ /**
+ * Return all the layers in the network.
+ *
+ * @return Traversable
+ */
+ public function layers() : Traversable
+ {
+ yield $this->input;
+
+ yield from $this->hidden;
+
+ yield $this->output;
+ }
+
+ /**
+ * Return the number of trainable parameters in the network.
+ *
+ * @return int
+ */
+ public function numParams() : int
+ {
+ $numParams = 0;
+
+ foreach ($this->layers() as $layer) {
+ if ($layer instanceof Parametric) {
+ foreach ($layer->parameters() as $parameter) {
+ $numParams += $parameter->param()->size();
+ }
+ }
+ }
+
+ return $numParams;
+ }
+
+ /**
+ * Initialize the parameters of the layers and warm the optimizer cache.
+ */
+ public function initialize() : void
+ {
+ $fanIn = 1;
+
+ foreach ($this->layers() as $layer) {
+ $fanIn = $layer->initialize($fanIn);
+ }
+
+ if ($this->optimizer instanceof Adaptive) {
+ foreach ($this->layers() as $layer) {
+ if ($layer instanceof Parametric) {
+ foreach ($layer->parameters() as $param) {
+ $this->optimizer->warm($param);
+ }
+ }
+ }
+ }
+ }
+
+ /**
+ * Run an inference pass and return the activations at the output layer.
+ *
+ * @param Dataset $dataset
+ * @return NDArray
+ */
+ public function infer(Dataset $dataset) : NDArray
+ {
+ $input = NumPower::transpose(NumPower::array($dataset->samples()), [1, 0]);
+
+ foreach ($this->layers() as $layer) {
+ $input = $layer->infer($input);
+ }
+
+ return NumPower::transpose($input, [1, 0]);
+ }
+
+ /**
+ * Perform a forward and backward pass of the network in one call. Returns
+ * the loss from the backward pass.
+ *
+ * @param Labeled $dataset
+ * @return float
+ */
+ public function roundtrip(Labeled $dataset) : float
+ {
+ $input = NumPower::transpose(NumPower::array($dataset->samples()), [1, 0]);
+
+ $this->feed($input);
+
+ $loss = $this->backpropagate($dataset->labels());
+
+ return $loss;
+ }
+
+ /**
+ * Feed a batch through the network and return a matrix of activations at the output later.
+ *
+ * @param NDArray $input
+ * @return NDArray
+ */
+ public function feed(NDArray $input) : NDArray
+ {
+ foreach ($this->layers() as $layer) {
+ $input = $layer->forward($input);
+ }
+
+ return $input;
+ }
+
+ /**
+ * Backpropagate the gradient of the cost function and return the loss.
+ *
+ * @param list $labels
+ * @return float
+ */
+ public function backpropagate(array $labels) : float
+ {
+ [$gradient, $loss] = $this->output->back($labels, $this->optimizer);
+
+ foreach ($this->backPass as $layer) {
+ $gradient = $layer->back($gradient, $this->optimizer);
+ }
+
+ return $loss;
+ }
+
+ /**
+ * Export the network architecture as a graph in dot format.
+ *
+ * @return Encoding
+ */
+ public function exportGraphviz() : Encoding
+ {
+ $dot = 'digraph Tree {' . PHP_EOL;
+ $dot .= ' node [shape=box, fontname=helvetica];' . PHP_EOL;
+
+ $layerNum = 0;
+
+ foreach ($this->layers() as $layer) {
+ ++$layerNum;
+
+ $dot .= " N$layerNum [label=\"$layer\",style=\"rounded\"]" . PHP_EOL;
+
+ if ($layerNum > 1) {
+ $parentId = $layerNum - 1;
+
+ $dot .= " N{$parentId} -> N{$layerNum};" . PHP_EOL;
+ }
+ }
+
+ $dot .= '}';
+
+ return new Encoding($dot);
+ }
+}
diff --git a/src/NeuralNet/Initializers/He/HeNormal.php b/src/NeuralNet/Initializers/He/HeNormal.php
index 3d68844e4..193c7ff16 100644
--- a/src/NeuralNet/Initializers/He/HeNormal.php
+++ b/src/NeuralNet/Initializers/He/HeNormal.php
@@ -35,7 +35,7 @@ public function initialize(int $fanIn, int $fanOut) : NDArray
$stdDev = sqrt(2 / $fanOut);
- return NumPower::truncatedNormal(size: [$fanOut, $fanIn], scale: $stdDev);
+ return NumPower::truncatedNormal(size: [$fanOut, $fanIn], loc: 0.0, scale: $stdDev);
}
/**
diff --git a/src/NeuralNet/Initializers/LeCun/LeCunNormal.php b/src/NeuralNet/Initializers/LeCun/LeCunNormal.php
index 81d8add56..3fc5832bc 100644
--- a/src/NeuralNet/Initializers/LeCun/LeCunNormal.php
+++ b/src/NeuralNet/Initializers/LeCun/LeCunNormal.php
@@ -36,7 +36,7 @@ public function initialize(int $fanIn, int $fanOut) : NDArray
$stdDev = sqrt(1 / $fanOut);
- return NumPower::truncatedNormal(size: [$fanOut, $fanIn], scale: $stdDev);
+ return NumPower::truncatedNormal(size: [$fanOut, $fanIn], loc: 0.0, scale: $stdDev);
}
/**
diff --git a/src/NeuralNet/Initializers/Normal/Normal.php b/src/NeuralNet/Initializers/Normal/Normal.php
index 08c77ff38..acb4ad050 100644
--- a/src/NeuralNet/Initializers/Normal/Normal.php
+++ b/src/NeuralNet/Initializers/Normal/Normal.php
@@ -43,7 +43,7 @@ public function initialize(int $fanIn, int $fanOut) : NDArray
{
$this->validateFanInFanOut(fanIn: $fanIn, fanOut: $fanOut);
- return NumPower::normal(size: [$fanOut, $fanIn], scale: $this->stdDev);
+ return NumPower::normal(size: [$fanOut, $fanIn], loc: 0.0, scale: $this->stdDev);
}
/**
diff --git a/src/NeuralNet/Initializers/Normal/TruncatedNormal.php b/src/NeuralNet/Initializers/Normal/TruncatedNormal.php
index c0c90196d..af9ed43fe 100644
--- a/src/NeuralNet/Initializers/Normal/TruncatedNormal.php
+++ b/src/NeuralNet/Initializers/Normal/TruncatedNormal.php
@@ -44,7 +44,7 @@ public function initialize(int $fanIn, int $fanOut) : NDArray
{
$this->validateFanInFanOut(fanIn: $fanIn, fanOut: $fanOut);
- return NumPower::truncatedNormal(size: [$fanOut, $fanIn], scale: $this->stdDev);
+ return NumPower::truncatedNormal(size: [$fanOut, $fanIn], loc: 0.0, scale: $this->stdDev);
}
/**
diff --git a/src/NeuralNet/Initializers/Xavier/XavierNormal.php b/src/NeuralNet/Initializers/Xavier/XavierNormal.php
index dfe5bc956..428c74e49 100644
--- a/src/NeuralNet/Initializers/Xavier/XavierNormal.php
+++ b/src/NeuralNet/Initializers/Xavier/XavierNormal.php
@@ -36,7 +36,7 @@ public function initialize(int $fanIn, int $fanOut) : NDArray
$stdDev = sqrt(2 / ($fanOut + $fanIn));
- return NumPower::truncatedNormal(size: [$fanOut, $fanIn], scale: $stdDev);
+ return NumPower::truncatedNormal(size: [$fanOut, $fanIn], loc: 0.0, scale: $stdDev);
}
/**
diff --git a/src/NeuralNet/Layers/Activation/Activation.php b/src/NeuralNet/Layers/Activation/Activation.php
new file mode 100644
index 000000000..4394350b4
--- /dev/null
+++ b/src/NeuralNet/Layers/Activation/Activation.php
@@ -0,0 +1,184 @@
+
+ */
+class Activation implements Hidden
+{
+ /**
+ * The function that computes the output of the layer.
+ *
+ * @var ActivationFunction
+ */
+ protected ActivationFunction $activationFn;
+
+ /**
+ * The width of the layer.
+ *
+ * @var positive-int|null
+ */
+ protected ?int $width = null;
+
+ /**
+ * The memorized input matrix.
+ *
+ * @var NDArray|null
+ */
+ protected ?NDArray $input = null;
+
+ /**
+ * The memorized activation matrix.
+ *
+ * @var NDArray|null
+ */
+ protected ?NDArray $output = null;
+
+ /**
+ * @param ActivationFunction $activationFn
+ */
+ public function __construct(ActivationFunction $activationFn)
+ {
+ $this->activationFn = $activationFn;
+ }
+
+ /**
+ * Return the width of the layer.
+ *
+ * @internal
+ *
+ * @throws RuntimeException
+ * @return positive-int
+ */
+ public function width() : int
+ {
+ if ($this->width === null) {
+ throw new RuntimeException('Layer has not been initialized.');
+ }
+
+ return $this->width;
+ }
+
+ /**
+ * Initialize the layer with the fan in from the previous layer and return
+ * the fan out for this layer.
+ *
+ * @internal
+ *
+ * @param positive-int $fanIn
+ * @return positive-int
+ */
+ public function initialize(int $fanIn) : int
+ {
+ $fanOut = $fanIn;
+
+ $this->width = $fanOut;
+
+ return $fanOut;
+ }
+
+ /**
+ * Compute a forward pass through the layer.
+ *
+ * @internal
+ *
+ * @param NDArray $input
+ * @return NDArray
+ */
+ public function forward(NDArray $input) : NDArray
+ {
+ $output = $this->activationFn->activate($input);
+
+ $this->input = $input;
+ $this->output = $output;
+
+ return $output;
+ }
+
+ /**
+ * Compute an inferential pass through the layer.
+ *
+ * @internal
+ *
+ * @param NDArray $input
+ * @return NDArray
+ */
+ public function infer(NDArray $input) : NDArray
+ {
+ return $this->activationFn->activate($input);
+ }
+
+ /**
+ * Calculate the gradient and update the parameters of the layer.
+ *
+ * @internal
+ *
+ * @param Deferred $prevGradient
+ * @param Optimizer $optimizer
+ * @throws RuntimeException
+ * @return Deferred
+ */
+ public function back(Deferred $prevGradient, Optimizer $optimizer) : Deferred
+ {
+ if (!$this->input or !$this->output) {
+ throw new RuntimeException('Must perform forward pass before backpropagating.');
+ }
+
+ $input = $this->input;
+ $output = $this->output;
+
+ $this->input = $this->output = null;
+
+ return new Deferred(
+ [$this, 'gradient'],
+ [$input, $output, $prevGradient]
+ );
+ }
+
+ /**
+ * Calculate the gradient for the previous layer.
+ *
+ * @internal
+ *
+ * @param NDArray $input
+ * @param NDArray $output
+ * @param Deferred $prevGradient
+ * @return NDArray
+ */
+ public function gradient(NDArray $input, NDArray $output, Deferred $prevGradient) : NDArray
+ {
+ return NumPower::multiply(
+ $this->activationFn->differentiate($input),
+ $prevGradient()
+ );
+ }
+
+ /**
+ * Return the string representation of the object.
+ *
+ * @internal
+ *
+ * @return string
+ */
+ public function __toString() : string
+ {
+ return "Activation (activation fn: {$this->activationFn})";
+ }
+}
diff --git a/src/NeuralNet/Layers/Base/Contracts/Hidden.php b/src/NeuralNet/Layers/Base/Contracts/Hidden.php
new file mode 100644
index 000000000..f903e3916
--- /dev/null
+++ b/src/NeuralNet/Layers/Base/Contracts/Hidden.php
@@ -0,0 +1,28 @@
+
+ */
+interface Hidden extends Layer
+{
+ /**
+ * Calculate the gradient and update the parameters of the layer.
+ *
+ * @internal
+ *
+ * @param Deferred $prevGradient
+ * @param Optimizer $optimizer
+ * @return Deferred
+ */
+ public function back(Deferred $prevGradient, Optimizer $optimizer) : Deferred;
+}
diff --git a/src/NeuralNet/Layers/Base/Contracts/Input.php b/src/NeuralNet/Layers/Base/Contracts/Input.php
new file mode 100644
index 000000000..f0d755253
--- /dev/null
+++ b/src/NeuralNet/Layers/Base/Contracts/Input.php
@@ -0,0 +1,18 @@
+
+ */
+interface Input extends Layer
+{
+ //
+}
diff --git a/src/NeuralNet/Layers/Base/Contracts/Layer.php b/src/NeuralNet/Layers/Base/Contracts/Layer.php
new file mode 100644
index 000000000..10cf17b6e
--- /dev/null
+++ b/src/NeuralNet/Layers/Base/Contracts/Layer.php
@@ -0,0 +1,57 @@
+
+ */
+interface Layer extends Stringable
+{
+ /**
+ * The width of the layer. i.e. the number of neurons or computation nodes.
+ *
+ * @internal
+ *
+ * @return positive-int
+ */
+ public function width() : int;
+
+ /**
+ * Initialize the layer with the fan in from the previous layer and return
+ * the fan out for this layer.
+ *
+ * @internal
+ *
+ * @param positive-int $fanIn
+ * @return positive-int
+ */
+ public function initialize(int $fanIn) : int;
+
+ /**
+ * Feed the input forward to the next layer in the network.
+ *
+ * @internal
+ *
+ * @param NDArray $input
+ * @return NDArray
+ */
+ public function forward(NDArray $input) : NDArray;
+
+ /**
+ * Forward pass during inference.
+ *
+ * @internal
+ *
+ * @param NDArray $input
+ * @return NDArray
+ */
+ public function infer(NDArray $input) : NDArray;
+}
diff --git a/src/NeuralNet/Layers/Base/Contracts/Output.php b/src/NeuralNet/Layers/Base/Contracts/Output.php
new file mode 100644
index 000000000..49e11bb4b
--- /dev/null
+++ b/src/NeuralNet/Layers/Base/Contracts/Output.php
@@ -0,0 +1,29 @@
+
+ */
+interface Output extends Layer
+{
+ /**
+ * Compute the gradient and loss at the output.
+ *
+ * @param (string|int|float)[] $labels
+ * @param Optimizer $optimizer
+ * @throws RuntimeException
+ * @return mixed[]
+ */
+ public function back(array $labels, Optimizer $optimizer) : array;
+}
diff --git a/src/NeuralNet/Layers/Base/Contracts/Parametric.php b/src/NeuralNet/Layers/Base/Contracts/Parametric.php
new file mode 100644
index 000000000..ed772c85d
--- /dev/null
+++ b/src/NeuralNet/Layers/Base/Contracts/Parametric.php
@@ -0,0 +1,33 @@
+
+ */
+interface Parametric
+{
+ /**
+ * Return the parameters of the layer.
+ *
+ * @return Generator<\Rubix\ML\NeuralNet\Parameter>
+ */
+ public function parameters() : Generator;
+
+ /**
+ * Restore the parameters on the layer from an associative array.
+ *
+ * @param Parameter[] $parameters
+ */
+ public function restore(array $parameters) : void;
+}
diff --git a/src/NeuralNet/Layers/BatchNorm/BatchNorm.php b/src/NeuralNet/Layers/BatchNorm/BatchNorm.php
new file mode 100644
index 000000000..a15b1fac5
--- /dev/null
+++ b/src/NeuralNet/Layers/BatchNorm/BatchNorm.php
@@ -0,0 +1,389 @@
+
+ */
+class BatchNorm implements Hidden, Parametric
+{
+ /**
+ * The decay rate of the previous running averages of the global mean and variance.
+ *
+ * @var float
+ */
+ protected float $decay;
+
+ /**
+ * The initializer for the beta parameter.
+ *
+ * @var Initializer
+ */
+ protected Initializer $betaInitializer;
+
+ /**
+ * The initializer for the gamma parameter.
+ *
+ * @var Initializer
+ */
+ protected Initializer $gammaInitializer;
+
+ /**
+ * The width of the layer. i.e. the number of neurons.
+ *
+ * @var positive-int|null
+ */
+ protected ?int $width = null;
+
+ /**
+ * The learnable centering parameter.
+ *
+ * @var Parameter|null
+ */
+ protected ?Parameter $beta = null;
+
+ /**
+ * The learnable scaling parameter.
+ *
+ * @var Parameter|null
+ */
+ protected ?Parameter $gamma = null;
+
+ /**
+ * The running mean of each input dimension.
+ *
+ * @var NDArray|null
+ */
+ protected ?NDArray $mean = null;
+
+ /**
+ * The running variance of each input dimension.
+ *
+ * @var NDArray|null
+ */
+ protected ?NDArray $variance = null;
+
+ /**
+ * A cache of inverse standard deviations calculated during the forward pass.
+ *
+ * @var NDArray|null
+ */
+ protected ?NDArray $stdInv = null;
+
+ /**
+ * A cache of normalized inputs to the layer.
+ *
+ * @var NDArray|null
+ */
+ protected ?NDArray $xHat = null;
+
+ /**
+ * Row-wise or column-wise normalization.
+ *
+ * @var int
+ */
+ protected const int AXIS_SAMPLES = 0;
+ protected const int AXIS_FEATURES = 1;
+
+ /**
+ * @param float $decay
+ * @param Initializer|null $betaInitializer
+ * @param Initializer|null $gammaInitializer
+ * @throws InvalidArgumentException
+ */
+ public function __construct(float $decay = 0.1, ?Initializer $betaInitializer = null, ?Initializer $gammaInitializer = null)
+ {
+ if ($decay < 0.0 or $decay > 1.0) {
+ throw new InvalidArgumentException("Decay must be between 0 and 1, $decay given.");
+ }
+
+ $this->decay = $decay;
+ $this->betaInitializer = $betaInitializer ?? new Constant(0.0);
+ $this->gammaInitializer = $gammaInitializer ?? new Constant(1.0);
+ }
+
+ /**
+ * Return the width of the layer.
+ *
+ * @internal
+ *
+ * @throws RuntimeException
+ * @return positive-int
+ */
+ public function width() : int
+ {
+ if ($this->width === null) {
+ throw new RuntimeException('Layer has not been initialized.');
+ }
+
+ return $this->width;
+ }
+
+ /**
+ * Initialize the layer with the fan in from the previous layer and return
+ * the fan out for this layer.
+ *
+ * @internal
+ *
+ * @param positive-int $fanIn
+ * @return positive-int
+ */
+ public function initialize(int $fanIn) : int
+ {
+ $fanOut = $fanIn;
+
+ // Initialize beta and gamma as vectors of length fanOut
+ // We request a [fanOut, 1] NDArray and then flatten to 1-D
+ $betaMat = $this->betaInitializer->initialize(1, $fanOut);
+ $gammaMat = $this->gammaInitializer->initialize(1, $fanOut);
+
+ $beta = NumPower::flatten($betaMat);
+ $gamma = NumPower::flatten($gammaMat);
+
+ $this->beta = new Parameter($beta);
+ $this->gamma = new Parameter($gamma);
+
+ $this->width = $fanOut;
+
+ return $fanOut;
+ }
+
+ /**
+ * Compute a forward pass through the layer.
+ *
+ * @internal
+ *
+ * @param NDArray $input
+ * @throws RuntimeException
+ * @return NDArray
+ */
+ public function forward(NDArray $input) : NDArray
+ {
+ if (!$this->beta or !$this->gamma) {
+ throw new RuntimeException('Layer has not been initialized.');
+ }
+
+ [$m, $n] = $input->shape();
+
+ // Row-wise mean across features (axis 1), length m
+ $sum = NumPower::sum($input, 1);
+ $mean = NumPower::divide($sum, $n);
+
+ // Center the input: broadcast mean to [m, n]
+ $centered = NumPower::subtract($input, NumPower::reshape($mean, [$m, 1]));
+
+ // Row-wise variance across features (axis 1)
+ $centeredSq = NumPower::multiply($centered, $centered);
+ $varSum = NumPower::sum($centeredSq, 1);
+ $variance = NumPower::divide($varSum, $n);
+ $variance = NumPower::clip($variance, EPSILON, PHP_FLOAT_MAX);
+
+ // Inverse std from clipped variance
+ $stdInv = NumPower::reciprocal(NumPower::sqrt($variance));
+
+ // Normalize: (x - mean) * stdInv
+ $xHat = NumPower::multiply($centered, NumPower::reshape($stdInv, [$m, 1]));
+
+ // Initialize running stats if needed
+ if (!$this->mean or !$this->variance) {
+ $this->mean = $mean;
+ $this->variance = $variance;
+ }
+
+ // Update running mean/variance using exponential moving average (EMA)
+ // Convention: running = running*(1 - decay) + current*decay
+ $this->mean = NumPower::add(
+ NumPower::multiply($this->mean, 1.0 - $this->decay),
+ NumPower::multiply($mean, $this->decay)
+ );
+
+ $this->variance = NumPower::add(
+ NumPower::multiply($this->variance, 1.0 - $this->decay),
+ NumPower::multiply($variance, $this->decay)
+ );
+
+ $this->stdInv = $stdInv;
+ $this->xHat = $xHat;
+
+ // gamma * xHat + beta (per-column scale/shift) using NDArray ops
+ return NumPower::add(NumPower::multiply($xHat, $this->gamma->param()), $this->beta->param());
+ }
+
+ /**
+ * Compute an inferential pass through the layer.
+ *
+ * @internal
+ *
+ * @param NDArray $input
+ * @throws RuntimeException
+ * @return NDArray
+ */
+ public function infer(NDArray $input) : NDArray
+ {
+ if (!$this->mean or !$this->variance or !$this->beta or !$this->gamma) {
+ throw new RuntimeException('Layer has not been initialized.');
+ }
+
+ $m = $input->shape()[0];
+
+ // Use clipped variance for numerical stability during inference
+ $varianceClipped = NumPower::clip($this->variance, EPSILON, PHP_FLOAT_MAX);
+ $xHat = NumPower::divide(
+ NumPower::subtract($input, NumPower::reshape($this->mean, [$m, 1])),
+ NumPower::reshape(NumPower::sqrt($varianceClipped), [$m, 1])
+ );
+
+ return NumPower::add(
+ NumPower::multiply(
+ $xHat,
+ $this->gamma->param()
+ ),
+ $this->beta->param()
+ );
+ }
+
+ /**
+ * Calculate the errors and gradients of the layer and update the parameters.
+ *
+ * @internal
+ *
+ * @param Deferred $prevGradient
+ * @param Optimizer $optimizer
+ * @throws RuntimeException
+ * @return Deferred
+ */
+ public function back(Deferred $prevGradient, Optimizer $optimizer) : Deferred
+ {
+ if (!$this->beta or !$this->gamma) {
+ throw new RuntimeException('Layer has not been initialized.');
+ }
+
+ if (!$this->stdInv or !$this->xHat) {
+ throw new RuntimeException('Must perform forward pass before backpropagating.');
+ }
+
+ $dOut = $prevGradient();
+ // Sum across samples (axis 0) for parameter gradients
+ $dBeta = NumPower::sum($dOut, self::AXIS_SAMPLES);
+ $dGamma = NumPower::sum(NumPower::multiply($dOut, $this->xHat), self::AXIS_SAMPLES);
+ $gamma = $this->gamma->param();
+
+ $this->beta->update($dBeta, $optimizer);
+ $this->gamma->update($dGamma, $optimizer);
+
+ $stdInv = $this->stdInv;
+ $xHat = $this->xHat;
+
+ $this->stdInv = $this->xHat = null;
+
+ return new Deferred(
+ [$this, 'gradient'],
+ [$dOut, $gamma, $stdInv, $xHat]
+ );
+ }
+
+ /**
+ * Calculate the gradient for the previous layer.
+ *
+ * @internal
+ *
+ * @param NDArray $dOut
+ * @param NDArray $gamma
+ * @param NDArray $stdInv
+ * @param NDArray $xHat
+ * @return NDArray
+ */
+ public function gradient(NDArray $dOut, NDArray $gamma, NDArray $stdInv, NDArray $xHat) : NDArray
+ {
+ $dXHat = NumPower::multiply($dOut, $gamma);
+ $xHatSigma = NumPower::sum(NumPower::multiply($dXHat, $xHat), self::AXIS_FEATURES);
+ $dXHatSigma = NumPower::sum($dXHat, self::AXIS_FEATURES);
+
+ $m = $dOut->shape()[0];
+
+ // Compute gradient per formula: dX = (dXHat * m - dXHatSigma - xHat * xHatSigma) * (stdInv / m)
+ return NumPower::multiply(
+ NumPower::subtract(
+ NumPower::subtract(
+ NumPower::multiply($dXHat, $m),
+ NumPower::reshape($dXHatSigma, [$m, 1])
+ ),
+ NumPower::multiply($xHat, NumPower::reshape($xHatSigma, [$m, 1]))
+ ),
+ NumPower::reshape(NumPower::divide($stdInv, $m), [$m, 1])
+ );
+ }
+
+ /**
+ * Return the parameters of the layer.
+ *
+ * @internal
+ *
+ * @throws RuntimeException
+ * @return Generator
+ */
+ public function parameters() : Generator
+ {
+ if (!$this->beta or !$this->gamma) {
+ throw new RuntimeException('Layer has not been initialized.');
+ }
+
+ yield 'beta' => $this->beta;
+ yield 'gamma' => $this->gamma;
+ }
+
+ /**
+ * Restore the parameters in the layer from an associative array.
+ *
+ * @internal
+ *
+ * @param Parameter[] $parameters
+ */
+ public function restore(array $parameters) : void
+ {
+ $this->beta = $parameters['beta'];
+ $this->gamma = $parameters['gamma'];
+ }
+
+ /**
+ * Return the string representation of the object.
+ *
+ * @internal
+ *
+ * @return string
+ */
+ public function __toString() : string
+ {
+ return "Batch Norm (decay: {$this->decay}, beta initializer: {$this->betaInitializer},"
+ . " gamma initializer: {$this->gammaInitializer})";
+ }
+}
diff --git a/src/NeuralNet/Layers/Binary/Binary.php b/src/NeuralNet/Layers/Binary/Binary.php
new file mode 100644
index 000000000..37b6f145b
--- /dev/null
+++ b/src/NeuralNet/Layers/Binary/Binary.php
@@ -0,0 +1,222 @@
+
+ */
+class Binary implements Output
+{
+ /**
+ * The labels of either of the possible outcomes.
+ *
+ * @var float[]
+ */
+ protected array $classes = [
+ //
+ ];
+
+ /**
+ * The function that computes the loss of erroneous activations.
+ *
+ * @var ClassificationLoss
+ */
+ protected ClassificationLoss $costFn;
+
+ /**
+ * The sigmoid activation function.
+ *
+ * @var Sigmoid
+ */
+ protected Sigmoid $sigmoid;
+
+ /**
+ * The memorized input matrix.
+ *
+ * @var NDArray|null
+ */
+ protected ?NDArray $input = null;
+
+ /**
+ * The memorized activation matrix.
+ *
+ * @var NDArray|null
+ */
+ protected ?NDArray $output = null;
+
+ /**
+ * @param string[] $classes
+ * @param ClassificationLoss|null $costFn
+ * @throws InvalidArgumentException
+ */
+ public function __construct(array $classes, ?ClassificationLoss $costFn = null)
+ {
+ $classes = array_values(array_unique($classes));
+
+ if (count($classes) !== 2) {
+ throw new InvalidArgumentException('Number of classes must be 2, ' . count($classes) . ' given.');
+ }
+
+ $classes = [
+ $classes[0] => 0.0,
+ $classes[1] => 1.0,
+ ];
+
+ $this->classes = $classes;
+ $this->costFn = $costFn ?? new CrossEntropy();
+ $this->sigmoid = new Sigmoid();
+ }
+
+ /**
+ * Return the width of the layer.
+ *
+ * @return positive-int
+ */
+ public function width() : int
+ {
+ return 1;
+ }
+
+ /**
+ * Initialize the layer with the fan in from the previous layer and return
+ * the fan out for this layer.
+ *
+ * @param positive-int $fanIn
+ * @throws InvalidArgumentException
+ * @return positive-int
+ */
+ public function initialize(int $fanIn) : int
+ {
+ if ($fanIn !== 1) {
+ throw new InvalidArgumentException("Fan in must be equal to 1, $fanIn given.");
+ }
+
+ return 1;
+ }
+
+ /**
+ * Compute a forward pass through the layer.
+ *
+ * @param NDArray $input
+ * @return NDArray
+ */
+ public function forward(NDArray $input) : NDArray
+ {
+ $output = $this->sigmoid->activate($input);
+
+ $this->input = $input;
+ $this->output = $output;
+
+ return $output;
+ }
+
+ /**
+ * Compute an inferential pass through the layer.
+ *
+ * @param NDArray $input
+ * @return NDArray
+ */
+ public function infer(NDArray $input) : NDArray
+ {
+ return $this->sigmoid->activate($input);
+ }
+
+ /**
+ * Compute the gradient and loss at the output.
+ *
+ * @param string[] $labels
+ * @param Optimizer $optimizer
+ * @throws RuntimeException
+ * @return (Deferred|float)[]
+ */
+ public function back(array $labels, Optimizer $optimizer) : array
+ {
+ if (!$this->input or !$this->output) {
+ throw new RuntimeException('Must perform forward pass before backpropagating.');
+ }
+
+ $expected = [];
+
+ foreach ($labels as $label) {
+ $expected[] = $this->classes[$label];
+ }
+
+ $expected = NumPower::array([$expected]);
+
+ $input = $this->input;
+ $output = $this->output;
+
+ $gradient = new Deferred([$this, 'gradient'], [$input, $output, $expected]);
+
+ $loss = $this->costFn->compute($output, $expected);
+
+ $this->input = $this->output = null;
+
+ return [$gradient, $loss];
+ }
+
+ /**
+ * Calculate the gradient for the previous layer.
+ *
+ * @param NDArray $input
+ * @param NDArray $output
+ * @param NDArray $expected
+ * @return NDArray
+ */
+ public function gradient(NDArray $input, NDArray $output, NDArray $expected) : NDArray
+ {
+ $n = $output->shape()[1];
+
+ if ($this->costFn instanceof CrossEntropy) {
+ return NumPower::divide(
+ NumPower::subtract($output, $expected),
+ $n
+ );
+ }
+
+ $dLoss = NumPower::divide(
+ $this->costFn->differentiate($output, $expected),
+ $n
+ );
+
+ return NumPower::multiply(
+ $this->sigmoid->differentiate($output),
+ $dLoss
+ );
+ }
+
+ /**
+ * Return the string representation of the object.
+ *
+ * @internal
+ *
+ * @return string
+ */
+ public function __toString() : string
+ {
+ return "Binary (cost function: {$this->costFn})";
+ }
+}
diff --git a/src/NeuralNet/Layers/Continuous/Continuous.php b/src/NeuralNet/Layers/Continuous/Continuous.php
new file mode 100644
index 000000000..7a07e9735
--- /dev/null
+++ b/src/NeuralNet/Layers/Continuous/Continuous.php
@@ -0,0 +1,157 @@
+
+ */
+class Continuous implements Output
+{
+ /**
+ * The function that computes the loss of erroneous activations.
+ *
+ * @var RegressionLoss
+ */
+ protected RegressionLoss $costFn;
+
+ /**
+ * The memorized input matrix.
+ *
+ * @var NDArray|null
+ */
+ protected ?NDArray $input = null;
+
+ /**
+ * @param RegressionLoss|null $costFn
+ */
+ public function __construct(?RegressionLoss $costFn = null)
+ {
+ $this->costFn = $costFn ?? new LeastSquares();
+ }
+
+ /**
+ * Return the width of the layer.
+ *
+ * @return positive-int
+ */
+ public function width() : int
+ {
+ return 1;
+ }
+
+ /**
+ * Initialize the layer with the fan in from the previous layer and return
+ * the fan out for this layer.
+ *
+ * @param positive-int $fanIn
+ * @throws InvalidArgumentException
+ * @return positive-int
+ */
+ public function initialize(int $fanIn) : int
+ {
+ if ($fanIn !== 1) {
+ throw new InvalidArgumentException("Fan in must be equal to 1, $fanIn given.");
+ }
+
+ return 1;
+ }
+
+ /**
+ * Compute a forward pass through the layer.
+ *
+ * @param NDArray $input
+ * @return NDArray
+ */
+ public function forward(NDArray $input) : NDArray
+ {
+ $this->input = $input;
+
+ return $input;
+ }
+
+ /**
+ * Compute an inferential pass through the layer.
+ *
+ * @param NDArray $input
+ * @return NDArray
+ */
+ public function infer(NDArray $input) : NDArray
+ {
+ return $input;
+ }
+
+ /**
+ * Compute the gradient and loss at the output.
+ *
+ * @param (int|float)[] $labels
+ * @param Optimizer $optimizer
+ * @throws RuntimeException
+ * @return (Deferred|float)[]
+ */
+ public function back(array $labels, Optimizer $optimizer) : array
+ {
+ if (!$this->input) {
+ throw new RuntimeException('Must perform forward pass before backpropagating.');
+ }
+
+ $expected = NumPower::array([$labels]);
+
+ $input = $this->input;
+
+ $gradient = new Deferred([$this, 'gradient'], [$input, $expected]);
+
+ $loss = $this->costFn->compute($input, $expected);
+
+ $this->input = null;
+
+ return [$gradient, $loss];
+ }
+
+ /**
+ * Calculate the gradient for the previous layer.
+ *
+ * @param NDArray $input
+ * @param NDArray $expected
+ * @return NDArray
+ */
+ public function gradient(NDArray $input, NDArray $expected) : NDArray
+ {
+ $n = $input->shape()[1];
+
+ return NumPower::divide(
+ $this->costFn->differentiate($input, $expected),
+ $n
+ );
+ }
+
+ /**
+ * Return the string representation of the object.
+ *
+ * @internal
+ *
+ * @return string
+ */
+ public function __toString() : string
+ {
+ return "Continuous (cost function: {$this->costFn})";
+ }
+}
diff --git a/src/NeuralNet/Layers/Dense/Dense.php b/src/NeuralNet/Layers/Dense/Dense.php
new file mode 100644
index 000000000..ee62d1e75
--- /dev/null
+++ b/src/NeuralNet/Layers/Dense/Dense.php
@@ -0,0 +1,348 @@
+
+ */
+class Dense implements Hidden, Parametric
+{
+ /**
+ * The number of nodes in the layer.
+ *
+ * @var positive-int
+ */
+ protected int $neurons;
+
+ /**
+ * The amount of L2 regularization applied to the weights.
+ *
+ * @var float
+ */
+ protected float $l2Penalty;
+
+ /**
+ * Should the layer include a bias parameter?
+ *
+ * @var bool
+ */
+ protected bool $bias;
+
+ /**
+ * The weight initializer.
+ *
+ * @var Initializer
+ */
+ protected Initializer $weightInitializer;
+
+ /**
+ * The bias initializer.
+ *
+ * @var Initializer
+ */
+ protected Initializer $biasInitializer;
+
+ /**
+ * The weights.
+ *
+ * @var Parameter|null
+ */
+ protected ?Parameter $weights = null;
+
+ /**
+ * The biases.
+ *
+ * @var Parameter|null
+ */
+ protected ?Parameter $biases = null;
+
+ /**
+ * The memorized inputs to the layer.
+ *
+ * @var NDArray|null
+ */
+ protected ?NDArray $input = null;
+
+ /**
+ * @param int $neurons
+ * @param float $l2Penalty
+ * @param bool $bias
+ * @param Initializer|null $weightInitializer
+ * @param Initializer|null $biasInitializer
+ * @throws InvalidArgumentException
+ */
+ public function __construct(
+ int $neurons,
+ float $l2Penalty = 0.0,
+ bool $bias = true,
+ ?Initializer $weightInitializer = null,
+ ?Initializer $biasInitializer = null
+ ) {
+ if ($neurons < 1) {
+ throw new InvalidArgumentException("Number of neurons must be greater than 0, $neurons given.");
+ }
+
+ if ($l2Penalty < 0.0) {
+ throw new InvalidArgumentException("L2 Penalty must be greater than 0, $l2Penalty given.");
+ }
+
+ $this->neurons = $neurons;
+ $this->l2Penalty = $l2Penalty;
+ $this->bias = $bias;
+ $this->weightInitializer = $weightInitializer ?? new HeUniform();
+ $this->biasInitializer = $biasInitializer ?? new Constant(0.0);
+ }
+
+ /**
+ * Return the width of the layer.
+ *
+ * @internal
+ *
+ * @return positive-int
+ */
+ public function width() : int
+ {
+ return $this->neurons;
+ }
+
+ /**
+ * Return the weight matrix.
+ *
+ * @internal
+ *
+ * @throws RuntimeException
+ * @return NDArray
+ */
+ public function weights() : NDArray
+ {
+ if (!$this->weights) {
+ throw new RuntimeException('Layer is not initialized');
+ }
+
+ return $this->weights->param();
+ }
+
+ /**
+ * Initialize the layer with the fan in from the previous layer and return
+ * the fan out for this layer.
+ *
+ * @internal
+ *
+ * @param positive-int $fanIn
+ * @return positive-int
+ */
+ public function initialize(int $fanIn) : int
+ {
+ $fanOut = $this->neurons;
+
+ $weights = $this->weightInitializer->initialize($fanIn, $fanOut);
+
+ $this->weights = new Parameter($weights);
+
+ if ($this->bias) {
+ // Initialize biases as a vector of length fanOut
+ $biasMat = $this->biasInitializer->initialize(1, $fanOut);
+ $biases = NumPower::flatten($biasMat);
+
+ $this->biases = new Parameter($biases);
+ }
+
+ return $fanOut;
+ }
+
+ /**
+ * Compute a forward pass through the layer.
+ *
+ * @param NDArray $input
+ * @return NDArray
+ * @internal
+ *
+ */
+ public function forward(NDArray $input) : NDArray
+ {
+ if (!$this->weights) {
+ throw new RuntimeException('Layer is not initialized');
+ }
+
+ $output = NumPower::matmul($this->weights->param(), $input);
+
+ if ($this->biases) {
+ // Reshape bias vector [fanOut] to column [fanOut, 1] to match output [fanOut, n]
+ $bias = NumPower::reshape($this->biases->param(), [$this->neurons, 1]);
+ // Manual “broadcast”: [neurons, n] + [neurons, 1]
+ $output = NumPower::add($output, $bias);
+ }
+
+ $this->input = $input;
+
+ return $output;
+ }
+
+ /**
+ * Compute an inference pass through the layer.
+ *
+ * @param NDArray $input
+ * @return NDArray
+ * @internal
+ *
+ */
+ public function infer(NDArray $input) : NDArray
+ {
+ if (!$this->weights) {
+ throw new RuntimeException('Layer is not initialized');
+ }
+
+ $output = NumPower::matmul($this->weights->param(), $input);
+
+ if ($this->biases) {
+ // Reshape bias vector [fanOut] to column [fanOut, 1] to match output [fanOut, n]
+ $bias = NumPower::reshape($this->biases->param(), [$this->neurons, 1]);
+ // Manual “broadcast”: [neurons, n] + [neurons, 1]
+ $output = NumPower::add($output, $bias);
+ }
+
+ return $output;
+ }
+
+ /**
+ * Calculate the gradient and update the parameters of the layer.
+ *
+ * @internal
+ *
+ * @param Deferred $prevGradient
+ * @param Optimizer $optimizer
+ * @throws RuntimeException
+ * @return Deferred
+ */
+ public function back(Deferred $prevGradient, Optimizer $optimizer) : Deferred
+ {
+ if (!$this->weights) {
+ throw new RuntimeException('Layer has not been initialized.');
+ }
+
+ if (!$this->input) {
+ throw new RuntimeException('Must perform forward pass before backpropagating.');
+ }
+
+ /** @var NDArray $dOut */
+ $dOut = $prevGradient();
+
+ $inputT = NumPower::transpose($this->input, [1, 0]);
+
+ $dW = NumPower::matmul($dOut, $inputT);
+
+ $weights = $this->weights->param();
+
+ if ($this->l2Penalty) {
+ $dW = NumPower::add(
+ $dW,
+ NumPower::multiply($weights, $this->l2Penalty)
+ );
+ }
+
+ $this->weights->update($dW, $optimizer);
+
+ if ($this->biases) {
+ // Sum gradients over the batch dimension to obtain a bias gradient
+ // with the same shape as the bias vector [neurons]
+ $dB = NumPower::sum($dOut, axis: 1);
+
+ $this->biases->update($dB, $optimizer);
+ }
+
+ $this->input = null;
+
+ return new Deferred([$this, 'gradient'], [$weights, $dOut]);
+ }
+
+ /**
+ * Calculate the gradient for the previous layer.
+ *
+ * @internal
+ *
+ * @param NDArray $weights
+ * @param NDArray $dOut
+ * @return NDArray
+ */
+ public function gradient(NDArray $weights, NDArray $dOut) : NDArray
+ {
+ $weightsT = NumPower::transpose($weights, [1, 0]);
+
+ return NumPower::matmul($weightsT, $dOut);
+ }
+
+ /**
+ * Return the parameters of the layer.
+ *
+ * @internal
+ *
+ * @throws RuntimeException
+ * @return Generator
+ */
+ public function parameters() : Generator
+ {
+ if (!$this->weights) {
+ throw new RuntimeException('Layer has not been initialized.');
+ }
+
+ yield 'weights' => $this->weights;
+
+ if ($this->biases) {
+ yield 'biases' => $this->biases;
+ }
+ }
+
+ /**
+ * Restore the parameters in the layer from an associative array.
+ *
+ * @internal
+ *
+ * @param Parameter[] $parameters
+ */
+ public function restore(array $parameters) : void
+ {
+ $this->weights = $parameters['weights'];
+ $this->biases = $parameters['biases'] ?? null;
+ }
+
+ /**
+ * Return the string representation of the object.
+ *
+ * @internal
+ *
+ * @return string
+ */
+ public function __toString() : string
+ {
+ return "Dense (neurons: {$this->neurons}, l2 penalty: {$this->l2Penalty},"
+ . ' bias: ' . Params::toString($this->bias) . ','
+ . " weight initializer: {$this->weightInitializer},"
+ . " bias initializer: {$this->biasInitializer})";
+ }
+}
diff --git a/src/NeuralNet/Layers/Dropout/Dropout.php b/src/NeuralNet/Layers/Dropout/Dropout.php
new file mode 100644
index 000000000..45d88e57a
--- /dev/null
+++ b/src/NeuralNet/Layers/Dropout/Dropout.php
@@ -0,0 +1,201 @@
+
+ */
+class Dropout implements Hidden
+{
+ /**
+ * The ratio of neurons that are dropped during each training pass.
+ *
+ * @var float
+ */
+ protected float $ratio;
+
+ /**
+ * The scaling coefficient.
+ *
+ * @var float
+ */
+ protected float $scale;
+
+ /**
+ * The width of the layer.
+ *
+ * @var positive-int|null
+ */
+ protected ?int $width = null;
+
+ /**
+ * The memoized dropout mask.
+ *
+ * @var NDArray|null
+ */
+ protected ?NDArray $mask = null;
+
+ /**
+ * @param float $ratio
+ * @throws InvalidArgumentException
+ */
+ public function __construct(float $ratio = 0.5)
+ {
+ if ($ratio <= 0.0 or $ratio >= 1.0) {
+ throw new InvalidArgumentException("Ratio must be between 0 and 1, $ratio given.");
+ }
+
+ $this->ratio = $ratio;
+ $this->scale = 1.0 / (1.0 - $ratio);
+ }
+
+ /**
+ * Return the width of the layer.
+ *
+ * @internal
+ *
+ * @throws RuntimeException
+ * @return positive-int
+ */
+ public function width() : int
+ {
+ if ($this->width === null) {
+ throw new RuntimeException('Layer has not been initialized.');
+ }
+
+ return $this->width;
+ }
+
+ /**
+ * Initialize the layer with the fan in from the previous layer and return
+ * the fan out for this layer.
+ *
+ * @internal
+ *
+ * @param positive-int $fanIn
+ * @return positive-int
+ */
+ public function initialize(int $fanIn) : int
+ {
+ $fanOut = $fanIn;
+
+ $this->width = $fanOut;
+
+ return $fanOut;
+ }
+
+ /**
+ * Compute a forward pass through the layer.
+ *
+ * @internal
+ *
+ * @param NDArray $input
+ * @return NDArray
+ */
+ public function forward(NDArray $input) : NDArray
+ {
+ // Build dropout mask using NumPower's uniform RNG. Each unit is kept
+ // with probability (1 - ratio) and scaled by $this->scale.
+ $shape = $input->shape();
+
+ // Uniform random numbers in [0, 1) with same shape as input
+ $rand = NumPower::uniform($shape, 0.0, 1.0);
+
+ // mask = (rand > ratio) * scale
+ $mask = NumPower::greater($rand, $this->ratio);
+ $mask = NumPower::multiply($mask, $this->scale);
+
+ $output = NumPower::multiply($input, $mask);
+
+ $this->mask = $mask;
+
+ return $output;
+ }
+
+ /**
+ * Compute an inferential pass through the layer.
+ *
+ * @internal
+ *
+ * @param NDArray $input
+ * @return NDArray
+ */
+ public function infer(NDArray $input) : NDArray
+ {
+ return $input;
+ }
+
+ /**
+ * Calculate the gradients of the layer and update the parameters.
+ *
+ * @internal
+ *
+ * @param Deferred $prevGradient
+ * @param Optimizer $optimizer
+ * @throws RuntimeException
+ * @return Deferred
+ */
+ public function back(Deferred $prevGradient, Optimizer $optimizer) : Deferred
+ {
+ if (!$this->mask) {
+ throw new RuntimeException('Must perform forward pass before backpropagating.');
+ }
+
+ $mask = $this->mask;
+
+ $this->mask = null;
+
+ return new Deferred([$this, 'gradient'], [$prevGradient, $mask]);
+ }
+
+ /**
+ * Calculate the gradient for the previous layer.
+ *
+ * @internal
+ *
+ * @param Deferred $prevGradient
+ * @param NDArray $mask
+ * @return NDArray
+ */
+ public function gradient(Deferred $prevGradient, NDArray $mask) : NDArray
+ {
+ /** @var NDArray $dOut */
+ $dOut = $prevGradient();
+
+ return NumPower::multiply($dOut, $mask);
+ }
+
+ /**
+ * Return the string representation of the object.
+ *
+ * @internal
+ *
+ * @return string
+ */
+ public function __toString() : string
+ {
+ return "Dropout (ratio: {$this->ratio})";
+ }
+}
diff --git a/src/NeuralNet/Layers/Multiclass/Multiclass.php b/src/NeuralNet/Layers/Multiclass/Multiclass.php
new file mode 100644
index 000000000..b6e33a5ac
--- /dev/null
+++ b/src/NeuralNet/Layers/Multiclass/Multiclass.php
@@ -0,0 +1,229 @@
+
+ */
+class Multiclass implements Output
+{
+ /**
+ * The unique class labels.
+ *
+ * @var string[]
+ */
+ protected array $classes = [
+ //
+ ];
+
+ /**
+ * The function that computes the loss of erroneous activations.
+ *
+ * @var ClassificationLoss
+ */
+ protected ClassificationLoss $costFn;
+
+ /**
+ * The softmax activation function.
+ *
+ * @var Softmax
+ */
+ protected Softmax $softmax;
+
+ /**
+ * The memorized input matrix.
+ *
+ * @var NDArray|null
+ */
+ protected ?NDArray $input = null;
+
+ /**
+ * The memorized activation matrix.
+ *
+ * @var NDArray|null
+ */
+ protected ?NDArray $output = null;
+
+ /**
+ * @param string[] $classes
+ * @param ClassificationLoss|null $costFn
+ * @throws InvalidArgumentException
+ */
+ public function __construct(array $classes, ?ClassificationLoss $costFn = null)
+ {
+ $classes = array_values(array_unique($classes));
+
+ if (count($classes) < 2) {
+ throw new InvalidArgumentException('Number of classes'
+ . ' must be greater than 1, ' . count($classes)
+ . ' given.');
+ }
+
+ $this->classes = $classes;
+ $this->costFn = $costFn ?? new CrossEntropy();
+ $this->softmax = new Softmax();
+ }
+
+ /**
+ * Return the width of the layer.
+ *
+ * @return positive-int
+ */
+ public function width() : int
+ {
+ return max(1, count($this->classes));
+ }
+
+ /**
+ * Initialize the layer with the fan in from the previous layer and return
+ * the fan out for this layer.
+ *
+ * @param positive-int $fanIn
+ * @throws InvalidArgumentException
+ * @return positive-int
+ */
+ public function initialize(int $fanIn) : int
+ {
+ $fanOut = count($this->classes);
+
+ if ($fanIn !== $fanOut) {
+ throw new InvalidArgumentException('Fan in must be'
+ . " equal to fan out, $fanOut expected but"
+ . " $fanIn given.");
+ }
+
+ return $fanOut;
+ }
+
+ /**
+ * Compute a forward pass through the layer.
+ *
+ * @param NDArray $input
+ * @return NDArray
+ */
+ public function forward(NDArray $input) : NDArray
+ {
+ $output = $this->softmax->activate($input);
+
+ $this->input = $input;
+ $this->output = $output;
+
+ return $output;
+ }
+
+ /**
+ * Compute an inferential pass through the layer.
+ *
+ * @param NDArray $input
+ * @throws RuntimeException
+ * @return NDArray
+ */
+ public function infer(NDArray $input) : NDArray
+ {
+ return $this->softmax->activate($input);
+ }
+
+ /**
+ * Compute the gradient and loss at the output.
+ *
+ * @param string[] $labels
+ * @param Optimizer $optimizer
+ * @throws RuntimeException
+ * @return array
+ */
+ public function back(array $labels, Optimizer $optimizer) : array
+ {
+ if (!$this->input or !$this->output) {
+ throw new RuntimeException('Must perform forward pass'
+ . ' before backpropagating.');
+ }
+
+ $expected = [];
+
+ foreach ($labels as $label) {
+ $dist = [];
+
+ foreach ($this->classes as $class) {
+ $dist[] = $class == $label ? 1.0 : 0.0;
+ }
+
+ $expected[] = $dist;
+ }
+
+ $expected = NumPower::array($expected);
+
+ $input = $this->input;
+ $output = $this->output;
+
+ $gradient = new Deferred([$this, 'gradient'], [$input, $output, $expected]);
+
+ $loss = $this->costFn->compute($output, $expected);
+
+ $this->input = $this->output = null;
+
+ return [$gradient, $loss];
+ }
+
+ /**
+ * Calculate the gradient for the previous layer.
+ *
+ * @param NDArray $input
+ * @param NDArray $output
+ * @param NDArray $expected
+ * @return NDArray
+ */
+ public function gradient(NDArray $input, NDArray $output, NDArray $expected) : NDArray
+ {
+ $n = array_product($output->shape());
+
+ if ($this->costFn instanceof CrossEntropy) {
+ return NumPower::divide(
+ NumPower::subtract($output, $expected),
+ $n
+ );
+ }
+
+ $dLoss = NumPower::divide(
+ $this->costFn->differentiate($output, $expected),
+ $n
+ );
+
+ return NumPower::multiply(
+ $this->softmax->differentiate($output),
+ $dLoss
+ );
+ }
+
+ /**
+ * Return the string representation of the object.
+ *
+ * @internal
+ *
+ * @return string
+ */
+ public function __toString() : string
+ {
+ return "Multiclass (cost function: {$this->costFn})";
+ }
+}
diff --git a/src/NeuralNet/Layers/Noise/Noise.php b/src/NeuralNet/Layers/Noise/Noise.php
new file mode 100644
index 000000000..934265bb3
--- /dev/null
+++ b/src/NeuralNet/Layers/Noise/Noise.php
@@ -0,0 +1,157 @@
+
+ */
+class Noise implements Hidden
+{
+ /**
+ * The amount (standard deviation) of the gaussian noise to add to the inputs.
+ *
+ * @var float
+ */
+ protected float $stdDev;
+
+ /**
+ * The width of the layer.
+ *
+ * @var positive-int|null
+ */
+ protected ?int $width = null;
+
+ /**
+ * @param float $stdDev
+ * @throws InvalidArgumentException
+ */
+ public function __construct(float $stdDev)
+ {
+ if ($stdDev < 0.0) {
+ throw new InvalidArgumentException("Standard deviation must be 0 or greater, $stdDev given.");
+ }
+
+ $this->stdDev = $stdDev;
+ }
+
+ /**
+ * Return the width of the layer.
+ *
+ * @internal
+ *
+ * @throws RuntimeException
+ * @return positive-int
+ */
+ public function width() : int
+ {
+ if ($this->width === null) {
+ throw new RuntimeException('Layer has not been initialized.');
+ }
+
+ return $this->width;
+ }
+
+ /**
+ * Initialize the layer with the fan in from the previous layer and return
+ * the fan out for this layer.
+ *
+ * @internal
+ *
+ * @param positive-int $fanIn
+ * @return positive-int
+ */
+ public function initialize(int $fanIn) : int
+ {
+ $fanOut = $fanIn;
+
+ $this->width = $fanOut;
+
+ return $fanOut;
+ }
+
+ /**
+ * Compute a forward pass through the layer.
+ *
+ * @internal
+ *
+ * @param NDArray $input
+ * @return NDArray
+ */
+ public function forward(NDArray $input) : NDArray
+ {
+ if ($this->width === null) {
+ throw new RuntimeException('Layer has not been initialized.');
+ }
+
+ if ($this->stdDev === 0.0) {
+ return $input;
+ }
+
+ $shape = $input->shape();
+
+ // Gaussian noise with mean 0 and standard deviation $this->stdDev
+ $noise = NumPower::normal(size: $shape, loc: 0.0, scale: $this->stdDev);
+
+ return NumPower::add($input, $noise);
+ }
+
+ /**
+ * Compute an inferential pass through the layer.
+ *
+ * @internal
+ *
+ * @param NDArray $input
+ * @return NDArray
+ */
+ public function infer(NDArray $input) : NDArray
+ {
+ return $input;
+ }
+
+ /**
+ * Calculate the gradients of the layer and update the parameters.
+ *
+ * @internal
+ *
+ * @param Deferred $prevGradient
+ * @param Optimizer $optimizer
+ * @return Deferred
+ */
+ public function back(Deferred $prevGradient, Optimizer $optimizer) : Deferred
+ {
+ return $prevGradient;
+ }
+
+ /**
+ * Return the string representation of the object.
+ *
+ * @internal
+ *
+ * @return string
+ */
+ public function __toString() : string
+ {
+ return "Noise (std dev: {$this->stdDev})";
+ }
+}
diff --git a/src/NeuralNet/Layers/PReLU/PReLU.php b/src/NeuralNet/Layers/PReLU/PReLU.php
new file mode 100644
index 000000000..a8986cce4
--- /dev/null
+++ b/src/NeuralNet/Layers/PReLU/PReLU.php
@@ -0,0 +1,287 @@
+
+ */
+class PReLU implements Hidden, Parametric
+{
+ /**
+ * The initializer of the alpha (leakage) parameter.
+ *
+ * @var Initializer
+ */
+ protected Initializer $initializer;
+
+ /**
+ * The width of the layer.
+ *
+ * @var positive-int|null
+ */
+ protected ?int $width = null;
+
+ /**
+ * The parameterized leakage coefficients.
+ *
+ * @var Parameter|null
+ */
+ protected ?Parameter $alpha = null;
+
+ /**
+ * The memoized input matrix.
+ *
+ * @var NDArray|null
+ */
+ protected ?NDArray $input = null;
+
+ /**
+ * @param Initializer|null $initializer
+ */
+ public function __construct(?Initializer $initializer = null)
+ {
+ $this->initializer = $initializer ?? new Constant(0.25);
+ }
+
+ /**
+ * Return the width of the layer.
+ *
+ * @internal
+ *
+ * @throws RuntimeException
+ * @return positive-int
+ */
+ public function width() : int
+ {
+ if ($this->width === null) {
+ throw new RuntimeException('Layer has not been initialized.');
+ }
+
+ return $this->width;
+ }
+
+ /**
+ * Initialize the layer with the fan in from the previous layer and return
+ * the fan out for this layer.
+ *
+ * @internal
+ *
+ * @param positive-int $fanIn
+ * @return positive-int
+ */
+ public function initialize(int $fanIn) : int
+ {
+ $fanOut = $fanIn;
+
+ // Initialize alpha as a vector of length fanOut (one alpha per neuron)
+ // Using shape [fanOut, 1] then flattening to [fanOut]
+ $alphaMat = $this->initializer->initialize(1, $fanOut);
+ $alpha = NumPower::flatten($alphaMat);
+
+ $this->width = $fanOut;
+ $this->alpha = new Parameter($alpha);
+
+ return $fanOut;
+ }
+
+ /**
+ * Compute a forward pass through the layer.
+ *
+ * @internal
+ *
+ * @param NDArray $input
+ * @return NDArray
+ */
+ public function forward(NDArray $input) : NDArray
+ {
+ $this->input = $input;
+
+ return $this->activate($input);
+ }
+
+ /**
+ * Compute an inferential pass through the layer.
+ *
+ * @internal
+ *
+ * @param NDArray $input
+ * @return NDArray
+ */
+ public function infer(NDArray $input) : NDArray
+ {
+ return $this->activate($input);
+ }
+
+ /**
+ * Calculate the gradient and update the parameters of the layer.
+ *
+ * @internal
+ *
+ * @param Deferred $prevGradient
+ * @param Optimizer $optimizer
+ * @throws RuntimeException
+ * @return Deferred
+ */
+ public function back(Deferred $prevGradient, Optimizer $optimizer) : Deferred
+ {
+ if (!$this->alpha) {
+ throw new RuntimeException('Layer has not been initialized.');
+ }
+
+ if (!$this->input) {
+ throw new RuntimeException('Must perform forward pass before backpropagating.');
+ }
+
+ /** @var NDArray $dOut */
+ $dOut = $prevGradient();
+
+ // Negative part of the input (values <= 0), used for dL/dalpha
+ $negativeInput = NumPower::minimum($this->input, 0.0);
+
+ $dAlphaFull = NumPower::multiply($dOut, $negativeInput);
+
+ // Sum over the batch axis (axis = 1) to obtain a gradient vector [width]
+ $dAlpha = NumPower::sum($dAlphaFull, axis: 1);
+
+ $this->alpha->update($dAlpha, $optimizer);
+
+ $input = $this->input;
+
+ $this->input = null;
+
+ return new Deferred([$this, 'gradient'], [$input, $dOut]);
+ }
+
+ /**
+ * Calculate the gradient for the previous layer.
+ *
+ * @internal
+ *
+ * @param NDArray $input
+ * @param NDArray $dOut
+ * @return NDArray
+ */
+ public function gradient(NDArray $input, NDArray $dOut) : NDArray
+ {
+ $derivative = $this->differentiate($input);
+
+ return NumPower::multiply($derivative, $dOut);
+ }
+
+ /**
+ * Return the parameters of the layer.
+ *
+ * @internal
+ *
+ * @throws \RuntimeException
+ * @return Generator
+ */
+ public function parameters() : Generator
+ {
+ if (!$this->alpha) {
+ throw new RuntimeException('Layer has not been initialized.');
+ }
+
+ yield 'alpha' => $this->alpha;
+ }
+
+ /**
+ * Restore the parameters in the layer from an associative array.
+ *
+ * @internal
+ *
+ * @param Parameter[] $parameters
+ */
+ public function restore(array $parameters) : void
+ {
+ $this->alpha = $parameters['alpha'];
+ }
+
+ /**
+ * Compute the leaky ReLU activation function and return a matrix.
+ *
+ * @param NDArray $input
+ * @throws RuntimeException
+ * @return NDArray
+ */
+ protected function activate(NDArray $input) : NDArray
+ {
+ if (!$this->alpha) {
+ throw new RuntimeException('Layer has not been initialized.');
+ }
+
+ // Reshape alpha vector [width] to column [width, 1] for broadcasting
+ $alphaCol = NumPower::reshape($this->alpha->param(), [$this->width(), 1]);
+
+ $positiveActivation = NumPower::maximum($input, 0.0);
+
+ $negativeActivation = NumPower::multiply(
+ NumPower::minimum($input, 0.0),
+ $alphaCol,
+ );
+
+ return NumPower::add($positiveActivation, $negativeActivation);
+ }
+
+ /**
+ * Calculate the derivative of the activation function at a given output.
+ *
+ * @param NDArray $input
+ * @throws RuntimeException
+ * @return NDArray
+ */
+ protected function differentiate(NDArray $input) : NDArray
+ {
+ if (!$this->alpha) {
+ throw new RuntimeException('Layer has not been initialized.');
+ }
+
+ // Reshape alpha vector [width] to column [width, 1] for broadcasting
+ $alphaCol = NumPower::reshape($this->alpha->param(), [$this->width(), 1]);
+
+ $positivePart = NumPower::greater($input, 0.0);
+
+ $negativePart = NumPower::multiply(
+ NumPower::lessEqual($input, 0.0),
+ $alphaCol,
+ );
+
+ return NumPower::add($positivePart, $negativePart);
+ }
+
+ /**
+ * Return the string representation of the object.
+ *
+ * @internal
+ *
+ * @return string
+ */
+ public function __toString() : string
+ {
+ return "PReLU (initializer: {$this->initializer})";
+ }
+}
diff --git a/src/NeuralNet/Layers/Placeholder1D/Placeholder1D.php b/src/NeuralNet/Layers/Placeholder1D/Placeholder1D.php
new file mode 100644
index 000000000..45f8fc49d
--- /dev/null
+++ b/src/NeuralNet/Layers/Placeholder1D/Placeholder1D.php
@@ -0,0 +1,108 @@
+
+ */
+class Placeholder1D implements Input
+{
+ /**
+ * The number of input nodes. i.e. feature inputs.
+ *
+ * @var positive-int
+ */
+ protected int $inputs;
+
+ /**
+ * @param int $inputs
+ * @throws InvalidArgumentException
+ */
+ public function __construct(int $inputs)
+ {
+ if ($inputs < 1) {
+ throw new InvalidArgumentException("Number of input nodes must be greater than 0, $inputs given.");
+ }
+
+ $this->inputs = $inputs;
+ }
+
+ /**
+ * @return positive-int
+ */
+ public function width() : int
+ {
+ return $this->inputs;
+ }
+
+ /**
+ * Initialize the layer with the fan in from the previous layer and return
+ * the fan out for this layer.
+ *
+ * @param positive-int $fanIn
+ * @return positive-int
+ */
+ public function initialize(int $fanIn) : int
+ {
+ return $this->inputs;
+ }
+
+ /**
+ * Compute a forward pass through the layer.
+ *
+ * @param NDArray $input
+ * @throws InvalidArgumentException
+ * @return NDArray
+ */
+ public function forward(NDArray $input) : NDArray
+ {
+ $shape = $input->shape();
+
+ if (empty($shape) || $shape[0] !== $this->inputs) {
+ $features = $shape[0] ?? 0;
+
+ throw new InvalidArgumentException(
+ 'The number of features and input nodes must be equal,'
+ . " {$this->inputs} expected but {$features} given.");
+ }
+
+ return $input;
+ }
+
+ /**
+ * Compute an inferential pass through the layer.
+ *
+ * @param NDArray $input
+ * @return NDArray
+ */
+ public function infer(NDArray $input) : NDArray
+ {
+ return $this->forward($input);
+ }
+
+ /**
+ * Return the string representation of the object.
+ *
+ * @internal
+ *
+ * @return string
+ */
+ public function __toString() : string
+ {
+ return "Placeholder 1D (inputs: {$this->inputs})";
+ }
+}
diff --git a/src/NeuralNet/Layers/Swish/Swish.php b/src/NeuralNet/Layers/Swish/Swish.php
new file mode 100644
index 000000000..fcb00fa44
--- /dev/null
+++ b/src/NeuralNet/Layers/Swish/Swish.php
@@ -0,0 +1,303 @@
+
+ */
+class Swish implements Hidden, Parametric
+{
+ /**
+ * The initializer of the beta parameter.
+ *
+ * @var Initializer
+ */
+ protected Initializer $initializer;
+
+ /**
+ * The sigmoid activation function.
+ *
+ * @var Sigmoid
+ */
+ protected Sigmoid $sigmoid;
+
+ /**
+ * The width of the layer.
+ *
+ * @var positive-int|null
+ */
+ protected ?int $width = null;
+
+ /**
+ * The parameterized scaling factors.
+ *
+ * @var Parameter|null
+ */
+ protected ?Parameter $beta = null;
+
+ /**
+ * The memoized input matrix.
+ *
+ * @var NDArray|null
+ */
+ protected ?NDArray $input = null;
+
+ /**
+ * The memorized activation matrix.
+ *
+ * @var NDArray|null
+ */
+ protected ?NDArray $output = null;
+
+ /**
+ * @param Initializer|null $initializer
+ */
+ public function __construct(?Initializer $initializer = null)
+ {
+ $this->initializer = $initializer ?? new Constant(1.0);
+ $this->sigmoid = new Sigmoid();
+ }
+
+ /**
+ * Return the width of the layer.
+ *
+ * @internal
+ *
+ * @throws RuntimeException
+ * @return positive-int
+ */
+ public function width() : int
+ {
+ if ($this->width === null) {
+ throw new RuntimeException('Layer has not been initialized.');
+ }
+
+ return $this->width;
+ }
+
+ /**
+ * Initialize the layer with the fan in from the previous layer and return
+ * the fan out for this layer.
+ *
+ * @internal
+ *
+ * @param positive-int $fanIn
+ * @return positive-int
+ */
+ public function initialize(int $fanIn) : int
+ {
+ $fanOut = $fanIn;
+
+ // Initialize beta as a vector of length fanOut (one beta per neuron)
+ // Using shape [fanOut, 1] then flattening to [fanOut]
+ $betaMat = $this->initializer->initialize(1, $fanOut);
+ $beta = NumPower::flatten($betaMat);
+
+ $this->width = $fanOut;
+ $this->beta = new Parameter($beta);
+
+ return $fanOut;
+ }
+
+ /**
+ * Compute a forward pass through the layer.
+ *
+ * @internal
+ *
+ * @param NDArray $input
+ * @return NDArray
+ */
+ public function forward(NDArray $input) : NDArray
+ {
+ $this->input = $input;
+
+ $this->output = $this->activate($input);
+
+ return $this->output;
+ }
+
+ /**
+ * Compute an inferential pass through the layer.
+ *
+ * @internal
+ *
+ * @param NDArray $input
+ * @return NDArray
+ */
+ public function infer(NDArray $input) : NDArray
+ {
+ return $this->activate($input);
+ }
+
+ /**
+ * Calculate the gradient and update the parameters of the layer.
+ *
+ * @internal
+ *
+ * @param Deferred $prevGradient
+ * @param Optimizer $optimizer
+ * @throws RuntimeException
+ * @return Deferred
+ */
+ public function back(Deferred $prevGradient, Optimizer $optimizer) : Deferred
+ {
+ if (!$this->beta) {
+ throw new RuntimeException('Layer has not been initialized.');
+ }
+
+ if (!$this->input or !$this->output) {
+ throw new RuntimeException('Must perform forward pass before backpropagating.');
+ }
+
+ /** @var NDArray $dOut */
+ $dOut = $prevGradient();
+
+ // Gradient of the loss with respect to beta
+ // dL/dbeta = sum_over_batch(dL/dy * dy/dbeta)
+ // Here we use a simplified formulation: dL/dbeta ~ sum(dOut * input)
+ $dBetaFull = NumPower::multiply($dOut, $this->input);
+
+ // Sum over the batch axis (axis = 1) to obtain a gradient vector [width]
+ $dBeta = NumPower::sum($dBetaFull, axis: 1);
+
+ $this->beta->update($dBeta, $optimizer);
+
+ $input = $this->input;
+ $output = $this->output;
+
+ $this->input = $this->output = null;
+
+ return new Deferred([$this, 'gradient'], [$input, $output, $dOut]);
+ }
+
+ /**
+ * Calculate the gradient for the previous layer.
+ *
+ * @internal
+ *
+ * @param NDArray $input
+ * @param NDArray $output
+ * @param NDArray $dOut
+ * @return NDArray
+ */
+ public function gradient(NDArray $input, NDArray $output, NDArray $dOut) : NDArray
+ {
+ $derivative = $this->differentiate($input, $output);
+
+ return NumPower::multiply($derivative, $dOut);
+ }
+
+ /**
+ * Return the parameters of the layer.
+ *
+ * @internal
+ *
+ * @throws \RuntimeException
+ * @return Generator
+ */
+ public function parameters() : Generator
+ {
+ if (!$this->beta) {
+ throw new RuntimeException('Layer has not been initialized.');
+ }
+
+ yield 'beta' => $this->beta;
+ }
+
+ /**
+ * Restore the parameters in the layer from an associative array.
+ *
+ * @internal
+ *
+ * @param Parameter[] $parameters
+ */
+ public function restore(array $parameters) : void
+ {
+ $this->beta = $parameters['beta'];
+ }
+
+ /**
+ * Compute the Swish activation function and return a matrix.
+ *
+ * @param NDArray $input
+ * @throws RuntimeException
+ * @return NDArray
+ */
+ protected function activate(NDArray $input) : NDArray
+ {
+ if (!$this->beta) {
+ throw new RuntimeException('Layer has not been initialized.');
+ }
+
+ // Reshape beta vector [width] to column [width, 1] for broadcasting
+ $betaCol = NumPower::reshape($this->beta->param(), [$this->width(), 1]);
+
+ $zHat = NumPower::multiply($betaCol, $input);
+
+ $activated = $this->sigmoid->activate($zHat);
+
+ return NumPower::multiply($activated, $input);
+ }
+
+ /**
+ * Calculate the derivative of the activation function at a given output.
+ *
+ * @param NDArray $input
+ * @param NDArray $output
+ * @throws RuntimeException
+ * @return NDArray
+ */
+ protected function differentiate(NDArray $input, NDArray $output) : NDArray
+ {
+ if (!$this->beta) {
+ throw new RuntimeException('Layer has not been initialized.');
+ }
+
+ // Original formulation:
+ // derivative = (output / input) * (1 - output) + output
+ // Implemented using NumPower operations to avoid explicit ones matrix.
+ $term1 = NumPower::divide($output, $input);
+ $oneMinusOutput = NumPower::subtract(1.0, $output);
+
+ $product = NumPower::multiply($term1, $oneMinusOutput);
+
+ return NumPower::add($product, $output);
+ }
+
+ /**
+ * Return the string representation of the object.
+ *
+ * @internal
+ *
+ * @return string
+ */
+ public function __toString() : string
+ {
+ return "Swish (initializer: {$this->initializer})";
+ }
+}
diff --git a/src/NeuralNet/Networks/Network.php b/src/NeuralNet/Networks/Network.php
new file mode 100644
index 000000000..6554940b3
--- /dev/null
+++ b/src/NeuralNet/Networks/Network.php
@@ -0,0 +1,275 @@
+
+ */
+class Network
+{
+ /**
+ * The input layer to the network.
+ *
+ * @var Input
+ */
+ protected Input $input;
+
+ /**
+ * The hidden layers of the network.
+ *
+ * @var list
+ */
+ protected array $hidden = [
+ //
+ ];
+
+ /**
+ * The pathing of the backward pass through the hidden layers.
+ *
+ * @var list
+ */
+ protected array $backPass = [
+ //
+ ];
+
+ /**
+ * The output layer of the network.
+ *
+ * @var Output
+ */
+ protected Output $output;
+
+ /**
+ * The gradient descent optimizer used to train the network.
+ *
+ * @var Optimizer
+ */
+ protected Optimizer $optimizer;
+
+ /**
+ * @param Input $input
+ * @param Hidden[] $hidden
+ * @param Output $output
+ * @param Optimizer $optimizer
+ */
+ public function __construct(Input $input, array $hidden, Output $output, Optimizer $optimizer)
+ {
+ $hidden = array_values($hidden);
+
+ $backPass = array_reverse($hidden);
+
+ $this->input = $input;
+ $this->hidden = $hidden;
+ $this->output = $output;
+ $this->optimizer = $optimizer;
+ $this->backPass = $backPass;
+ }
+
+ /**
+ * Return the input layer.
+ *
+ * @return Input
+ */
+ public function input() : Input
+ {
+ return $this->input;
+ }
+
+ /**
+ * Return an array of hidden layers indexed left to right.
+ *
+ * @return list
+ */
+ public function hidden() : array
+ {
+ return $this->hidden;
+ }
+
+ /**
+ * Return the output layer.
+ *
+ * @return Output
+ */
+ public function output() : Output
+ {
+ return $this->output;
+ }
+
+ /**
+ * Return all the layers in the network.
+ *
+ * @return Traversable
+ */
+ public function layers() : Traversable
+ {
+ yield $this->input;
+
+ yield from $this->hidden;
+
+ yield $this->output;
+ }
+
+ /**
+ * Return the number of trainable parameters in the network.
+ *
+ * @return int
+ */
+ public function numParams() : int
+ {
+ $numParams = 0;
+
+ foreach ($this->layers() as $layer) {
+ if ($layer instanceof Parametric) {
+ foreach ($layer->parameters() as $parameter) {
+ $numParams += $parameter->param()->size();
+ }
+ }
+ }
+
+ return $numParams;
+ }
+
+ /**
+ * Initialize the parameters of the layers and warm the optimizer cache.
+ */
+ public function initialize() : void
+ {
+ $fanIn = 1;
+
+ foreach ($this->layers() as $layer) {
+ $fanIn = $layer->initialize($fanIn);
+ }
+
+ if ($this->optimizer instanceof Adaptive) {
+ foreach ($this->layers() as $layer) {
+ if ($layer instanceof Parametric) {
+ foreach ($layer->parameters() as $param) {
+ $this->optimizer->warm($param);
+ }
+ }
+ }
+ }
+ }
+
+ /**
+ * Run an inference pass and return the activations at the output layer.
+ *
+ * @param Dataset $dataset
+ * @return NDArray
+ */
+ public function infer(Dataset $dataset) : NDArray
+ {
+ $input = NumPower::transpose(NumPower::array($dataset->samples()), [1, 0]);
+
+ foreach ($this->layers() as $layer) {
+ $input = $layer->infer($input);
+ }
+
+ return NumPower::transpose($input, [1, 0]);
+ }
+
+ /**
+ * Perform a forward and backward pass of the network in one call. Returns
+ * the loss from the backward pass.
+ *
+ * @param Labeled $dataset
+ * @return float
+ */
+ public function roundtrip(Labeled $dataset) : float
+ {
+ $input = NumPower::transpose(NumPower::array($dataset->samples()), [1, 0]);
+
+ $this->feed($input);
+
+ $loss = $this->backpropagate($dataset->labels());
+
+ return $loss;
+ }
+
+ /**
+ * Feed a batch through the network and return a matrix of activations at the output later.
+ *
+ * @param NDArray $input
+ * @return NDArray
+ */
+ public function feed(NDArray $input) : NDArray
+ {
+ foreach ($this->layers() as $layer) {
+ $input = $layer->forward($input);
+ }
+
+ return $input;
+ }
+
+ /**
+ * Backpropagate the gradient of the cost function and return the loss.
+ *
+ * @param list $labels
+ * @return float
+ */
+ public function backpropagate(array $labels) : float
+ {
+ [$gradient, $loss] = $this->output->back($labels, $this->optimizer);
+
+ foreach ($this->backPass as $layer) {
+ $gradient = $layer->back($gradient, $this->optimizer);
+ }
+
+ return $loss;
+ }
+
+ /**
+ * Export the network architecture as a graph in dot format.
+ *
+ * @return Encoding
+ */
+ public function exportGraphviz() : Encoding
+ {
+ $dot = 'digraph Tree {' . PHP_EOL;
+ $dot .= ' node [shape=box, fontname=helvetica];' . PHP_EOL;
+
+ $layerNum = 0;
+
+ foreach ($this->layers() as $layer) {
+ ++$layerNum;
+
+ $dot .= " N$layerNum [label=\"$layer\",style=\"rounded\"]" . PHP_EOL;
+
+ if ($layerNum > 1) {
+ $parentId = $layerNum - 1;
+
+ $dot .= " N{$parentId} -> N{$layerNum};" . PHP_EOL;
+ }
+ }
+
+ $dot .= '}';
+
+ return new Encoding($dot);
+ }
+}
diff --git a/src/NeuralNet/Snapshots/Snapshot.php b/src/NeuralNet/Snapshots/Snapshot.php
new file mode 100644
index 000000000..033224d5c
--- /dev/null
+++ b/src/NeuralNet/Snapshots/Snapshot.php
@@ -0,0 +1,90 @@
+
+ */
+class Snapshot
+{
+ /**
+ * The parametric layers of the network.
+ *
+ * @var Parametric[]
+ */
+ protected array $layers;
+
+ /**
+ * The parameters corresponding to each layer in the network at the time of the snapshot.
+ *
+ * @var list
+ */
+ protected array $parameters;
+
+ /**
+ * Take a snapshot of the network.
+ *
+ * @param Network $network
+ * @return Snapshot
+ */
+ public static function take(Network $network) : self
+ {
+ $layers = $parameters = [];
+
+ foreach ($network->layers() as $layer) {
+ if ($layer instanceof Parametric) {
+ $params = [];
+
+ foreach ($layer->parameters() as $key => $parameter) {
+ $params[$key] = clone $parameter;
+ }
+
+ $layers[] = $layer;
+ $parameters[] = $params;
+ }
+ }
+
+ return new self($layers, $parameters);
+ }
+
+ /**
+ * Class constructor.
+ *
+ * @param Parametric[] $layers
+ * @param list $parameters
+ * @throws InvalidArgumentException
+ */
+ public function __construct(array $layers, array $parameters)
+ {
+ if (count($layers) !== count($parameters)) {
+ throw new InvalidArgumentException('Number of layers and parameter groups must be equal.');
+ }
+
+ $this->layers = $layers;
+ $this->parameters = $parameters;
+ }
+
+ /**
+ * Restore the network parameters.
+ */
+ public function restore() : void
+ {
+ foreach ($this->layers as $i => $layer) {
+ $layer->restore($this->parameters[$i]);
+ }
+ }
+}
diff --git a/src/Traits/AssertsShapes.php b/src/Traits/AssertsShapes.php
index 7fabc316f..88fe23c1e 100644
--- a/src/Traits/AssertsShapes.php
+++ b/src/Traits/AssertsShapes.php
@@ -4,7 +4,7 @@
namespace Rubix\ML\Traits;
-use InvalidArgumentException;
+use Rubix\ML\Exceptions\InvalidArgumentException;
use NDArray;
/**
@@ -29,7 +29,7 @@ trait AssertsShapes
protected function assertSameShape(NDArray $output, NDArray $target) : void
{
if ($output->shape() !== $target->shape()) {
- throw new InvalidArgumentException('Output and target must have identical shapes.');
+ throw new InvalidArgumentException('Output and target must have the same shape.');
}
}
}
diff --git a/tests/Helpers/GraphvizTest.php b/tests/Helpers/GraphvizTest.php
index da1c70b99..3bef96a06 100644
--- a/tests/Helpers/GraphvizTest.php
+++ b/tests/Helpers/GraphvizTest.php
@@ -6,6 +6,8 @@
use PHPUnit\Framework\Attributes\CoversClass;
use PHPUnit\Framework\Attributes\Group;
+use PHPUnit\Framework\Attributes\Test;
+use PHPUnit\Framework\Attributes\TestDox;
use Rubix\ML\Encoding;
use Rubix\ML\Helpers\Graphviz;
use PHPUnit\Framework\TestCase;
@@ -14,9 +16,8 @@
#[CoversClass(GraphvizTest::class)]
class GraphvizTest extends TestCase
{
- /**
- * @test
- */
+ #[Test]
+ #[TestDox('Converts a DOT graph description to an image encoding')]
public function dotToImage() : void
{
// Almost always skip this test, needed to appease Stan.
@@ -56,6 +57,6 @@ public function dotToImage() : void
$encoding = Graphviz::dotToImage($dot, 'png');
- $this->assertInstanceOf(Encoding::class, $encoding);
+ self::assertInstanceOf(Encoding::class, $encoding);
}
}
diff --git a/tests/NeuralNet/FeedForwardTest.php b/tests/NeuralNet/FeedForwardTest.php
index c68ae47be..a060975e0 100644
--- a/tests/NeuralNet/FeedForwardTest.php
+++ b/tests/NeuralNet/FeedForwardTest.php
@@ -14,11 +14,14 @@
use Rubix\ML\NeuralNet\ActivationFunctions\ReLU;
use Rubix\ML\NeuralNet\CostFunctions\CrossEntropy;
use PHPUnit\Framework\TestCase;
-
-/**
- * @group NeuralNet
- * @covers \Rubix\ML\NeuralNet\FeedForward
- */
+use PHPUnit\Framework\Attributes\Before;
+use PHPUnit\Framework\Attributes\CoversClass;
+use PHPUnit\Framework\Attributes\Group;
+use PHPUnit\Framework\Attributes\Test;
+use PHPUnit\Framework\Attributes\TestDox;
+
+#[Group('NeuralNet')]
+#[CoversClass(FeedForward::class)]
class FeedForwardTest extends TestCase
{
/**
@@ -46,9 +49,7 @@ class FeedForwardTest extends TestCase
*/
protected $output;
- /**
- * @before
- */
+ #[Before]
protected function setUp() : void
{
$this->dataset = Labeled::quick([
@@ -72,50 +73,44 @@ protected function setUp() : void
$this->network = new FeedForward($this->input, $this->hidden, $this->output, new Adam(0.001));
}
- /**
- * @test
- */
+ #[Test]
+ #[TestDox('Builds a feed-forward network instance')]
public function build() : void
{
$this->assertInstanceOf(FeedForward::class, $this->network);
$this->assertInstanceOf(Network::class, $this->network);
}
- /**
- * @test
- */
+ #[Test]
+ #[TestDox('Returns all hidden and output layers')]
public function layers() : void
{
$this->assertCount(5, iterator_to_array($this->network->layers()));
}
- /**
- * @test
- */
+ #[Test]
+ #[TestDox('Returns the input layer')]
public function input() : void
{
$this->assertInstanceOf(Placeholder1D::class, $this->network->input());
}
- /**
- * @test
- */
+ #[Test]
+ #[TestDox('Returns the hidden layers')]
public function hidden() : void
{
$this->assertCount(5, $this->network->hidden());
}
- /**
- * @test
- */
+ #[Test]
+ #[TestDox('Returns the output layer')]
public function networkOutput() : void
{
$this->assertInstanceOf(Output::class, $this->network->output());
}
- /**
- * @test
- */
+ #[Test]
+ #[TestDox('Reports the correct number of parameters after initialization')]
public function numParams() : void
{
$this->network->initialize();
@@ -123,9 +118,8 @@ public function numParams() : void
$this->assertEquals(103, $this->network->numParams());
}
- /**
- * @test
- */
+ #[Test]
+ #[TestDox('Performs a roundtrip pass and returns a loss value')]
public function roundtrip() : void
{
$this->network->initialize();
diff --git a/tests/NeuralNet/FeedForwards/FeedForwardTest.php b/tests/NeuralNet/FeedForwards/FeedForwardTest.php
new file mode 100644
index 000000000..84226fc70
--- /dev/null
+++ b/tests/NeuralNet/FeedForwards/FeedForwardTest.php
@@ -0,0 +1,133 @@
+dataset = Labeled::quick([
+ [1.0, 2.5],
+ [0.1, 0.0],
+ [0.002, -6.0],
+ ], ['yes', 'no', 'maybe']);
+
+ $this->input = new Placeholder1D(2);
+
+ $this->hidden = [
+ new Dense(10),
+ new Activation(new ReLU()),
+ new Dense(5),
+ new Activation(new ReLU()),
+ new Dense(3),
+ ];
+
+ $this->output = new Multiclass(['yes', 'no', 'maybe'], new CrossEntropy());
+
+ $this->network = new FeedForward($this->input, $this->hidden, $this->output, new Adam(0.001));
+ }
+
+ #[Test]
+ #[TestDox('Builds a feed-forward network instance')]
+ public function build() : void
+ {
+ self::assertInstanceOf(FeedForward::class, $this->network);
+ self::assertInstanceOf(Network::class, $this->network);
+ }
+
+ #[Test]
+ #[TestDox('Returns all hidden and output layers')]
+ public function layers() : void
+ {
+ self::assertCount(5, iterator_to_array($this->network->layers()));
+ }
+
+ #[Test]
+ #[TestDox('Returns the input layer')]
+ public function input() : void
+ {
+ self::assertInstanceOf(Placeholder1D::class, $this->network->input());
+ }
+
+ #[Test]
+ #[TestDox('Returns the hidden layers')]
+ public function hidden() : void
+ {
+ self::assertCount(5, $this->network->hidden());
+ }
+
+ #[Test]
+ #[TestDox('Returns the output layer')]
+ public function networkOutput() : void
+ {
+ self::assertInstanceOf(Output::class, $this->network->output());
+ }
+
+ #[Test]
+ #[TestDox('Reports the correct number of parameters after initialization')]
+ public function numParams() : void
+ {
+ $this->network->initialize();
+
+ self::assertEquals(103, $this->network->numParams());
+ }
+
+ #[Test]
+ #[TestDox('Performs a roundtrip pass and returns a loss value')]
+ public function roundtrip() : void
+ {
+ $this->network->initialize();
+
+ $loss = $this->network->roundtrip($this->dataset);
+
+ self::assertIsFloat($loss);
+ }
+}
diff --git a/tests/NeuralNet/Layers/Activation/ActivationTest.php b/tests/NeuralNet/Layers/Activation/ActivationTest.php
new file mode 100644
index 000000000..2c203ad18
--- /dev/null
+++ b/tests/NeuralNet/Layers/Activation/ActivationTest.php
@@ -0,0 +1,181 @@
+
+ */
+ public static function forwardProvider() : array
+ {
+ return [
+ [
+ NumPower::array([
+ [1.0, 2.5, -0.1],
+ [0.1, 0.0, 3.0],
+ [0.002, -6.0, -0.5],
+ ]),
+ [
+ [1.0, 2.5, 0.0],
+ [0.1, 0.0, 3.0],
+ [0.002, 0.0, 0.0],
+ ],
+ ],
+ ];
+ }
+
+ /**
+ * @return array
+ */
+ public static function backProvider() : array
+ {
+ return [
+ [
+ NumPower::array([
+ [1.0, 2.5, -0.1],
+ [0.1, 0.0, 3.0],
+ [0.002, -6.0, -0.5],
+ ]),
+ NumPower::array([
+ [0.25, 0.7, 0.1],
+ [0.50, 0.2, 0.01],
+ [0.25, 0.1, 0.89],
+ ]),
+ [
+ [0.25, 0.7, 0.0],
+ [0.5, 0.0, 0.01],
+ [0.25, 0, 0.0],
+ ],
+ ],
+ ];
+ }
+
+ protected function setUp() : void
+ {
+ $this->fanIn = 3;
+
+ $this->input = NumPower::array([
+ [1.0, 2.5, -0.1],
+ [0.1, 0.0, 3.0],
+ [0.002, -6.0, -0.5],
+ ]);
+
+ $this->prevGrad = new Deferred(fn: function () : NDArray {
+ return NumPower::array([
+ [0.25, 0.7, 0.1],
+ [0.50, 0.2, 0.01],
+ [0.25, 0.1, 0.89],
+ ]);
+ });
+
+ $this->optimizer = new Stochastic(0.001);
+
+ $this->layer = new Activation(new ReLU());
+ }
+
+ #[Test]
+ #[TestDox('Can be cast to a string')]
+ public function testToString() : void
+ {
+ self::assertEquals('Activation (activation fn: ReLU)', (string) $this->layer);
+ }
+
+ #[Test]
+ #[TestDox('Initializes width equal to fan-in')]
+ public function testInitializeSetsWidth() : void
+ {
+ $this->layer->initialize($this->fanIn);
+
+ self::assertEquals($this->fanIn, $this->layer->width());
+ }
+
+ #[Test]
+ #[TestDox('Computes forward activations')]
+ #[DataProvider('forwardProvider')]
+ public function testForward(NDArray $input, array $expected) : void
+ {
+ $this->layer->initialize($this->fanIn);
+
+ $forward = $this->layer->forward($input);
+ self::assertEqualsWithDelta($expected, $forward->toArray(), 1e-7);
+ }
+
+ #[Test]
+ #[TestDox('Computes backpropagated gradients after forward pass')]
+ #[DataProvider('backProvider')]
+ public function testBack(NDArray $input, NDArray $prevGrad, array $expected) : void
+ {
+ $this->layer->initialize($this->fanIn);
+
+ // Forward pass to set internal input/output state
+ $this->layer->forward($input);
+
+ $gradient = $this->layer
+ ->back(prevGradient: new Deferred(fn: fn () => $prevGrad), optimizer: $this->optimizer)
+ ->compute();
+
+ self::assertEqualsWithDelta($expected, $gradient->toArray(), 1e-7);
+ }
+
+ #[Test]
+ #[TestDox('Computes inference activations')]
+ #[DataProvider('forwardProvider')]
+ public function testInfer(NDArray $input, array $expected) : void
+ {
+ $this->layer->initialize($this->fanIn);
+
+ $infer = $this->layer->infer($input);
+ self::assertEqualsWithDelta($expected, $infer->toArray(), 1e-7);
+ }
+
+ #[Test]
+ #[TestDox('Computes gradient correctly given input, output, and previous gradient')]
+ #[DataProvider('backProvider')]
+ public function testGradient(NDArray $input, NDArray $prevGrad, array $expected) : void
+ {
+ $this->layer->initialize($this->fanIn);
+
+ // Produce output to pass explicitly to gradient
+ $output = $this->layer->forward($input);
+
+ $gradient = $this->layer->gradient(
+ $input,
+ $output,
+ new Deferred(fn: fn () => $prevGrad)
+ );
+
+ self::assertEqualsWithDelta($expected, $gradient->toArray(), 1e-7);
+ }
+}
diff --git a/tests/NeuralNet/Layers/BatchNorm/BatchNormTest.php b/tests/NeuralNet/Layers/BatchNorm/BatchNormTest.php
new file mode 100644
index 000000000..dd5380941
--- /dev/null
+++ b/tests/NeuralNet/Layers/BatchNorm/BatchNormTest.php
@@ -0,0 +1,393 @@
+
+ */
+ public static function initializeProvider() : array
+ {
+ return [
+ 'fanIn=3' => [3],
+ ];
+ }
+
+ /**
+ * @return array
+ */
+ public static function forwardProvider() : array
+ {
+ return [
+ 'expectedForward' => [[
+ [-0.1251222, 1.2825031, -1.1573808],
+ [-0.6708631, -0.7427414, 1.4136046],
+ [0.7974158, -1.4101899, 0.6127743],
+ ]],
+ ];
+ }
+
+ /**
+ * @return array
+ */
+ public static function backProvider() : array
+ {
+ return [
+ 'expectedGradient' => [[
+ [-0.0644587, 0.0272710, 0.0371877],
+ [0.1137590, -0.1099670, -0.0037919],
+ [-0.1190978, -0.0108703, 0.1299681],
+ ]],
+ ];
+ }
+
+ /**
+ * @return array
+ */
+ public static function inferProvider() : array
+ {
+ return [
+ 'expectedInfer' => [[
+ [-0.1251222, 1.2825031, -1.1573808],
+ [-0.6708631, -0.7427414, 1.4136046],
+ [0.7974158, -1.4101899, 0.6127743],
+ ]],
+ ];
+ }
+
+ /**
+ * Additional inputs to validate behavior across different batch sizes.
+ *
+ * @return array
+ */
+ public static function batchInputsProvider() : array
+ {
+ return [
+ 'batch1x3' => [[
+ [2.0, -1.0, 0.0],
+ ]],
+ 'batch2x3' => [[
+ [1.0, 2.0, 3.0],
+ [3.0, 3.0, 3.0],
+ ]],
+ 'batch4x3' => [[
+ [0.5, -0.5, 1.5],
+ [10.0, -10.0, 0.0],
+ [7.2, 3.3, -2.4],
+ [-1.0, -2.0, 4.0],
+ ]],
+ ];
+ }
+
+ /**
+ * @return array
+ */
+ public static function gradientProvider() : array
+ {
+ return [
+ 'expectedGradient' => [[
+ [-0.0644587, 0.0272710, 0.0371877],
+ [0.1137590, -0.1099670, -0.0037919],
+ [-0.1190978, -0.0108703, 0.1299681],
+ ]],
+ ];
+ }
+
+ /**
+ * @return array
+ */
+ public static function badDecayProvider() : array
+ {
+ return [
+ 'negative' => [-0.01],
+ 'greaterThanOne' => [1.01],
+ ];
+ }
+
+ protected function setUp() : void
+ {
+ $this->fanIn = 3;
+
+ $this->input = NumPower::array([
+ [1.0, 2.5, -0.1],
+ [0.1, 0.0, 3.0],
+ [0.002, -6.0, -0.5],
+ ]);
+
+ $this->prevGrad = new Deferred(fn: function () : NDArray {
+ return NumPower::array([
+ [0.25, 0.7, 0.1],
+ [0.50, 0.2, 0.01],
+ [0.25, 0.1, 0.89],
+ ]);
+ });
+
+ $this->optimizer = new Stochastic(0.001);
+
+ $this->layer = new BatchNorm(
+ decay: 0.9,
+ betaInitializer: new Constant(0.0),
+ gammaInitializer: new Constant(1.0)
+ );
+ }
+
+ #[Test]
+ #[TestDox('Can be cast to a string')]
+ public function testToString() : void
+ {
+ self::assertEquals(
+ 'Batch Norm (decay: 0.9, beta initializer: Constant (value: 0), gamma initializer: Constant (value: 1))',
+ (string) $this->layer
+ );
+ }
+
+ #[Test]
+ #[TestDox('Initializes width and returns fan out')]
+ #[DataProvider('initializeProvider')]
+ public function testInitialize(int $fanIn) : void
+ {
+ $fanOut = $this->layer->initialize($fanIn);
+ self::assertEquals($fanIn, $fanOut);
+ self::assertEquals($fanIn, $this->layer->width());
+ }
+
+ #[Test]
+ #[TestDox('Computes forward pass')]
+ #[DataProvider('forwardProvider')]
+ public function testForward(array $expected) : void
+ {
+ $this->layer->initialize($this->fanIn);
+
+ $forward = $this->layer->forward($this->input);
+
+ self::assertEqualsWithDelta($expected, $forward->toArray(), 1e-7);
+ }
+
+ #[Test]
+ #[TestDox('Backpropagates and returns gradient for previous layer')]
+ #[DataProvider('backProvider')]
+ public function testBack(array $expected) : void
+ {
+ $this->layer->initialize($this->fanIn);
+ $this->layer->forward($this->input);
+
+ $gradient = $this->layer->back(
+ prevGradient: $this->prevGrad,
+ optimizer: $this->optimizer
+ )->compute();
+
+ self::assertInstanceOf(NDArray::class, $gradient);
+ self::assertEqualsWithDelta($expected, $gradient->toArray(), 1e-7);
+ }
+
+ #[Test]
+ #[TestDox('Infers using running statistics')]
+ #[DataProvider('inferProvider')]
+ public function testInfer(array $expected) : void
+ {
+ $this->layer->initialize($this->fanIn);
+ // Perform a forward pass to set running mean/variance
+ $this->layer->forward($this->input);
+
+ $infer = $this->layer->infer($this->input);
+
+ self::assertEqualsWithDelta($expected, $infer->toArray(), 1e-7);
+ }
+
+ #[Test]
+ #[TestDox('Computes forward pass (row-wise) with zero mean and unit variance per sample for various batch sizes')]
+ #[DataProvider('batchInputsProvider')]
+ public function testForwardStatsMultipleBatches(array $input) : void
+ {
+ $this->layer->initialize($this->fanIn);
+
+ $forward = $this->layer->forward(NumPower::array($input));
+ $out = $forward->toArray();
+
+ // Check per-row mean ~ 0 and variance ~ 1 (allow 0 for degenerate rows)
+ $this->assertRowwiseStats($input, $out, true);
+ }
+
+ #[Test]
+ #[TestDox('Infers (row-wise) with zero mean and unit variance per sample for various batch sizes')]
+ #[DataProvider('batchInputsProvider')]
+ public function testInferStatsMultipleBatches(array $input) : void
+ {
+ $this->layer->initialize($this->fanIn);
+
+ // Perform a forward pass on the same input to initialize running stats
+ $this->layer->forward(NumPower::array($input));
+
+ $infer = $this->layer->infer(NumPower::array($input));
+ $out = $infer->toArray();
+
+ $this->assertRowwiseStats($input, $out, false);
+ }
+
+ #[Test]
+ #[TestDox('Throws when width is requested before initialization')]
+ public function testWidthThrowsBeforeInitialize() : void
+ {
+ $layer = new BatchNorm();
+ $this->expectException(RubixRuntimeException::class);
+ $layer->width();
+ }
+
+ #[Test]
+ #[TestDox('Constructor rejects invalid decay values')]
+ #[DataProvider('badDecayProvider')]
+ public function testConstructorRejectsInvalidDecay(float $decay) : void
+ {
+ $this->expectException(InvalidArgumentException::class);
+ new BatchNorm(decay: $decay);
+ }
+
+ #[Test]
+ #[TestDox('Yields trainable parameters beta and gamma')]
+ public function testParameters() : void
+ {
+ $this->layer->initialize($this->fanIn);
+
+ $params = iterator_to_array($this->layer->parameters());
+
+ self::assertArrayHasKey('beta', $params);
+ self::assertArrayHasKey('gamma', $params);
+ self::assertInstanceOf(TrainableParameter::class, $params['beta']);
+ self::assertInstanceOf(TrainableParameter::class, $params['gamma']);
+
+ self::assertEquals([0.0, 0.0, 0.0], $params['beta']->param()->toArray());
+ self::assertEquals([1.0, 1.0, 1.0], $params['gamma']->param()->toArray());
+ }
+
+ #[Test]
+ #[TestDox('Restores parameters from array')]
+ public function testRestore() : void
+ {
+ $this->layer->initialize($this->fanIn);
+
+ $betaNew = new TrainableParameter(NumPower::full([3], 2.0));
+ $gammaNew = new TrainableParameter(NumPower::full([3], 3.0));
+
+ $this->layer->restore([
+ 'beta' => $betaNew,
+ 'gamma' => $gammaNew,
+ ]);
+
+ $restored = iterator_to_array($this->layer->parameters());
+ self::assertSame($betaNew, $restored['beta']);
+ self::assertSame($gammaNew, $restored['gamma']);
+ self::assertEquals([2.0, 2.0, 2.0], $restored['beta']->param()->toArray());
+ self::assertEquals([3.0, 3.0, 3.0], $restored['gamma']->param()->toArray());
+ }
+
+ #[Test]
+ #[TestDox('Computes gradient for previous layer directly')]
+ #[DataProvider('gradientProvider')]
+ public function testGradient(array $expected) : void
+ {
+ $this->layer->initialize($this->fanIn);
+
+ // Compute forward-time caches manually to pass into gradient()
+ $input = $this->input;
+ $rows = $input->shape()[0];
+ $meanArr = [];
+ $varArr = [];
+ $stdInvArr = [];
+
+ for ($i = 0; $i < $rows; $i++) {
+ $row = $input->toArray()[$i];
+ $meanArr[$i] = NumPower::mean($row);
+ $varArr[$i] = NumPower::variance($row);
+ $stdInvArr[$i] = 1.0 / sqrt($varArr[$i]);
+ }
+
+ $mean = NumPower::array($meanArr);
+ $stdInv = NumPower::array($stdInvArr);
+
+ $xHat = NumPower::multiply(
+ NumPower::subtract(NumPower::transpose($input, [1, 0]), $mean),
+ $stdInv
+ );
+ $xHat = NumPower::transpose($xHat, [1, 0]);
+
+ // Use provided prevGrad as dOut and current gamma parameter
+ $dOut = ($this->prevGrad)();
+ $gamma = iterator_to_array($this->layer->parameters())['gamma']->param();
+
+ $gradient = $this->layer->gradient($dOut, $gamma, $stdInv, $xHat);
+
+ self::assertEqualsWithDelta($expected, $gradient->toArray(), 1e-7);
+ }
+
+ /**
+ * @param array> $inputRows
+ * @param array> $outRows
+ */
+ private function assertRowwiseStats(array $inputRows, array $outRows, bool $checkMean) : void
+ {
+ foreach ($outRows as $i => $row) {
+ $mean = array_sum($row) / count($row);
+ $var = 0.0;
+ foreach ($row as $v) {
+ $var += ($v - $mean) * ($v - $mean);
+ }
+ $var /= count($row);
+
+ $orig = $inputRows[$i];
+ $origMean = array_sum($orig) / count($orig);
+ $origVar = 0.0;
+ foreach ($orig as $ov) {
+ $origVar += ($ov - $origMean) * ($ov - $origMean);
+ }
+ $origVar /= count($orig);
+
+ $expectedVar = $origVar < 1e-12 ? 0.0 : 1.0;
+
+ if ($checkMean) {
+ self::assertEqualsWithDelta(0.0, $mean, 1e-7);
+ }
+
+ if ($expectedVar === 0.0) {
+ self::assertLessThan(1e-6, $var);
+ } else {
+ self::assertEqualsWithDelta(1.0, $var, 1e-6);
+ }
+ }
+ }
+}
diff --git a/tests/NeuralNet/Layers/Binary/BinaryTest.php b/tests/NeuralNet/Layers/Binary/BinaryTest.php
new file mode 100644
index 000000000..645d7c86b
--- /dev/null
+++ b/tests/NeuralNet/Layers/Binary/BinaryTest.php
@@ -0,0 +1,192 @@
+
+ */
+ public static function forwardProvider() : array
+ {
+ return [
+ [
+ [
+ [0.7310585, 0.9241418, 0.4750207],
+ ],
+ ],
+ ];
+ }
+
+ /**
+ * @return array
+ */
+ public static function backProvider() : array
+ {
+ return [
+ [
+ [
+ [0.2436861, -0.0252860, 0.1583402],
+ ],
+ ],
+ ];
+ }
+
+ /**
+ * @return array}>
+ */
+ public static function badClassesProvider() : array
+ {
+ return [
+ 'empty' => [[]],
+ 'single' => [['hot']],
+ 'duplicatesToOne' => [['hot', 'hot']],
+ 'threeUnique' => [['hot', 'cold', 'warm']],
+ ];
+ }
+
+ protected function setUp() : void
+ {
+ $this->input = NumPower::array([
+ [1.0, 2.5, -0.1],
+ ]);
+
+ $this->labels = ['hot', 'cold', 'hot'];
+
+ $this->optimizer = new Stochastic(0.001);
+
+ $this->layer = new Binary(classes: ['hot', 'cold'], costFn: new CrossEntropy());
+ }
+
+ #[Test]
+ #[TestDox('Returns string representation')]
+ public function testToString() : void
+ {
+ $this->layer->initialize(1);
+
+ self::assertEquals('Binary (cost function: Cross Entropy)', (string) $this->layer);
+ }
+
+ #[Test]
+ #[TestDox('Initializes and reports width')]
+ public function testInitializeWidth() : void
+ {
+ $this->layer->initialize(1);
+ self::assertEquals(1, $this->layer->width());
+ }
+
+ #[Test]
+ #[TestDox('Constructor rejects invalid classes arrays')]
+ #[DataProvider('badClassesProvider')]
+ public function testConstructorRejectsInvalidClasses(array $classes) : void
+ {
+ $this->expectException(InvalidArgumentException::class);
+ new Binary(classes: $classes, costFn: new CrossEntropy());
+ }
+
+ #[Test]
+ #[TestDox('Constructor accepts classes arrays that dedupe to exactly 2 labels')]
+ public function testConstructorAcceptsDuplicateClassesThatDedupeToTwo() : void
+ {
+ $layer = new Binary(classes: ['hot', 'cold', 'hot'], costFn: new CrossEntropy());
+ // Should initialize without throwing and report correct width
+ $layer->initialize(1);
+ self::assertEquals(1, $layer->width());
+ }
+
+ #[Test]
+ #[TestDox('Computes forward pass')]
+ #[DataProvider('forwardProvider')]
+ public function testForward(array $expected) : void
+ {
+ $this->layer->initialize(1);
+
+ $forward = $this->layer->forward($this->input);
+ self::assertEqualsWithDelta($expected, $forward->toArray(), 1e-7);
+ }
+
+ #[Test]
+ #[TestDox('Backpropagates and returns gradient for previous layer')]
+ #[DataProvider('backProvider')]
+ public function testBack(array $expectedGradient) : void
+ {
+ $this->layer->initialize(1);
+ $this->layer->forward($this->input);
+
+ [$computation, $loss] = $this->layer->back(labels: $this->labels, optimizer: $this->optimizer);
+
+ self::assertInstanceOf(Deferred::class, $computation);
+ self::assertIsFloat($loss);
+
+ $gradient = $computation->compute();
+
+ self::assertInstanceOf(NDArray::class, $gradient);
+ self::assertEqualsWithDelta($expectedGradient, $gradient->toArray(), 1e-7);
+ }
+
+ #[Test]
+ #[TestDox('Computes gradient directly given input, output, expected, and batch size')]
+ #[DataProvider('backProvider')]
+ public function testGradient(array $expectedGradient) : void
+ {
+ $this->layer->initialize(1);
+
+ $input = $this->input;
+ $output = $this->layer->forward($input);
+
+ // Build expected NDArray (1, batch) using the Binary classes mapping: hot=>0.0, cold=>1.0
+ $expected = [];
+ foreach ($this->labels as $label) {
+ $expected[] = ($label === 'cold') ? 1.0 : 0.0;
+ }
+ $expected = NumPower::array([$expected]);
+
+ $batchSize = count($this->labels);
+
+ $gradient = $this->layer->gradient($input, $output, $expected, $batchSize);
+
+ self::assertInstanceOf(NDArray::class, $gradient);
+ self::assertEqualsWithDelta($expectedGradient, $gradient->toArray(), 1e-7);
+ }
+
+ #[Test]
+ #[TestDox('Computes inference activations')]
+ #[DataProvider('forwardProvider')]
+ public function testInfer(array $expected) : void
+ {
+ $this->layer->initialize(1);
+
+ $infer = $this->layer->infer($this->input);
+ self::assertEqualsWithDelta($expected, $infer->toArray(), 1e-7);
+ }
+}
diff --git a/tests/NeuralNet/Layers/Continuous/ContinuousTest.php b/tests/NeuralNet/Layers/Continuous/ContinuousTest.php
new file mode 100644
index 000000000..39592cdcb
--- /dev/null
+++ b/tests/NeuralNet/Layers/Continuous/ContinuousTest.php
@@ -0,0 +1,159 @@
+
+ */
+ public static function forwardProvider() : array
+ {
+ return [
+ [
+ [
+ [2.5, 0.0, -6.0],
+ ],
+ ],
+ ];
+ }
+
+ /**
+ * @return array
+ */
+ public static function gradientProvider() : array
+ {
+ return [
+ [
+ [
+ [0.8333333, 0.8333333, -32.0],
+ ],
+ ],
+ ];
+ }
+
+ protected function setUp() : void
+ {
+ $this->input = NumPower::array([
+ [2.5, 0.0, -6.0],
+ ]);
+
+ $this->labels = [0.0, -2.5, 90.0];
+
+ $this->optimizer = new Stochastic(0.001);
+
+ $this->layer = new Continuous(new LeastSquares());
+ }
+
+ #[Test]
+ #[TestDox('Returns string representation')]
+ public function testToString() : void
+ {
+ $this->layer->initialize(1);
+
+ self::assertEquals('Continuous (cost function: Least Squares)', (string) $this->layer);
+ }
+
+ #[Test]
+ #[TestDox('Initializes and reports width')]
+ public function testInitializeWidth() : void
+ {
+ $this->layer->initialize(1);
+ self::assertEquals(1, $this->layer->width());
+ }
+
+ #[Test]
+ #[TestDox('Initialize rejects fan-in not equal to 1')]
+ public function testInitializeRejectsInvalidFanIn() : void
+ {
+ $this->expectException(InvalidArgumentException::class);
+ $this->layer->initialize(2);
+ }
+
+ #[Test]
+ #[TestDox('Computes forward pass')]
+ #[DataProvider('forwardProvider')]
+ public function testForward(array $expected) : void
+ {
+ $this->layer->initialize(1);
+
+ $forward = $this->layer->forward($this->input);
+ self::assertEqualsWithDelta($expected, $forward->toArray(), 1e-7);
+ }
+
+ #[Test]
+ #[TestDox('Backpropagates and returns gradient for previous layer')]
+ #[DataProvider('gradientProvider')]
+ public function testBack(array $expectedGradient) : void
+ {
+ $this->layer->initialize(1);
+ $this->layer->forward($this->input);
+
+ [$computation, $loss] = $this->layer->back(labels: $this->labels, optimizer: $this->optimizer);
+
+ self::assertInstanceOf(Deferred::class, $computation);
+ self::assertIsFloat($loss);
+
+ $gradient = $computation->compute();
+
+ self::assertInstanceOf(NDArray::class, $gradient);
+ self::assertEqualsWithDelta($expectedGradient, $gradient->toArray(), 1e-7);
+ }
+
+ #[Test]
+ #[TestDox('Computes gradient directly given input and expected')]
+ #[DataProvider('gradientProvider')]
+ public function testGradient(array $expectedGradient) : void
+ {
+ $this->layer->initialize(1);
+
+ $input = $this->input;
+ $expected = NumPower::array([$this->labels]);
+
+ $gradient = $this->layer->gradient($input, $expected);
+
+ self::assertInstanceOf(NDArray::class, $gradient);
+ self::assertEqualsWithDelta($expectedGradient, $gradient->toArray(), 1e-7);
+ }
+
+ #[Test]
+ #[TestDox('Computes inference activations')]
+ #[DataProvider('forwardProvider')]
+ public function testInfer(array $expected) : void
+ {
+ $this->layer->initialize(1);
+
+ $infer = $this->layer->infer($this->input);
+ self::assertEqualsWithDelta($expected, $infer->toArray(), 1e-7);
+ }
+}
diff --git a/tests/NeuralNet/Layers/Dense/DenseTest.php b/tests/NeuralNet/Layers/Dense/DenseTest.php
new file mode 100644
index 000000000..d8c920aa3
--- /dev/null
+++ b/tests/NeuralNet/Layers/Dense/DenseTest.php
@@ -0,0 +1,308 @@
+>, array, array>}>
+ */
+ public static function forwardProvider() : array
+ {
+ return [
+ [
+ // weights 2x3
+ [
+ [1.0, 0.0, 0.0],
+ [0.0, 1.0, 0.0],
+ ],
+ // biases length-2
+ [0.0, 0.0],
+ // expected forward output 2x3 for the fixed input in setUp()
+ // input = [
+ // [1.0, 2.5, -0.1],
+ // [0.1, 0.0, 3.0],
+ // [0.002, -6.0, -0.5],
+ // ];
+ // so W * input = first two rows of input
+ [
+ [1.0, 2.5, -0.1],
+ [0.1, 0.0, 3.0],
+ ],
+ ],
+ ];
+ }
+
+ /**
+ * @return array>, array, array>, array>}>
+ */
+ public static function backProvider() : array
+ {
+ return [
+ [
+ // weights 2x3
+ [
+ [1.0, 0.0, 0.0],
+ [0.0, 1.0, 0.0],
+ ],
+ // biases length-2
+ [0.0, 0.0],
+ // prev gradient 2x3
+ [
+ [0.50, 0.2, 0.01],
+ [0.25, 0.1, 0.89],
+ ],
+ // expected gradient for previous layer 3x3
+ [
+ [0.50, 0.2, 0.01],
+ [0.25, 0.1, 0.89],
+ [0.0, 0.0, 0.0],
+ ],
+ ],
+ ];
+ }
+
+ protected function setUp() : void
+ {
+ $this->fanIn = 3;
+
+ $this->input = NumPower::array([
+ [1.0, 2.5, -0.1],
+ [0.1, 0.0, 3.0],
+ [0.002, -6.0, -0.5],
+ ]);
+
+ $this->prevGrad = new Deferred(fn: function () : NDArray {
+ return NumPower::array([
+ [0.50, 0.2, 0.01],
+ [0.25, 0.1, 0.89],
+ ]);
+ });
+
+ $this->optimizer = new Stochastic(0.001);
+
+ $this->layer = new Dense(
+ neurons: 2,
+ l2Penalty: 0.0,
+ bias: true,
+ weightInitializer: new HeUniform(),
+ biasInitializer: new Constant(0.0)
+ );
+
+ srand(self::RANDOM_SEED);
+ }
+
+ #[Test]
+ #[TestDox('Throws an exception for invalid constructor arguments')]
+ public function testConstructorValidation() : void
+ {
+ $this->expectException(InvalidArgumentException::class);
+
+ new Dense(
+ neurons: 0,
+ l2Penalty: -0.1,
+ bias: true,
+ weightInitializer: new HeUniform(),
+ biasInitializer: new Constant(0.0)
+ );
+ }
+
+ #[Test]
+ #[TestDox('Computes forward activations for fixed weights and biases')]
+ #[DataProvider('forwardProvider')]
+ public function testForward(array $weights, array $biases, array $expected) : void
+ {
+ $this->layer->initialize($this->fanIn);
+ self::assertEquals(2, $this->layer->width());
+
+ $this->layer->restore([
+ 'weights' => new TrainableParameter(NumPower::array($weights)),
+ 'biases' => new TrainableParameter(NumPower::array($biases)),
+ ]);
+
+ $forward = $this->layer->forward($this->input);
+
+ self::assertEqualsWithDelta($expected, $forward->toArray(), 1e-7);
+ }
+
+ #[Test]
+ #[TestDox('Method weights() returns the restored weight matrix')]
+ public function testWeightsReturnsExpectedValues() : void
+ {
+ $this->layer->initialize($this->fanIn);
+
+ $weightsArray = [
+ [1.0, 0.0, 0.0],
+ [0.0, 1.0, 0.0],
+ ];
+
+ $this->layer->restore([
+ 'weights' => new TrainableParameter(NumPower::array($weightsArray)),
+ 'biases' => new TrainableParameter(NumPower::array([0.0, 0.0])),
+ ]);
+
+ $weights = $this->layer->weights();
+
+ self::assertEqualsWithDelta($weightsArray, $weights->toArray(), 1e-7);
+ }
+
+ #[Test]
+ #[TestDox('width() returns the number of neurons')]
+ public function testWidthReturnsNeuronsCount() : void
+ {
+ // Layer is constructed in setUp() with neurons: 2
+ self::assertSame(2, $this->layer->width());
+ }
+
+ #[Test]
+ #[TestDox('Computes backpropagated gradients for previous layer')]
+ #[DataProvider('backProvider')]
+ public function testBack(array $weights, array $biases, array $prevGrad, array $expected) : void
+ {
+ $this->layer->initialize($this->fanIn);
+
+ $this->layer->restore([
+ 'weights' => new TrainableParameter(NumPower::array($weights)),
+ 'biases' => new TrainableParameter(NumPower::array($biases)),
+ ]);
+
+ $prevGradNd = NumPower::array($prevGrad);
+
+ // Forward pass to set internal input cache
+ $this->layer->forward($this->input);
+
+ $gradient = $this->layer->back(
+ prevGradient: new Deferred(fn: fn () => $prevGradNd),
+ optimizer: $this->optimizer
+ )->compute();
+
+ self::assertInstanceOf(NDArray::class, $gradient);
+ self::assertEqualsWithDelta($expected, $gradient->toArray(), 1e-7);
+ }
+
+ #[Test]
+ #[TestDox('Computes inference activations equal to forward for fixed parameters')]
+ #[DataProvider('forwardProvider')]
+ public function testInfer(array $weights, array $biases, array $expected) : void
+ {
+ $this->layer->initialize($this->fanIn);
+
+ $this->layer->restore([
+ 'weights' => new TrainableParameter(NumPower::array($weights)),
+ 'biases' => new TrainableParameter(NumPower::array($biases)),
+ ]);
+
+ $infer = $this->layer->infer($this->input);
+
+ self::assertEqualsWithDelta($expected, $infer->toArray(), 1e-7);
+ }
+
+ #[Test]
+ #[TestDox('Method restore() correctly replaces layer parameters')]
+ public function testRestoreReplacesParameters() : void
+ {
+ $this->layer->initialize($this->fanIn);
+
+ // Use the same deterministic weights and biases as in forwardProvider
+ $weights = [
+ [1.0, 0.0, 0.0],
+ [0.0, 1.0, 0.0],
+ ];
+
+ $biases = [0.0, 0.0];
+
+ $expected = [
+ [1.0, 2.5, -0.1],
+ [0.1, 0.0, 3.0],
+ ];
+
+ $this->layer->restore([
+ 'weights' => new TrainableParameter(NumPower::array($weights)),
+ 'biases' => new TrainableParameter(NumPower::array($biases)),
+ ]);
+
+ $forward = $this->layer->forward($this->input);
+
+ self::assertEqualsWithDelta($expected, $forward->toArray(), 1e-7);
+ }
+
+ #[Test]
+ #[TestDox('Method parameters() yields restored weights and biases')]
+ public function testParametersReturnsRestoredParameters() : void
+ {
+ $this->layer->initialize($this->fanIn);
+
+ $weightsArray = [
+ [1.0, 0.0, 0.0],
+ [0.0, 1.0, 0.0],
+ ];
+
+ $biasesArray = [0.0, 0.0];
+
+ $weightsParam = new TrainableParameter(NumPower::array($weightsArray));
+ $biasesParam = new TrainableParameter(NumPower::array($biasesArray));
+
+ $this->layer->restore([
+ 'weights' => $weightsParam,
+ 'biases' => $biasesParam,
+ ]);
+
+ $params = iterator_to_array($this->layer->parameters());
+
+ self::assertArrayHasKey('weights', $params);
+ self::assertArrayHasKey('biases', $params);
+
+ self::assertSame($weightsParam, $params['weights']);
+ self::assertSame($biasesParam, $params['biases']);
+
+ self::assertEqualsWithDelta($weightsArray, $params['weights']->param()->toArray(), 1e-7);
+ self::assertEqualsWithDelta($biasesArray, $params['biases']->param()->toArray(), 1e-7);
+ }
+
+ #[Test]
+ #[TestDox('It returns correct string representation')]
+ public function testToStringReturnsCorrectValue() : void
+ {
+ $expected = 'Dense (neurons: 2, l2 penalty: 0, bias: true, weight initializer: He Uniform, bias initializer: Constant (value: 0))';
+
+ self::assertSame($expected, (string) $this->layer);
+ }
+}
diff --git a/tests/NeuralNet/Layers/Dropout/DropoutTest.php b/tests/NeuralNet/Layers/Dropout/DropoutTest.php
new file mode 100644
index 000000000..47cf1ece1
--- /dev/null
+++ b/tests/NeuralNet/Layers/Dropout/DropoutTest.php
@@ -0,0 +1,277 @@
+fanIn = 3;
+
+ $this->input = NumPower::array([
+ [1.0, 2.5, -0.1],
+ [0.1, 0.0, 3.0],
+ [0.002, -6.0, -0.5],
+ ]);
+
+ $this->prevGrad = new Deferred(fn: function () : NDArray {
+ return NumPower::array([
+ [0.25, 0.7, 0.1],
+ [0.50, 0.2, 0.01],
+ [0.25, 0.1, 0.89],
+ ]);
+ });
+
+ $this->optimizer = new Stochastic(0.001);
+
+ $this->layer = new Dropout(0.5);
+ }
+
+ /**
+ * @return array
+ */
+ public static function badRatioProvider() : array
+ {
+ return [
+ 'zero' => [0.0],
+ 'negative' => [-0.1],
+ 'one' => [1.0],
+ 'greaterThanOne'=> [1.1],
+ ];
+ }
+
+ /**
+ * @return array>}>
+ */
+ public static function inferProvider() : array
+ {
+ return [
+ 'identityOnInput' => [[
+ [1.0, 2.5, -0.1],
+ [0.1, 0.0, 3.0],
+ [0.002, -6.0, -0.5],
+ ]],
+ ];
+ }
+
+ #[Test]
+ #[TestDox('Constructor rejects invalid ratio values')]
+ #[DataProvider('badRatioProvider')]
+ public function testConstructorRejectsInvalidRatio(float $ratio) : void
+ {
+ $this->expectException(InvalidArgumentException::class);
+
+ new Dropout($ratio);
+ }
+
+ #[Test]
+ #[TestDox('Initializes width equal to fan-in')]
+ public function testInitializeSetsWidth() : void
+ {
+ $this->layer->initialize($this->fanIn);
+
+ self::assertEquals($this->fanIn, $this->layer->width());
+ }
+
+ #[Test]
+ #[TestDox('Method forward() applies dropout mask with correct shape and scaling')]
+ public function testForward() : void
+ {
+ $this->layer->initialize($this->fanIn);
+
+ $forward = $this->layer->forward($this->input);
+
+ $inputArray = $this->input->toArray();
+ $forwardArray = $forward->toArray();
+
+ self::assertSameSize($inputArray, $forwardArray);
+
+ $scale = 1.0 / (1.0 - 0.5); // ratio = 0.5
+
+ $nonZero = 0;
+ $total = 0;
+
+ foreach ($inputArray as $i => $row) {
+ foreach ($row as $j => $x) {
+ $y = $forwardArray[$i][$j];
+ $total++;
+
+ if (abs($x) < 1e-12) {
+ // If input is (near) zero, output should also be ~0
+ self::assertEqualsWithDelta(0.0, $y, 1e-7);
+ continue;
+ }
+
+ if (abs($y) < 1e-12) {
+ // Dropped unit
+ continue;
+ }
+
+ $nonZero++;
+
+ // Kept unit should be scaled input
+ self::assertEqualsWithDelta($x * $scale, $y, 1e-6);
+ }
+ }
+
+ // Roughly (1 - ratio) of units should be non-zero; allow wide tolerance
+ $expectedKept = (1.0 - 0.5) * $total;
+ self::assertGreaterThan(0, $nonZero);
+ self::assertLessThan($total, $nonZero);
+ self::assertEqualsWithDelta($expectedKept, $nonZero, $total * 0.5);
+ }
+
+ #[Test]
+ #[TestDox('Backpropagates gradients using the same dropout mask')]
+ public function testBack() : void
+ {
+ $this->layer->initialize($this->fanIn);
+
+ // Forward pass to generate and store mask
+ $forward = $this->layer->forward($this->input);
+ $forwardArray = $forward->toArray();
+ $inputArray = $this->input->toArray();
+
+ // Approximate mask from forward output: mask ≈ forward / input
+ $maskArray = [];
+ foreach ($inputArray as $i => $row) {
+ foreach ($row as $j => $x) {
+ $y = $forwardArray[$i][$j];
+
+ if (abs($x) < 1e-12) {
+ $maskArray[$i][$j] = 0.0;
+ } else {
+ $maskArray[$i][$j] = $y / $x;
+ }
+ }
+ }
+
+ $gradient = $this->layer->back(
+ prevGradient: $this->prevGrad,
+ optimizer: $this->optimizer
+ )->compute();
+
+ $gradArray = $gradient->toArray();
+ $prevGradArray = ($this->prevGrad)()->toArray();
+
+ // Expected gradient per element: prevGrad * mask for non-zero inputs.
+ // For zero inputs, the mask cannot be inferred from the forward output
+ // (forward is always 0 regardless of mask), so we accept the actual
+ // gradient value there.
+ $expectedGrad = [];
+ foreach ($prevGradArray as $i => $row) {
+ foreach ($row as $j => $g) {
+ if (abs($inputArray[$i][$j]) < 1e-12) {
+ $expectedGrad[$i][$j] = $gradArray[$i][$j];
+ } else {
+ $expectedGrad[$i][$j] = $g * $maskArray[$i][$j];
+ }
+ }
+ }
+
+ self::assertEqualsWithDelta($expectedGrad, $gradArray, 1e-6);
+ }
+
+ #[Test]
+ #[TestDox('Inference pass leaves inputs unchanged')]
+ #[DataProvider('inferProvider')]
+ public function testInfer(array $expected) : void
+ {
+ $this->layer->initialize($this->fanIn);
+
+ $infer = $this->layer->infer($this->input);
+
+ self::assertEqualsWithDelta($expected, $infer->toArray(), 1e-7);
+ }
+
+ #[Test]
+ #[TestDox('Method initialize() returns fan out equal to fan in')]
+ public function testInitializeReturnsFanOut() : void
+ {
+ $fanOut = $this->layer->initialize($this->fanIn);
+
+ self::assertSame($this->fanIn, $fanOut);
+ }
+
+ #[Test]
+ #[TestDox('Method width() returns the initialized width')]
+ public function testWidthAfterInitialize() : void
+ {
+ $this->layer->initialize($this->fanIn);
+
+ self::assertSame($this->fanIn, $this->layer->width());
+ }
+
+ #[Test]
+ #[TestDox('Method gradient() multiplies previous gradient by the dropout mask')]
+ public function testGradient() : void
+ {
+ // Deterministic previous gradient (same shape as input)
+ $prevGradNd = NumPower::array([
+ [0.25, 0.7, 0.1],
+ [0.50, 0.2, 0.01],
+ [0.25, 0.1, 0.89],
+ ]);
+
+ // Same deterministic mask as used in testForward/testBack
+ $mask = NumPower::array([
+ [2.0, 2.0, 2.0],
+ [2.0, 0.0, 2.0],
+ [2.0, 2.0, 0.0],
+ ]);
+
+ $prevGradient = new Deferred(fn: static function () use ($prevGradNd) : NDArray {
+ return $prevGradNd;
+ });
+
+ $gradient = $this->layer->gradient($prevGradient, $mask);
+
+ $expected = [
+ [0.5, 1.4, 0.2],
+ [1.0, 0.0, 0.02],
+ [0.5, 0.2, 0.0],
+ ];
+
+ self::assertEqualsWithDelta($expected, $gradient->toArray(), 1e-7);
+ }
+
+ #[Test]
+ #[TestDox('It returns correct string representation')]
+ public function testToString() : void
+ {
+ $expected = 'Dropout (ratio: 0.5)';
+
+ self::assertSame($expected, (string) $this->layer);
+ }
+}
diff --git a/tests/NeuralNet/Layers/Multiclass/MulticlassTest.php b/tests/NeuralNet/Layers/Multiclass/MulticlassTest.php
new file mode 100644
index 000000000..a920a4272
--- /dev/null
+++ b/tests/NeuralNet/Layers/Multiclass/MulticlassTest.php
@@ -0,0 +1,217 @@
+
+ */
+ public static function initializeProvider() : array
+ {
+ return [
+ 'fanInEqualsClasses' => [3],
+ ];
+ }
+
+ /**
+ * @return array>}>
+ */
+ public static function forwardProvider() : array
+ {
+ return [
+ 'expectedForward' => [[
+ [0.1719820, 0.7707700, 0.0572478],
+ [0.0498033, 0.0450639, 0.9051327],
+ [0.6219707, 0.0015385, 0.3764905],
+ ]],
+ ];
+ }
+
+ /**
+ * @return array>}>
+ */
+ public static function backProvider() : array
+ {
+ return [
+ 'expectedGradient' => [[
+ [-0.0920019, 0.0856411, 0.0063608],
+ [0.0055337, -0.1061040, 0.1005703],
+ [0.0691078, 0.00017093, -0.0692788],
+ ]],
+ ];
+ }
+
+ /**
+ * @return array>}>
+ */
+ public static function inferProvider() : array
+ {
+ // Same expectations as forward
+ return self::forwardProvider();
+ }
+
+ protected function setUp() : void
+ {
+ $this->input = NumPower::array([
+ [1.0, 2.5, -0.1],
+ [0.1, 0.0, 3.0],
+ [0.002, -6.0, -0.5],
+ ]);
+
+ $this->labels = ['hot', 'cold', 'ice cold'];
+
+ $this->optimizer = new Stochastic(0.001);
+
+ $this->layer = new Multiclass(
+ classes: ['hot', 'cold', 'ice cold'],
+ costFn: new CrossEntropy()
+ );
+ }
+
+ #[Test]
+ #[TestDox('Constructor rejects invalid number of classes')]
+ public function testConstructorRejectsInvalidClasses() : void
+ {
+ $this->expectException(InvalidArgumentException::class);
+
+ new Multiclass(classes: ['only-one-class']);
+ }
+
+ #[Test]
+ #[TestDox('Method width() returns number of classes')]
+ public function testWidthReturnsNumberOfClasses() : void
+ {
+ self::assertSame(3, $this->layer->width());
+ }
+
+ #[Test]
+ #[TestDox('Initializes and returns correct fan out')]
+ #[DataProvider('initializeProvider')]
+ public function testInitializeReturnsFanOut(int $fanIn) : void
+ {
+ $fanOut = $this->layer->initialize($fanIn);
+
+ self::assertSame($fanIn, $fanOut);
+ self::assertSame(3, $this->layer->width());
+ }
+
+ #[Test]
+ #[TestDox('Computes forward softmax probabilities')]
+ #[DataProvider('forwardProvider')]
+ public function testForward(array $expected) : void
+ {
+ $this->layer->initialize(3);
+
+ self::assertEquals(3, $this->layer->width());
+
+ $forward = $this->layer->forward($this->input);
+
+ self::assertEqualsWithDelta($expected, $forward->toArray(), 1e-7);
+ }
+
+ #[Test]
+ #[TestDox('Backpropagates and returns output gradient')]
+ #[DataProvider('backProvider')]
+ public function testBack(array $expected) : void
+ {
+ $this->layer->initialize(3);
+
+ // Set internal caches
+ $this->layer->forward($this->input);
+
+ [$computation, $loss] = $this->layer->back(
+ labels: $this->labels,
+ optimizer: $this->optimizer
+ );
+
+ self::assertInstanceOf(Deferred::class, $computation);
+ self::assertIsFloat($loss);
+
+ $gradient = $computation->compute();
+
+ self::assertInstanceOf(NDArray::class, $gradient);
+ self::assertEqualsWithDelta($expected, $gradient->toArray(), 1e-7);
+ }
+
+ #[Test]
+ #[TestDox('Computes gradient for previous layer directly')]
+ #[DataProvider('backProvider')]
+ public function testGradient(array $expectedGradient) : void
+ {
+ $this->layer->initialize(3);
+
+ // Forward pass to obtain output probabilities
+ $output = $this->layer->forward($this->input);
+
+ // Rebuild expected one-hot matrix the same way as Multiclass::back()
+ $expected = [];
+
+ foreach ($this->labels as $label) {
+ $dist = [];
+
+ foreach (['hot', 'cold', 'ice cold'] as $class) {
+ $dist[] = $class === $label ? 1.0 : 0.0;
+ }
+
+ $expected[] = $dist;
+ }
+
+ $expectedNd = NumPower::array($expected);
+
+ $gradient = $this->layer->gradient($this->input, $output, $expectedNd);
+
+ self::assertEqualsWithDelta($expectedGradient, $gradient->toArray(), 1e-7);
+ }
+
+ #[Test]
+ #[TestDox('Computes infer softmax probabilities')]
+ #[DataProvider('inferProvider')]
+ public function testInfer(array $expected) : void
+ {
+ $this->layer->initialize(3);
+
+ $infer = $this->layer->infer($this->input);
+
+ self::assertEqualsWithDelta($expected, $infer->toArray(), 1e-7);
+ }
+
+ #[Test]
+ #[TestDox('It returns correct string representation')]
+ public function testToStringReturnsCorrectValue() : void
+ {
+ $expected = 'Multiclass (cost function: Cross Entropy)';
+
+ self::assertSame($expected, (string) $this->layer);
+ }
+}
diff --git a/tests/NeuralNet/Layers/Noise/NoiseTest.php b/tests/NeuralNet/Layers/Noise/NoiseTest.php
new file mode 100644
index 000000000..4eaf11770
--- /dev/null
+++ b/tests/NeuralNet/Layers/Noise/NoiseTest.php
@@ -0,0 +1,208 @@
+fanIn = 3;
+
+ $this->input = NumPower::array([
+ [1.0, 2.5, -0.1],
+ [0.1, 0.0, 3.0],
+ [0.002, -6.0, -0.5],
+ ]);
+
+ $this->prevGrad = new Deferred(fn: function () : NDArray {
+ return NumPower::array([
+ [0.25, 0.7, 0.1],
+ [0.50, 0.2, 0.01],
+ [0.25, 0.1, 0.89],
+ ]);
+ });
+
+ $this->optimizer = new Stochastic(0.001);
+
+ $this->layer = new Noise(0.1);
+ }
+
+ /**
+ * @return array>}>
+ */
+ public static function backProvider() : array
+ {
+ return [
+ [
+ [
+ [0.25, 0.7, 0.1],
+ [0.5, 0.2, 0.01],
+ [0.25, 0.1, 0.89],
+ ],
+ ],
+ ];
+ }
+
+ /**
+ * @return array>}>
+ */
+ public static function inferProvider() : array
+ {
+ return [
+ [
+ [
+ [1.0, 2.5, -0.1],
+ [0.1, 0.0, 3.0],
+ [0.002, -6.0, -0.5],
+ ],
+ ],
+ ];
+ }
+
+ #[Test]
+ #[TestDox('Can be cast to a string')]
+ public function testToString() : void
+ {
+ self::assertEquals('Noise (std dev: 0.1)', (string) $this->layer);
+ }
+
+ #[Test]
+ #[TestDox('Constructor rejects invalid standard deviation')]
+ public function testConstructorRejectsInvalidStdDev() : void
+ {
+ $this->expectException(InvalidArgumentException::class);
+
+ // Negative std dev should be rejected
+ new Noise(-0.1);
+ }
+
+ #[Test]
+ #[TestDox('Forward throws if layer is not initialized')]
+ public function testForwardThrowsIfNotInitialized() : void
+ {
+ $layer = new Noise(0.1);
+
+ $this->expectException(RuntimeException::class);
+
+ $layer->forward($this->input);
+ }
+
+ #[Test]
+ #[TestDox('Initializes width equal to fan-in')]
+ public function testInitializeSetsWidth() : void
+ {
+ $this->layer->initialize($this->fanIn);
+
+ self::assertEquals($this->fanIn, $this->layer->width());
+ }
+
+ #[Test]
+ #[TestDox('Computes forward pass that adds Gaussian noise with correct shape and scale')]
+ public function testForwardAddsNoiseWithCorrectProperties() : void
+ {
+ $this->layer->initialize($this->fanIn);
+
+ $forward = $this->layer->forward($this->input);
+
+ self::assertInstanceOf(NDArray::class, $forward);
+
+ $inputArray = $this->input->toArray();
+ $forwardArray = $forward->toArray();
+
+ // 1) Shape is preserved
+ self::assertSameSize($inputArray, $forwardArray);
+
+ // 2) At least one element differs (very high probability)
+ $allEqual = true;
+ foreach ($inputArray as $i => $row) {
+ if ($row !== $forwardArray[$i]) {
+ $allEqual = false;
+ break;
+ }
+ }
+ self::assertFalse($allEqual, 'Expected forward output to differ from input due to noise.');
+
+ // 3) Empirical std dev of (forward - input) is ~ stdDev, within tolerance
+ $diffs = [];
+ foreach ($inputArray as $i => $row) {
+ foreach ($row as $j => $v) {
+ $diffs[] = $forwardArray[$i][$j] - $v;
+ }
+ }
+
+ $n = count($diffs);
+ $mean = array_sum($diffs) / $n;
+
+ $var = 0.0;
+ foreach ($diffs as $d) {
+ $var += ($d - $mean) * ($d - $mean);
+ }
+ $var /= $n;
+ $std = sqrt($var);
+
+ // Mean of noise should be near 0, std near $this->stdDev
+ self::assertEqualsWithDelta(0.0, $mean, 2e-1); // +/-0.2 around 0
+ self::assertEqualsWithDelta(0.1, $std, 1e-1); // +/-0.1 around 0.1
+ }
+
+ #[Test]
+ #[TestDox('Backpropagates and returns previous gradient unchanged')]
+ #[DataProvider('backProvider')]
+ public function testBackReturnsPrevGradient(array $expected) : void
+ {
+ $this->layer->initialize($this->fanIn);
+ $this->layer->forward($this->input);
+
+ $gradient = $this->layer->back(
+ prevGradient: $this->prevGrad,
+ optimizer: $this->optimizer
+ )->compute();
+
+ self::assertInstanceOf(NDArray::class, $gradient);
+ self::assertEqualsWithDelta($expected, $gradient->toArray(), 1e-7);
+ }
+
+ #[Test]
+ #[TestDox('Infer returns input unchanged')]
+ #[DataProvider('inferProvider')]
+ public function testInferIdentity(array $expected) : void
+ {
+ $this->layer->initialize($this->fanIn);
+
+ $infer = $this->layer->infer($this->input);
+
+ self::assertEqualsWithDelta($expected, $infer->toArray(), 1e-7);
+ }
+}
diff --git a/tests/NeuralNet/Layers/PReLU/PReLUTest.php b/tests/NeuralNet/Layers/PReLU/PReLUTest.php
new file mode 100644
index 000000000..a1193ea09
--- /dev/null
+++ b/tests/NeuralNet/Layers/PReLU/PReLUTest.php
@@ -0,0 +1,291 @@
+fanIn = 3;
+
+ $this->input = NumPower::array([
+ [1.0, 2.5, -0.1],
+ [0.1, 0.0, 3.0],
+ [0.002, -6.0, -0.5],
+ ]);
+
+ $this->prevGrad = new Deferred(fn: function () : NDArray {
+ return NumPower::array([
+ [0.25, 0.7, 0.1],
+ [0.50, 0.2, 0.01],
+ [0.25, 0.1, 0.89],
+ ]);
+ });
+
+ $this->optimizer = new Stochastic(0.001);
+
+ $this->layer = new PReLU(new Constant(0.25));
+
+ srand(self::RANDOM_SEED);
+ }
+
+ /**
+ * @return array
+ */
+ public static function initializeProvider() : array
+ {
+ return [
+ 'fanIn=3' => [3],
+ ];
+ }
+
+ /**
+ * @return array
+ */
+ public static function forwardProvider() : array
+ {
+ return [
+ 'expectedForward' => [[
+ [1.0, 2.5, -0.025],
+ [0.1, 0.0, 3.0],
+ [0.002, -1.5, -0.125],
+ ]],
+ ];
+ }
+
+ /**
+ * @return array
+ */
+ public static function backProvider() : array
+ {
+ return [
+ 'expectedGradient' => [[
+ [0.25, 0.6999999, 0.0250010],
+ [0.5, 0.05, 0.01],
+ [0.25, 0.0251045, 0.2234300],
+ ]],
+ ];
+ }
+
+ /**
+ * @return array
+ */
+ public static function gradientProvider() : array
+ {
+ return [
+ 'expectedGradient' => [[
+ [0.25, 0.7, 0.025],
+ [0.5, 0.05, 0.01],
+ [0.25, 0.025, 0.2225],
+ ]],
+ ];
+ }
+
+ /**
+ * @return array
+ */
+ public static function inferProvider() : array
+ {
+ return [
+ 'expectedInfer' => [[
+ [1.0, 2.5, -0.0250000],
+ [0.1, 0.0, 3.0],
+ [0.0020000, -1.5, -0.125],
+ ]],
+ ];
+ }
+
+ /**
+ * @return array
+ */
+ public static function activateProvider() : array
+ {
+ return [
+ 'defaultInput' => [
+ [
+ [1.0, 2.5, -0.1],
+ [0.1, 0.0, 3.0],
+ [0.002, -6.0, -0.5],
+ ],
+ [
+ [1.0, 2.5, -0.025],
+ [0.1, 0.0, 3.0],
+ [0.002, -1.5, -0.125],
+ ],
+ ],
+ ];
+ }
+
+ /**
+ * @return array
+ */
+ public static function differentiateProvider() : array
+ {
+ return [
+ 'defaultInput' => [
+ [
+ [1.0, 2.5, -0.1],
+ [0.1, 0.0, 3.0],
+ [0.002, -6.0, -0.5],
+ ],
+ [
+ [1.0, 1.0, 0.25],
+ [1.0, 0.25, 1.0],
+ [1.0, 0.25, 0.25],
+ ],
+ ],
+ ];
+ }
+
+ #[Test]
+ #[TestDox('Can be cast to a string')]
+ public function testToString() : void
+ {
+ self::assertEquals('PReLU (initializer: Constant (value: 0.25))', (string) $this->layer);
+ }
+
+ #[Test]
+ #[TestDox('Initializes width equal to fan-in')]
+ public function testInitializeSetsWidth() : void
+ {
+ $this->layer->initialize($this->fanIn);
+
+ self::assertEquals($this->fanIn, $this->layer->width());
+ }
+
+ #[Test]
+ #[TestDox('Initializes and returns fan out equal to fan-in')]
+ #[DataProvider('initializeProvider')]
+ public function testInitializeReturnsFanOut(int $fanIn) : void
+ {
+ $fanOut = $this->layer->initialize($fanIn);
+
+ self::assertEquals($fanIn, $fanOut);
+ self::assertEquals($fanIn, $this->layer->width());
+ }
+
+ #[Test]
+ #[TestDox('Computes forward activations')]
+ #[DataProvider('forwardProvider')]
+ public function testForward(array $expected) : void
+ {
+ $this->layer->initialize($this->fanIn);
+
+ $forward = $this->layer->forward($this->input);
+
+ self::assertEqualsWithDelta($expected, $forward->toArray(), 1e-7);
+ }
+
+ #[Test]
+ #[TestDox('Backpropagates and returns gradient for previous layer')]
+ #[DataProvider('backProvider')]
+ public function testBack(array $expected) : void
+ {
+ $this->layer->initialize($this->fanIn);
+
+ // Forward pass to set internal input state
+ $this->layer->forward($this->input);
+
+ $gradient = $this->layer->back(
+ prevGradient: $this->prevGrad,
+ optimizer: $this->optimizer
+ )->compute();
+
+ self::assertInstanceOf(NDArray::class, $gradient);
+ self::assertEqualsWithDelta($expected, $gradient->toArray(), 1e-7);
+ }
+
+ #[Test]
+ #[TestDox('Computes gradient for previous layer directly')]
+ #[DataProvider('gradientProvider')]
+ public function testGradient(array $expected) : void
+ {
+ $this->layer->initialize($this->fanIn);
+
+ $gradient = $this->layer->gradient(
+ $this->input,
+ ($this->prevGrad)(),
+ );
+
+ self::assertEqualsWithDelta($expected, $gradient->toArray(), 1e-7);
+ }
+
+ #[Test]
+ #[TestDox('Computes inference activations')]
+ #[DataProvider('inferProvider')]
+ public function testInfer(array $expected) : void
+ {
+ $this->layer->initialize($this->fanIn);
+
+ $infer = $this->layer->infer($this->input);
+
+ self::assertEqualsWithDelta($expected, $infer->toArray(), 1e-7);
+ }
+
+ #[Test]
+ #[TestDox('Yields trainable alpha parameter')]
+ public function testParameters() : void
+ {
+ $this->layer->initialize($this->fanIn);
+
+ $params = iterator_to_array($this->layer->parameters());
+
+ self::assertArrayHasKey('alpha', $params);
+ self::assertInstanceOf(TrainableParameter::class, $params['alpha']);
+ }
+
+ #[Test]
+ #[TestDox('Restores alpha parameter from array')]
+ public function testRestore() : void
+ {
+ $this->layer->initialize($this->fanIn);
+
+ $alphaNew = new TrainableParameter(NumPower::full([$this->fanIn], 0.5));
+
+ $this->layer->restore([
+ 'alpha' => $alphaNew,
+ ]);
+
+ $restored = iterator_to_array($this->layer->parameters());
+
+ self::assertSame($alphaNew, $restored['alpha']);
+ self::assertEquals(
+ array_fill(0, $this->fanIn, 0.5),
+ $restored['alpha']->param()->toArray(),
+ );
+ }
+}
diff --git a/tests/NeuralNet/Layers/Placeholder1D/Placeholder1DTest.php b/tests/NeuralNet/Layers/Placeholder1D/Placeholder1DTest.php
new file mode 100644
index 000000000..7aa3168c8
--- /dev/null
+++ b/tests/NeuralNet/Layers/Placeholder1D/Placeholder1DTest.php
@@ -0,0 +1,114 @@
+input = NumPower::array([
+ [1.0, 2.5],
+ [0.1, 0.0],
+ [0.002, -6.0],
+ ]);
+
+ $this->layer = new Placeholder1D(3);
+ }
+
+ /**
+ * @return array>}>
+ */
+ public static function inputProvider() : array
+ {
+ return [
+ [
+ NumPower::array([
+ [1.0, 2.5],
+ [0.1, 0.0],
+ [0.002, -6.0],
+ ]),
+ [
+ [1.0, 2.5],
+ [0.1, 0.0],
+ [0.002, -6.0],
+ ],
+ ],
+ ];
+ }
+
+ #[Test]
+ #[TestDox('Can be cast to a string')]
+ public function testToString() : void
+ {
+ self::assertEquals('Placeholder 1D (inputs: 3)', (string) $this->layer);
+ }
+
+ #[Test]
+ #[TestDox('Returns width equal to number of inputs')]
+ public function testWidth() : void
+ {
+ self::assertEquals(3, $this->layer->width());
+ }
+
+ #[Test]
+ #[TestDox('Constructor rejects invalid number of inputs')]
+ public function testConstructorRejectsInvalidInputs() : void
+ {
+ $this->expectException(InvalidArgumentException::class);
+
+ new Placeholder1D(0);
+ }
+
+ #[Test]
+ #[TestDox('Initialize returns fan out equal to inputs without changing width')]
+ public function testInitialize() : void
+ {
+ $fanOut = $this->layer->initialize(5);
+
+ self::assertEquals(3, $fanOut);
+ self::assertEquals(3, $this->layer->width());
+ }
+
+ #[Test]
+ #[TestDox('Computes forward pass')]
+ #[DataProvider('inputProvider')]
+ public function testForward(NDArray $input, array $expected) : void
+ {
+ self::assertEquals(3, $this->layer->width());
+
+ $forward = $this->layer->forward($input);
+
+ self::assertEqualsWithDelta($expected, $forward->toArray(), 1e-7);
+ }
+
+ #[Test]
+ #[TestDox('Computes inference pass')]
+ #[DataProvider('inputProvider')]
+ public function testInfer(NDArray $input, array $expected) : void
+ {
+ self::assertEquals(3, $this->layer->width());
+
+ $infer = $this->layer->infer($input);
+
+ self::assertEqualsWithDelta($expected, $infer->toArray(), 1e-7);
+ }
+}
diff --git a/tests/NeuralNet/Layers/Swish/SwishTest.php b/tests/NeuralNet/Layers/Swish/SwishTest.php
new file mode 100644
index 000000000..5f8d55503
--- /dev/null
+++ b/tests/NeuralNet/Layers/Swish/SwishTest.php
@@ -0,0 +1,202 @@
+fanIn = 3;
+
+ $this->input = NumPower::array([
+ [1.0, 2.5, -0.1],
+ [0.1, 0.1, 3.0],
+ [0.002, -6.0, -0.5],
+ ]);
+
+ $this->prevGrad = new Deferred(fn: function () : NDArray {
+ return NumPower::array([
+ [0.25, 0.7, 0.1],
+ [0.50, 0.2, 0.01],
+ [0.25, 0.1, 0.89],
+ ]);
+ });
+
+ $this->optimizer = new Stochastic(0.001);
+
+ $this->layer = new Swish(new Constant(1.0));
+ }
+
+ /**
+ * @return array>>>
+ */
+ public static function initializeForwardBackInferProvider() : array
+ {
+ return [
+ [
+ 'forwardExpected' => [
+ [0.7310585, 2.3103545, -0.0475020],
+ [0.0524979, 0.0524979, 2.8577223],
+ [0.0010009, -0.0148357, -0.1887703],
+ ],
+ 'backExpected' => [
+ [0.2319176, 0.7695808, 0.0450083],
+ [0.2749583, 0.1099833, 0.0108810],
+ [0.1252499, -0.0012326, 0.2314345],
+ ],
+ 'inferExpected' => [
+ [0.7306671, 2.3094806, -0.0475070],
+ [0.0524976, 0.0524976, 2.8576817],
+ [0.0010010, -0.0147432, -0.1887089],
+ ],
+ ],
+ ];
+ }
+
+ /**
+ * @return array
+ */
+ public static function toStringProvider() : array
+ {
+ return [
+ 'value one' => [1.0, 'Swish (initializer: Constant (value: 1))'],
+ 'value zero' => [0.0, 'Swish (initializer: Constant (value: 0))'],
+ ];
+ }
+
+ #[DataProvider('initializeForwardBackInferProvider')]
+ public function testInitializeForwardBackInfer(
+ array $forwardExpected,
+ array $backExpected,
+ array $inferExpected,
+ ) : void
+ {
+ $this->layer->initialize($this->fanIn);
+
+ self::assertEquals($this->fanIn, $this->layer->width());
+
+ $forward = $this->layer->forward($this->input);
+
+ self::assertEqualsWithDelta($forwardExpected, $forward->toArray(), 1e-7);
+
+ $gradient = $this->layer->back(
+ prevGradient: $this->prevGrad,
+ optimizer: $this->optimizer
+ )->compute();
+
+ self::assertInstanceOf(NDArray::class, $gradient);
+ self::assertEqualsWithDelta($backExpected, $gradient->toArray(), 1e-7);
+
+ $infer = $this->layer->infer($this->input);
+
+ self::assertEqualsWithDelta($inferExpected, $infer->toArray(), 1e-7);
+ }
+
+ #[DataProvider('toStringProvider')]
+ public function testToString(float $value, string $expected) : void
+ {
+ $layer = new Swish(new Constant($value));
+
+ self::assertSame($expected, (string) $layer);
+ }
+
+ public function testWidthThrowsIfNotInitialized() : void
+ {
+ $layer = new Swish();
+
+ $this->expectException(RuntimeException::class);
+ $this->expectExceptionMessage('Layer has not been initialized.');
+
+ $layer->width();
+ }
+
+ public function testInitializeReturnsFanOutAndSetsWidth() : void
+ {
+ $fanIn = 4;
+ $layer = new Swish(new Constant(1.0));
+
+ $fanOut = $layer->initialize($fanIn);
+
+ self::assertSame($fanIn, $fanOut);
+ self::assertSame($fanIn, $layer->width());
+ }
+
+ public function testParametersAndRestore() : void
+ {
+ $this->layer->initialize($this->fanIn);
+
+ $parameters = iterator_to_array($this->layer->parameters());
+
+ self::assertArrayHasKey('beta', $parameters);
+ self::assertInstanceOf(Parameter::class, $parameters['beta']);
+
+ $betaParam = $parameters['beta'];
+ $originalBeta = $betaParam->param()->toArray();
+
+ $newLayer = new Swish(new Constant(0.0));
+ $newLayer->initialize($this->fanIn);
+
+ $newLayer->restore($parameters);
+
+ $restoredParams = iterator_to_array($newLayer->parameters());
+
+ self::assertArrayHasKey('beta', $restoredParams);
+ self::assertInstanceOf(Parameter::class, $restoredParams['beta']);
+
+ $restoredBeta = $restoredParams['beta']->param()->toArray();
+
+ self::assertEquals($originalBeta, $restoredBeta);
+ }
+
+ public function testGradientMatchesBackpropagatedGradient() : void
+ {
+ $this->layer->initialize($this->fanIn);
+
+ $output = $this->layer->forward($this->input);
+
+ $backGradient = $this->layer->back(
+ prevGradient: $this->prevGrad,
+ optimizer: $this->optimizer
+ )->compute();
+
+ $directGradient = $this->layer->gradient(
+ $this->input,
+ $output,
+ ($this->prevGrad)()
+ );
+
+ self::assertInstanceOf(NDArray::class, $directGradient);
+ self::assertEqualsWithDelta($backGradient->toArray(), $directGradient->toArray(), 1e-7);
+ }
+}
diff --git a/tests/NeuralNet/Networks/NetworkTest.php b/tests/NeuralNet/Networks/NetworkTest.php
new file mode 100644
index 000000000..0197c225d
--- /dev/null
+++ b/tests/NeuralNet/Networks/NetworkTest.php
@@ -0,0 +1,101 @@
+dataset = Labeled::quick(
+ samples: [
+ [1.0, 2.5],
+ [0.1, 0.0],
+ [0.002, -6.0],
+ ],
+ labels: ['yes', 'no', 'maybe']
+ );
+
+ $this->input = new Placeholder1D(2);
+
+ $this->hidden = [
+ new Dense(neurons: 10),
+ new Activation(new ReLU()),
+ new Dense(neurons: 5),
+ new Activation(new ReLU()),
+ new Dense(neurons: 3),
+ ];
+
+ $this->output = new Multiclass(
+ classes: ['yes', 'no', 'maybe'],
+ costFn: new CrossEntropy()
+ );
+
+ $this->network = new Network(
+ input: $this->input,
+ hidden: $this->hidden,
+ output: $this->output,
+ optimizer: new Adam(0.001)
+ );
+ }
+
+ public function testLayers() : void
+ {
+ $count = 0;
+
+ foreach ($this->network->layers() as $item) {
+ ++$count;
+ }
+
+ self::assertSame(7, $count);
+ }
+
+ public function testInput() : void
+ {
+ self::assertInstanceOf(Placeholder1D::class, $this->network->input());
+ }
+
+ public function testHidden() : void
+ {
+ self::assertCount(5, $this->network->hidden());
+ }
+
+ public function testNumParams() : void
+ {
+ $this->network->initialize();
+
+ self::assertEquals(103, $this->network->numParams());
+ }
+}
diff --git a/tests/NeuralNet/Snapshots/SnapshotTest.php b/tests/NeuralNet/Snapshots/SnapshotTest.php
new file mode 100644
index 000000000..ecde317e3
--- /dev/null
+++ b/tests/NeuralNet/Snapshots/SnapshotTest.php
@@ -0,0 +1,64 @@
+expectException(InvalidArgumentException::class);
+ $this->expectExceptionMessage('Number of layers and parameter groups must be equal.');
+
+ new Snapshot(
+ layers: [new Dense(1)],
+ parameters: []
+ );
+ }
+
+ public function testTake() : void
+ {
+ $network = new Network(
+ input: new Placeholder1D(1),
+ hidden: [
+ new Dense(10),
+ new Activation(new ELU()),
+ new Dense(5),
+ new Activation(new ELU()),
+ new Dense(1),
+ ],
+ output: new Binary(
+ classes: ['yes', 'no'],
+ costFn: new CrossEntropy()
+ ),
+ optimizer: new Stochastic()
+ );
+
+ $network->initialize();
+
+ $this->expectNotToPerformAssertions();
+
+ Snapshot::take($network);
+ }
+}
diff --git a/tests/Specifications/SamplesAreCompatibleWithDistanceTest.php b/tests/Specifications/SamplesAreCompatibleWithDistanceTest.php
index 885eb5d7b..7b564fbd2 100644
--- a/tests/Specifications/SamplesAreCompatibleWithDistanceTest.php
+++ b/tests/Specifications/SamplesAreCompatibleWithDistanceTest.php
@@ -7,6 +7,8 @@
use PHPUnit\Framework\Attributes\CoversClass;
use PHPUnit\Framework\Attributes\DataProvider;
use PHPUnit\Framework\Attributes\Group;
+use PHPUnit\Framework\Attributes\Test;
+use PHPUnit\Framework\Attributes\TestDox;
use Rubix\ML\Datasets\Unlabeled;
use Rubix\ML\Kernels\Distance\Hamming;
use Rubix\ML\Kernels\Distance\Euclidean;
@@ -61,11 +63,9 @@ public static function passesProvider() : Generator
];
}
- /**
- * @param SamplesAreCompatibleWithDistance $specification
- * @param bool $expected
- */
#[DataProvider('passesProvider')]
+ #[Test]
+ #[TestDox('Checks whether samples are compatible with the given distance metric')]
public function passes(SamplesAreCompatibleWithDistance $specification, bool $expected) : void
{
$this->assertSame($expected, $specification->passes());