Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions docs/src/api/layers.md
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
Collect
StatefulLayer
DelayLayer
NonlinearFeaturesLayer
```

## Readout Layers
Expand Down
2 changes: 1 addition & 1 deletion src/ReservoirComputing.jl
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ include("extensions/reca.jl")

export ReservoirComputer
export ESNCell, StatefulLayer, LinearReadout, ReservoirChain, Collect, collectstates,
DelayLayer
DelayLayer, NonlinearFeaturesLayer
export SVMReadout
export Pad, Extend, NLAT1, NLAT2, NLAT3, PartialSquare, ExtendedSquare
export StandardRidge
Expand Down
80 changes: 80 additions & 0 deletions src/layers/basic.jl
Original file line number Diff line number Diff line change
Expand Up @@ -337,3 +337,83 @@ function (dl::DelayLayer)(inp::AbstractVecOrMat, ps, st::NamedTuple)

return inp_with_delay, (history = history, clock = clock, rng = st.rng)
end

@doc raw"""
NonlinearFeaturesLayer(features...; include_input=true)

Layer that builds a feature vector by applying one or more user-defined
functions to a single input vector and concatenating the results. Intended to
be used as a `state_modifier` (for example, after a `DelayLayer`) to construct
NGRC/NVAR-style feature maps.

At each call, for an input vector `x`, the layer:

1. Optionally includes `x` itself (if `include_input=true`).
2. Applies each function in `features` to `x`.
3. Returns the vertical concatenation of all results.

## Arguments

- `features...`: One or more functions `f(x)` that map a vector to a vector.
Each function is called as `f(inp)` and must return an `AbstractVector`.

## Keyword arguments

- `include_input`: If `true` (default), the original input vector `inp` is
included as the first block in the feature vector. If `false`, the output
contains only the concatenation of `features(inp)`.

## Inputs

- `inp :: AbstractVector`
The current feature vector, typically the output of a `DelayLayer` or a
reservoir state.

## Returns

- `out :: AbstractVector`
Concatenation of:
- the original input `inp` (if `include_input=true`), and
- the outputs of each function in `features` applied to `inp`.
- The unchanged state `st` (this layer is stateless).

## Parameters

- None. `NonlinearFeaturesLayer` has no trainable parameters.

## States

- None. `initialstates` returns an empty `NamedTuple`.
"""
@concrete struct NonlinearFeaturesLayer <: AbstractLuxLayer
features
include_input <: StaticBool
end

function NonlinearFeaturesLayer(features...; include_input::BoolType = True())
feats = features isa Tuple ? features : (features,)

return NonlinearFeaturesLayer(feats, static(include_input))
end

function initialparameters(rng::AbstractRNG, l::NonlinearFeaturesLayer)
return NamedTuple()
end

function initialstates(rng::AbstractRNG, l::NonlinearFeaturesLayer)
return NamedTuple()
end

function (nfl::NonlinearFeaturesLayer)(inp::AbstractVector, ps, st)
feature_vector = Any[]
if known(nfl.include_input) === true
push!(feature_vector, inp)
end
for func in nfl.features
nonlin_feat = func(inp)
push!(feature_vector, nonlin_feat)
end
out = vcat(feature_vector...)

return out, st
end
84 changes: 84 additions & 0 deletions test/layers/test_basic.jl
Original file line number Diff line number Diff line change
Expand Up @@ -248,3 +248,87 @@ end
@test_throws AssertionError dl(badx, ps, st)
end
end

@testset "NonlinearFeaturesLayer" begin
rng = MersenneTwister(123)

@testset "constructor & flags" begin
sq = x -> x .^ 2
cb = x -> x .^ 3

nfl = NonlinearFeaturesLayer(sq, cb; include_input = true)
@test length(nfl.features) == 2
ii = ReservoirComputing.known(nfl.include_input)
@test ii === true

nfl2 = NonlinearFeaturesLayer(sq; include_input = false)
@test length(nfl2.features) == 1
ii2 = ReservoirComputing.known(nfl2.include_input)
@test ii2 === false

nfl_id = NonlinearFeaturesLayer(; include_input = true)
@test length(nfl_id.features) == 0
ii_id = ReservoirComputing.known(nfl_id.include_input)
@test ii_id === true
end

@testset "initialparameters / initialstates" begin
sq = x -> x .^ 2
nfl = NonlinearFeaturesLayer(sq; include_input = true)

ps = initialparameters(rng, nfl)
st = initialstates(rng, nfl)

@test ps == NamedTuple()
@test st == NamedTuple()
end

@testset "forward: include_input=true" begin
sq = x -> x .^ 2
cb = x -> x .^ 3
nfl = NonlinearFeaturesLayer(sq, cb; include_input = true)

ps = initialparameters(rng, nfl)
st = initialstates(rng, nfl)

x = Float32[1, 2, 3]

y, st2 = nfl(x, ps, st)

@test length(y) == 3 * 3
@test y[1:3] == x
@test y[4:6] == x .^ 2
@test y[7:9] == x .^ 3

@test st2 === st
end

@testset "forward: include_input=false" begin
sq = x -> x .^ 2
cb = x -> x .^ 3
nfl = NonlinearFeaturesLayer(sq, cb; include_input = false)

ps = initialparameters(rng, nfl)
st = initialstates(rng, nfl)

x = Float32[1, 2, 3]

y, _ = nfl(x, ps, st)

@test length(y) == 2 * 3
@test y[1:3] == x .^ 2
@test y[4:6] == x .^ 3
end

@testset "identity case: no features, include_input=true" begin
nfl = NonlinearFeaturesLayer(; include_input = true)
ps = initialparameters(rng, nfl)
st = initialstates(rng, nfl)

x = rand(rng, Float32, 4)

y, _ = nfl(x, ps, st)

@test y == x
end
end
Loading