Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion Project.toml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
name = "ReservoirComputing"
uuid = "7c2d2b1e-3dd4-11ea-355a-8f6a8116e294"
authors = ["Francesco Martinuzzi"]
version = "0.10.9"
version = "0.10.10"

[deps]
Adapt = "79e6a3ab-5dfb-504d-930d-738a2a938a0e"
Expand Down
2 changes: 2 additions & 0 deletions docs/src/api/inits.md
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,8 @@
informed_init
minimal_init
chebyshev_mapping
logistic_mapping
modified_lm
```

## Reservoirs
Expand Down
3 changes: 2 additions & 1 deletion src/ReservoirComputing.jl
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,8 @@ include("reca/reca_input_encodings.jl")
export NLADefault, NLAT1, NLAT2, NLAT3
export StandardStates, ExtendedStates, PaddedStates, PaddedExtendedStates
export StandardRidge
export scaled_rand, weighted_init, informed_init, minimal_init, chebyshev_mapping
export scaled_rand, weighted_init, informed_init, minimal_init, chebyshev_mapping,
logistic_mapping, modified_lm
export rand_sparse, delay_line, delay_line_backward, cycle_jumps,
simple_cycle, pseudo_svd, chaotic_init
export RNN, MRNN, GRU, GRUParams, FullyGated, Minimal
Expand Down
223 changes: 204 additions & 19 deletions src/esn/esn_inits.jl
Original file line number Diff line number Diff line change
Expand Up @@ -308,7 +308,8 @@ Subsequent rows are generated by applying the mapping:
from WeightInitializers.
- `T`: Type of the elements in the reservoir matrix.
Default is `Float32`.
- `dims`: Dimensions of the matrix. Should follow `res_size x in_size`. Here, res_size is assumed to be K+1.
- `dims`: Dimensions of the matrix. Should follow `res_size x in_size`.
`res_size` is assumed to be K+1.

# Keyword arguments

Expand Down Expand Up @@ -362,6 +363,189 @@ function chebyshev_mapping(rng::AbstractRNG, ::Type{T}, dims::Integer...;
return return_sparse ? sparse(input_matrix) : input_matrix
end

@doc raw"""
logistic_mapping(rng::AbstractRNG, ::Type{T}, dims::Integer...;
amplitude=0.3, sine_divisor=5.9, logistic_parameter = 3.7,
return_sparse=true)

Generate an input weight matrix using a logistic mapping [^wang2022].The first
row is initialized using a sine function:

```math
W(1, j) = amplitude * sin(j * π / (sine_divisor * in_size))
```

for each input index `j`, with `in_size` being the number of columns provided in `dims`. Subsequent rows
are generated recursively using the logistic map recurrence:

```math
W(i+1, j) = logistic_parameter * W(i, j) * (1 - W(i, j))
```

# Arguments
- `rng`: Random number generator. Default is `Utils.default_rng()`
from WeightInitializers.
- `T`: Type of the elements in the reservoir matrix.
Default is `Float32`.
- `dims`: Dimensions of the matrix. Should follow `res_size x in_size`.

# keyword arguments
- `amplitude`: Scaling parameter used in the sine initialization of the
first row. Default is 0.3.
- `sine_divisor`: Parameter used to adjust the phase in the sine initialization.
Default is 5.9.
- `logistic_parameter`: The parameter in the logistic mapping recurrence that
governs the dynamics. Default is 3.7.
- `return_sparse`: If `true`, returns the resulting matrix as a sparse matrix.
Default is `false`.

# Examples

```jldoctest
julia> logistic_mapping(8, 3)
8×3 Matrix{Float32}:
0.0529682 0.104272 0.1523
0.185602 0.345578 0.477687
0.559268 0.836769 0.923158
0.912003 0.50537 0.262468
0.296938 0.924893 0.716241
0.772434 0.257023 0.751987
0.650385 0.70656 0.69006
0.841322 0.767132 0.791346

```


[^wang2022]: Wang, Heshan, et al. "Echo state network with logistic
mapping and bias dropout for time series prediction."
Neurocomputing 489 (2022): 196-210.
"""
function logistic_mapping(rng::AbstractRNG, ::Type{T}, dims::Integer...;
amplitude::AbstractFloat=0.3, sine_divisor::AbstractFloat=5.9,
logistic_parameter::AbstractFloat=3.7,
return_sparse::Bool=false) where {T <: Number}
input_matrix = DeviceAgnostic.zeros(rng, T, dims...)
num_rows, num_columns = dims[1], dims[2]
for col in 1:num_columns
input_matrix[1, col] = amplitude * sin(col * pi / (sine_divisor * num_columns))
end
for row in 2:num_rows
for col in 1:num_columns
previous_value = input_matrix[row - 1, col]
input_matrix[row, col] = logistic_parameter * previous_value *
(1 - previous_value)
end
end

return return_sparse ? sparse(input_matrix) : input_matrix
end

@doc raw"""
modified_lm([rng], [T], dims...;
factor, amplitude=0.3, sine_divisor=5.9, logistic_parameter=2.35,
return_sparse=true)

Generate a input weight matrix based on the logistic mapping [^viehweg2025]. The
matrix is built so that each input is transformed into a high-dimensional feature
space via a recursive logistic map. For each input, a chain of weights is generated
as follows:
- The first element of the chain is initialized using a sine function:

```math
W(1,j) = amplitude * sin( (j * π) / (factor * n * sine_divisor) )
```
where `j` is the index corresponding to the input and `n` is the number of inputs.

- Subsequent elements are recursively computed using the logistic mapping:

```math
W(i+1,j) = logistic_parameter * W(i,j) * (1 - W(i,j))
```

The resulting matrix has dimensions `(factor * in_size) x in_size`, where
`in_size` corresponds to the number of columns provided in `dims`.
If the provided number of rows does not match `factor * in_size`
the number of rows is overridden.

# Arguments
- `rng`: Random number generator. Default is `Utils.default_rng()`
from WeightInitializers.
- `T`: Type of the elements in the reservoir matrix.
Default is `Float32`.
- `dims`: Dimensions of the matrix. Should follow `res_size x in_size`.

# keyword arguments
- `factor`: The number of logistic map iterations (chain length) per input,
determining the number of rows per input.
- `amplitude`: Scaling parameter A for the sine-based initialization of
the first element in each logistic chain. Default is 0.3.
- `sine_divisor`: Parameter B used to adjust the phase in the sine initialization.
Default is 5.9.
- `logistic_parameter`: The parameter r in the logistic recurrence that governs
the chain dynamics. Default is 2.35.
- `return_sparse`: If `true`, returns the resulting matrix as a sparse matrix.
Default is `true`.

# Examples

```jldoctest
julia> modified_lm(20, 10; factor=2)
20×10 SparseArrays.SparseMatrixCSC{Float32, Int64} with 18 stored entries:
⎡⢠⠀⠀⠀⠀⎤
⎢⠀⢣⠀⠀⠀⎥
⎢⠀⠀⢣⠀⠀⎥
⎢⠀⠀⠀⢣⠀⎥
⎣⠀⠀⠀⠀⢣⎦

julia> modified_lm(12, 4; factor=3)
12×4 SparseArrays.SparseMatrixCSC{Float32, Int64} with 9 stored entries:
⋅ ⋅ ⋅ ⋅
⋅ ⋅ ⋅ ⋅
⋅ ⋅ ⋅ ⋅
⋅ 0.0133075 ⋅ ⋅
⋅ 0.0308564 ⋅ ⋅
⋅ 0.070275 ⋅ ⋅
⋅ ⋅ 0.0265887 ⋅
⋅ ⋅ 0.0608222 ⋅
⋅ ⋅ 0.134239 ⋅
⋅ ⋅ ⋅ 0.0398177
⋅ ⋅ ⋅ 0.0898457
⋅ ⋅ ⋅ 0.192168

```

[^viehweg2025]: Viehweg, Johannes, Constanze Poll, and Patrick Mäder.
"Deterministic Reservoir Computing for Chaotic Time Series Prediction."
arXiv preprint arXiv:2501.15615 (2025).
"""
function modified_lm(rng::AbstractRNG, ::Type{T}, dims::Integer...;
factor::Integer, amplitude::AbstractFloat=0.3,
sine_divisor::AbstractFloat=5.9, logistic_parameter::AbstractFloat=2.35,
return_sparse::Bool=true) where {T <: Number}
num_columns = dims[2]
expected_num_rows = factor * num_columns
if dims[1] != expected_num_rows
@warn """\n
Provided dims[1] ($(dims[1])) is not equal to factor*num_columns ($expected_num_rows).
Overriding number of rows to $expected_num_rows. \n
"""
end
output_matrix = DeviceAgnostic.zeros(rng, T, expected_num_rows, num_columns)
for col in 1:num_columns
base_row = (col - 1) * factor + 1
output_matrix[base_row, col] = amplitude * sin(((col - 1) * pi) /
(factor * num_columns * sine_divisor))
for j in 1:(factor - 1)
current_row = base_row + j
previous_value = output_matrix[current_row - 1, col]
output_matrix[current_row, col] = logistic_parameter * previous_value *
(1 - previous_value)
end
end

return return_sparse ? sparse(output_matrix) : output_matrix
end

### reservoirs

"""
Expand Down Expand Up @@ -752,23 +936,6 @@ function get_sparsity(M, dim)
return size(M[M .!= 0], 1) / (dim * dim - size(M[M .!= 0], 1)) #nonzero/zero elements
end

function digital_chaotic_adjacency(rng::AbstractRNG, bit_precision::Integer;
extra_edge_probability::AbstractFloat=0.1)
matrix_order = 2^(2 * bit_precision)
adjacency_matrix = zeros(Int, matrix_order, matrix_order)
for row_index in 1:(matrix_order - 1)
adjacency_matrix[row_index, row_index + 1] = 1
end
adjacency_matrix[matrix_order, 1] = 1
for row_index in 1:matrix_order, column_index in 1:matrix_order
if row_index != column_index && rand(rng) < extra_edge_probability
adjacency_matrix[row_index, column_index] = 1
end
end

return adjacency_matrix
end

"""
chaotic_init([rng], [T], dims...;
extra_edge_probability=T(0.1), spectral_radius=one(T),
Expand Down Expand Up @@ -859,11 +1026,29 @@ function chaotic_init(rng::AbstractRNG, ::Type{T}, dims::Integer...;
return return_sparse_matrix ? sparse(reservoir_matrix) : reservoir_matrix
end

function digital_chaotic_adjacency(rng::AbstractRNG, bit_precision::Integer;
extra_edge_probability::AbstractFloat=0.1)
matrix_order = 2^(2 * bit_precision)
adjacency_matrix = DeviceAgnostic.zeros(rng, Int, matrix_order, matrix_order)
for row_index in 1:(matrix_order - 1)
adjacency_matrix[row_index, row_index + 1] = 1
end
adjacency_matrix[matrix_order, 1] = 1
for row_index in 1:matrix_order, column_index in 1:matrix_order
if row_index != column_index && rand(rng) < extra_edge_probability
adjacency_matrix[row_index, column_index] = 1
end
end

return adjacency_matrix
end

### fallbacks
#fallbacks for initializers #eventually to remove once migrated to WeightInitializers.jl
for initializer in (:rand_sparse, :delay_line, :delay_line_backward, :cycle_jumps,
:simple_cycle, :pseudo_svd, :chaotic_init,
:scaled_rand, :weighted_init, :informed_init, :minimal_init, :chebyshev_mapping)
:scaled_rand, :weighted_init, :informed_init, :minimal_init, :chebyshev_mapping,
:logistic_mapping, :modified_lm)
@eval begin
function ($initializer)(dims::Integer...; kwargs...)
return $initializer(Utils.default_rng(), Float32, dims...; kwargs...)
Expand Down
4 changes: 3 additions & 1 deletion test/esn/test_inits.jl
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,9 @@ input_inits = [
weighted_init,
minimal_init,
minimal_init(; sampling_type=:irrational),
chebyshev_mapping
chebyshev_mapping,
logistic_mapping,
modified_lm(; factor=4)
]

@testset "Reservoir Initializers" begin
Expand Down
Loading