Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions docs/src/api/inits.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
```@docs
scaled_rand
weighted_init
weighted_minimal
informed_init
minimal_init
chebyshev_mapping
Expand Down
2 changes: 1 addition & 1 deletion src/ReservoirComputing.jl
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ export NLADefault, NLAT1, NLAT2, NLAT3, PartialSquare, ExtendedSquare
export StandardStates, ExtendedStates, PaddedStates, PaddedExtendedStates
export StandardRidge
export scaled_rand, weighted_init, informed_init, minimal_init, chebyshev_mapping,
logistic_mapping, modified_lm
logistic_mapping, modified_lm, weighted_minimal
export rand_sparse, delay_line, delay_line_backward, cycle_jumps,
simple_cycle, pseudo_svd, chaotic_init, low_connectivity, double_cycle,
selfloop_cycle, selfloop_feedback_cycle, selfloop_delayline_backward,
Expand Down
126 changes: 121 additions & 5 deletions src/esn/esn_inits.jl
Original file line number Diff line number Diff line change
Expand Up @@ -108,6 +108,120 @@ function weighted_init(rng::AbstractRNG, ::Type{T}, dims::Integer...;
return return_init_as(Val(return_sparse), layer_matrix)
end

"""
weighted_minimal([rng], [T], dims...;
weight=0.1, return_sparse=false,
sampling_type=:no_sample)

Create and return a minimal weighted input layer matrix.
This initializer generates a weighted input matrix with equal, deterministic
elements in the same construction as [`weighted_minimal]`(@ref),
inspired by [^lu2017].

Please note that this initializer computes its own reservoir size! If
the computed reservoir size is different than the provided one it will raise a
warning.

# Arguments

- `rng`: Random number generator. Default is `Utils.default_rng()`
from WeightInitializers.
- `T`: Type of the elements in the reservoir matrix.
Default is `Float32`.
- `dims`: Dimensions of the matrix. Should follow `res_size x in_size`.

# Keyword arguments

- `weight`: The value for all the weights in the input matrix.
Defaults to `0.1`.
- `return_sparse`: flag for returning a `sparse` matrix.
Default is `false`.
- `sampling_type`: Sampling that decides the distribution of `weight` negative numbers.
If set to `:no_sample` the sign is unchanged. If set to `:bernoulli_sample!` then each
`weight` can be positive with a probability set by `positive_prob`. If set to
`:irrational_sample!` the `weight` is negative if the decimal number of the
irrational number chosen is odd. Default is `:no_sample`.
- `positive_prob`: probability of the `weight` being positive when `sampling_type` is
set to `:bernoulli_sample!`. Default is 0.5.
- `irrational`: Irrational number whose decimals decide the sign of `weight`.
Default is `pi`.
- `start`: Which place after the decimal point the counting starts for the `irrational`
sign counting. Default is 1.

# Examples

```jldoctest
julia> res_input = weighted_minimal(8, 3)
┌ Warning: Reservoir size has changed!
│ Computed reservoir size (6) does not equal the provided reservoir size (8).
│ Using computed value (6). Make sure to modify the reservoir initializer accordingly.
└ @ ReservoirComputing ~/.julia/dev/ReservoirComputing/src/esn/esn_inits.jl:159
6×3 Matrix{Float32}:
0.1 0.0 0.0
0.1 0.0 0.0
0.0 0.1 0.0
0.0 0.1 0.0
0.0 0.0 0.1
0.0 0.0 0.1

julia> res_input = weighted_minimal(9, 3; weight=0.99)
9×3 Matrix{Float32}:
0.99 0.0 0.0
0.99 0.0 0.0
0.99 0.0 0.0
0.0 0.99 0.0
0.0 0.99 0.0
0.0 0.99 0.0
0.0 0.0 0.99
0.0 0.0 0.99
0.0 0.0 0.99

julia> res_input = weighted_minimal(9, 3; sampling_type=:bernoulli_sample!)
9×3 Matrix{Float32}:
0.1 -0.0 -0.0
-0.1 -0.0 -0.0
0.1 -0.0 0.0
-0.0 0.1 0.0
0.0 0.1 -0.0
0.0 0.1 0.0
-0.0 -0.0 -0.1
-0.0 -0.0 0.1
0.0 -0.0 0.1
```

[^lu2017]: Lu, Zhixin, et al.
"Reservoir observers: Model-free inference of unmeasured variables in
chaotic systems."
Chaos: An Interdisciplinary Journal of Nonlinear Science 27.4 (2017): 041102.
"""
function weighted_minimal(rng::AbstractRNG, ::Type{T}, dims::Integer...;
weight::Number=T(0.1), return_sparse::Bool=false,
sampling_type=:no_sample, kwargs...) where {T <: Number}
throw_sparse_error(return_sparse)
approx_res_size, in_size = dims
res_size = Int(floor(approx_res_size / in_size) * in_size)
if res_size != approx_res_size
@warn """Reservoir size has changed!\n
Computed reservoir size ($res_size) does not equal the \
provided reservoir size ($approx_res_size). \n
Using computed value ($res_size). Make sure to modify the \
reservoir initializer accordingly. \n
"""
end
layer_matrix = DeviceAgnostic.zeros(rng, T, res_size, in_size)
q = floor(Int, res_size / in_size)

for idx in 1:in_size
layer_matrix[((idx - 1) * q + 1):((idx) * q), idx] = T(weight) .* ones(T, q)
end
f_sample = getfield(@__MODULE__, sampling_type)
f_sample(rng, layer_matrix; kwargs...)
return return_init_as(Val(return_sparse), layer_matrix)
end

"""
informed_init([rng], [T], dims...;
scaling=0.1, model_in_size, gamma=0.5)
Expand Down Expand Up @@ -174,7 +288,8 @@ end

"""
minimal_init([rng], [T], dims...;
sampling_type=:bernoulli, weight=0.1, irrational=pi, start=1, p=0.5)
sampling_type=:bernoulli_sample!, weight=0.1, irrational=pi,
start=1, p=0.5)

Create a layer matrix with uniform weights determined by `weight` [^rodan2010].
The sign difference is randomly determined by the `sampling` chosen.
Expand All @@ -191,7 +306,7 @@ The sign difference is randomly determined by the `sampling` chosen.

- `weight`: The weight used to fill the layer matrix. Default is 0.1.
- `sampling_type`: The sampling parameters used to generate the input matrix.
Default is `:bernoulli`.
Default is `:bernoulli_sample!`.
- `irrational`: Irrational number chosen for sampling if `sampling_type=:irrational`.
Default is `pi`.
- `start`: Starting value for the irrational sample. Default is 1
Expand Down Expand Up @@ -1737,9 +1852,10 @@ end
#fallbacks for initializers #eventually to remove once migrated to WeightInitializers.jl
for initializer in (:rand_sparse, :delay_line, :delay_line_backward, :cycle_jumps,
:simple_cycle, :pseudo_svd, :chaotic_init, :scaled_rand, :weighted_init,
:informed_init, :minimal_init, :chebyshev_mapping, :logistic_mapping, :modified_lm,
:low_connectivity, :double_cycle, :selfloop_cycle, :selfloop_feedback_cycle,
:selfloop_delayline_backward, :selfloop_forward_connection, :forward_connection)
:weighted_minimal, :informed_init, :minimal_init, :chebyshev_mapping,
:logistic_mapping, :modified_lm, :low_connectivity, :double_cycle, :selfloop_cycle,
:selfloop_feedback_cycle, :selfloop_delayline_backward, :selfloop_forward_connection,
:forward_connection)
@eval begin
function ($initializer)(dims::Integer...; kwargs...)
return $initializer(Utils.default_rng(), Float32, dims...; kwargs...)
Expand Down
1 change: 1 addition & 0 deletions test/esn/test_inits.jl
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ reservoir_inits = [
input_inits = [
scaled_rand,
weighted_init,
weighted_minimal,
minimal_init,
minimal_init(; sampling_type=:irrational_sample!),
chebyshev_mapping,
Expand Down
Loading