Skip to content

Commit 832612c

Browse files
adding chaotic and chebyshev inits
1 parent 5d65ff8 commit 832612c

File tree

2 files changed

+115
-4
lines changed

2 files changed

+115
-4
lines changed

src/ReservoirComputing.jl

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -38,8 +38,9 @@ include("reca/reca_input_encodings.jl")
3838
export NLADefault, NLAT1, NLAT2, NLAT3
3939
export StandardStates, ExtendedStates, PaddedStates, PaddedExtendedStates
4040
export StandardRidge
41-
export scaled_rand, weighted_init, informed_init, minimal_init
42-
export rand_sparse, delay_line, delay_line_backward, cycle_jumps, simple_cycle, pseudo_svd
41+
export scaled_rand, weighted_init, informed_init, minimal_init, chebyshev_mapping
42+
export rand_sparse, delay_line, delay_line_backward, cycle_jumps,
43+
simple_cycle, pseudo_svd, chaotic_init
4344
export RNN, MRNN, GRU, GRUParams, FullyGated, Minimal
4445
export train
4546
export ESN, HybridESN, KnowledgeModel, DeepESN

src/esn/esn_inits.jl

Lines changed: 112 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -282,6 +282,68 @@ function _create_irrational(irrational::Irrational, start::Int, res_size::Int,
282282
return T.(input_matrix)
283283
end
284284

285+
286+
"""
287+
288+
chebyshev_mapping([rng], [T], dims...;
289+
amplitude=one(T), sine_divisor=one(T),
290+
chebyshev_parameter=one(T), return_sparse=true)
291+
292+
Generate a Chebyshev-mapped matrix [^xie2024]. The first row is initialized
293+
using a sine function and subsequent rows are iteratively generated
294+
via the Chebyshev mapping. The first row is defined as:
295+
296+
```math
297+
w(1, j) = amplitude * sin(j * π / (sine_divisor * n_cols))
298+
```
299+
300+
for j = 1, 2, …, n_cols (with n_cols typically equal to K+1, where K is the number of input layer neurons).
301+
Subsequent rows are generated by applying the mapping:
302+
```math
303+
w(i+1, j) = cos(chebyshev_parameter * acos(w(i, j)))
304+
```
305+
306+
# Arguments
307+
- `rng`: Random number generator. Default is `Utils.default_rng()`
308+
from WeightInitializers.
309+
- `T`: Type of the elements in the reservoir matrix.
310+
Default is `Float32`.
311+
- `dims`: Dimensions of the matrix. Should follow `res_size x in_size`. Here, res_size is assumed to be K+1.
312+
313+
# Keyword arguments
314+
- `amplitude`: Scaling factor used to initialize the first row.
315+
This parameter adjusts the amplitude of the sine function. Default value is one.
316+
- `sine_divisor`: Divisor applied in the sine function's phase. Default value is one.
317+
- `chebyshev_parameter`: Control parameter for the Chebyshev mapping in
318+
subsequent rows. This parameter influences the distribution of the
319+
matrix elements. Default is one.
320+
- `return_sparse`: If `true`, the function returns the matrix as a sparse matrix. Default is `true`.
321+
322+
[^xie2024]: Xie, Minzhi, Qianxue Wang, and Simin Yu.
323+
"Time Series Prediction of ESN Based on Chebyshev Mapping and Strongly
324+
Connected Topology."
325+
Neural Processing Letters 56.1 (2024): 30.
326+
"""
327+
function chebyshev_mapping(rng::AbstractRNG, ::Type{T}, dims::Integer...;
328+
amplitude::AbstractFloat = one(T), sine_divisor::AbstractFloat = one(T),
329+
chebyshev_parameter::AbstractFloat = one(T),
330+
return_sparse::Bool = true) where {T <: Number}
331+
input_matrix = DeviceAgnostic.zeros(rng, T, dims...)
332+
n_rows, n_cols = dims[1], dims[2]
333+
334+
for idx_cols in 1:n_cols
335+
input_matrix[1, idx_cols] = amplitude * sin(idx_cols * pi / (sine_divisor * n_cols))
336+
end
337+
for idx_rows in 2:n_rows
338+
for idx_cols in 1:n_cols
339+
input_matrix[idx_rows, idx_cols] = cos(chebyshev_parameter * acos(input_matrix[idx_rows - 1, idx_cols]))
340+
end
341+
end
342+
343+
return return_sparse ? sparse(input_matrix) : input_matrix
344+
end
345+
346+
285347
### reservoirs
286348

287349
"""
@@ -672,11 +734,59 @@ function get_sparsity(M, dim)
672734
return size(M[M .!= 0], 1) / (dim * dim - size(M[M .!= 0], 1)) #nonzero/zero elements
673735
end
674736

737+
738+
function digital_chaotic_adjacency(rng::AbstractRNG, bit_precision::Integer;
739+
extra_edge_probability::AbstractFloat = 0.1)
740+
matrix_order = 2^(2 * bit_precision)
741+
adjacency_matrix = zeros(Int, matrix_order, matrix_order)
742+
for row_index in 1:(matrix_order - 1)
743+
adjacency_matrix[row_index, row_index + 1] = 1
744+
end
745+
adjacency_matrix[matrix_order, 1] = 1
746+
for row_index in 1:matrix_order, column_index in 1:matrix_order
747+
if row_index != column_index && rand(rng) < extra_edge_probability
748+
adjacency_matrix[row_index, column_index] = 1
749+
end
750+
end
751+
752+
return adjacency_matrix
753+
end
754+
755+
function chaotic_init(rng::AbstractRNG, ::Type{T}, dims::Integer...;
756+
extra_edge_probability::AbstractFloat = T(0.1), spectral_radius::AbstractFloat = one(T),
757+
return_sparse_matrix::Bool = true) where {T <: Number}
758+
759+
requested_order = first(dims)
760+
if length(dims) > 1 && dims[2] != requested_order
761+
@warn "Using dims[1] = $requested_order for the chaotic reservoir matrix order."
762+
end
763+
D_estimate = log2(requested_order) / 2
764+
D_floor = max(floor(Int, D_estimate), 1)
765+
D_ceil = ceil(Int, D_estimate)
766+
candidate_order_floor = 2^(2 * D_floor)
767+
candidate_order_ceil = 2^(2 * D_ceil)
768+
chosen_bit_precision = abs(candidate_order_floor - requested_order) <= abs(candidate_order_ceil - requested_order) ? D_floor : D_ceil
769+
actual_matrix_order = 2^(2 * chosen_bit_precision)
770+
if actual_matrix_order != requested_order
771+
@warn "Chaotic reservoir matrix order adjusted from $requested_order to $actual_matrix_order based on computed bit precision = $chosen_bit_precision."
772+
end
773+
774+
random_weight_matrix = T(2) * rand(rng, T, actual_matrix_order, actual_matrix_order) .- T(1)
775+
adjacency_matrix = digital_chaotic_adjacency(rng, chosen_bit_precision; extra_edge_probability = extra_edge_probability)
776+
reservoir_matrix = random_weight_matrix .* adjacency_matrix
777+
current_spectral_radius = maximum(abs, eigvals(reservoir_matrix))
778+
if current_spectral_radius != 0
779+
reservoir_matrix .*= spectral_radius / current_spectral_radius
780+
end
781+
782+
return return_sparse_matrix ? sparse(reservoir_matrix) : reservoir_matrix
783+
end
784+
675785
### fallbacks
676786
#fallbacks for initializers #eventually to remove once migrated to WeightInitializers.jl
677787
for initializer in (:rand_sparse, :delay_line, :delay_line_backward, :cycle_jumps,
678-
:simple_cycle, :pseudo_svd,
679-
:scaled_rand, :weighted_init, :informed_init, :minimal_init)
788+
:simple_cycle, :pseudo_svd, :chaotic_init,
789+
:scaled_rand, :weighted_init, :informed_init, :minimal_init, :chebyshev_mapping)
680790
@eval begin
681791
function ($initializer)(dims::Integer...; kwargs...)
682792
return $initializer(Utils.default_rng(), Float32, dims...; kwargs...)

0 commit comments

Comments
 (0)