Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Memristive network with memory workflow #2

Open
wants to merge 5 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
29 changes: 19 additions & 10 deletions conn2res/reservoir.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
import itertools as itr
import numpy as np
import numpy.ma as ma
from numpy.linalg import (inv, matrix_rank)
from numpy.linalg import (pinv, matrix_rank)

import matplotlib.pyplot as plt

Expand Down Expand Up @@ -348,7 +348,7 @@ def solveVi(self, Ve, Vgr=None, G=None, **kwargs):
# inverse matrix A_II
A_II = A[np.ix_(self._I, self._I)]
# print(matrix_rank(A_II, hermitian=check_symmetric(A_II)))
A_II_inv = inv(A_II)
A_II_inv = pinv(A_II)
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I was getting errors from trying to invert a singular matrix (maybe multiple rows of all zeroes?). So I switched to the pseudoinverse to avoid this problem.


# matrix HI
H_IE = np.dot(G[np.ix_(self._I, self._E)], Ve)
Expand Down Expand Up @@ -406,7 +406,7 @@ def simulate(self, Vext, ic=None, mode='forward'):

Parameters
----------
ext_input : (time, N_external_nodes) numpy.ndarray
Vext : (time, N_external_nodes) numpy.ndarray
External input signal
N_external_nodes: number of external (input) nodes
ic : (N_internal_nodes,) numpy.ndarray
Expand Down Expand Up @@ -445,7 +445,10 @@ def simulate(self, Vext, ic=None, mode='forward'):
# get voltage at internal nodes
Vi.append(self.iterate(Ve))

return np.asarray(Vi)
V = np.zeros((len(Vi), self._n_nodes))
V[:, self._I] = np.asarray(Vi)
V[:, self._E] = Vext
return V
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

EchoStateNetwork returns a matrix that is size (n_times, n_nodes), while MemristiveReservoir was returning (n_times, n_internal_nodes). This messes up the indexing for extracting readout nodes, for example, because those indexes were created relative to all nodes that exist, not just internal nodes.



def iterate(self, Ve, tol=5e-2, iters=100):
Expand Down Expand Up @@ -617,8 +620,10 @@ def __init__(self, vA=0.17, vB=0.22, tc=0.32e-3, NMSS=10000,\
self.NMSS = self.init_property(NMSS, noise) # constant
self.Woff = self.init_property(Woff, noise) # constant
self.Won = self.init_property(Won, noise) # constant
self._Ga = mask(self, np.divide(self.Woff, self.NMSS)) # constant
self._Gb = mask(self, np.divide(self.Won, self.NMSS)) # constant
self._Ga = mask(self, np.divide(self.Woff, self.NMSS,
where=self.NMSS != 0)) # constant
self._Gb = mask(self, np.divide(self.Won, self.NMSS,
where=self.NMSS != 0)) # constant
Comment on lines +724 to +727
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I was getting some divide by zero errors and infinites in the resulting matrices because of this. I added a condition to avoid dividing by zero here, and in some other places.


self._Nb = self.init_property(Nb, noise)
self._G = self._Nb * (self._Gb - self._Ga) + self.NMSS * self._Ga
Expand All @@ -644,13 +649,15 @@ def dG(self, V, G=None, dt=1e-4):

# set Nb values
if G is not None:
Nb = mask(self, (G - self.NMSS * self._Ga)/(self._Gb - self._Ga))
Gdiff1 = G - self.NMSS * self._Ga
Gdiff2 = self._Gb - self._Ga
Nb = mask(self, np.divide(Gdiff1, Gdiff2, where=Gdiff2 != 0))

else:
Nb = self._Nb

# ration of dt to characterictic time of the device tc
alpha = dt/self.tc
# ratio of dt to characterictic time of the device tc
alpha = np.divide(dt, self.tc, where=self.tc != 0)

# compute Pa
exponent = -1 * (V - self.vA) / self.VT
Expand Down Expand Up @@ -740,4 +747,6 @@ def check_square(a):

def reservoir(name, **kwargs):
if name == 'EchoStateNetwork':
return EchoStateNetwork(**kwargs)
return EchoStateNetwork(**kwargs)
if name == 'MSSNetwork':
return MSSNetwork(**kwargs)
2 changes: 1 addition & 1 deletion conn2res/task.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
import numpy as np
import pandas as pd
import scipy as sp
import mdp
# import mdp

from sklearn import metrics
from sklearn.model_selection import ParameterGrid
Expand Down
44 changes: 35 additions & 9 deletions conn2res/workflows.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,8 @@

from . import iodata, reservoir, coding

def memory_capacity(conn, input_nodes, output_nodes, readout_modules=None,

def memory_capacity(conn, input_nodes, output_nodes, rsn_mapping=None,
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I changed this to take in the raw mappings and index with the output nodes later. This is because output nodes will sometimes get adjusted in this method when randomly selecting the ground node.

readout_nodes=None, resname='EchoStateNetwork',
alphas=None, input_gain=1.0, tau_max=20, plot_res=False,
plot_title=None):
Expand Down Expand Up @@ -50,23 +51,48 @@ def memory_capacity(conn, input_nodes, output_nodes, readout_modules=None,
w_in = np.zeros((1, n_reservoir_nodes))
w_in[:,input_nodes] = input_gain

# if using MSSNetwork, must restructure nodes and input
if resname == 'MSSNetwork':
# select random node as ground from output nodes
gr_nodes = np.random.choice(output_nodes, 1)
output_nodes = np.setdiff1d(output_nodes, gr_nodes)

# remove ground node from readout_nodes if necessary
if readout_nodes is not None:
readout_nodes = np.setdiff1d(readout_nodes, gr_nodes)

# second dimension should be along the input nodes
x = np.tile(x, (1, len(input_nodes)))
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

MemristiveReservoir.simulate expects an inputs that is size (n_examples, n_external_nodes). This is different than EchoStateNetwork which only requires the input size second dimension align with the first dimension of w_in.


# establish readout modules
readout_modules = rsn_mapping[output_nodes]

# evaluate network performance across various dynamical regimes
if alphas is None: alphas = np.linspace(0,2,11)

df = []
for alpha in alphas:
for alpha in alphas[1:]:

print(f'\n----------------------- alpha = {alpha} -----------------------')

# instantiate an Echo State Network object
network = reservoir.reservoir(name=resname,
w_ih=w_in,
w_hh=alpha*conn.copy(),
activation_function='tanh'
)
if resname == 'EchoStateNetwork':
# instantiate an Echo State Network object
network = reservoir.reservoir(name=resname,
w_ih=w_in,
w_hh=alpha * conn.copy(),
activation_function='tanh'
)
elif resname == 'MSSNetwork':
# instantiate an MSS Network object
network = reservoir.reservoir(name=resname,
w=alpha * conn.copy(),
i_nodes=output_nodes,
e_nodes=input_nodes,
gr_nodes=gr_nodes
)

# simulate reservoir states; select only output nodes
rs = network.simulate(ext_input=x)[:,output_nodes]
rs = network.simulate(x)[:, output_nodes]
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

In MSSNetwork, this input is called Vext not ext_input. I'm not sure if it's better to just avoid explicitly passing a parameter-value pair, or to change the API of MSSNetwork to be the same as EchoStateNetwork. I chose the former for now. Also, there's no ability to choose 'forward' or 'backward' right now. Let me know if this is important to add.


# remove first tau_max points from reservoir states
rs = rs[tau_max:]
Expand Down
21 changes: 11 additions & 10 deletions examples/memory_capacity.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,17 +35,18 @@
# define different sets of output nodes
rsn_mapping = np.load(os.path.join(DATA_DIR, 'rsn_mapping.npy'))

# evaluate the memory capacity of an echo state network (default)
# Evaluate the memory capacity of an echo state network or
# metastable switch memristor network
from conn2res import workflows

MC = workflows.memory_capacity(conn=conn,
input_nodes=input_nodes,
output_nodes=output_nodes,
readout_modules=rsn_mapping[output_nodes],
resname='EchoStateNetwork',
alphas=np.linspace(0,4,21),
input_gain=1.0,
tau_max=16,
plot_res=True,
)
input_nodes=input_nodes,
output_nodes=output_nodes,
rsn_mapping=rsn_mapping,
resname='MSSNetwork',
alphas=np.linspace(0, 4, 21),
input_gain=1.0,
tau_max=16,
plot_res=True,
)