"""
A minimalistic Echo State Networks demo with Mackey-Glass (delay 17) data
in "plain" scientific Python.
by Mantas Lukosevicius 2012
http://minds.jacobs-university.de/mantas
---
Modified by Xavier Hinaut: 19 November 2015.
http://www.xavierhinaut.com
Modified by Remy Portelas: 30 May 2016
https://github.com/rPortelas/ip_in_esn
Modified by Xiao Wei: 20 Oct 2020
https://goodgoodstudy.blog.csdn.net/article/details/109226320
"""
import numpy
as np
import matplotlib
.pyplot
as plt
import scipy
.linalg
import random
def set_seed(seed
=None):
"""Making the seed (for random values) variable if None"""
if seed
is None:
import time
seed
= int((time
.time
()*10**6) % 10**12)
try:
random
.seed
(seed
)
print("Seed used for random values:", seed
)
except:
print("!!! WARNING !!!: Seed was not set correctly.")
return seed
def sigmoid(x
):
if activation_function_mode
== 'tanh':
return np
.tanh
(x
)
elif activation_function_mode
== 'fermi':
return ( 1 / ( 1 + np
.exp
(-x
)))
else:
raise Exception
("ERROR: 'activation_function_mode' was not " + \
"set correctly.")
def plot_activity(x
,epoch
):
plt
.figure
(epoch
+42).clear
()
if activation_function_mode
== 'tanh':
plt
.hist
(x
.ravel
(), bins
= 200)
plt
.xlim
(-1,+1)
else :
plt
.hist
(x
.ravel
(), bins
= 100)
plt
.xlim
(0,+1)
plt
.xlabel
('neurons outputs')
plt
.ylabel
('number of neurons')
mean
= str(round(x
.mean
(), 2))
med
= str(round(np
.median
(x
), 2))
min = str(round(x
.min(), 2))
max = str(round(x
.max(), 2))
std_dev
= str(round(x
.std
(), 2))
plt
.title
('Spatio-temporal distribution of the reservoir at epoch ' + \
str(epoch
) + '\n mean = '+ mean
+ ' median = ' + med
+ ' min = ' + \
min + ' max = ' + max + ' std_dev = ' + std_dev
)
def plot_neuron_activity(x
,epoch
):
plt
.figure
(epoch
+84).clear
()
if activation_function_mode
== 'tanh':
plt
.hist
(x
.ravel
(), bins
= 200)
plt
.xlim
(-1,+1)
else :
plt
.hist
(x
.ravel
(), bins
= 100)
plt
.xlim
(0,+1)
plt
.xlabel
('neuron outputs')
plt
.ylabel
('number of timesteps')
plt
.title
('The output distribution of a single randomly chosen neuron ' + \
'at epoch ' + str(epoch
))
trainLen
= 4000
prepareLen
= 2000
testLen
= 2000
data
= np
.loadtxt
('../datasets/MackeyGlass_t17.txt')
print(data
.shape
)
mode
= 'generative'
activation_function_mode
= 'tanh'
wout_mode
= 'entries bias and resOut'
ip_mode
= 'intrinsic plasticity off'
ip_update_mode
= 'leaky neurons ignored'
if ip_mode
== 'intrinsic plasticity on':
if activation_function_mode
== 'tanh':
nb_epoch
= 3
else:
nb_epoch
= 41
else:
nb_epoch
= 1
inSize
= outSize
= 1
resSize
= 100
a
= 0.3
if ip_mode
== 'intrinsic plasticity on':
spectral_radius
= 1.
reg
= 0.02
lr
= 0.001
if activation_function_mode
== 'tanh':
m
= 0.
sigma
= 0.2
var
= np
.square
(sigma
)
else :
m
= 0.2
ip_gain
= np
.ones
((resSize
, 1))
record_ip_gain
= np
.zeros
(((nb_epoch
-1) * trainLen
, 1))
record_ip_bias
= np
.zeros
(((nb_epoch
-1) * trainLen
, 1))
ip_bias
= np
.zeros
((resSize
, 1))
else :
spectral_radius
= 1.25
reg
= 1e-8
input_scaling
= 1.
our_seed
= None
set_seed
(our_seed
)
Win
= (np
.random
.rand
(resSize
,1+inSize
)-0.5) * input_scaling
W
= np
.random
.rand
(resSize
,resSize
)-0.5
print('Computing spectral radius...', end
=' ')
rhoW
= max(abs(np
.linalg
.eig
(W
)[0]))
print('done.')
W
*= spectral_radius
/ rhoW
if wout_mode
== 'entries bias and resOut':
X
= np
.zeros
((1+inSize
+resSize
,trainLen
))
elif wout_mode
== 'resOut and bias only':
X
= np
.zeros
((1+resSize
,trainLen
))
else :
raise Exception
("ERROR: 'wout_mode' was not set correctly.")
recorded_res_out
= np
.zeros
((trainLen
, resSize
))
choosen_neuron
= np
.random
.random_integers
(resSize
- 1)
neuron_out_records
= np
.zeros
(trainLen
)
Yt
= data
[None,1:trainLen
+1]
x
= np
.zeros
((resSize
,1))
for epoch
in range(nb_epoch
):
for t
in range(trainLen
):
u
= data
[t
]
res_in
= np
.dot
( Win
, np
.vstack
((1,u
))) + np
.dot
( W
, x
)
if ip_mode
== 'intrinsic plasticity on':
res_out
= sigmoid
( ip_gain
* res_in
+ ip_bias
)
x
= (1-a
) * x
+ a
* res_out
if epoch
!= 0:
if activation_function_mode
== 'tanh':
if ip_update_mode
== 'leaky neurons ignored':
d_ip_bias
= (-lr
) * ((-(m
/ var
)) + (res_out
/ var
) * \
((2 * var
) + 1 - np
.square
(res_out
) + m
* res_out
))
elif ip_update_mode
== 'leaky neurons treated':
d_ip_bias
= (-lr
) * ((-(m
/ var
)) + (x
/ var
) * \
((2 * var
) + 1 - np
.square
(x
) + m
* x
))
else:
raise Exception
("ERROR: 'ip_update_mode' was not " \
"set correctly.")
else:
if ip_update_mode
== 'leaky neurons ignored':
d_ip_bias
= lr
* (1 - (2 + (1/m
)) * res_out
+ \
(np
.square
(res_out
) / m
))
elif ip_update_mode
== 'leaky neurons treated':
d_ip_bias
= lr
* (1 - (2 + (1/m
)) * x
+ \
(np
.square
(x
) / m
))
else:
raise Exception
("ERROR: 'ip_update_mode' was not " \
"set correctly.")
ip_bias
+= d_ip_bias
ip_gain
+= (lr
/ ip_gain
) + (d_ip_bias
* res_in
)
record_ip_bias
[t
+ (trainLen
* (epoch
-1)),0] = ip_bias
.mean
()
record_ip_gain
[t
+ (trainLen
* (epoch
-1)),0] = ip_gain
.mean
()
elif ip_mode
== 'intrinsic plasticity off':
res_out
= sigmoid
(res_in
)
x
= (1-a
) * x
+ a
* res_out
else:
raise Exception
("ERROR: 'ip_mode' was not set correctly.")
neuron_out_records
[t
] = np
.round(res_out
[choosen_neuron
],2)
if epoch
== nb_epoch
- 1 :
if wout_mode
== 'entries bias and resOut':
X
[:,t
] = np
.vstack
((1,u
,x
))[:,0]
elif wout_mode
== 'resOut and bias only':
X
[:,t
] = np
.vstack
((1,x
))[:,0]
else:
raise Exception
("ERROR: 'wout_mode' was not set correctly.")
recorded_res_out
[t
] = res_out
[:,0]
if activation_function_mode
== 'tanh':
plot_activity
(recorded_res_out
, epoch
)
plot_neuron_activity
(neuron_out_records
, epoch
)
if activation_function_mode
== 'fermi':
if(epoch
%20 == 0):
plot_activity
(recorded_res_out
, epoch
)
plot_neuron_activity
(neuron_out_records
, epoch
)
if ip_mode
== 'intrinsic plasticity on':
plt
.figure
(10).clear
()
plt
.plot
( record_ip_gain
, label
='gain' )
plt
.plot
( record_ip_bias
, label
='bias' )
plt
.legend
()
plt
.ylabel
('mean value')
plt
.xlabel
('number of timesteps')
plt
.title
('Evolution of the mean of gain and bias ' \
'relative to intrinsic plasticity')
print(X
.shape
,Yt
.shape
)
X
= X
[:, prepareLen
:]
Yt
= Yt
[:, prepareLen
:]
X_T
= X
.T
if wout_mode
== 'entries bias and resOut':
Wout
= np
.dot
( np
.dot
(Yt
,X_T
), np
.linalg
.inv
( np
.dot
(X
,X_T
) + \
reg
*np
.eye
(1+inSize
+resSize
)))
elif wout_mode
== 'resOut and bias only':
Wout
= np
.dot
( np
.dot
(Yt
,X_T
), np
.linalg
.inv
( np
.dot
(X
,X_T
) + \
reg
*np
.eye
(1+resSize
)))
else :
raise Exception
("ERROR: 'wout_mode' was not set correctly.")
Y
= np
.zeros
((outSize
,testLen
))
u
= data
[trainLen
]
for t
in range(testLen
):
res_in
= np
.dot
( Win
, np
.vstack
((1,u
)) ) + np
.dot
( W
, x
)
if ip_mode
== 'intrinsic plasticity on':
res_out
= sigmoid
(ip_gain
* res_in
+ ip_bias
)
elif ip_mode
== 'intrinsic plasticity off':
res_out
= sigmoid
(res_in
)
else:
raise Exception
("ERROR: 'ip_mode' was not set correctly.")
x
= (1-a
) * x
+ a
* res_out
if wout_mode
== 'entries bias and resOut':
y
= np
.dot
( Wout
, np
.vstack
((1,u
,x
)) )
elif wout_mode
== 'resOut and bias only':
y
= np
.dot
( Wout
, np
.vstack
((1,x
)))
else :
raise Exception
("ERROR: 'wout_mode' was not set correctly.")
Y
[:,t
] = y
if mode
== 'generative':
u
= y
elif mode
== 'prediction':
u
= data
[trainLen
+t
+1]
else:
raise Exception
("ERROR: 'mode' was not set correctly.")
errorLen
= 1000
mse_for_each_t
= np
.square
( data
[trainLen
+1:trainLen
+errorLen
+1] - \
Y
[0,0:errorLen
] )
mse
= np
.sum( mse_for_each_t
) / errorLen
print('MSE = ' + str( mse
))
print('compared to max default (Mantas) error 2.91524629066e-07'\
'(For prediction / 100 Neurons)')
print('ratio compared to (Mantas) error ' + str(mse
/2.91524629066e-07) + \
' (For prediction / 100 Neurons)')
print("")
print('compared to max default (Mantas) error 4.06986845044e-06 '\
'(For generation / 1000 Neurons)')
print('compared to max default (Mantas) error 2.02529702465e-08 '\
'(For prediction / 1000 Neurons)')
plt
.figure
()
plt
.plot
(data
[trainLen
+1:trainLen
+errorLen
+1].T
)
plt
.plot
(Y
[0,0:errorLen
])
import seaborn
as sns
plt
.figure
(figsize
=(20,10))
ax
= sns
.heatmap
(X
[:,:1000])
plt
.figure
()
plt
.plot
(Yt
.T
, color
='k', label
='y')
for i
in range(9,10):
plt
.plot
(X
[i
],label
=str(i
))
plt
.legend
()
plt
.show
()
转载请注明原文地址: https://lol.8miu.com/read-35926.html