-
-
Notifications
You must be signed in to change notification settings - Fork 1k
/
regression_kernel_ridge_modular.py
55 lines (42 loc) · 1.57 KB
/
regression_kernel_ridge_modular.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
#!/usr/bin/env python
###########################################################################
# kernel ridge regression
###########################################################################
from numpy import *
#from pylab import plot, show, legend
parameter_list = [[20,100,6,10,0.5,1, 0.5,1], [20,100,6,10,0.5,1, 2,2]]
def regression_kernel_ridge_modular (n=100,n_test=100, \
x_range=6,x_range_test=10,noise_var=0.5,width=1, tau=1e-6, seed=1):
from shogun.Features import RegressionLabels, RealFeatures
from shogun.Kernel import GaussianKernel
from shogun.Regression import KernelRidgeRegression
# reproducable results
random.seed(seed)
# easy regression data: one dimensional noisy sine wave
n=15
n_test=100
x_range_test=10
noise_var=0.5;
X=random.rand(1,n)*x_range
X_test=array([[float(i)/n_test*x_range_test for i in range(n_test)]])
Y_test=sin(X_test)
Y=sin(X)+random.randn(n)*noise_var
# shogun representation
labels=RegressionLabels(Y[0])
feats_train=RealFeatures(X)
feats_test=RealFeatures(X_test)
kernel=GaussianKernel(feats_train, feats_train, width)
krr=KernelRidgeRegression(tau, kernel, labels)
krr.train(feats_train)
kernel.init(feats_train, feats_test)
out = krr.apply().get_labels()
# plot results
#plot(X[0],Y[0],'x') # training observations
#plot(X_test[0],Y_test[0],'-') # ground truth of test
#plot(X_test[0],out, '-') # mean predictions of test
#legend(["training", "ground truth", "mean predictions"])
#show()
return out,kernel,krr
if __name__=='__main__':
print('KRR')
regression_kernel_ridge_modular(*parameter_list[0])