Skip to content

Commit

Permalink
Merge pull request #45 from Saurabh7/gpregres
Browse files Browse the repository at this point in the history
add some options and refactor
  • Loading branch information
karlnapf committed Jun 30, 2014
2 parents 9bc285c + 71ce75f commit 6980d9a
Show file tree
Hide file tree
Showing 5 changed files with 80 additions and 73 deletions.
11 changes: 7 additions & 4 deletions demos/classifier/gaussian_process.py
@@ -1,13 +1,15 @@
import numpy as np
import modshogun as sg

def classify_gp(features, labels, kernel, domain, lik, learn, returnValues=True):
def classify_gp(features, labels, kernel, domain, lik, learn, scale, returnValues=True):
mean = sg.ZeroMean()
inf = sg.EPInferenceMethod(kernel, features, mean, labels, lik)
inf.set_scale(scale)
gp = sg.GaussianProcessBinaryClassification(inf)
best_width=0
best_width=0.0
best_param=0
best_degree=0
best_scale=0.0

if learn == 'ML2':
grad = sg.GradientEvaluation(gp, features, labels, sg.GradientCriterion(), False)
Expand All @@ -19,9 +21,10 @@ def classify_gp(features, labels, kernel, domain, lik, learn, returnValues=True)
best_width=sg.GaussianKernel.obtain_from_generic(inf.get_kernel()).get_width()
except:
pass
best_scale = inf.get_scale()
gp.train()

size = 100
size = 50
x1 = np.linspace(domain['horizontal'][0], domain['horizontal'][1], size)
y1 = np.linspace(domain['vertical'][0], domain['vertical'][1], size)
x, y = np.meshgrid(x1, y1)
Expand All @@ -34,4 +37,4 @@ def classify_gp(features, labels, kernel, domain, lik, learn, returnValues=True)
out = gp.apply(test).get_labels()
z = out.reshape((size, size))
z = np.transpose(z)
return x, y, z, best_width, best_param
return x, y, z, best_width, best_param, best_scale
14 changes: 13 additions & 1 deletion demos/classifier/gp.py
Expand Up @@ -52,6 +52,13 @@ def handler(request):
'argument_label': 'Degree',
'argument_default': '2',
'argument_explain': 'The degree to use in the PolynomialKernel'},
{
'argument_type': 'decimal',
'argument_name': 'scale',
'argument_label': 'Kernel scaling',
'argument_default' : '0.1',
'argument_explain': 'The scale for kernel'},

{
'argument_type': 'button-group',
'argument_items': [{'button_name': 'classify',
Expand Down Expand Up @@ -112,15 +119,20 @@ def classify(request):
learn = request.POST["learn"]
except ValueError as e:
return HttpResponse(json.dumps({"status": e.message}))
try:
scale = float(request.POST["scale"])
except:
raise ValueError("Scale is not correct")
try:
domain = json.loads(request.POST['axis_domain'])
x, y, z, width, param = gaussian_process.classify_gp(features, labels, kernel, domain, lik, learn)
x, y, z, width, param, best_scale = gaussian_process.classify_gp(features, labels, kernel, domain, lik, learn, scale)
except Exception as e:
return HttpResponse(json.dumps({"status": repr(e)}))

return HttpResponse(json.dumps({ 'status': 'ok',
'best_width': float(width),
'best_param': float(param),
'best_scale': float(best_scale),
'domain': [np.min(z), np.max(z)],
'z': z.tolist() }))

112 changes: 44 additions & 68 deletions demos/regression/gp.py
Expand Up @@ -47,12 +47,25 @@ def entrance(request):
'argument_default': '0.1',
'argument_explain': 'The noise level of the training points'
},
{
'argument_type': 'decimal',
'argument_name': 'scale',
'argument_label': 'Kernel scaling',
'argument_default' : '0.1',
'argument_explain': 'The scale for kernel'},

{
'argument_type': 'select',
'argument_label': 'Learn parameters',
'argument_name': 'learn',
'argument_items':['No',
'ML2'],
'argument_explain':'Learn parameters using model selection'},

{
'argument_type': 'button-group',
'argument_items': [{'button_name': 'TrainGP',
'button_type': 'json_up_down_load'},
{'button_name': 'UseML2',
'button_type': 'json_up_down_load'},
{'button_name': 'clear'}]
}
]
Expand Down Expand Up @@ -92,17 +105,6 @@ def gaussian_process(request):

return HttpResponse(json.dumps(result))

def gaussian_process_ml2(request):
result = []
try:
arguments = _read_toy_data_ml2(request)
result = _process_ml2(*arguments)
except:
raise ValueError("Argument Error")

return HttpResponse(json.dumps(result))


def _read_toy_data(request):
y_set = []
x_set = []
Expand All @@ -111,6 +113,7 @@ def _read_toy_data(request):
y_set.append(float(pt["y"]))
x_set.append(float(pt["x"]))
noise_level = float(request.POST['noise_level'])
scale = float(request.POST['scale'])
domain = json.loads(request.POST['axis_domain'])

labels = np.array(y_set, dtype = np.float64)
Expand All @@ -123,30 +126,14 @@ def _read_toy_data(request):
feat_train = sg.RealFeatures(examples)
labels = sg.RegressionLabels(labels)
kernel = get_kernel(request, feat_train)
return (feat_train, labels, noise_level, kernel, domain)

def _read_toy_data_ml2(request):
y_set = []
x_set = []
toy_data = json.loads(request.POST['point_set'])
for pt in toy_data:
y_set.append(float(pt["y"]))
x_set.append(float(pt["x"]))
domain = json.loads(request.POST['axis_domain'])

labels = np.array(y_set, dtype = np.float64)
num = len(x_set)
if num == 0:
raise Http404
examples = np.zeros((1, num))
for i in xrange(num):
examples[0,i] = x_set[i]
feat_train = sg.RealFeatures(examples)
labels = sg.RegressionLabels(labels)
return (feat_train, labels, domain)
try:
learn = request.POST["learn"]
except ValueError as e:
return HttpResponse(json.dumps({"status": e.message}))

return (feat_train, labels, noise_level, scale, kernel, domain, learn)

def _process(feat_train, labels, noise_level, kernel, domain):
def _process(feat_train, labels, noise_level, scale, kernel, domain, learn):
n_dimensions = 1

likelihood = sg.GaussianLikelihood()
Expand All @@ -159,6 +146,7 @@ def _process(feat_train, labels, noise_level, kernel, domain):
covar = SECF
zmean = sg.ZeroMean()
inf = sg.ExactInferenceMethod(SECF, feat_train, zmean, labels, likelihood)
inf.set_scale(scale)

# location of unispaced predictions
x_test = np.array([np.linspace(domain['horizontal'][0],
Expand All @@ -167,6 +155,22 @@ def _process(feat_train, labels, noise_level, kernel, domain):
feat_test = sg.RealFeatures(x_test)

gp = sg.GaussianProcessRegression(inf)

best_width=0.0
best_scale=0.0
best_sigma=0.0

if learn == 'ML2':
grad = sg.GradientEvaluation(gp, feat_train, labels, sg.GradientCriterion(), False)
grad.set_function(inf)
grad_search = sg.GradientModelSelection(grad)
best_combination = grad_search.select_model()
best_combination.apply_to_machine(gp)
best_scale = inf.get_scale()
best_sigma= sg.GaussianLikelihood.obtain_from_generic(inf.get_model()).get_sigma()
if kernel.get_name() == 'GaussianKernel':
best_width = sg.GaussianKernel.obtain_from_generic(inf.get_kernel()).get_width()

gp.train()

# gp.set_return_type(sg.GaussianProcessRegression.GP_RETURN_COV)
Expand All @@ -179,37 +183,9 @@ def _process(feat_train, labels, noise_level, kernel, domain):
result.append({'x': feat_test.get_feature_matrix()[0][i],
'y': predictions[i],
'range_upper': predictions[i]+2*np.sqrt(covariance[i]),
'range_lower': predictions[i]-2*np.sqrt(covariance[i])})
'range_lower': predictions[i]-2*np.sqrt(covariance[i]),
'best_width': float(best_width),
'best_scale': float(best_scale),
'best_sigma': float(best_sigma)
})
return result


def _process_ml2(feats_train, labels_train, domain):
n_dimensions = 1
inf = sg.ExactInferenceMethod(sg.GaussianKernel(10, 32.), feats_train, sg.ZeroMean(), labels_train, sg.GaussianLikelihood())
gp=sg.GaussianProcessRegression(inf)
grad=sg.GradientEvaluation(gp, feats_train, labels_train, sg.GradientCriterion(), False)
grad.set_function(inf)
grad_search=sg.GradientModelSelection(grad)
best_combination=grad_search.select_model()
best_combination.apply_to_machine(gp)
best_width=sg.GaussianKernel.obtain_from_generic(inf.get_kernel()).get_width()
best_scale=inf.get_scale()
best_sigma=sg.GaussianLikelihood.obtain_from_generic(inf.get_model()).get_sigma()
gp.train()
# location of unispaced predictions
x_test = np.array([np.linspace(domain['horizontal'][0],
domain['horizontal'][1],
feats_train.get_num_vectors())])
feats_test = sg.RealFeatures(x_test)
# gp.set_return_type(sg.GaussianProcessRegression.GP_RETURN_COV)
covariance=gp.get_variance_vector(feats_test)
# gp.set_return_type(sg.GaussianProcessRegression.GP_RETURN_MEANS)
predictions = gp.get_mean_vector(feats_test)
result=[]
for i in xrange(len(feats_test.get_feature_matrix()[0])):
result.append({'x': feats_test.get_feature_matrix()[0][i],
'y': predictions[i],
'range_upper': predictions[i]+2*np.sqrt(covariance[i]),
'range_lower': predictions[i]-2*np.sqrt(covariance[i])})
return result

3 changes: 3 additions & 0 deletions templates/classifier/gp.html
Expand Up @@ -14,6 +14,9 @@
if (data['best_width'] != 0){
$('#sigma').val(data['best_width']);}

if (data['best_scale'] != 0){
$('#scale').val(data['best_scale']);}

var z = data['z'];
var domain = data['domain'];
var minimum = Math.floor(domain[0]);
Expand Down
13 changes: 13 additions & 0 deletions templates/regression/gaussian_process.html
Expand Up @@ -14,6 +14,19 @@
function TrainGP(data)
{
json = $.parseJSON(data);

if (json[0].best_width != 0){
$('#sigma').val(json[0].best_width);
console.log(json[0].best_width)
}

if (json[0].best_sigma != 0){
$('#noise_level').val(json[0].best_sigma);}

if (json[0].best_scale != 0){
$('#scale').val(json[0].best_scale);}


if (svg.selectAll(".line")[0].length == 0)
{
svg.append("path")
Expand Down

0 comments on commit 6980d9a

Please sign in to comment.