-
Notifications
You must be signed in to change notification settings - Fork 6
/
A_Main.m
64 lines (52 loc) · 1.76 KB
/
A_Main.m
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
% Machine Learning Toolbox by Jingwei Too - 10/12/2020
%---Input-------------------------------------------------------------
% feat : feature vector matrix (Instances x Features)
% label : label matrix (Instances x 1)
% opts : parameter settings
% opts.tf : choose either hold-out / k-fold / leave-one-out
% opts.ho : ratio of testing data in hold-out validation
% opts.kfold : number of folds in k-fold cross-validation
%---Output-------------------------------------------------------------
% ML : machine learning model (It contains several results)
% ML.acc : classification accuracy
% ML.con : confusion matrix
% ML.t : computational time (s)
%----------------------------------------------------------------------
%% Example 1: K-nearest neighbor (KNN) with k-fold cross-validation
% Parameter settings
opts.tf = 2;
opts.kfold = 10;
opts.k = 5; % k-value in KNN
% Load data
load iris.mat;
% Classification
ML = jml('knn',feat,label,opts);
% Accuracy
accuracy = ML.acc;
% Confusion matrix
confmat = ML.con;
%% Example 2: Multi-class support vector machine (MSVM) with hold-out validation
% Parameter settings
opts.tf = 1;
opts.ho = 0.3;
opts.fun = 'r'; % radial basis kernel function in SVM
% Load data
load iris.mat;
% Classification
ML = jml('msvm',feat,label,opts);
% Accuracy
accuracy = ML.acc;
% Confusion matrix
confmat = ML.con;
%% Example 3: Decision Tree (DT) with leave-one-out validation
% Parameter settings
opts.tf = 3;
opts.nSplit = 50; % number of split in DT
% Load data
load iris.mat;
% Classification
ML = jml('dt',feat,label,opts);
% Accuracy
accuracy = ML.acc;
% Confusion matrix
confmat = ML.con;