-
Notifications
You must be signed in to change notification settings - Fork 1
/
darknet19cv.m
156 lines (134 loc) · 5.11 KB
/
darknet19cv.m
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
function [t,x] = resnet101cv(path,optimizer,augmentation,numfold)
%resnet18cv Summary of this function goes here
%path :path folder
%optimizer:'adam','rmsprop','sgdm'
%augmentaion: 0 means there is no augmentation,and 1 means there is aug
%numfold: can take values as 2,3,4,5,...
% example:
%[t,x]=resnet18cv('G:\new researches\dataset224','adam',0,5);
digitDatasetPath = fullfile(path);
imds = imageDatastore(digitDatasetPath, ...
'IncludeSubfolders',true,'LabelSource','foldernames');
% Determine the split up
total_split=countEachLabel(imds)
% Number of Images
num_images=length(imds.Labels);
% Visualize random images
perm=randperm(num_images,6);
% for idx=1:length(perm)
%
% subplot(2,3,idx);
% imshow(imread(imds.Files{perm(idx)}));
% title(sprintf('%s',imds.Labels(perm(idx))))
%
% end
%%K-fold Validation
% Number of folds
num_folds=numfold;
%for num=2:3
% Loop for each fold
for fold_idx=1:num_folds
fprintf('Processing %d among %d folds \n',fold_idx,num_folds);
% Test Indices for current fold
test_idx=fold_idx:num_folds:num_images;
% Test cases for current fold
imdsTest = subset(imds,test_idx);
labeltest=countEachLabel(imdsTest);
% Train indices for current fold
train_idx=setdiff(1:length(imds.Files),test_idx);
% Train cases for current fold
imdsTrain = subset(imds,train_idx);
labeltrain= countEachLabel(imdsTrain);
% ResNet Architecture
net=resnet101;
lgraph = layerGraph(net);
clear net;
% Number of categories
numClasses = numel(categories(imdsTrain.Labels));
% New Learnable Layer
newLearnableLayer = fullyConnectedLayer(numClasses, ...
'Name','new_fc', ...
'WeightLearnRateFactor',10, ...
'BiasLearnRateFactor',10);
% Replacing the last layers with new layers
lgraph = replaceLayer(lgraph,'fc1000',newLearnableLayer);
newsoftmaxLayer = softmaxLayer('Name','new_softmax');
lgraph = replaceLayer(lgraph,'fc1000_softmax',newsoftmaxLayer);
newClassLayer = classificationLayer('Name','new_classoutput');
lgraph = replaceLayer(lgraph,'ClassificationLayer_fc1000',newClassLayer);
% Preprocessing Technique
% Training Options, we choose a small mini-batch size due to limited images
options = trainingOptions(optimizer,...
'MaxEpochs',20,'MiniBatchSize',8,...
'Shuffle','every-epoch', ...
'InitialLearnRate',1e-4, ...
'Verbose',false,...
'Plots','training-progress');
%'LearnRateSchedule','piecewise'
%'OutputFcn',@(info)savetrainingplot(info)
if augmentation==1
% % Data Augumentation
augmenter = imageDataAugmenter( ...
'RandRotation',[-5 5],'RandXReflection',1,...
'RandYReflection',1,'RandXShear',[-0.05 0.05],'RandYShear',[-0.05 0.05]);
% Resizing all training images to [224 224] for ResNet architecture
auimds = augmentedImageDatastore([224 224],imdsTrain,'DataAugmentation',augmenter);
% Training
netTransfer = trainNetwork(auimds,lgraph,options);
else
netTransfer = trainNetwork(imdsTrain,lgraph,options);
end
% Resizing all testing images to [224 224] for ResNet architecture
% augtestimds = augmentedImageDatastore([224 224],imdsTest);
% Testing and their corresponding Labels and Posterior for each Case
[predicted_labels(test_idx),posterior(test_idx,:)] = classify(netTransfer,imdsTest);
% Save the Independent ResNet Architectures obtained for each Fold
save(sprintf('ResNet18_%d_among_%d_folds',fold_idx,num_folds),'netTransfer','test_idx','train_idx','labeltest','labeltrain');
delete(findall(0))
% Clearing unnecessary variables
clearvars -except optimizer path augmentation total_split numfold fold_idx num_folds num_images predicted_labels posterior imds netTransfer;
end
%analyzeNetwork(netTransfer)
%%Performance Study
% Actual Labels
actual_labels=imds.Labels;
% Confusion Matrix
%figure;
%plotconfusion(actual_labels,predicted_labels')
%title('Confusion Matrix');
%ROC CURVE
test_labels=double(nominal(imds.Labels));
% ROC Curve - Our target class is the first class in this scenario
[fp_rate,tp_rate,T,AUC]=perfcurve(test_labels,posterior(:,1),1);
%figure;
%plot(fp_rate,tp_rate,'b-');
%grid on;
%xlabel('False Positive Rate');
%ylabel('Detection Rate');
% Area under the ROC curve value
AUC
%evaluation
%Evaluate(YValidation,YPred)
ACTUAL=actual_labels;
PREDICTED=predicted_labels';
idx = (ACTUAL()==total_split.Label(1));
%disp(idx)
p = length(ACTUAL(idx));
n = length(ACTUAL(~idx));
N = p+n;
tp = sum(ACTUAL(idx)==PREDICTED(idx));
tn = sum(ACTUAL(~idx)==PREDICTED(~idx));
fp = n-tn;
fn = p-tp;
tp_rate = tp/p;
tn_rate = tn/n;
accuracy = (tp+tn)/N;
sensitivity = tp_rate;
specificity = tn_rate;
precision = tp/(tp+fp);
recall = sensitivity;
f_measure = 2*((precision*recall)/(precision + recall));
gmean = sqrt(tp_rate*tn_rate);
t=[AUC,accuracy,sensitivity,specificity,precision,recall,f_measure,gmean];
x={t,path,optimizer,augmentation,numfold};
end