Permalink
Browse files

first commit

  • Loading branch information...
0 parents commit 57dd408738c5f404a1e8314ac1e31374af88201e @bytefish committed Aug 10, 2011
@@ -0,0 +1,18 @@
+# facerec #
+
+This repository contains code for performing face recognition with GNU Octave and Python. It implements:
+
+* Eigenfaces
+* Fisherfaces
+* k-fold Cross Validation
+* Leave-One-Out Cross Validation
+* k-Nearest Neighbor
+* ...
+
+The classes and function definition files are commented and should include examples. Most of this is also discussed at [http://www.bytefish.de/blog/fisherfaces](http://www.bytefish.de/blog/fisherfaces).
+
+## m ##
+GNU Octave implementation.
+
+### py ###
+Python implementation.
@@ -0,0 +1,25 @@
+Copyright (c) 2011, philipp <bytefish[at]gmx.de>
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+ * Neither the name of the organization nor the
+ names of its contributors may be used to endorse or promote products
+ derived from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL COPYRIGHT HOLDER BE LIABLE FOR ANY
+DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
@@ -0,0 +1,109 @@
+% load function files from subfolders aswell
+addpath (genpath ("."));
+
+% load data
+[X y width height names] = read_images("/home/philipp/facerec/data/yalefaces_recognition");
+
+% Fisherfaces example like Python version
+fun_fisherface = @(X,y) fisherfaces(X,y); % no parameters needed
+fun_predict = @(model, Xtest) fisherfaces_predict(model, Xtest, 1); % 1-NN
+
+cv0 = LeaveOneOutCV(X,y,fun_fisherface, fun_predict, 1)
+
+%% There's no OOP here. If you want to pass a parameter to the validation,
+%% bind them to the function, see the examples.
+
+% Learn Eigenfaces with 100 components
+fun_eigenface = @(X,y) eigenfaces(X,y,100);
+fun_predict = @(model, Xtest) eigenfaces_predict(model, Xtest, 1)
+
+% a Leave-One-Out Cross Validation
+cv0 = LeaveOneOutCV(X,y,fun_eigenface, fun_predict, 1)
+
+% a 10-fold cross validation
+cv1 = KFoldCV(X,y,10,fun_eigenface, fun_predict,1)
+% a 3-fold cross validation
+cv2 = KFoldCV(X,y,3,fun_eigenface, fun_predict,1)
+
+eigenface = eigenfaces(X,y,100);
+% plot the first (atmost) 16 eigenfaces
+figure; hold on;
+for i=1:min(16, size(eigenface.W,2))
+ subplot(4,4,i);
+ comp = cvtGray(eigenface.W(:,i), width, height);
+ imshow(comp);
+ title(sprintf("Eigenface #%i", i));
+endfor
+
+%% 2D plot of projection (add the classes you want)
+figure; hold on;
+for i = findclasses(eigenface.y, [1,2,3])
+ text(eigenface.P(1,i), eigenface.P(2,i), num2str(eigenface.y(i)));
+endfor
+
+%% 3D plot of projection (first three classes, add those you want)
+figure; hold on;
+for i = findclasses(eigenface.y, [1,2,3])
+ plot3(eigenface.P(1,i), eigenface.P(2,i), eigenface.P(3,i), 'r.');
+ text(eigenface.P(1,i), eigenface.P(2,i), eigenface.P(3,i), num2str(eigenface.y(i)));
+endfor
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+% Fisherfaces
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+% Fisherfaces example like Python version
+fisherface = @(X,y) fisherfaces(X,y); % no parameters needed
+predict = @(model, Xtest) fisherfaces_predict(model, Xtest, 1); % 1-NN
+% a 10-fold cross validation
+cv1 = KFoldCV(X,y,10,fisherface, predict)
+% a 3-fold cross validation
+cv2 = KFoldCV(X,y,3,fisherface,predict)
+
+% make a scatter plot
+
+
+
+
+fisherface = fisherfaces(X,y);
+% plot eigenfaces
+figure; hold on;
+for i=1:min(16, size(fisherface.W,2))
+ subplot(4,4,i);
+ comp = cvtGray(fisherface.W(:,i), width, height);
+ imshow(comp);
+ title(sprintf("Fisherface #%i", i));
+endfor
+
+%{
+figure; hold on;
+x_values = [1:1:width];
+y_values = [1:1:height];
+for i=1:min(16, size(fisherface.W,2))
+ subplot(4,4,i);
+ contourf(x_values, y_values, cvtGray(fisherface.W(:,i),width,height));
+ axis("equal")
+endfor
+%}
+
+%% 2D plot of projection (first three classes)
+figure; hold on;
+for i = [find(eigenface.y==1), find(eigenface.y==2), find(eigenface.y==3)]
+ text(eigenface.P(1,i), eigenface.P(2,i), num2str(eigenface.y(i)));
+endfor
+
+%% 3D plot of projection (first three classes)
+figure; hold on;
+for i = 1:33
+ plot3(eigenface.P(1,i), eigenface.P(2,i), eigenface.P(3,i), 'r.');
+ text(eigenface.P(1,i), eigenface.P(2,i), eigenface.P(3,i), num2str(eigenface.y(i)));
+endfor
+
+% is a contour plot probably useful?
+figure;
+x_values = [1:1:width];
+y_values = [1:1:height];
+contourf(x_values, y_values, cvtGray(fisherface.W(:,13),width,height));
+colorbar;
+axis("equal");
+
@@ -0,0 +1,33 @@
+function model = eigenfaces(X, y, num_components)
+ %% Performs a PCA on X and stores num_components principal components.
+ %%
+ %% Args:
+ %% X [dim x num_data] Input
+ %% y [1 x num_data] Classes
+ %% param [struct] parameter for this model
+ %% .num_components [1x1] Number of components to use.
+ %%
+ %% Out:
+ %% model [struct] Learned model
+ %% .name [char] Name of this model.
+ %% .W [dim x num_components] Components identified by PCA.
+ %% .num_components [1x1] Number of components used in this model.
+ %% .mu [dim x 1] Mean of
+ %%
+ %% Example:
+ %% pca(X, y, struct("num_components",100))
+ %%
+ if(nargin < 3)
+ num_components=size(X,2)-1;
+ endif
+ % center data
+ Pca = pca(X, num_components);
+
+ % build model
+ model.name = "eigenfaces";
+ model.W = Pca.W;
+ model.num_components = num_components;
+ model.mu = Pca.mu;
+ model.P = model.W'*(X - repmat(Pca.mu, 1, size(X,2)));
+ model.y = y;
+endfunction
@@ -0,0 +1,4 @@
+function C = eigenfaces_predict(model, Xtest, k)
+ Q = model.W' * (Xtest - model.mu);
+ C = knn(model.P, model.y, Q, k);
+endfunction
@@ -0,0 +1,22 @@
+function model = fisherfaces(X, y, num_components)
+ N = size(X,2);
+ c = max(y);
+
+ % set the num_components
+ if(nargin==2)
+ num_components=c-1;
+ endif
+ num_components = min(c-1,num_components);
+
+ % reduce dim(X) to (N-c) (see paper [BHK1997])
+ Pca = pca(X,y,(N-c));
+ Lda = lda(project(X, Pca.W, Pca.mu), y, num_components);
+
+ % build model
+ model.name = "lda";
+ model.mu = repmat(0, size(X,1), 1);
+ model.W = Pca.W*Lda.W;
+ model.P = model.W'*X;
+ model.num_components = Lda.num_components;
+ model.y = y;
+endfunction
@@ -0,0 +1,4 @@
+function C = fisherfaces_predict(model, Xtest, k)
+ Q = model.W' * Xtest;
+ C = knn(model.P, model.y, Q, k);
+endfunction
@@ -0,0 +1,51 @@
+function c = knn(P, y, Q, k)
+ %% k-nearest neighbor classification.
+ %%
+ %% Args:
+ %% P: Reference vectors given in column.
+ %% Q: Query column vector.
+ %% y: Classes corresponding to P. (y = {1,2,...,n})
+ %% k: Number of nearest neighbors for prediction.
+ %%
+ %% Returns:
+ %% c: Class identified by the majority of k neighbors.
+ %%
+ %% Example:
+ %% P=[1,21,20,2,4,30;
+ %% 1,21,20,2,4,30]
+ %% y=[1, 3, 3,2,2, 3]
+ %% Q=[1;1]
+ %%
+ %% knn(P,Q,y,1) % returns 1
+ %% knn(P,Q,y,3) % returns 2
+ %% knn(P,Q,y,6) % returns 3
+ %%
+ n = size(P,2);
+ % clip k
+ if (nargin == 3)
+ k=1;
+ elseif (k>n)
+ k=n;
+ endif
+
+ Q = repmat(Q, 1, n);
+ distances = sqrt(sum(power((P-Q),2),1));
+ [distances, idx] = sort(distances);
+ y = y(idx);
+ y = y(1:k);
+ h = histc(y,(1:max(y)));
+ [v,c] = max(h);
+endfunction
+
+%{
+P=[1,21,20,2,4,30;
+ 1,21,20,2,4,30]
+y=[1, 3, 3,2,2, 3]
+Q=[1;1]
+
+knn(P,Q,y,1) % c == 1
+knn(P,Q,y,3) % c == 2
+knn(P,Q,y,6) % c == 3
+%}
+
+
@@ -0,0 +1,60 @@
+function model = lda(X, y, num_components)
+ %% Performs a Linear Discriminant Analysis and returns the
+ %% num_components components sorted descending by their
+ %% eigenvalue.
+ %%
+ %% num_components is bound to the number of classes, hence
+ %% num_components = min(c-1, num_components)
+ %%
+ %% Args:
+ %% X: Array with observations given in column.
+ %% y: Classes corresponding to y.
+ %% num_components: Number of components to store.
+ %%
+ %% Returns:
+ %% model: Represents the learned model.
+ %%
+ %% Model description:
+ %% mu - mean of the model.
+ %% name - "lda"
+ %% W - 1:num_components eigenvectors
+ %%
+ %% Example:
+ %% lda(I, 200)
+ %%
+ dim = size(X,1);
+ c = max(y);
+
+ if(nargin==2)
+ num_components = c - 1
+ endif
+
+ num_components = min(c-1,num_components);
+
+ meanTotal = mean(X,2);
+
+ Sw = zeros(dim, dim);
+ Sb = zeros(dim, dim);
+ for i=1:c
+ Xi = X(:,find(y==i));
+ meanClass = mean(Xi,2);
+ % center data
+ Xi = Xi - repmat(meanClass, 1, size(Xi,2));
+ % calculate within-class scatter
+ Sw = Sw + Xi*Xi';
+ % calculate between-class scatter
+ Sb = Sb + size(Xi,2)*(meanClass-meanTotal)*(meanClass-meanTotal)';
+ endfor
+
+ % solve the eigenvalue problem
+ [V, D] = eig(Sb,Sw);
+
+ % sort eigenvectors descending by eigenvalue
+ [D,idx] = sort(diag(D),1,'descend');
+ V = V(:,idx);
+
+ % build model
+ model.name = "lda";
+ model.num_components = num_components;
+ model.W = V(:,1:(c-1));
+endfunction
@@ -0,0 +1,34 @@
+function model = pca(X, y, num_components)
+ %% Performs a PCA on X and stores num_components principal components.
+ %%
+ %% Args:
+ %% X [dim x num_data] Input
+ %% y [1 x num_data] Classes
+ %% param [struct] parameter for this model
+ %% .num_components [1x1] Number of components to use.
+ %%
+ %% Out:
+ %% model [struct] Learned model
+ %% .name [char] Name of this model.
+ %% .W [dim x num_components] Components identified by PCA.
+ %% .num_components [1x1] Number of components used in this model.
+ %% .mu [dim x 1] Mean of
+ %%
+ %% Example:
+ %% pca(X, y, struct("num_components",100))
+ %%
+ if(nargin < 3)
+ num_components=size(X,2)-1;
+ endif
+ % center data
+ mu = mean(X,2);
+ X = X - repmat(mu, 1, size(X,2));
+ % svd on centered data == pca
+ [E,D,V] = svd(X ,'econ');
+
+ % build model
+ model.name = "pca";
+ model.W = E(:,1:num_components);
+ model.num_components = num_components;
+ model.mu = mu;
+endfunction
@@ -0,0 +1,13 @@
+function c = predict(model, Q, opts)
+ %% Perform k-NN on a given model with Reference vectors in P
+ %%
+ %% Args:
+ %% opts:
+ %% k: k in knn (see knn.m)
+ %%
+ if(~isfield(opts,"k"))
+ opts.k = 1;
+ endif
+ Q = project(model, Q);
+ c = knn(model.P, model.y, Q, opts.k);
+endfunction
@@ -0,0 +1,15 @@
+function Y = project(X, W, mu)
+ %% Projects X onto W, pass mu to adjust mean.
+ %%
+ %% Args:
+ %% X: Array with observations given in columns.
+ %% W: Array representing the transformation matrix.
+ %% mu: Pass if the mean should be adjusted.
+ %%
+ %%
+ %% Returns:
+ %% Y: Projection of X.
+ %%
+ X = X - repmat(mu, 1, size(X,2));
+ Y = W'*X;
+endfunction
Oops, something went wrong.

0 comments on commit 57dd408

Please sign in to comment.