当前位置: 首页 > 工具软件 > dPCA > 使用案例 >

2dpca的matlab代码,人脸识别2dpca的matlab源码

薛淮晨
2023-12-01

function [eigvectors, eigvalues, meanData, newTrainData, newTestData] = TDPCA(trainData, testData, height, width, numvecs)

%2DPCA        Two Dimensional Principal component analysis

%        Usage:

%        [eigvectors, eigvalues, meanData, newTrainData, newTestData] = TDPCA(trainData, testData, height, width, numvecs)

%

%        trainData: Rows of vectors of training data points

%   testData: Rows of vectors of testing data points

%   height: height of the image matrix

%   width: width of the image matrix

%   numvecs: the needed number of eigenvectors

%

%   meanData: Mean of all the data.

%        newTrainData: The data after projection (mean removed)

%   newTestData: The data after projection (mean removed)

%        eigvectors: Each column of this matrix is a eigenvector of the convariance

%                   matrix defined in 2DPCA

%        eigvalues: Eigenvalues of the convariance matrix

%

%

%   Reference paper: J.Yang,D.Zhang,A.F.Frangi,and J.Yang.Two-dimensional

%                    pca:A new approach to a appearance-based face

%                    represenation and recognition. IEEE Trans.on

%                    PAMI,2004

%   Written by Zhonghua Shen (cnjsnt_s@yahoo.com.cn), 2006.07

% Check arguments

if nargin ~= 5

error('usage: [eigvectors, eigvalues, meanData, newTrainData, newTestData] = TDPCA(trainData, testData, height, width, numvecs)');

end;

[nSam,nFea] = size(trainData);

fprintf(1,'Computing average matrix...\n');

meanDataVector = mean(trainData);

meanData = reshape(meanDataVector,height,width);

fprintf(1,'Calculating matrix differences from avg and 2DPCA covariance matrix L...\n');

L = zeros(width,width);

ddata = zeros(nSam,nFea);

for i = 1:nSam

ddata(i,:) = trainData(i,:)-meanDataVector;

dummyMat = reshape(ddata(i,:),height,width);

L = L + dummyMat'*dummyMat;

end;

L = L/nSam;

L = (L + L')/2;

fprintf(1,'Calculating eigenvectors of L...\n');

[eigvectors,eigvalues] = eig(L);

fprintf(1,'Sorting eigenvectors according to eigenvalues...\n');

[eigvectors,eigvalues] = sortem(eigvectors,eigvalues);

eigvalues = diag(eigvalues);

fprintf(1,'Normalize Vectors to unit length, kill vectors corr. to tiny evalues...\n');

num_good = 0;

for i = 1:size(eigvectors,2)

eigvectors(:,i) = eigvectors(:,i)/norm(eigvectors(:,i));

if eigvalues(i) < 0.00001

% Set the vector to the 0 vector; set the value to 0.

eigvalues(i) = 0;

eigvectors(:,i) = zeros(size(eigvectors,1),1);

else

num_good = num_good + 1;

end;

end;

if (numvecs > num_good)

fprintf(1,'Warning: numvecs is %d; only %d exist.\n',numvecs,num_good);

numvecs = num_good;

end;

eigvectors = eigvectors(:,1:numvecs);

if nargout == 5

fprintf(1,'Feature extraction and calculating new training and testing data...\n');

newTrainData = zeros(nSam,height*numvecs);

for i = 1:nSam

dummyMat = reshape(ddata(i,:),height,width);

newTrainData(i,:) = reshape(dummyMat*eigvectors,1,height*numvecs);

end

%testData

……………………

…………限于本文篇幅 余下代码请从51黑下载附件…………

 类似资料: