-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathkpca.m
83 lines (73 loc) · 2.68 KB
/
kpca.m
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
function [out, idxtrain,eigvector, eigvalue] = kpca(spectraldata, No_Train, dimension, kernel, parameter)
%
% Feature extraction based on Kernel principal component analysis, KPCA
%
% [out, eigvector, eigvalue] = kpca(spectraldata, No_Train, dimension, kernel, parameter)
%
% Input
%
% spectraldata - input hyperspectral data with 3-D, ncols by nrows by nbands
% No_Train - randomly selected samples for training (<= 5000),
% depending on the memory of your PC
% dimension - the dimension of extracted kernelized PCs (with largest eigen values)
% kernel - kernel function
% parameter - parameters for kernel fuction
% - kernel = 'linear';% linear kernel
% - kernel = 'Gaussian'; parameter=1; %Gausian kernel
% - kernel = 'poly'; parameter=[1,3];% third order polynomial
%
%
% Output
%
% out - the extracted kernelized PCs
% eigvector - the eigenvectors divided by square root of corresponding eigenvalues
% eigvalue - the first dimension largest eigenvalues
%
%
% Copyright 2009-2011
% Wenzhi Liao
% wliao@telin.ugent.be, http://telin.ugent.be/~wliao/
% 2 Oct 2010
if nargin < 3, error('not enough input'); end
if nargin < 4
if strncmp(kernel,'Gaussian',1)
parameter=1;
elseif strncmp(kernel,'poly',1)
parameter=[1, 3];
end
end
[nrows,ncols,nbands] = size(spectraldata);
X0 = reshape(spectraldata,nrows*ncols,nbands);
clear spectraldata
%% sub-sample for training, select No_Train samples randomly
rand('state',4711007);% initialization of rand
if No_Train>nrows*ncols, No_Train = nrows*ncols; end
idxtrain = randsample(nrows*ncols, No_Train);
X = double(X0(idxtrain,:));
ntrain = size(X,1);
Xtest = X0;
ntest = size(Xtest,1);
clear X0;
%% kernelized training data and centering the kernel matrix
[K scale sums] = kernelize_training(kernel, X, parameter);
meanK = mean(K(:));
meanrowsK = mean(K);
K = centering(K);
%% select the first dimension eigvectors
dimout = ntrain;
if dimout>dimension, dimout=dimension; end
[eigvector,eigvalue,flagk] = eigs(K, dimout, 'LM');
if flagk~=0, warning('*** Convergence problems in eigs ***'); end
eigvalue = diag(abs(eigvalue))';
eigvector = sqrt(ntrain-1)*eigvector*diag(1./sqrt(eigvalue));
clear K
out = NaN(ntest,dimout);
%% kernelized the test samples, center kernel and calculate kernel PCs
for rr=1:nrows
idx = (rr-1)*ncols+1;
idx = idx:(idx+ncols-1);
Xk = kernelize_test(kernel, X, Xtest(idx,:), scale, sums);
Xk = Xk - repmat(meanrowsK,ncols,1)' - repmat(mean(Xk),ntrain,1) + meanK;
out(idx,:) = Xk'*eigvector;
end
out = reshape(out,nrows,ncols,dimout);