source: distools/kem.m @ 31

Last change on this file since 31 was 18, checked in by bduin, 13 years ago

clevald, parzenddc, parzend_map and kem added

File size: 6.8 KB
RevLine 
[18]1%KEM Kernel Embedding
2%
3%     W = KEM(D,ALF,P)
4%                 OR
5%     W = KEM(W,ALF)
6%
7% INPUT
8%   D   NxN kernel matrix (dataset)
9%   W   Trained linear embedding into a pseudo-Euclidean space
10%   ALF Parameter determining the dimensionality and the mapping (optional, default: Inf)
11%       (0,1)   - Fraction of the total (absolute value) preserved variance
12%        Inf    - No dimensionality reduction, keeping all dimensions (it's VERY noisy)
13%       'p'     - Projection into a Euclidean space based on positive eigenvalues only
14%       'PARp'  - Projection into a Euclidean space based on the PAR fraction of
15%                 positive eigenvalues; e.g. ALF = '0.9p'
16%       'n'     - Projection into a Euclidean space based on negative eigenvalues only
17%       'PARn'  - Projection into a (negative) Euclidean space based on the PAR fraction
18%                 of negative eigenvalues; e.g. ALF = '0.7n'
19%       'P1pP2n'- Projection into a Euclidean space based on the P1 positive eigenvalues
20%                 and P2 negative eigenvalues; e.g. ALF = '0.7p0.1n', ALF = '7p2n'
21%       1 .. N  - Number of dimensions in total
22%       [P1 P2] - P1 dimensions or preserved fraction of variance in the positive subspace
23%                 and P2 dimensions or preserved fraction of variance in the negative
24%                 subspace; e.g. ALF = [5 10], ALF = [0.9 0.1]
25%   P   Integer between 0 and N specifying which object is mapped at the origin;
26%       0 stands for the mean; (optional, default: 0)
27%
28% OUTPUT
29%   W   Linear embedding into a pseudo-Euclidean space
30%
31% DEFAULT
32%   P   = 0
33%   ALF = INF
34%
35% DESCRIPTION
36% Linear mapping W onto an M-dimensional Pseudo-Euclidean (PE) subspace from a
37% symmetric, square kernel matrix D such that the similarities are preserved.
38% M is determined by ALF. E.g., the subspace is found such that at least a fraction
39% ALF of the total variance is preserved for ALF in (0,1). The resulting X is found
40% by D*W. The signature of the obtained PE space (numbers of positive and negative
41% directions) can be found by PE_SIG(W). The spectrum of the obtained space
42% can be found by PE_SPEC(W).
43%
44% A trained mapping can be reduced further by:   W = KEM(W,ALF)
45% The signature of the obtained PE space can be found by PE_SIG(W)
46% The spectrum of
47%
48% SEE ALSO
49% MAPPINGS, DATASETS, PE_EM, AUGPSEM, PCA, PE_PCA, PE_SPEC, GETSIG, SETSIG
50%
51% LITERATURE
52% 1. L. Goldfarb, A unified approach to pattern recognition, Pattern Recognition, vol.17,
53% 575-582, 1984.
54% 2. E. Pekalska, P. Paclik, and R.P.W. Duin, A Generalized Kernel Approach to
55% Dissimilarity-based Classification, Journal of Machine Learning Research,
56% vol.2, no.2, 175-211, 2002.
57
58% Copyright: Elzbieta Pekalska, ela.pekalska@googlemail.com
59% Faculty EWI, Delft University of Technology and
60% School of Computer Science, University of Manchester
61
62% This routine is adapted from PE_EM by R.P.W. Duin.
63
64function [W,sig,L,Q] = kem(d,alf,pzero,prec)
65
66% PREC is the precision parameter used for the automatic
67% selection (heuristic) of the number of dominant eigenvalues.
68% This happens when SELEIGS is called with the parameter 'CUT'.
69
70if nargin < 4, prec = [];  end
71if nargin < 3, pzero = []; end
72if nargin < 2  alf = [];   end
73
74if nargin < 1 | isempty(d),
75  W = mapping(mfilename,{alf,pzero,prec});
76  W = setname(W,'PE embedding');
77  return
78end
79
80if isempty(prec),  prec = 1e-4; end
81if isempty(pzero), pzero = 0;   end
82if isempty(alf),   alf = inf;   end
83
84if (isdataset(d) | isa(d,'double'))
85  if ismapping(alf)
86    % APPLY THE MAPPING
87    [m,n] = size(d);
88    %d     = d.^2; % OK for kernels???
89
90    Q   = getdata(alf,'evec');  % Eigenvectors
91    me  = getdata(alf,'mdis');  % Vector of the average squared original dissimilarities
92    p   = getdata(alf,'mean');  % p=0 -> the mean of the embedded configuration lies at 0,
93                                % otherwise, it lies at pzero
94    L   = getdata(alf,'eval');  % Eigenvalues
95
96    % Project new data depending on p
97    % (whether the mean or other object lies at the origin)
98    if p == 0,
99      H = -repmat(1,n,n)/n;
100      H(1:n+1:end) = H(1:n+1:end) + 1;     % H = eye(n) - ones(n,n)/n
101      W = -0.5 * (d - me(ones(m,1),:)) * H * Q * diag(sqrt(abs(L))./L);
102    else
103      W =  0.5 * (d(:,p) * ones(1,n) + me(ones(m,1),:) - d) * Q * diag(sqrt(abs(L))./L);
104    end
105
106    % Store signature in the USER field
107    if isdataset(W),
108      W = setname(W,['Projected ' updname(W.name)]);
109      W = setsig(W,getdata(alf,'sig'));
110    end
111    return
112  end
113end
114
115
116
117% REDUCE A TRAINED MAPPING
118if ismapping(d)
119  data = getdata(d);
120  if iscell(data) % check for old type of mapping
121    ispsem(d);
122    dat.evec = data{1};
123    dat.mdis = data{2};
124    dat.mean = data{3};
125    dat.eval = data{4};
126    dat.sig  = data{5};
127    dat.prec = data{6};
128    d = setmapping_file(d,mfilename);
129    d = setdata(d,dat);
130    if nargin < 2
131      W = d;
132      return % conversion only
133    else
134      data = dat;
135    end
136  end
137  Q   = data.evec;  % Eigenvectors
138  L   = data.eval;  % Eigenvalues
139  m    = size(Q,1);
140
141  [ll,K] = sort(-abs(L));
142  L = L(K);
143  Q = Q(:,K);
144  [J,sig] = seleigs(L,alf,getdata(d,'prec'));
145  data.evec = Q(:,J);        % Eigenvectors
146  data.eval = L(J);          % Eigenvalues
147 
148  W = mapping(mfilename,'trained',data,[],m,length(J));
149  W = setname(W,'PE embedding');
150  return
151end
152
153
154
155% TRAIN THE MAPPING
156% Tolerance value used in comparisons
157% if mean(+d(:)) < 1,
158%   tol = 1e-12;
159% else
160%   tol = 1e-10;
161% end
162%
163[n,m] = size(d);
164% if ~issym(d,tol),
165%   prwarning(1,'Matrix should be symmetric. It is made symmetric by averaging.')
166%   d = 0.5*(d+d');
167% end
168
169if pzero > n,
170  error('Wrong third parameter.');
171end
172
173%d = (+d).^2;
174
175% if pzero == 0,
176%   % Project the data such that the mean lies at the origin
177%   H = -repmat(1/n,n,n);
178%   H(1:n+1:end) = H(1:n+1:end) + 1; % H = eye(n) - ones(n,n)/n
179%   H = -0.5 * H * d * H;            % H is now the matrix of inner products
180% else
181%   % Project the data such that pzero's object lies at the origin
182%   H = 0.5 * (d(:,pzero) * ones(1,n) + ones(n,1) * d(:,pzero)' - d);
183% end
184H = d;
185H = 0.5*(H+H');                    % Make sure H is symmetric
186
187[Q,L]  = preig(H);
188Q      = real(Q);
189l      = diag(real(L));
190[lm,Z] = sort(-abs(l));
191Q      = Q(:,Z);
192l      = l(Z);                     % Eigenvalues are sorted by decreasing absolute value
193
194[J,sig] = seleigs(l,alf,prec);     % J is the index of the selected eigenvalues
195data.eval = l(J);                          % Eigenvalues
196data.evec = Q(:,J);                        % Eigenvectors
197data.mdis = mean(+d,2)';
198data.mean = pzero;
199data.sig  = sig;
200data.prec = prec;
201
202%A  = Q * diag(sqrt(abs(L)));      % Data in a pseudo-Euclidean space
203
204% Determine the mapping depending on pzero
205if pzero == 0,
206  W = mapping(mfilename,'trained',data,[],m,sum(sig));
207else
208  data.mdis = +d(:,pzero)';
209  W = mapping(mfilename,'trained',data,[],m,sum(sig));
210end
211W = setname(W,'PE embedding');
212return
Note: See TracBrowser for help on using the repository browser.