source: distools/sphem.m @ 13

Last change on this file since 13 was 10, checked in by bduin, 14 years ago
File size: 4.8 KB
Line 
1%SPHEM Spherical Embedding
2%
3%       [W,SIG,L] = SPHEM(D,R,ALF)
4%                 OR
5%       [W,SIG,L] = SPHEM(W,ALF)
6%
7% INPUT
8%   D   NxN symmetric dissimilarity matrix (dataset)
9%   W   Trained linear embedding into a pseudo-Euclidean space
10%   ALF Parameter determining the dimensionality and the mapping (optional, defaulf: Inf)
11%       (0,1)   - fraction of the total (absolute value) preserved variance
12%        Inf    - no dimensionality reduction, keeping all dimensions (it's noisy)
13%       'p'     - projection into a Euclidean space based on positive eigenvalues only
14%       'PARp'  - projection into a Euclidean space based on the PAR fraction of
15%                 positive eigenvalues; e.g. ALF = '0.9p'
16%       'n'     - projection into a Euclidean space based on negative eigenvalues only
17%       'PARn'  - projection into a (negative) Euclidean space based on the PAR fraction
18%                 of negative eigenvalues; e.g. ALF = '0.7n'
19%       'P1pP2n'- projection into a Euclidean space based on the P1 positive eigenvalues
20%                 and P2 negative eigenvalues; e.g. ALF = '0.7p0.1n', ALF = '7p2n'
21%       1 .. N  - number of dimensions in total
22%       [P1 P2] - P1 dimensions or preserved fraction of variance in the positive subspace
23%                 and P2 dimensions or preserved fraction of variance in the negative
24%                 subspace; e.g. ALF = [5 10], ALF = [0.9 0.1]
25%   P   Integer between 0 and N specifying which object is mapped at the origin;
26%       0 stands for the mean; (optional, default: 0)
27%
28% OUTPUT
29%   W   Linear embedding into a pseudo-Euclidean space
30%   SIG Signature of the space
31%   L   List of eigenvalues
32%
33% DESCRIPTION
34% Linear mapping W onto an M-dimensional pseudo-Euclidean subspace from a symmetric,
35% square dissimilarity matrix D such that the dissimilarities are preserved. M
36% M is determined by ALF. E.g., the subspace is found such that at least a fraction
37% ALF of the total variance is preserved for ALF in (0,1). The resulting X is found
38% by D*W. The parameter SIG describes the signature of the subspace. L is a sorted
39% list of eigenvalues describing the variances in the (pseudo-)Euclidean space.
40%
41% A trained mapping can be reduced further by:
42%   [W,SIG,L] = SPHEM(D,ALF)
43%
44% SEE ALSO
45% MAPPINGS, DATASETS, PSEM, AUGPSEM, PCA
46%
47% LITERATURE
48% 1. L. Goldfarb, A unified approach to pattern recognition, Pattern Recognition, vol.17, 575-582, 1984.
49% 2. E. Pekalska, P. Paclik, and R.P.W. Duin, A Generalized Kernel Approach to Dissimilarity-based
50%    Classification, Journal of Machine Learning Research, vol.2, no.2, 175-211, 2002.
51%
52
53% Copyright: Elzbieta Pekalska, ela.pekalska@googlemail.com
54% Faculty EWI, Delft University of Technology and
55% School of Computer Science, University of Manchester
56
57
58
59function [W,sig,L,Q] = sphem(d,R,alf,prec)
60if nargin < 4 | isempty(prec), prec = 1e-4; end
61if nargin < 3 | isempty(alf), alf = inf; end
62if nargin < 2 | isempty(R), R = 1; end
63if nargin < 1 | isempty(d),
64  W = mapping(mfilename,{R,alf,prec});
65  W = setname(W,'Spherical embedding');
66  return
67end
68
69
70
71if (isdataset(d) | isa(d,'double'))
72  if ismapping(R)
73
74    % APPLY THE MAPPING
75    pars  = getdata(R);
76    [m,n] = size(d);
77
78    R = pars{1};  % radius
79    Q = pars{2};  % Eigenvectors
80    L = pars{3};  % Eigenvalues
81
82    W = R^2*(cos(+d/R)) * Q * diag(sqrt(abs(L))./L);
83    if isdataset(d),
84      W = dataset(W,getlab(d),'prior',getprior(d));
85
86      % Store signature in the USER field
87      W.user = pars{4};    % Signature
88      W.name = ['Projected '  updname(d.name)];
89    end
90    return
91  end
92end
93
94
95
96% REDUCE ALREADY TRAINED MAPPING
97if ismapping(d)
98  pars = getdata(d);
99  R = pars{1};  % radius
100  Q = pars{2};  % Eigenvectors
101  L = pars{3};
102  m = size(Q,1);
103
104  [ll,K] = sort(-abs(L));
105  L = L(K);
106  Q = Q(:,K);
107  [J,sig] = seleigs(L,alf,pars{5});
108  Q = Q(:,J);        % Eigenvectors
109  L = L(J);          % Eigenvalues
110
111  W = mapping(mfilename,'trained',{R,Q,L,pars{4},pars{5}},[],m,length(J));
112  W = setname(W,'Spherical embedding');
113  return
114end
115
116
117
118% TRAIN THE MAPPING
119
120% Tolerance value used in comparisons
121if mean(+d(:)) < 1,
122  tol = 1e-12;
123else
124  tol = 1e-10;
125end
126
127if R <= 0,
128  error('R should be positive.');
129end
130
131
132[n,m] = size(d);
133if ~issym(d,tol),
134  prwarning(1,'Matrix should be symmetric. It is made symmetric by averaging.')
135  d = 0.5*(d+d');
136end
137
138B = R^2*cos(+d/R);
139B = 0.5*(B+B');                       % Make sure B is symmetric
140
141[Q, L] = preig(B);
142Q      = real(Q);
143l      = diag(real(L));
144[lm,Z] = sort(-abs(l));
145Q      = Q(:,Z);
146l      = l(Z);                % Eigenvalues are sorted by decreasing absolute value
147
148[J,sig] = seleigs(l,alf,prec);% J is the index of the selected eigenvalues
149L = l(J);                     % Eigenvalues
150Q = Q(:,J);                   % Eigenvectors
151
152W = mapping(mfilename,'trained',{R,Q,L,sig,prec},[],m,sum(sig));
153W = setname(W,'Spherical  embedding');
154return
Note: See TracBrowser for help on using the repository browser.