source: prextra/lassoc.m @ 109

Last change on this file since 109 was 108, checked in by dtax, 9 years ago

new and improved.

File size: 1.8 KB
RevLine 
[90]1%LASSOC
2%
3%      W = LASSOC(X, LAMBDA)
4%
5% Train the LASSO classifier on dataset X. LAMBDA is the regularization
6% parameter.
7
[5]8function w = lassoc(x, lambda)
9
10mustScale=0;
11
12if (nargin < 2)
13        %prwarning(3,'Lambda set to one');
14        lambda = 1;
15end
16if (nargin < 1) | (isempty(x))
[90]17        w = prmapping(mfilename,{lambda});
[5]18        w = setname(w,'LASSO classifier');
19        return
20end
21
22if ~ismapping(lambda)   % train the mapping
23
24    % Unpack the dataset.
25    islabtype(x,'crisp');
26    %isvaldset(x,1,2); % at least 1 object per class, 2 classes
27    [n,k,c] = getsize(x);
28
29    % Is this necessary??
30    if mustScale
31        wsc = scalem(x,'variance');
32        x.data = x.data*wsc;
33    end
[108]34    % make sure a bias is added:
35    x = [x ones(n,1)];
[5]36
37    if c ~= 2  % two-class classifier:
38        error('Only a two-class classifier is implemented');
39    end
40
[99]41    if exist('lasso')==3 % we have a own compiled mex code
42       beta=-lasso(+x,3-2*getnlab(x),lambda);
43    else % hope that we have a modern Matlab with stats. toolbox:
44       if ~exist('lasso')
45          error('Cannot find the function lasso.m.');
46       end
47       beta=-lasso(+x,3-2*getnlab(x),'Lambda',lambda);
48    end
[5]49
50    % now find out how sparse the result is:
51    nr = sum(abs(beta)>1.0e-8);
52
53    % and store the results:
54    if mustScale
55        W.wsc = wsc;
56    end
57
58    W.beta = beta; % the ultimate weights
59    W.nr = nr;
[90]60    w = prmapping(mfilename,'trained',W,getlablist(x),size(x,2),c);
[108]61    w = setname(w,'LASSO classifier (l=%f)',lambda);
[5]62
63else
64    % Evaluate the classifier on new data:
65    W = getdata(lambda);
66    n = size(x,1);
67
68    % scaling and linear classifier:
69    if mustScale
70        x = x*W.wsc;
71    end
[108]72    % make sure a bias is added:
73    x = [x ones(n,1)];
74    %go:
[5]75    out = x*W.beta;
76
77    % and put it nicely in a prtools dataset:
78    w = setdat(x,sigm([-out out]),lambda);
79
80end
81
82return
Note: See TracBrowser for help on using the repository browser.