source: prextra/dectreec.m @ 106

Last change on this file since 106 was 23, checked in by dtax, 13 years ago

The decision tree and random forest, with compiled code!

File size: 1.1 KB
RevLine 
[23]1%
2%          W = DECTREEC(A,N)
3%
4% Train a decision tree on A, using random feature subsets of size N.
5% When N=0, no feature subsets are used.
6% The decision tree training grows a full tree (no pruning), by
7% splitting a single feature using one threshold. For the splitting the
8% optimal GINI index is used.
9function w = dectreec(a,featsubset)
10
11if nargin<2
12        featsubset = 0;
13end
14if nargin<1 || isempty(a)
15        w = mapping(mfilename,{featsubset});
16        w = setname(w,'Decision tree');
17        return
18end
19
20if ~ismapping(featsubset)
21        y = getnlab(a);
22        opt.K = max(y);
23        opt.featsubset = featsubset;
24        if exist('decisiontree')==3
25                v = decisiontree(+a,y,opt.K,featsubset);
26        else
27                v = tree_train(+a,y,opt);
28        end
29        w = mapping(mfilename,'trained',v,getlablist(a),size(a,2),opt.K);
30else
31        v = getdata(featsubset);
32        n = size(a,1);
33        if exist('decisiontree')==3
34
35                if ~isa(v,'double')
36                        error('This tree should have been trained with the C-code');
37                end
38                out = decisiontree(v,+a);
39        else
40                if ~isa(v,'cell')
41                        error('This tree should have been trained with the Matlab code');
42                end
43                out = tree_eval(v,+a);
44        end
45        out = accumarray([(1:n)' out],ones(n,1));
46
47        w = setdat(a,out,featsubset);
48end
49return
50       
51       
Note: See TracBrowser for help on using the repository browser.