[23] | 1 | % |
---|
| 2 | % W = DECTREEC(A,N) |
---|
| 3 | % |
---|
| 4 | % Train a decision tree on A, using random feature subsets of size N. |
---|
| 5 | % When N=0, no feature subsets are used. |
---|
| 6 | % The decision tree training grows a full tree (no pruning), by |
---|
| 7 | % splitting a single feature using one threshold. For the splitting the |
---|
| 8 | % optimal GINI index is used. |
---|
| 9 | function w = dectreec(a,featsubset) |
---|
| 10 | |
---|
| 11 | if nargin<2 |
---|
| 12 | featsubset = 0; |
---|
| 13 | end |
---|
| 14 | if nargin<1 || isempty(a) |
---|
| 15 | w = mapping(mfilename,{featsubset}); |
---|
| 16 | w = setname(w,'Decision tree'); |
---|
| 17 | return |
---|
| 18 | end |
---|
| 19 | |
---|
| 20 | if ~ismapping(featsubset) |
---|
| 21 | y = getnlab(a); |
---|
| 22 | opt.K = max(y); |
---|
| 23 | opt.featsubset = featsubset; |
---|
| 24 | if exist('decisiontree')==3 |
---|
| 25 | v = decisiontree(+a,y,opt.K,featsubset); |
---|
| 26 | else |
---|
| 27 | v = tree_train(+a,y,opt); |
---|
| 28 | end |
---|
| 29 | w = mapping(mfilename,'trained',v,getlablist(a),size(a,2),opt.K); |
---|
| 30 | else |
---|
| 31 | v = getdata(featsubset); |
---|
| 32 | n = size(a,1); |
---|
| 33 | if exist('decisiontree')==3 |
---|
| 34 | |
---|
| 35 | if ~isa(v,'double') |
---|
| 36 | error('This tree should have been trained with the C-code'); |
---|
| 37 | end |
---|
| 38 | out = decisiontree(v,+a); |
---|
| 39 | else |
---|
| 40 | if ~isa(v,'cell') |
---|
| 41 | error('This tree should have been trained with the Matlab code'); |
---|
| 42 | end |
---|
| 43 | out = tree_eval(v,+a); |
---|
| 44 | end |
---|
| 45 | out = accumarray([(1:n)' out],ones(n,1)); |
---|
| 46 | |
---|
| 47 | w = setdat(a,out,featsubset); |
---|
| 48 | end |
---|
| 49 | return |
---|
| 50 | |
---|
| 51 | |
---|