1 | %DKL Kullback-Leibler divergence |
---|
2 | % |
---|
3 | % D=DKL(DATA,PROTO) |
---|
4 | % |
---|
5 | % INPUT |
---|
6 | % DATA a dataset |
---|
7 | % PROTO the dataset with prototypes (representation set) |
---|
8 | % OUTPUT |
---|
9 | % D dataset with a distance matrix |
---|
10 | % |
---|
11 | % DESCRIPTION |
---|
12 | % The Kullback-Leibler divergence (pp. 151, Pattern Recognition, |
---|
13 | % Theodoridis, p. 176 in 2nd edition) |
---|
14 | % |
---|
15 | |
---|
16 | % $Id: dkl2.m,v 1.2 2005/03/02 13:39:45 serguei Exp $ |
---|
17 | |
---|
18 | function d=dkl2(data,proto) |
---|
19 | |
---|
20 | fprintf(' dkl2:'); |
---|
21 | |
---|
22 | if size(data,2)~=size(proto,2) |
---|
23 | error('both datasets must have the same feature sizes'); |
---|
24 | end |
---|
25 | sc=size(data,1); % training sample size |
---|
26 | pc=size(proto,1); % test sample size |
---|
27 | d=zeros(sc,pc); |
---|
28 | |
---|
29 | labdata=getlab(data); |
---|
30 | labproto=getlab(proto); |
---|
31 | data2=data; data=+data; |
---|
32 | proto2=proto; proto=+proto; |
---|
33 | |
---|
34 | |
---|
35 | % normalization: we consider a histogram being a set of probability |
---|
36 | % estimates |
---|
37 | data=data./repmat(sum(data,2),[1,size(data,2)]); |
---|
38 | proto=proto./repmat(sum(proto,2),[1,size(proto,2)]); |
---|
39 | |
---|
40 | step=round(pc/10); |
---|
41 | for i=1:pc |
---|
42 | t=repmat(proto(i,:),[sc,1]); |
---|
43 | |
---|
44 | w = warning; |
---|
45 | warning('off'); |
---|
46 | l = log(t./data); |
---|
47 | warning(w); |
---|
48 | |
---|
49 | l(~isfinite(l)) = 0; |
---|
50 | d(:,i)=sum((t-data).*l ,2); |
---|
51 | |
---|
52 | if mod(i,step)==0, fprintf('.'); end |
---|
53 | end |
---|
54 | |
---|
55 | d=setdat(data2,d); |
---|
56 | d=setfeatlab(d,labproto); |
---|
57 | |
---|
58 | return |
---|
59 | |
---|