function z = jointEntropy(x, y)
% Compute joint entropy z=H(x,y) of two discrete variables x and y.
% Input:
%
x, y: two integer vector of the same length
% Output:
%
z: joint entroy z=H(x,y)
% Written by Mo Chen ([email protected]).
assert(numel(x) = num
function z = nmi(x, y)
% Compute normalized mutual information I(x,y)/sqrt(H(x)*H(y) of two discrete
variables x and y.
% Input:
%
x, y: two integer vector of the same length
% Ouput:
%
z: normalized mutual information z=I(x,y)/sqrt(H(x)*H(y)
% Written by
function z = condEntropy (x, y)
% Compute conditional entropy z=H(x|y) of two discrete variables x and y.
% Input:
%
x, y: two integer vector of the same length
% Output:
%
z: conditional entropy z=H(x|y)
% Written by Mo Chen ([email protected]).
assert(n
function y = logSt(X, mu, sigma, v)
% Compute log pdf of a Student's t distribution.
% Input:
%
X: d x n data matrix
%
mu: mean
%
sigma: variance
%
v: degree of freedom
% Output:
%
y: probability density in logrithm scale y=log p(x)
% Written by mo Chen (
function z = nvi(x, y)
% Compute normalized variation information z=(1-I(x,y)/H(x,y) of two discrete
variables x and y.
% Input:
%
x, y: two integer vector of the same length
% Output:
%
z: normalized variation information z=(1-I(x,y)/H(x,y)
% Written by
function y = logWishart(Sigma, W, v)
% Compute log pdf of a Wishart distribution.
% Input:
%
Sigma: d x d covariance matrix
%
W: d x d covariance parameter
%
v: degree of freedom
% Output:
%
y: probability density in logrithm scale y=log p(Sigma)
% Writte
function y = logVmf(X, mu, kappa)
% Compute log pdf of a von Mises-Fisher distribution.
% Input:
%
X: d x n data matrix
%
mu: d x k mean
%
kappa: 1 x k variance
% Output:
%
y: k x n probability density in logrithm scale y=log p(x)
% Written by Mo Chen (st
function z = relatEntropy (x, y)
% Compute relative entropy (a.k.a KL divergence) z=KL(p(x)|p(y) of two
discrete variables x and y.
% Input:
%
x, y: two integer vector of the same length
% Output:
%
z: relative entropy (a.k.a KL divergence) z=KL(p(x)|p(y)
function z = entropy(x)
% Compute entropy z=H(x) of a discrete variable x.
% Input:
%
x: a integer vectors
% Output:
%
z: entropy z=H(x)
% Written by Mo Chen ([email protected]).
n = numel(x);
[u,~,x] = unique(x);
k = numel(u);
idx = 1:n;
Mx = sparse(idx,
function z = mutInfo(x, y)
% Compute mutual information I(x,y) of two discrete variables x and y.
% Input:
%
x, y: two integer vector of the same length
% Output:
%
z: mutual information z=I(x,y)
% Written by Mo Chen ([email protected]).
assert(numel(x) =