Commit b196c8ba authored by Sylvain Calinon's avatar Sylvain Calinon

New GMR example added

parent 08ffb757
......@@ -37,6 +37,7 @@ end
%% Learning and reproduction
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%model = init_GMM_kmeans(Data, model);
model = init_GMM_timeBased(Data, model);
model = EM_GMM(Data, model);
DataOut = GMR(model, 1:nbData, 1, [2,3]);
......
function [model, GAMMA2, LL] = EM_GMM(Data, model)
% Training of a Gaussian mixture model (GMM) with an expectation-maximization (EM) algorithm.
%
% Author: Sylvain Calinon, 2014
% http://programming-by-demonstration.org/SylvainCalinon
%Parameters of the EM algorithm
nbMinSteps = 5; %Minimum number of iterations allowed
nbMaxSteps = 100; %Maximum number of iterations allowed
maxDiffLL = 1E-4; %Likelihood increase threshold to stop the algorithm
nbData = size(Data,2);
diagRegularizationFactor = 1E-6;
for nbIter=1:nbMaxSteps
fprintf('.');
%E-step
[L, GAMMA] = computeGamma(Data, model); %See 'computeGamma' function below
GAMMA2 = GAMMA ./ repmat(sum(GAMMA,2),1,nbData);
%M-step
for i=1:model.nbStates
%Update Priors
model.Priors(i) = sum(GAMMA(i,:)) / nbData;
%Update Mu
model.Mu(:,i) = Data * GAMMA2(i,:)';
%Update Sigma (regularization term is optional)
DataTmp = Data - repmat(model.Mu(:,i),1,nbData);
model.Sigma(:,:,i) = DataTmp * diag(GAMMA2(i,:)) * DataTmp' + eye(model.nbVar) * diagRegularizationFactor;
end
%Compute average log-likelihood
LL(nbIter) = sum(log(sum(L,1))) / nbData;
%Stop the algorithm if EM converged (small change of LL)
if nbIter>nbMinSteps
if LL(nbIter)-LL(nbIter-1)<maxDiffLL || nbIter==nbMaxSteps-1
disp(['EM converged after ' num2str(nbIter) ' iterations.']);
return;
end
end
end
disp(['The maximum number of ' num2str(nbMaxSteps) ' EM iterations has been reached.']);
end
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
function [L, GAMMA] = computeGamma(Data, model)
L = zeros(model.nbStates,size(Data,2));
for i=1:model.nbStates
L(i,:) = model.Priors(i) * gaussPDF(Data, model.Mu(:,i), model.Sigma(:,:,i));
end
GAMMA = L ./ repmat(sum(L,1),model.nbStates,1);
end
function model = init_GMM_kmeans(Data, model)
%
% This function initializes the parameters of a Gaussian Mixture Model
% (GMM) by using k-means clustering algorithm.
%
% Inputs -----------------------------------------------------------------
% o Data: D x N array representing N datapoints of D dimensions.
% o nbStates: Number K of GMM components.
% Outputs ----------------------------------------------------------------
% o Priors: 1 x K array representing the prior probabilities of the
% K GMM components.
% o Mu: D x K array representing the centers of the K GMM components.
% o Sigma: D x D x K array representing the covariance matrices of the
% K GMM components.
% Comments ---------------------------------------------------------------
% o This function uses the 'kmeans' function from the MATLAB Statistics
% toolbox. If you are using a version of the 'netlab' toolbox that also
% uses a function named 'kmeans', please rename the netlab function to
% 'kmeans_netlab.m' to avoid conflicts.
%
% Copyright (c) 2006 Sylvain Calinon, LASA Lab, EPFL, CH-1015 Lausanne,
% Switzerland, http://lasa.epfl.ch
[nbVar, nbData] = size(Data);
[Data_id, model.Mu] = kmeansClustering(Data, model.nbStates);
for i=1:model.nbStates
idtmp = find(Data_id==i);
model.Priors(i) = length(idtmp);
model.Sigma(:,:,i) = cov([Data(:,idtmp) Data(:,idtmp)]');
%Regularization term to avoid numerical instability
model.Sigma(:,:,i) = model.Sigma(:,:,i) + eye(nbVar)*1E-2;
end
model.Priors = model.Priors ./ sum(model.Priors);
function model = init_GMM_timeBased(Data, model)
%
% This function initializes the parameters of a Gaussian Mixture Model
% (GMM) by using k-means clustering algorithm.
%
% Inputs -----------------------------------------------------------------
% o Data: D x N array representing N datapoints of D dimensions.
% o nbStates: Number K of GMM components.
% Outputs ----------------------------------------------------------------
% o Priors: 1 x K array representing the prior probabilities of the
% K GMM components.
% o Mu: D x K array representing the centers of the K GMM components.
% o Sigma: D x D x K array representing the covariance matrices of the
% K GMM components.
% Comments ---------------------------------------------------------------
% o This function uses the 'kmeans' function from the MATLAB Statistics
% toolbox. If you are using a version of the 'netlab' toolbox that also
% uses a function named 'kmeans', please rename the netlab function to
% 'kmeans_netlab.m' to avoid conflicts.
%
% Copyright (c) 2006 Sylvain Calinon, LASA Lab, EPFL, CH-1015 Lausanne,
% Switzerland, http://lasa.epfl.ch
[nbVar, nbData] = size(Data);
TimingSep = linspace(min(Data(1,:)), max(Data(1,:)), model.nbStates+1);
for i=1:model.nbStates
idtmp = find( Data(1,:)>=TimingSep(i) & Data(1,:)<TimingSep(i+1));
model.Priors(i) = length(idtmp);
model.Mu(:,i) = mean(Data(:,idtmp)');
model.Sigma(:,:,i) = cov(Data(:,idtmp)');
%Regularization term to avoid numerical instability
model.Sigma(:,:,i) = model.Sigma(:,:,i) + eye(nbVar)*1E-2;
end
model.Priors = model.Priors / sum(model.Priors);
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment