Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
L
lab-speaker-recognition
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Analytics
Analytics
CI / CD
Repository
Value Stream
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
biometric-resources
lab-speaker-recognition
Commits
a6280e55
Commit
a6280e55
authored
Oct 13, 2018
by
Sushil Bhattacharjee
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
updates to lab exercise
parent
35dea031
Pipeline
#24286
failed with stage
in 3 seconds
Changes
3
Pipelines
1
Expand all
Hide whitespace changes
Inline
Side-by-side
Showing
3 changed files
with
249 additions
and
133 deletions
+249
-133
notebooks/model.04_gmm1d.ipynb
notebooks/model.04_gmm1d.ipynb
+101
-132
notebooks/plots.py
notebooks/plots.py
+71
-0
notebooks/speaker_lib.py
notebooks/speaker_lib.py
+77
-1
No files found.
notebooks/model.04_gmm1d.ipynb
View file @
a6280e55
This diff is collapsed.
Click to expand it.
notebooks/plots.py
View file @
a6280e55
...
...
@@ -17,6 +17,13 @@ import bob.measure
import
bob.measure.load
import
os
import
math
import
bob.bio.gmm
import
numpy
from
matplotlib
import
pyplot
as
plt
from
matplotlib
import
mlab
from
sklearn.mixture
import
GaussianMixture
as
GMM2
def
plot_with_subplots
(
image_a
,
image_b
):
"""
...
...
@@ -241,3 +248,67 @@ def plot_multidet(file_names, labels, base_path="./scores"):
pyplot
.
legend
()
pyplot
.
show
()
def
plot_one_gaussian
(
dataset
,
model_mean
=
0.0
,
model_variance
=
1.0
):
fig
=
plt
.
figure
(
figsize
=
(
12
,
6
))
fig
.
subplots_adjust
(
left
=
0.12
,
right
=
0.97
,
bottom
=
0.21
,
top
=
0.9
,
wspace
=
0.5
)
# plot distribution and model:
ax
=
fig
.
add_subplot
(
111
)
ax
.
hist
(
dataset
,
30
,
normed
=
True
,
histtype
=
'stepfilled'
,
alpha
=
0.4
,
label
=
'Distribution'
)
#ax.text(0.04, 0.96, "Distribution", ha='left', va='top', transform=ax.transAxes)
ax
.
set_xlabel
(
'$x$'
)
ax
.
set_ylabel
(
'$p(x)$'
)
mu
=
model_mean
var
=
model_variance
sig
=
math
.
sqrt
(
var
)
x
=
numpy
.
linspace
(
mu
-
3
*
sig
,
mu
+
3
*
sig
)
ax
.
plot
(
x
,
mlab
.
normpdf
(
x
,
mu
,
sig
),
label
=
"Gaussian model"
)
ax
.
legend
(
loc
=
1
)
ax
.
tick_params
(
axis
=
'x'
,
which
=
'both'
,
bottom
=
False
,
top
=
False
,
labelbottom
=
False
)
#suppress x-ticks.
#print('Negative log-likelihood:', sl.calc_loglikelihood(ds1, mu, var))
def
plot_mixture
(
ds
,
n_gaussians
):
my_model
=
GMM2
(
n_gaussians
).
fit
(
ds
)
#max_gmm_components = len(models)
if
n_gaussians
>
0
:
# < max_gmm_components+1:
mix
=
n_gaussians
-
1
M_best
=
my_model
# models[mix]
x
=
numpy
.
linspace
(
-
6
,
6
,
1000
)
logprob
=
M_best
.
score_samples
(
x
.
reshape
((
-
1
,
1
)))
responsibilities
=
M_best
.
predict_proba
(
x
.
reshape
((
-
1
,
1
)))
pdf
=
numpy
.
exp
(
logprob
)
pdf_individual
=
responsibilities
*
pdf
[:,
numpy
.
newaxis
]
#print(pdf_individual.shape)
fig
=
plt
.
figure
(
figsize
=
(
12
,
6
))
fig
.
subplots_adjust
(
left
=
0.12
,
right
=
0.97
,
bottom
=
0.21
,
top
=
0.9
,
wspace
=
0.5
)
ax
=
fig
.
add_subplot
(
111
)
ax
.
hist
(
ds
,
30
,
normed
=
True
,
histtype
=
'stepfilled'
,
alpha
=
0.4
)
ax
.
plot
(
x
,
pdf
,
'-k'
,
label
=
"GMM"
)
ax
.
plot
(
x
,
pdf_individual
,
'--k'
)
if
n_gaussians
>
1
:
#this line is just a hack to get a single '---Components' label in the legend.
ax
.
plot
(
x
,
pdf_individual
[:,
0
],
'--k'
,
label
=
"Components"
)
ax
.
legend
(
loc
=
1
)
ax
.
text
(
0.04
,
0.96
,
str
(
n_gaussians
)
+
" Gaussians"
,
ha
=
'left'
,
va
=
'top'
,
transform
=
ax
.
transAxes
)
#ax.text(0.9, 0.96, "AIC", ha='right', va='top', transform=ax.transAxes)
ax
.
set_xlabel
(
'$x$'
)
ax
.
set_ylabel
(
'$p(x)$'
)
else
:
print
(
'Number of Gaussians should be between 1 and '
,
max_gmm_components
,
'.'
)
def
plot_1gaussian_model
(
ds
):
plot_mixture
(
ds
,
1
)
notebooks/speaker_lib.py
View file @
a6280e55
from
pathlib
import
Path
import
math
import
bob.bio.gmm
import
numpy
from
matplotlib
import
pyplot
as
plt
from
matplotlib
import
mlab
from
sklearn.mixture
import
GaussianMixture
as
GMM2
# we assume that the file is in the 4-column format devised for bob-score-files.
# we assume that the file is in the 4-column format devised for bob-score-files.
#function to load a 4-col. score-file and extract the genuine and ZEI-scores
def
load_scores
(
score_filename
):
my_file
=
Path
(
score_filename
)
assert
my_file
.
is_file
(),
"File %s does not exist. Quitting."
%
score_filename
...
...
@@ -16,3 +21,74 @@ def load_scores(score_filename):
zei_scores
=
[
float
(
line
.
split
()[
3
])
for
line
in
x
if
line
.
split
()[
0
]
!=
line
.
split
()[
1
]]
return
numpy
.
array
(
zei_scores
),
numpy
.
array
(
gen_scores
)
# function to compute log-likelihood of gaussian-model.
def
calc_loglikelihood
(
ds
,
mean
,
var
):
residual
=
ds
-
mean
#print(residual.shape)
smplv
=
residual
*
residual
#print(smplv.shape)
return
numpy
.
sum
(
-
0.5
*
(
numpy
.
log
(
var
)
-
(
residual
*
residual
)
/
var
)
)
def
plot_one_gaussian
(
dataset
,
model_mean
=
0.0
,
model_variance
=
1.0
):
fig
=
plt
.
figure
(
figsize
=
(
12
,
6
))
fig
.
subplots_adjust
(
left
=
0.12
,
right
=
0.97
,
bottom
=
0.21
,
top
=
0.9
,
wspace
=
0.5
)
# plot distribution and model:
ax
=
fig
.
add_subplot
(
111
)
ax
.
hist
(
dataset
,
30
,
normed
=
True
,
histtype
=
'stepfilled'
,
alpha
=
0.4
,
label
=
'Distribution'
)
#ax.text(0.04, 0.96, "Distribution", ha='left', va='top', transform=ax.transAxes)
ax
.
set_xlabel
(
'$x$'
)
ax
.
set_ylabel
(
'$p(x)$'
)
mu
=
model_mean
var
=
model_variance
sig
=
math
.
sqrt
(
var
)
x
=
numpy
.
linspace
(
mu
-
3
*
sig
,
mu
+
3
*
sig
)
ax
.
plot
(
x
,
mlab
.
normpdf
(
x
,
mu
,
sig
),
label
=
"Gaussian model"
)
ax
.
legend
(
loc
=
1
)
ax
.
tick_params
(
axis
=
'x'
,
which
=
'both'
,
bottom
=
False
,
top
=
False
,
labelbottom
=
False
)
#suppress x-ticks.
#print('Negative log-likelihood:', sl.calc_loglikelihood(ds1, mu, var))
def
plot_mixture
(
ds
,
n_gaussians
):
my_model
=
GMM2
(
n_gaussians
).
fit
(
ds
)
#max_gmm_components = len(models)
if
n_gaussians
>
0
:
# < max_gmm_components+1:
mix
=
n_gaussians
-
1
M_best
=
my_model
# models[mix]
x
=
numpy
.
linspace
(
-
6
,
6
,
1000
)
logprob
=
M_best
.
score_samples
(
x
.
reshape
((
-
1
,
1
)))
responsibilities
=
M_best
.
predict_proba
(
x
.
reshape
((
-
1
,
1
)))
pdf
=
numpy
.
exp
(
logprob
)
pdf_individual
=
responsibilities
*
pdf
[:,
numpy
.
newaxis
]
#print(pdf_individual.shape)
fig
=
plt
.
figure
(
figsize
=
(
12
,
6
))
fig
.
subplots_adjust
(
left
=
0.12
,
right
=
0.97
,
bottom
=
0.21
,
top
=
0.9
,
wspace
=
0.5
)
ax
=
fig
.
add_subplot
(
111
)
ax
.
hist
(
ds
,
30
,
normed
=
True
,
histtype
=
'stepfilled'
,
alpha
=
0.4
)
ax
.
plot
(
x
,
pdf
,
'-k'
,
label
=
"GMM"
)
ax
.
plot
(
x
,
pdf_individual
,
'--k'
)
if
n_gaussians
>
1
:
#this line is just a hack to get a single '---Components' label in the legend.
ax
.
plot
(
x
,
pdf_individual
[:,
0
],
'--k'
,
label
=
"Components"
)
ax
.
legend
(
loc
=
1
)
ax
.
text
(
0.04
,
0.96
,
str
(
n_gaussians
)
+
" Gaussians"
,
ha
=
'left'
,
va
=
'top'
,
transform
=
ax
.
transAxes
)
#ax.text(0.9, 0.96, "AIC", ha='right', va='top', transform=ax.transAxes)
ax
.
set_xlabel
(
'$x$'
)
ax
.
set_ylabel
(
'$p(x)$'
)
else
:
print
(
'Number of Gaussians should be between 1 and '
,
max_gmm_components
,
'.'
)
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment