Skip to content
GitLab
Explore
Sign in
Primary navigation
Search or go to…
Project
bob.bio.base
Manage
Activity
Members
Labels
Plan
Issues
Issue boards
Milestones
Code
Merge requests
Repository
Branches
Commits
Tags
Repository graph
Compare revisions
Build
Pipelines
Jobs
Pipeline schedules
Artifacts
Deploy
Releases
Package registry
Model registry
Operate
Environments
Terraform modules
Monitor
Incidents
Analyze
Value stream analytics
Contributor analytics
CI/CD analytics
Repository analytics
Model experiments
Help
Help
Support
GitLab documentation
Compare GitLab plans
Community forum
Contribute to GitLab
Provide feedback
Keyboard shortcuts
?
Snippets
Groups
Projects
Show more breadcrumbs
bob
bob.bio.base
Commits
7cd81e26
There was a problem fetching the pipeline summary.
Commit
7cd81e26
authored
7 years ago
by
Theophile GENTILHOMME
Browse files
Options
Downloads
Patches
Plain Diff
Change variable name from criter to criterion
parent
bbfa677e
Branches
Branches containing commit
Tags
Tags containing commit
1 merge request
!146
Add 4-5-col files related functionalities and add click commands
Pipeline
#
Changes
2
Pipelines
1
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
bob/bio/base/script/commands.py
+7
-7
7 additions, 7 deletions
bob/bio/base/script/commands.py
bob/bio/base/script/figure.py
+4
-4
4 additions, 4 deletions
bob/bio/base/script/figure.py
with
11 additions
and
11 deletions
bob/bio/base/script/commands.py
+
7
−
7
View file @
7cd81e26
...
...
@@ -59,7 +59,7 @@ def metrics(ctx, scores, evaluation, **kargs):
$ bob bio metrics {dev,eval}-scores1 {dev,eval}-scores2
"""
if
'
criter
'
in
ctx
.
meta
and
ctx
.
meta
[
'
criter
'
]
==
'
rr
'
:
if
'
criter
ion
'
in
ctx
.
meta
and
ctx
.
meta
[
'
criter
ion
'
]
==
'
rr
'
:
process
=
bio_figure
.
Metrics
(
ctx
,
scores
,
evaluation
,
load
.
cmc
)
else
:
process
=
bio_figure
.
Metrics
(
ctx
,
scores
,
evaluation
,
load
.
split
)
...
...
@@ -365,25 +365,25 @@ def evaluate(ctx, scores, evaluation, **kwargs):
# first time erase if existing file
ctx
.
meta
[
'
open_mode
'
]
=
'
w
'
click
.
echo
(
"
Computing metrics with EER%s...
"
%
log_str
)
ctx
.
meta
[
'
criter
'
]
=
'
eer
'
# no criterion passed to evaluate
ctx
.
meta
[
'
criter
ion
'
]
=
'
eer
'
# no criterion passed to evaluate
ctx
.
invoke
(
metrics
,
scores
=
scores
,
evaluation
=
evaluation
)
# other times, appends the content
ctx
.
meta
[
'
open_mode
'
]
=
'
a
'
click
.
echo
(
"
Computing metrics with HTER%s...
"
%
log_str
)
ctx
.
meta
[
'
criter
'
]
=
'
hter
'
# no criterion passed in evaluate
ctx
.
meta
[
'
criter
ion
'
]
=
'
hter
'
# no criterion passed in evaluate
ctx
.
invoke
(
metrics
,
scores
=
scores
,
evaluation
=
evaluation
)
if
'
far_value
'
in
ctx
.
meta
and
ctx
.
meta
[
'
far_value
'
]
is
not
None
:
click
.
echo
(
"
Computing metrics with FAR=%f%s...
"
%
\
(
ctx
.
meta
[
'
far_value
'
],
log_str
))
ctx
.
meta
[
'
criter
'
]
=
'
far
'
# no criterio % n passed in evaluate
ctx
.
meta
[
'
criter
ion
'
]
=
'
far
'
# no criterio % n passed in evaluate
ctx
.
invoke
(
metrics
,
scores
=
scores
,
evaluation
=
evaluation
)
click
.
echo
(
"
Computing minDCF%s...
"
%
log_str
)
ctx
.
meta
[
'
criter
'
]
=
'
mindcf
'
# no criterion passed in evaluate
ctx
.
meta
[
'
criter
ion
'
]
=
'
mindcf
'
# no criterion passed in evaluate
ctx
.
invoke
(
metrics
,
scores
=
scores
,
evaluation
=
evaluation
)
click
.
echo
(
"
Computing Cllr and minCllr%s...
"
%
log_str
)
ctx
.
meta
[
'
criter
'
]
=
'
cllr
'
# no criterion passed in evaluate
ctx
.
meta
[
'
criter
ion
'
]
=
'
cllr
'
# no criterion passed in evaluate
ctx
.
invoke
(
metrics
,
scores
=
scores
,
evaluation
=
evaluation
)
# avoid closing pdf file before all figures are plotted
...
...
@@ -407,7 +407,7 @@ def evaluate(ctx, scores, evaluation, **kwargs):
# the last one closes the file
ctx
.
meta
[
'
closef
'
]
=
True
click
.
echo
(
"
Generating score histograms in %s...
"
%
ctx
.
meta
[
'
output
'
])
ctx
.
meta
[
'
criter
'
]
=
'
hter
'
# no criterion passed in evaluate
ctx
.
meta
[
'
criter
ion
'
]
=
'
hter
'
# no criterion passed in evaluate
ctx
.
forward
(
hist
)
click
.
echo
(
"
Evaluate successfully completed!
"
)
This diff is collapsed.
Click to expand it.
bob/bio/base/script/figure.py
+
4
−
4
View file @
7cd81e26
...
...
@@ -107,7 +107,7 @@ class Dir(measure_figure.PlotBase):
class
Metrics
(
measure_figure
.
Metrics
):
'''
Compute metrics from score files
'''
def
init_process
(
self
):
if
self
.
_criter
==
'
rr
'
:
if
self
.
_criter
ion
==
'
rr
'
:
self
.
_thres
=
[
None
]
*
self
.
n_systems
if
self
.
_thres
is
None
else
\
self
.
_thres
...
...
@@ -117,7 +117,7 @@ class Metrics(measure_figure.Metrics):
headers
=
[
''
or
title
,
'
Development %s
'
%
input_names
[
0
]]
if
self
.
_eval
and
input_scores
[
1
]
is
not
None
:
headers
.
append
(
'
eval % s
'
%
input_names
[
1
])
if
self
.
_criter
==
'
rr
'
:
if
self
.
_criter
ion
==
'
rr
'
:
rr
=
bob
.
measure
.
recognition_rate
(
input_scores
[
0
],
self
.
_thres
[
idx
])
dev_rr
=
"
%.1f%%
"
%
(
100
*
rr
)
raws
=
[[
'
RR
'
,
dev_rr
]]
...
...
@@ -128,7 +128,7 @@ class Metrics(measure_figure.Metrics):
click
.
echo
(
tabulate
(
raws
,
headers
,
self
.
_tablefmt
),
file
=
self
.
log_file
)
elif
self
.
_criter
==
'
mindcf
'
:
elif
self
.
_criter
ion
==
'
mindcf
'
:
if
'
cost
'
in
self
.
_ctx
.
meta
:
cost
=
0.99
if
'
cost
'
not
in
self
.
_ctx
.
meta
else
\
self
.
_ctx
.
meta
[
'
cost
'
]
...
...
@@ -170,7 +170,7 @@ class Metrics(measure_figure.Metrics):
click
.
echo
(
tabulate
(
raws
,
headers
,
self
.
_tablefmt
),
file
=
self
.
log_file
)
elif
self
.
_criter
==
'
cllr
'
:
elif
self
.
_criter
ion
==
'
cllr
'
:
cllr
=
bob
.
measure
.
calibration
.
cllr
(
input_scores
[
0
][
0
],
input_scores
[
0
][
1
])
min_cllr
=
bob
.
measure
.
calibration
.
min_cllr
(
...
...
This diff is collapsed.
Click to expand it.
Preview
0%
Loading
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Save comment
Cancel
Please
register
or
sign in
to comment