Skip to content
GitLab
Projects
Groups
Snippets
/
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
bob
bob.bio.face
Commits
d93b03ff
Commit
d93b03ff
authored
Jun 10, 2020
by
Tiago de Freitas Pereira
Browse files
Reorganized baselines
parent
2f04b3e6
Pipeline
#40427
failed with stage
in 13 minutes and 28 seconds
Changes
4
Pipelines
1
Hide whitespace changes
Inline
Side-by-side
bob/bio/face/config/baseline/facenet.py
0 → 100644
View file @
d93b03ff
import
bob.bio.face
from
sklearn.pipeline
import
make_pipeline
from
bob.bio.base.wrappers
import
wrap_sample_preprocessor
from
bob.pipelines
import
wrap
from
bob.bio.face.embeddings
import
FaceNetSanderberg
from
bob.bio.face.helpers
import
face_crop_solver
# This is the size of the image that this model expects
CROPPED_IMAGE_HEIGHT
=
160
CROPPED_IMAGE_WIDTH
=
160
cropped_image_size
=
(
CROPPED_IMAGE_HEIGHT
,
CROPPED_IMAGE_WIDTH
)
color_channel
=
"rgb"
if
"database"
in
locals
():
annotation_type
=
database
.
annotation_type
fixed_positions
=
database
.
fixed_positions
else
:
annotation_type
=
"eyes-center"
fixed_positions
=
None
#### SOLVING THE FACE CROPPER TO BE USED
if
annotation_type
==
"bounding-box"
:
transform_extra_arguments
=
((
"annotations"
,
"annotations"
),)
TOP_LEFT_POS
=
(
0
,
0
)
BOTTOM_RIGHT_POS
=
(
CROPPED_IMAGE_HEIGHT
,
CROPPED_IMAGE_WIDTH
)
# Detects the face and crops it without eye detection
face_cropper
=
face_crop_solver
(
cropped_image_size
,
color_channel
=
color_channel
,
cropped_positions
=
{
"topleft"
:
TOP_LEFT_POS
,
"bottomright"
:
BOTTOM_RIGHT_POS
},
fixed_positions
=
fixed_positions
,
)
elif
annotation_type
==
"eyes-center"
:
transform_extra_arguments
=
((
"annotations"
,
"annotations"
),)
# eye positions for frontal images
RIGHT_EYE_POS
=
(
46
,
53
)
LEFT_EYE_POS
=
(
46
,
107
)
# Detects the face and crops it without eye detection
face_cropper
=
face_crop_solver
(
cropped_image_size
,
color_channel
=
color_channel
,
cropped_positions
=
{
"leye"
:
LEFT_EYE_POS
,
"reye"
:
RIGHT_EYE_POS
},
fixed_positions
=
fixed_positions
,
)
else
:
transform_extra_arguments
=
None
# DEFAULT TO FACE SIMPLE RESIZE
face_cropper
=
face_crop_solver
(
cropped_image_size
)
embedding
=
FaceNetSanderberg
()
transformer
=
make_pipeline
(
wrap
(
[
"sample"
],
face_cropper
,
transform_extra_arguments
=
transform_extra_arguments
,
),
wrap
([
"sample"
],
embedding
),
)
bob/bio/face/config/transformers/__init__.py
deleted
100644 → 0
View file @
2f04b3e6
bob/bio/face/config/transformers/eyes_crop/facenet.py
deleted
100644 → 0
View file @
2f04b3e6
import
bob.bio.face
from
sklearn.pipeline
import
make_pipeline
from
bob.bio.base.wrappers
import
wrap_sample_preprocessor
from
bob.pipelines
import
wrap
from
bob.bio.face.transformers
import
FaceNetSanderberg
# This is the size of the image that this model expects
CROPPED_IMAGE_HEIGHT
=
160
CROPPED_IMAGE_WIDTH
=
160
# eye positions for frontal images
RIGHT_EYE_POS
=
(
46
,
53
)
LEFT_EYE_POS
=
(
46
,
107
)
legacy_face_cropper
=
bob
.
bio
.
face
.
preprocessor
.
FaceCrop
(
cropped_image_size
=
(
CROPPED_IMAGE_HEIGHT
,
CROPPED_IMAGE_WIDTH
),
cropped_positions
=
{
"leye"
:
LEFT_EYE_POS
,
"reye"
:
RIGHT_EYE_POS
},
color_channel
=
"rgb"
,
)
embedding
=
FaceNetSanderberg
()
transformer
=
make_pipeline
(
wrap_sample_preprocessor
(
legacy_face_cropper
,
transform_extra_arguments
=
((
"annotations"
,
"annotations"
),),
),
wrap
([
"sample"
],
embedding
),
)
bob/bio/face/test/test_
pip
eline
s
.py
→
bob/bio/face/test/test_
bas
eline.py
View file @
d93b03ff
...
...
@@ -6,24 +6,23 @@ from bob.pipelines import Sample, SampleSet
def
get_fake_sample
(
face_size
=
(
160
,
160
),
eyes
=
{
"leye"
:
(
46
,
107
),
"reye"
:
(
46
,
53
)}):
data
=
np
.
random
.
rand
(
3
,
400
,
400
)
annotations
=
{
"leye"
:
(
115
,
267
),
"reye"
:
(
115
,
132
)}
data
=
np
.
random
.
rand
(
3
,
400
,
400
)
annotations
=
{
"leye"
:
(
115
,
267
),
"reye"
:
(
115
,
132
)}
return
Sample
(
data
,
key
=
"1"
,
annotations
=
annotations
)
def
test_facenet_pipeline
():
config_name
=
pkg_resources
.
resource_filename
(
'bob.bio.face'
,
'config/transformers/eyes_crop/facenet.py'
)
def
test_facenet_baseline
():
config_name
=
pkg_resources
.
resource_filename
(
"bob.bio.face"
,
"config/baseline/facenet.py"
)
transformer
=
load
([
config_name
]).
transformer
#import ipdb; ipdb.set_trace()
fake_sample
=
get_fake_sample
()
#transformed_sample = transformer.transform([fake_sample])[0].data
#
transformed_sample = transformer.transform([fake_sample])[0].data
#import ipdb; ipdb.set_trace()
transformed_sample
=
transformer
.
transform
([
fake_sample
])[
0
]
assert
transformed_sample
.
data
.
size
==
160
transformed_data
=
transformed_sample
.
data
assert
transformed_sample
.
data
.
size
==
128
pass
\ No newline at end of file
pass
Write
Preview
Supports
Markdown
0%
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment