lbp_64.py 991 Bytes
Newer Older
Amir MOHAMMADI's avatar
Amir MOHAMMADI committed
1
2
import bob.pipelines as mario
from bob.bio.face.helpers import face_crop_solver
Amir MOHAMMADI's avatar
Amir MOHAMMADI committed
3
from bob.bio.video.transformer import VideoWrapper
Amir MOHAMMADI's avatar
Amir MOHAMMADI committed
4
5
6
7
8
9
10
11
12
13
14
15
from bob.pad.face.extractor import LBPHistogram

database = globals().get("database")
if database is not None:
    annotation_type = database.annotation_type
    fixed_positions = database.fixed_positions
else:
    annotation_type = None
    fixed_positions = None

# Preprocessor #
cropper = face_crop_solver(
16
17
18
19
    cropped_image_size=64,
    cropped_positions=annotation_type,
    color_channel="gray",
    fixed_positions=fixed_positions,
Amir MOHAMMADI's avatar
Amir MOHAMMADI committed
20
)
Amir MOHAMMADI's avatar
Amir MOHAMMADI committed
21
preprocessor = VideoWrapper(cropper)
Amir MOHAMMADI's avatar
Amir MOHAMMADI committed
22
preprocessor = mario.wrap(
Amir MOHAMMADI's avatar
Amir MOHAMMADI committed
23
    ["sample"],
Amir MOHAMMADI's avatar
Amir MOHAMMADI committed
24
25
26
27
28
    preprocessor,
    transform_extra_arguments=(("annotations", "annotations"),),
)

# Extractor #
Amir MOHAMMADI's avatar
Amir MOHAMMADI committed
29
extractor = VideoWrapper(
Amir MOHAMMADI's avatar
Amir MOHAMMADI committed
30
    LBPHistogram(
Amir MOHAMMADI's avatar
Amir MOHAMMADI committed
31
32
33
        lbp_type="uniform",
        elbp_type="regular",
        radius=1,
Amir MOHAMMADI's avatar
Amir MOHAMMADI committed
34
        neighbors=8,
Amir MOHAMMADI's avatar
Amir MOHAMMADI committed
35
        circular=False,
Amir MOHAMMADI's avatar
Amir MOHAMMADI committed
36
37
38
        dtype=None,
    )
)
Amir MOHAMMADI's avatar
Amir MOHAMMADI committed
39
extractor = mario.wrap(["sample"], extractor)