lbp_64.py 945 Bytes
Newer Older
Amir MOHAMMADI's avatar
Amir MOHAMMADI committed
1
2
import bob.pipelines as mario
from bob.bio.face.helpers import face_crop_solver
Amir MOHAMMADI's avatar
Amir MOHAMMADI committed
3
from bob.bio.video.transformer import VideoWrapper
Amir MOHAMMADI's avatar
Amir MOHAMMADI committed
4
5
6
7
8
9
10
11
12
13
14
15
16
17
from bob.pad.face.extractor import LBPHistogram

database = globals().get("database")
if database is not None:
    annotation_type = database.annotation_type
    fixed_positions = database.fixed_positions
else:
    annotation_type = None
    fixed_positions = None

# Preprocessor #
cropper = face_crop_solver(
    cropped_image_size=64, cropped_positions=annotation_type, color_channel="gray"
)
Amir MOHAMMADI's avatar
Amir MOHAMMADI committed
18
preprocessor = VideoWrapper(cropper)
Amir MOHAMMADI's avatar
Amir MOHAMMADI committed
19
preprocessor = mario.wrap(
Amir MOHAMMADI's avatar
Amir MOHAMMADI committed
20
    ["sample"],
Amir MOHAMMADI's avatar
Amir MOHAMMADI committed
21
22
23
24
25
    preprocessor,
    transform_extra_arguments=(("annotations", "annotations"),),
)

# Extractor #
Amir MOHAMMADI's avatar
Amir MOHAMMADI committed
26
extractor = VideoWrapper(
Amir MOHAMMADI's avatar
Amir MOHAMMADI committed
27
    LBPHistogram(
Amir MOHAMMADI's avatar
Amir MOHAMMADI committed
28
29
30
        lbp_type="uniform",
        elbp_type="regular",
        radius=1,
Amir MOHAMMADI's avatar
Amir MOHAMMADI committed
31
        neighbors=8,
Amir MOHAMMADI's avatar
Amir MOHAMMADI committed
32
        circular=False,
Amir MOHAMMADI's avatar
Amir MOHAMMADI committed
33
34
35
        dtype=None,
    )
)
Amir MOHAMMADI's avatar
Amir MOHAMMADI committed
36
extractor = mario.wrap(["sample"], extractor)