Changed 112x112 crop
Compare changes
@@ -5,8 +5,10 @@ from bob.pipelines import wrap
@@ -5,8 +5,10 @@ from bob.pipelines import wrap
@@ -30,7 +32,10 @@ def embedding_transformer_default_cropping(cropped_image_size, annotation_type):
@@ -30,7 +32,10 @@ def embedding_transformer_default_cropping(cropped_image_size, annotation_type):
return [embedding_transformer_default_cropping(cropped_image_size, item) for item in annotation_type]
@@ -78,7 +83,9 @@ def embedding_transformer_default_cropping(cropped_image_size, annotation_type):
@@ -78,7 +83,9 @@ def embedding_transformer_default_cropping(cropped_image_size, annotation_type):
logger.warning(f"Annotation type {annotation_type} is not supported. Input images will be fully scaled.")
@@ -107,7 +114,10 @@ def legacy_default_cropping(cropped_image_size, annotation_type):
@@ -107,7 +114,10 @@ def legacy_default_cropping(cropped_image_size, annotation_type):
@@ -137,7 +147,9 @@ def legacy_default_cropping(cropped_image_size, annotation_type):
@@ -137,7 +147,9 @@ def legacy_default_cropping(cropped_image_size, annotation_type):
logger.warning(f"Annotation type {annotation_type} is not supported. Input images will be fully scaled.")
@@ -167,7 +179,9 @@ def embedding_transformer(
@@ -167,7 +179,9 @@ def embedding_transformer(
None if (cropped_positions is None or fixed_positions is not None) else (("annotations", "annotations"),)
@@ -221,7 +235,7 @@ def embedding_transformer_112x112(
@@ -221,7 +235,7 @@ def embedding_transformer_112x112(
@@ -316,7 +330,9 @@ def crop_80x64(annotation_type, fixed_positions=None, color_channel="gray"):
@@ -316,7 +330,9 @@ def crop_80x64(annotation_type, fixed_positions=None, color_channel="gray"):
None if (cropped_positions is None or fixed_positions is not None) else (("annotations", "annotations"),)