From 9812c7cdb3538a5ce4650ef69b897345e54c7d89 Mon Sep 17 00:00:00 2001 From: uraid Date: Mon, 30 Aug 2021 18:00:31 +0300 Subject: [PATCH 1/2] Fix face recognition of rotated images --- photonix/classifiers/face/model.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/photonix/classifiers/face/model.py b/photonix/classifiers/face/model.py index 816bc02d..6fec8538 100644 --- a/photonix/classifiers/face/model.py +++ b/photonix/classifiers/face/model.py @@ -17,6 +17,7 @@ from photonix.classifiers.face.deepface.commons.distance import findEuclideanDistance from photonix.classifiers.face.deepface.DeepFace import build_model from photonix.photos.utils.redis import redis_connection +from photonix.photos.utils.metadata import PhotoMetadata GRAPH_FILE = os.path.join('face', 'mtcnn_weights.npy') @@ -72,6 +73,14 @@ def load_graph(self, graph_file): def predict(self, image_file, min_score=0.99): # Detects face bounding boxes image = Image.open(image_file) + + # Perform rotations if decalared in metadata + metadata = PhotoMetadata(image_file) + if metadata.get('Orientation') in ['Rotate 90 CW', 'Rotate 270 CCW']: + image = image.rotate(-90, expand=True) + elif metadata.get('Orientation') in ['Rotate 90 CCW', 'Rotate 270 CW']: + image = image.rotate(90, expand=True) + image = np.asarray(image) results = self.graph['mtcnn'].detect_faces(image) return list(filter(lambda f: f['confidence'] > min_score, results)) From b9b64385b5362fc6d23aecc6283ff4eef77541ba Mon Sep 17 00:00:00 2001 From: uraid Date: Mon, 30 Aug 2021 18:29:36 +0300 Subject: [PATCH 2/2] Fix object recognition of rotated images --- photonix/classifiers/object/model.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/photonix/classifiers/object/model.py b/photonix/classifiers/object/model.py index a77b99c6..f875beb1 100644 --- a/photonix/classifiers/object/model.py +++ b/photonix/classifiers/object/model.py @@ -11,6 +11,7 @@ from photonix.classifiers.object.utils import label_map_util from photonix.classifiers.base_model import BaseModel from photonix.photos.utils.redis import redis_connection +from photonix.photos.utils.metadata import PhotoMetadata GRAPH_FILE = os.path.join('object', 'ssd_mobilenet_v2_oid_v4_2018_12_12_frozen_inference_graph.pb') @@ -115,6 +116,14 @@ def format_output(self, output_dict, min_score): def predict(self, image_file, min_score=0.1): image = Image.open(image_file) + + # Perform rotations if decalared in metadata + metadata = PhotoMetadata(image_file) + if metadata.get('Orientation') in ['Rotate 90 CW', 'Rotate 270 CCW']: + image = image.rotate(-90, expand=True) + elif metadata.get('Orientation') in ['Rotate 90 CCW', 'Rotate 270 CW']: + image = image.rotate(90, expand=True) + # the array based representation of the image will be used later in order to prepare the # result image with boxes and labels on it. image_np = self.load_image_into_numpy_array(image)