Skip to content
Permalink
Browse files

Add images exporting support to the Facenet example and fix logits, i…

…nput shape and lfw.get_paths errors.
  • Loading branch information...
BrunoLopezGarcia committed Aug 2, 2019
1 parent 4a8520f commit 08f6fb9cf2a7f199467d5ed60179fc3ae9140458
@@ -8,12 +8,21 @@ In order to run this example you need to do the following:
1. Install the Facenet library, download and align the LFW faces and download a pretrained Facenet model. You can do that following the next Facenet tutorial: https://github.com/davidsandberg/facenet/wiki/Validate-on-LFW
(Running the validate_on_lfw script is not necessary). The datasets and the models folders must be in the folder of this example.

2. Change the following line in the facenet_fgsm.py script with the name of the .pb file of the model you downloaded:
2. Install Pillow: https://pillow.readthedocs.io/en/stable/

3. Change the following line in the facenet_fgsm.py script with the name of the .pb file of the model you downloaded:
```
model_path = "models/facenet/20170512-110547/20170512-110547.pb"
model_path = "models/facenet/20180402-114759/20180402-114759.pb"
```

3. Run the script:
4. Run the script:
```
python facenet_fgsm.py
```

5. The Facenet models dimensions are sometimes changed by its developer, so if the script gives you an error regarding the dimensions of the input shapes, you must change this line to use the proper number:
```
self.victim_embedding_input = tf.placeholder(
tf.float32,
shape=(None, 512))
```
@@ -9,7 +9,7 @@


class InceptionResnetV1Model(Model):
model_path = "models/facenet/20170512-110547/20170512-110547.pb"
model_path = "models/facenet/20180402-114759/20180402-114759.pb"

def __init__(self):
super(InceptionResnetV1Model, self).__init__(scope='model')
@@ -25,7 +25,7 @@ def convert_to_classifier(self):
# Create victim_embedding placeholder
self.victim_embedding_input = tf.placeholder(
tf.float32,
shape=(None, 128))
shape=(None, 512))

# Squared Euclidean Distance between embeddings
distance = tf.reduce_sum(
@@ -46,7 +46,7 @@ def convert_to_classifier(self):
self.layer_names = []
self.layers = []
self.layers.append(self.softmax_output)
self.layer_names.append('probs')
self.layer_names.append('logits')

def fprop(self, x, set_ref=False):
return dict(zip(self.layer_names, self.layers))
@@ -60,7 +60,8 @@ def fprop(self, x, set_ref=False):
model.convert_to_classifier()

# Load pairs of faces and their labels in one-hot encoding
faces1, faces2, labels = set_loader.load_testset(1000)
size = 100
faces1, faces2, labels = set_loader.load_testset(size)

# Create victims' embeddings using Facenet itself
graph = tf.get_default_graph()
@@ -131,3 +132,6 @@ def fprop(self, x, set_ref=False):
+ 'different people faces (impersonation): '
+ str(accuracy * 100)
+ '%')

# Save images to folder
set_loader.save_images(adv, faces1, faces2, size)
@@ -1,19 +1,42 @@
import shutil
import os

import lfw
import facenet

import numpy as np

from PIL import Image


pairs_path = "datasets/lfw/pairs.txt"
testset_path = "datasets/lfw/lfw_mtcnnpy_160"
file_extension = 'png'
image_size = 160


def save_images(adv, faces1, faces2, size):
save_images_to_folder(adv, size, 'images/adversarial/')
save_images_to_folder(0.5 + (adv - faces1), size, 'images/noise/')
save_images_to_folder(faces1, size, 'images/faces1/')
save_images_to_folder(faces2, size, 'images/faces2/')


def save_images_to_folder(images, size, path):
if os.path.isdir(path):
shutil.rmtree(path)
os.makedirs(path)

for index in range(images.shape[0]):
if index < size:
image_array = (np.reshape(images[index], (160, 160, 3))
* 255).astype(np.uint8)
Image.fromarray(image_array, 'RGB').save(path + str(index) + '.png')


def load_testset(size):
# Load images paths and labels
pairs = lfw.read_pairs(pairs_path)
paths, labels = lfw.get_paths(testset_path, pairs, file_extension)
paths, labels = lfw.get_paths(testset_path, pairs)

# Random choice
permutation = np.random.choice(len(labels), size, replace=False)

0 comments on commit 08f6fb9

Please sign in to comment.
You can’t perform that action at this time.