Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fixing several issues with library being out of date #1240

Open
wants to merge 8 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -92,3 +92,4 @@ ENV/

# PyCharm project setting
.idea
.DS_Store
171 changes: 100 additions & 71 deletions contributed/batch_represent.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,6 @@
#!/usr/bin/env python
# coding=utf-8

from __future__ import absolute_import
from __future__ import division
from __future__ import print_function

"""
Allows you to generate embeddings from a directory of images in the format:

Expand Down Expand Up @@ -41,7 +37,7 @@
The concept is inspired by Brandon Amos' github.com/cmusatyalab/openface/blob/master/batch-represent/batch-represent.lua
"""

#----------------------------------------------------
# ----------------------------------------------------
# MIT License
#
# Copyright (c) 2017 Rakshak Talwar
Expand All @@ -63,86 +59,119 @@
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#----------------------------------------------------
# ----------------------------------------------------

import os
import sys
import argparse
import importlib
import os
import sys
import time

sys.path.insert(1, "../src")
import facenet
import numpy as np
from sklearn.datasets import load_files
import tensorflow as tf
from six.moves import xrange
from sklearn.datasets import load_files

import facenet


def main(args):
with tf.Graph().as_default():
with tf.Session() as sess:
# create output directory if it doesn't exist
output_dir = os.path.expanduser(args.output_dir)
if not os.path.isdir(output_dir):
os.makedirs(output_dir)

# load the model
print("Loading trained model...\n")
meta_file, ckpt_file = facenet.get_model_filenames(
os.path.expanduser(args.trained_model_dir)
)
facenet.load_model(args.trained_model_dir, meta_file, ckpt_file)

# grab all image paths and labels
print("Finding image paths and targets...\n")
data = load_files(args.data_dir, load_content=False, shuffle=False)
labels_array = data["target"]
paths = data["filenames"]

# Get input and output tensors
images_placeholder = tf.get_default_graph().get_tensor_by_name("input:0")
embeddings = tf.get_default_graph().get_tensor_by_name("embeddings:0")
phase_train_placeholder = tf.get_default_graph().get_tensor_by_name(
"phase_train:0"
)

image_size = images_placeholder.get_shape()[1]
embedding_size = embeddings.get_shape()[1]

# Run forward pass to calculate embeddings
print("Generating embeddings from images...\n")
start_time = time.time()
batch_size = args.batch_size
nrof_images = len(paths)
nrof_batches = int(np.ceil(1.0 * nrof_images / batch_size))
emb_array = np.zeros((nrof_images, embedding_size))
for i in xrange(nrof_batches):
start_index = i * batch_size
end_index = min((i + 1) * batch_size, nrof_images)
paths_batch = paths[start_index:end_index]
images = facenet.load_data(
paths_batch,
do_random_crop=False,
do_random_flip=False,
image_size=image_size,
do_prewhiten=True,
)
feed_dict = {images_placeholder: images, phase_train_placeholder: False}
emb_array[start_index:end_index, :] = sess.run(
embeddings, feed_dict=feed_dict
)

time_avg_forward_pass = (time.time() - start_time) / float(nrof_images)
print(
"Forward pass took avg of %.3f[seconds/image] for %d images\n"
% (time_avg_forward_pass, nrof_images)
)

print("Finally saving embeddings and gallery to: %s" % (output_dir))
# save the gallery and embeddings (signatures) as numpy arrays to disk
np.save(os.path.join(output_dir, "gallery.npy"), labels_array)
np.save(os.path.join(output_dir, "signatures.npy"), emb_array)

with tf.Graph().as_default():

with tf.Session() as sess:

# create output directory if it doesn't exist
output_dir = os.path.expanduser(args.output_dir)
if not os.path.isdir(output_dir):
os.makedirs(output_dir)

# load the model
print("Loading trained model...\n")
meta_file, ckpt_file = facenet.get_model_filenames(os.path.expanduser(args.trained_model_dir))
facenet.load_model(args.trained_model_dir, meta_file, ckpt_file)

# grab all image paths and labels
print("Finding image paths and targets...\n")
data = load_files(args.data_dir, load_content=False, shuffle=False)
labels_array = data['target']
paths = data['filenames']

# Get input and output tensors
images_placeholder = tf.get_default_graph().get_tensor_by_name("input:0")
embeddings = tf.get_default_graph().get_tensor_by_name("embeddings:0")
phase_train_placeholder = tf.get_default_graph().get_tensor_by_name("phase_train:0")

image_size = images_placeholder.get_shape()[1]
embedding_size = embeddings.get_shape()[1]

# Run forward pass to calculate embeddings
print('Generating embeddings from images...\n')
start_time = time.time()
batch_size = args.batch_size
nrof_images = len(paths)
nrof_batches = int(np.ceil(1.0*nrof_images / batch_size))
emb_array = np.zeros((nrof_images, embedding_size))
for i in xrange(nrof_batches):
start_index = i*batch_size
end_index = min((i+1)*batch_size, nrof_images)
paths_batch = paths[start_index:end_index]
images = facenet.load_data(paths_batch, do_random_crop=False, do_random_flip=False, image_size=image_size, do_prewhiten=True)
feed_dict = { images_placeholder:images, phase_train_placeholder:False}
emb_array[start_index:end_index,:] = sess.run(embeddings, feed_dict=feed_dict)

time_avg_forward_pass = (time.time() - start_time) / float(nrof_images)
print("Forward pass took avg of %.3f[seconds/image] for %d images\n" % (time_avg_forward_pass, nrof_images))

print("Finally saving embeddings and gallery to: %s" % (output_dir))
# save the gallery and embeddings (signatures) as numpy arrays to disk
np.save(os.path.join(output_dir, "gallery.npy"), labels_array)
np.save(os.path.join(output_dir, "signatures.npy"), emb_array)

def parse_arguments(argv):
parser = argparse.ArgumentParser(description="Batch-represent face embeddings from a given data directory")
parser.add_argument('-d', '--data_dir', type=str,
help='directory of images with structure as seen at the top of this file.')
parser.add_argument('-o', '--output_dir', type=str,
help='directory containing aligned face patches with file structure as seen at the top of this file.')
parser.add_argument('--trained_model_dir', type=str,
help='Load a trained model before training starts.')
parser.add_argument('--batch_size', type=int, help='Number of images to process in a batch.', default=50)

return parser.parse_args(argv)
parser = argparse.ArgumentParser(
description="Batch-represent face embeddings from a given data directory"
)
parser.add_argument(
"-d",
"--data_dir",
type=str,
help="directory of images with structure as seen at the top of this file.",
)
parser.add_argument(
"-o",
"--output_dir",
type=str,
help="directory containing aligned face patches with file structure as seen at the top of this file.",
)
parser.add_argument(
"--trained_model_dir",
type=str,
help="Load a trained model before training starts.",
)
parser.add_argument(
"--batch_size",
type=int,
help="Number of images to process in a batch.",
default=50,
)

return parser.parse_args(argv)


if __name__ == "__main__":
main(parse_arguments(sys.argv[1:]))
main(parse_arguments(sys.argv[1:]))
Loading