import os import re import time import dlib import numpy as np from flask import Blueprint, request, jsonify from PIL import Image # todo: try to remove PIL dependency from app.processors import face_recognition from app.processors import face_detector from app.processors.faiss import load_faiss_databases from app.models.sql_factory import load_sql_datasets, list_datasets, get_dataset, get_table from app.utils.im_utils import pil2np sanitize_re = re.compile('[\W]+') valid_exts = ['.gif', '.jpg', '.jpeg', '.png'] api = Blueprint('api', __name__) faiss_datasets = load_faiss_databases() @api.route('/') def index(): return jsonify({ 'datasets': list_datasets() }) @api.route('/dataset/') def show(name): dataset = get_dataset(name) if dataset: return jsonify(dataset.describe()) else: return jsonify({ 'status': 404 }) @api.route('/dataset//face/', methods=['POST']) def upload(name): start = time.time() dataset = get_dataset(name) if name not in faiss_datasets: return jsonify({ 'error': 'invalid dataset' }) faiss_dataset = faiss_datasets[name] file = request.files['query_img'] fn = file.filename if fn.endswith('blob'): fn = 'filename.jpg' basename, ext = os.path.splitext(fn) print("got {}, type {}".format(basename, ext)) if ext.lower() not in valid_exts: return jsonify({ 'error': 'not an image' }) im = Image.open(file.stream).convert('RGB') im_np = pil2np(im) # Face detection detector = face_detector.DetectorDLIBHOG() # get detection as BBox object bboxes = detector.detect(im_np, largest=True) bbox = bboxes[0] dim = im_np.shape[:2][::-1] bbox = bbox.to_dim(dim) # convert back to real dimensions # face recognition/vector recognition = face_recognition.RecognitionDLIB(gpu=-1) vec = recognition.vec(im_np, bbox) # print(vec) query = np.array([ vec ]).astype('float32') # query FAISS! distances, indexes = faiss_dataset.search(query, 10) if len(indexes) == 0: print("weird, no results!") return [] # get the results for this single query... distances = distances[0] indexes = indexes[0] if len(indexes) == 0: print("no results!") return [] lookup = {} ids = [i+1 for i in indexes] for _d, _i in zip(distances, indexes): lookup[_i+1] = _d print(distances) print(indexes) # with the result we have an ID # query the sql dataset for the UUID etc here query = { 'timing': time.time() - start, } results = [ dataset.get_identity(id) for id in ids ] print(results) return jsonify({ 'results': results, 'distances': distances.tolist(), 'indexes': indexes.tolist(), }) @api.route('/dataset//name', methods=['GET']) def name_lookup(dataset): start = time.time() dataset = get_dataset(name) # we have a query from the request query string... # use this to do a like* query on the identities_meta table query = { 'timing': time.time() - start, } results = [] print(results) return jsonify({ 'query': query, 'results': results, })