summaryrefslogtreecommitdiff
path: root/megapixels/app/server
diff options
context:
space:
mode:
authorJules Laplace <julescarbon@gmail.com>2018-12-17 00:35:19 +0100
committerJules Laplace <julescarbon@gmail.com>2018-12-17 00:35:19 +0100
commit4cf8581655c34698f8869bb364b6d436b881d17a (patch)
tree449d6c5a8cd5a3f5bbd277e67f66a734ae0f51c8 /megapixels/app/server
parent0bbaef7c889f2bf17cdf7e4584a6946085d0a7eb (diff)
returning results...!
Diffstat (limited to 'megapixels/app/server')
-rw-r--r--megapixels/app/server/api.py53
-rw-r--r--megapixels/app/server/json_encoder.py17
2 files changed, 61 insertions, 9 deletions
diff --git a/megapixels/app/server/api.py b/megapixels/app/server/api.py
index cf8241bd..36563910 100644
--- a/megapixels/app/server/api.py
+++ b/megapixels/app/server/api.py
@@ -2,18 +2,23 @@ import os
import re
import time
import dlib
+import numpy as np
from flask import Blueprint, request, jsonify
from PIL import Image # todo: try to remove PIL dependency
from app.processors import face_recognition
from app.processors import face_detector
-from app.models.sql_factory import list_datasets, get_dataset, get_table
+from app.processors.faiss import load_faiss_databases
+from app.models.sql_factory import load_sql_datasets, list_datasets, get_dataset, get_table
+from app.utils.im_utils import pil2np
sanitize_re = re.compile('[\W]+')
valid_exts = ['.gif', '.jpg', '.jpeg', '.png']
api = Blueprint('api', __name__)
+faiss_datasets = load_faiss_databases()
+
@api.route('/')
def index():
return jsonify({ 'datasets': list_datasets() })
@@ -26,10 +31,15 @@ def show(name):
else:
return jsonify({ 'status': 404 })
-@api.route('/dataset/<name>/face', methods=['POST'])
+@api.route('/dataset/<name>/face/', methods=['POST'])
def upload(name):
start = time.time()
dataset = get_dataset(name)
+ if name not in faiss_datasets:
+ return jsonify({
+ 'error': 'invalid dataset'
+ })
+ faiss_dataset = faiss_datasets[name]
file = request.files['query_img']
fn = file.filename
if fn.endswith('blob'):
@@ -40,22 +50,46 @@ def upload(name):
if ext.lower() not in valid_exts:
return jsonify({ 'error': 'not an image' })
- img = Image.open(file.stream).convert('RGB')
+ im = Image.open(file.stream).convert('RGB')
+ im_np = pil2np(im)
# Face detection
detector = face_detector.DetectorDLIBHOG()
# get detection as BBox object
- bboxes = detector.detect(im, largest=True)
+ bboxes = detector.detect(im_np, largest=True)
bbox = bboxes[0]
- dim = im.shape[:2][::-1]
+ dim = im_np.shape[:2][::-1]
bbox = bbox.to_dim(dim) # convert back to real dimensions
# face recognition/vector
recognition = face_recognition.RecognitionDLIB(gpu=-1)
+ vec = recognition.vec(im_np, bbox)
+
+ # print(vec)
+ query = np.array([ vec ]).astype('float32')
+
+ # query FAISS!
+ distances, indexes = faiss_dataset.search(query, 5)
+
+ if len(indexes) == 0:
+ print("weird, no results!")
+ return []
+
+ # get the results for this single query...
+ distances = distances[0]
+ indexes = indexes[0]
- # print(vec.shape)
- # results = db.search(vec, limit=limit)
+ if len(indexes) == 0:
+ print("no results!")
+ return []
+
+ lookup = {}
+ for _d, _i in zip(distances, indexes):
+ lookup[_i+1] = _d
+
+ print(distances)
+ print(indexes)
# with the result we have an ID
# query the sql dataset for the UUID etc here
@@ -63,12 +97,13 @@ def upload(name):
query = {
'timing': time.time() - start,
}
- results = []
+ results = [ dataset.get_identity(index) for index in indexes ]
print(results)
return jsonify({
- 'query': query,
'results': results,
+ # 'distances': distances.tolist(),
+ # 'indexes': indexes.tolist(),
})
@api.route('/dataset/<name>/name', methods=['GET'])
diff --git a/megapixels/app/server/json_encoder.py b/megapixels/app/server/json_encoder.py
new file mode 100644
index 00000000..89af578a
--- /dev/null
+++ b/megapixels/app/server/json_encoder.py
@@ -0,0 +1,17 @@
+from sqlalchemy.ext.declarative import DeclarativeMeta
+from flask import json
+
+class AlchemyEncoder(json.JSONEncoder):
+ def default(self, o):
+ if isinstance(o.__class__, DeclarativeMeta):
+ data = {}
+ fields = o.__json__() if hasattr(o, '__json__') else dir(o)
+ for field in [f for f in fields if not f.startswith('_') and f not in ['metadata', 'query', 'query_class']]:
+ value = o.__getattribute__(field)
+ try:
+ json.dumps(value)
+ data[field] = value
+ except TypeError:
+ data[field] = None
+ return data
+ return json.JSONEncoder.default(self, o)