summaryrefslogtreecommitdiff
path: root/megapixels/app/server
diff options
context:
space:
mode:
authorAdam Harvey <adam@ahprojects.com>2018-12-23 01:37:03 +0100
committerAdam Harvey <adam@ahprojects.com>2018-12-23 01:37:03 +0100
commit4452e02e8b04f3476273574a875bb60cfbb4568b (patch)
tree3ffa44f9621b736250a8b94da14a187dc785c2fe /megapixels/app/server
parent2a65f7a157bd4bace970cef73529867b0e0a374d (diff)
parent5340bee951c18910fd764241945f1f136b5a22b4 (diff)
.
Diffstat (limited to 'megapixels/app/server')
-rw-r--r--megapixels/app/server/api.py152
-rw-r--r--megapixels/app/server/create.py49
-rw-r--r--megapixels/app/server/json_encoder.py17
l---------megapixels/app/server/static1
4 files changed, 219 insertions, 0 deletions
diff --git a/megapixels/app/server/api.py b/megapixels/app/server/api.py
new file mode 100644
index 00000000..35862837
--- /dev/null
+++ b/megapixels/app/server/api.py
@@ -0,0 +1,152 @@
+import os
+import re
+import time
+import dlib
+import numpy as np
+from flask import Blueprint, request, jsonify
+from PIL import Image # todo: try to remove PIL dependency
+
+from app.processors import face_recognition
+from app.processors import face_detector
+from app.processors.faiss import load_faiss_databases
+from app.models.sql_factory import load_sql_datasets, list_datasets, get_dataset, get_table
+from app.utils.im_utils import pil2np
+
+sanitize_re = re.compile('[\W]+')
+valid_exts = ['.gif', '.jpg', '.jpeg', '.png']
+
+LIMIT = 9
+THRESHOLD = 0.3
+
+api = Blueprint('api', __name__)
+
+faiss_datasets = load_faiss_databases()
+
+@api.route('/')
+def index():
+ """List the datasets and their fields"""
+ return jsonify({ 'datasets': list_datasets() })
+
+
+@api.route('/dataset/<dataset_name>')
+def show(dataset_name):
+ """Show the data that a dataset will return"""
+ dataset = get_dataset(dataset_name)
+ if dataset:
+ return jsonify(dataset.describe())
+ else:
+ return jsonify({ 'status': 404 })
+
+
+@api.route('/dataset/<dataset_name>/face', methods=['POST'])
+def upload(dataset_name):
+ """Query an image against FAISS and return the matching identities"""
+ start = time.time()
+ dataset = get_dataset(dataset_name)
+ if dataset_name not in faiss_datasets:
+ return jsonify({
+ 'error': 'invalid dataset'
+ })
+ faiss_dataset = faiss_datasets[dataset_name]
+ file = request.files['query_img']
+ fn = file.filename
+ if fn.endswith('blob'):
+ fn = 'filename.jpg'
+
+ basename, ext = os.path.splitext(fn)
+ # print("got {}, type {}".format(basename, ext))
+ if ext.lower() not in valid_exts:
+ return jsonify({ 'error': 'not an image' })
+
+ im = Image.open(file.stream).convert('RGB')
+ im_np = pil2np(im)
+
+ # Face detection
+ detector = face_detector.DetectorDLIBHOG()
+
+ # get detection as BBox object
+ bboxes = detector.detect(im_np, largest=True)
+ if not bboxes or not len(bboxes):
+ return jsonify({
+ 'error': 'bbox'
+ })
+ bbox = bboxes[0]
+ if not bbox:
+ return jsonify({
+ 'error': 'bbox'
+ })
+
+ dim = im_np.shape[:2][::-1]
+ bbox = bbox.to_dim(dim) # convert back to real dimensions
+ # print("got bbox")
+ if not bbox:
+ return jsonify({
+ 'error': 'bbox'
+ })
+
+ # extract 128-D vector
+ recognition = face_recognition.RecognitionDLIB(gpu=-1)
+ vec = recognition.vec(im_np, bbox)
+ query = np.array([ vec ]).astype('float32')
+
+ # query FAISS
+ distances, indexes = faiss_dataset.search(query, LIMIT)
+
+ if len(indexes) == 0 or len(indexes[0]) == 0:
+ return jsonify({
+ 'error': 'nomatch'
+ })
+
+ # get the results for this single query...
+ distances = distances[0]
+ indexes = indexes[0]
+
+ dists = []
+ ids = []
+ for _d, _i in zip(distances, indexes):
+ if _d <= THRESHOLD:
+ dists.append(round(float(_d), 2))
+ ids.append(_i+1)
+
+ results = [ dataset.get_identity(int(_i)) for _i in ids ]
+
+ # print(distances)
+ # print(ids)
+
+ # 'bbox': str(bboxes[0]),
+ # 'bbox_dim': str(bbox),
+ # print(bboxes[0])
+ # print(bbox)
+
+ query = {
+ 'timing': round(time.time() - start, 3),
+ 'bbox': str(bbox),
+ }
+ # print(results)
+ return jsonify({
+ 'query': query,
+ 'results': results,
+ 'distances': dists,
+ })
+
+
+@api.route('/dataset/<dataset_name>/name', methods=['GET','POST'])
+def name_lookup(dataset_name):
+ """Find a name in the dataset"""
+ start = time.time()
+ dataset = get_dataset(dataset_name)
+
+ q = request.args.get('q')
+ # print(q)
+
+ query = {
+ 'q': q,
+ 'timing': time.time() - start,
+ }
+ results = dataset.search_name(q + '%') if q else None
+
+ # print(results)
+ return jsonify({
+ 'query': query,
+ 'results': results,
+ })
diff --git a/megapixels/app/server/create.py b/megapixels/app/server/create.py
new file mode 100644
index 00000000..4b1333b9
--- /dev/null
+++ b/megapixels/app/server/create.py
@@ -0,0 +1,49 @@
+from flask import Flask, Blueprint, jsonify, send_from_directory
+from flask_sqlalchemy import SQLAlchemy
+from app.models.sql_factory import connection_url, load_sql_datasets
+
+from app.server.api import api
+
+db = SQLAlchemy()
+
+def create_app(script_info=None):
+ """
+ functional pattern for creating the flask app
+ """
+ app = Flask(__name__, static_folder='static', static_url_path='')
+ app.config['SQLALCHEMY_DATABASE_URI'] = connection_url
+ app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
+
+ db.init_app(app)
+ datasets = load_sql_datasets(replace=False, base_model=db.Model)
+
+ app.register_blueprint(api, url_prefix='/api')
+ app.add_url_rule('/<path:file_relative_path_to_root>', 'serve_page', serve_page, methods=['GET'])
+
+ @app.route('/', methods=['GET'])
+ def index():
+ return app.send_static_file('index.html')
+
+ @app.shell_context_processor
+ def shell_context():
+ return { 'app': app, 'db': db }
+
+ @app.route("/site-map")
+ def site_map():
+ links = []
+ for rule in app.url_map.iter_rules():
+ # url = url_for(rule.endpoint, **(rule.defaults or {}))
+ # print(url)
+ links.append((rule.endpoint))
+ return(jsonify(links))
+
+ return app
+
+def serve_page(file_relative_path_to_root):
+ """
+ trying to get this to serve /path/ with /path/index.html,
+ ...but it doesnt actually matter for production...
+ """
+ if file_relative_path_to_root[-1] == '/':
+ file_relative_path_to_root += 'index.html'
+ return send_from_directory("static", file_relative_path_to_root)
diff --git a/megapixels/app/server/json_encoder.py b/megapixels/app/server/json_encoder.py
new file mode 100644
index 00000000..89af578a
--- /dev/null
+++ b/megapixels/app/server/json_encoder.py
@@ -0,0 +1,17 @@
+from sqlalchemy.ext.declarative import DeclarativeMeta
+from flask import json
+
+class AlchemyEncoder(json.JSONEncoder):
+ def default(self, o):
+ if isinstance(o.__class__, DeclarativeMeta):
+ data = {}
+ fields = o.__json__() if hasattr(o, '__json__') else dir(o)
+ for field in [f for f in fields if not f.startswith('_') and f not in ['metadata', 'query', 'query_class']]:
+ value = o.__getattribute__(field)
+ try:
+ json.dumps(value)
+ data[field] = value
+ except TypeError:
+ data[field] = None
+ return data
+ return json.JSONEncoder.default(self, o)
diff --git a/megapixels/app/server/static b/megapixels/app/server/static
new file mode 120000
index 00000000..1dc7a639
--- /dev/null
+++ b/megapixels/app/server/static
@@ -0,0 +1 @@
+../../../site/public \ No newline at end of file