From 1c82f7ec6a603978322e16470547731e92e947c6 Mon Sep 17 00:00:00 2001 From: Jules Laplace Date: Mon, 17 Dec 2018 20:15:41 +0100 Subject: adding verbiage and timing --- megapixels/app/models/sql_factory.py | 2 +- megapixels/app/server/api.py | 62 +++++++++++++++++++++--------------- megapixels/app/site/parser.py | 2 +- 3 files changed, 39 insertions(+), 27 deletions(-) (limited to 'megapixels') diff --git a/megapixels/app/models/sql_factory.py b/megapixels/app/models/sql_factory.py index 9a44941b..02b722df 100644 --- a/megapixels/app/models/sql_factory.py +++ b/megapixels/app/models/sql_factory.py @@ -98,7 +98,7 @@ class SqlDataset: return None session = Session() # for obj in session.query(table).filter_by(id=id): - print(table) + # print(table) obj = session.query(table).filter(table.id == id).first() session.close() return obj.toJSON() diff --git a/megapixels/app/server/api.py b/megapixels/app/server/api.py index bc60118c..b3447eb1 100644 --- a/megapixels/app/server/api.py +++ b/megapixels/app/server/api.py @@ -15,24 +15,32 @@ from app.utils.im_utils import pil2np sanitize_re = re.compile('[\W]+') valid_exts = ['.gif', '.jpg', '.jpeg', '.png'] +LIMIT = 9 +THRESHOLD = 0.3 + api = Blueprint('api', __name__) faiss_datasets = load_faiss_databases() @api.route('/') def index(): + """List the datasets and their fields""" return jsonify({ 'datasets': list_datasets() }) + @api.route('/dataset/') def show(name): + """Show the data that a dataset will return""" dataset = get_dataset(name) if dataset: return jsonify(dataset.describe()) else: return jsonify({ 'status': 404 }) + @api.route('/dataset//face/', methods=['POST']) def upload(name): + """Query an image against FAISS and return the matching identities""" start = time.time() dataset = get_dataset(name) if name not in faiss_datasets: @@ -52,31 +60,39 @@ def upload(name): im = Image.open(file.stream).convert('RGB') im_np = pil2np(im) - + # Face detection detector = face_detector.DetectorDLIBHOG() # get detection as BBox object bboxes = detector.detect(im_np, largest=True) - if not len(bboxes): + if not bboxes or not len(bboxes): return jsonify({ 'error': 'bbox' }) bbox = bboxes[0] + if not bbox: + return jsonify({ + 'error': 'bbox' + }) + dim = im_np.shape[:2][::-1] bbox = bbox.to_dim(dim) # convert back to real dimensions + print("got bbox") + if not bbox: + return jsonify({ + 'error': 'bbox' + }) - # face recognition/vector + # extract 128-D vector recognition = face_recognition.RecognitionDLIB(gpu=-1) vec = recognition.vec(im_np, bbox) - - # print(vec) query = np.array([ vec ]).astype('float32') - # query FAISS! - distances, indexes = faiss_dataset.search(query, 10) + # query FAISS + distances, indexes = faiss_dataset.search(query, LIMIT) - if len(indexes) == 0: + if len(indexes) == 0 or len(indexes[0]) == 0: return jsonify({ 'error': 'nomatch' }) @@ -85,36 +101,32 @@ def upload(name): distances = distances[0] indexes = indexes[0] - if len(indexes) == 0: - return jsonify({ - 'error': 'nomatch' - }) - - lookup = {} - ids = [i+1 for i in indexes] + dists = [] + ids = [] for _d, _i in zip(distances, indexes): - lookup[_i+1] = _d + if _d <= THRESHOLD: + dists.append(round(float(_d), 2)) + ids.append(_i+1) - print(distances) - print(indexes) + results = [ dataset.get_identity(_i) for _i in ids ] - # with the result we have an ID - # query the sql dataset for the UUID etc here + print(distances) + print(ids) query = { - 'timing': time.time() - start, + 'timing': round(time.time() - start, 3), } - results = [ dataset.get_identity(id) for id in ids ] - print(results) return jsonify({ + 'query': query, 'results': results, - 'distances': distances.tolist(), - 'indexes': indexes.tolist(), + 'distances': dists, }) + @api.route('/dataset//name', methods=['GET']) def name_lookup(dataset): + """Find a name in the dataset""" start = time.time() dataset = get_dataset(name) diff --git a/megapixels/app/site/parser.py b/megapixels/app/site/parser.py index ecfae0cb..b3d3a8c2 100644 --- a/megapixels/app/site/parser.py +++ b/megapixels/app/site/parser.py @@ -64,7 +64,7 @@ def format_applet(section, s3_path): else: command = payload[0] opt = None - if command == 'python': + if command == 'python' or command == 'javascript' or command == 'code': return format_section([ section ], s3_path) applet['command'] = command -- cgit v1.2.3-70-g09d2