diff options
Diffstat (limited to 'megapixels/app/server/api.py')
| -rw-r--r-- | megapixels/app/server/api.py | 105 |
1 files changed, 62 insertions, 43 deletions
diff --git a/megapixels/app/server/api.py b/megapixels/app/server/api.py index bc60118c..35862837 100644 --- a/megapixels/app/server/api.py +++ b/megapixels/app/server/api.py @@ -15,68 +15,84 @@ from app.utils.im_utils import pil2np sanitize_re = re.compile('[\W]+') valid_exts = ['.gif', '.jpg', '.jpeg', '.png'] +LIMIT = 9 +THRESHOLD = 0.3 + api = Blueprint('api', __name__) faiss_datasets = load_faiss_databases() @api.route('/') def index(): + """List the datasets and their fields""" return jsonify({ 'datasets': list_datasets() }) -@api.route('/dataset/<name>') -def show(name): - dataset = get_dataset(name) + +@api.route('/dataset/<dataset_name>') +def show(dataset_name): + """Show the data that a dataset will return""" + dataset = get_dataset(dataset_name) if dataset: return jsonify(dataset.describe()) else: return jsonify({ 'status': 404 }) -@api.route('/dataset/<name>/face/', methods=['POST']) -def upload(name): + +@api.route('/dataset/<dataset_name>/face', methods=['POST']) +def upload(dataset_name): + """Query an image against FAISS and return the matching identities""" start = time.time() - dataset = get_dataset(name) - if name not in faiss_datasets: + dataset = get_dataset(dataset_name) + if dataset_name not in faiss_datasets: return jsonify({ 'error': 'invalid dataset' }) - faiss_dataset = faiss_datasets[name] + faiss_dataset = faiss_datasets[dataset_name] file = request.files['query_img'] fn = file.filename if fn.endswith('blob'): fn = 'filename.jpg' basename, ext = os.path.splitext(fn) - print("got {}, type {}".format(basename, ext)) + # print("got {}, type {}".format(basename, ext)) if ext.lower() not in valid_exts: return jsonify({ 'error': 'not an image' }) im = Image.open(file.stream).convert('RGB') im_np = pil2np(im) - + # Face detection detector = face_detector.DetectorDLIBHOG() # get detection as BBox object bboxes = detector.detect(im_np, largest=True) - if not len(bboxes): + if not bboxes or not len(bboxes): return jsonify({ 'error': 'bbox' }) bbox = bboxes[0] + if not bbox: + return jsonify({ + 'error': 'bbox' + }) + dim = im_np.shape[:2][::-1] bbox = bbox.to_dim(dim) # convert back to real dimensions + # print("got bbox") + if not bbox: + return jsonify({ + 'error': 'bbox' + }) - # face recognition/vector + # extract 128-D vector recognition = face_recognition.RecognitionDLIB(gpu=-1) vec = recognition.vec(im_np, bbox) - - # print(vec) query = np.array([ vec ]).astype('float32') - # query FAISS! - distances, indexes = faiss_dataset.search(query, 10) + # query FAISS + distances, indexes = faiss_dataset.search(query, LIMIT) - if len(indexes) == 0: + if len(indexes) == 0 or len(indexes[0]) == 0: return jsonify({ 'error': 'nomatch' }) @@ -85,48 +101,51 @@ def upload(name): distances = distances[0] indexes = indexes[0] - if len(indexes) == 0: - return jsonify({ - 'error': 'nomatch' - }) - - lookup = {} - ids = [i+1 for i in indexes] + dists = [] + ids = [] for _d, _i in zip(distances, indexes): - lookup[_i+1] = _d + if _d <= THRESHOLD: + dists.append(round(float(_d), 2)) + ids.append(_i+1) + + results = [ dataset.get_identity(int(_i)) for _i in ids ] - print(distances) - print(indexes) + # print(distances) + # print(ids) - # with the result we have an ID - # query the sql dataset for the UUID etc here + # 'bbox': str(bboxes[0]), + # 'bbox_dim': str(bbox), + # print(bboxes[0]) + # print(bbox) query = { - 'timing': time.time() - start, + 'timing': round(time.time() - start, 3), + 'bbox': str(bbox), } - results = [ dataset.get_identity(id) for id in ids ] - - print(results) + # print(results) return jsonify({ + 'query': query, 'results': results, - 'distances': distances.tolist(), - 'indexes': indexes.tolist(), + 'distances': dists, }) -@api.route('/dataset/<name>/name', methods=['GET']) -def name_lookup(dataset): + +@api.route('/dataset/<dataset_name>/name', methods=['GET','POST']) +def name_lookup(dataset_name): + """Find a name in the dataset""" start = time.time() - dataset = get_dataset(name) + dataset = get_dataset(dataset_name) - # we have a query from the request query string... - # use this to do a like* query on the identities_meta table + q = request.args.get('q') + # print(q) query = { + 'q': q, 'timing': time.time() - start, } - results = [] - - print(results) + results = dataset.search_name(q + '%') if q else None + + # print(results) return jsonify({ 'query': query, 'results': results, |
