summaryrefslogtreecommitdiff
path: root/megapixels/app/models
diff options
context:
space:
mode:
Diffstat (limited to 'megapixels/app/models')
-rw-r--r--megapixels/app/models/bbox.py40
-rw-r--r--megapixels/app/models/data_store.py3
-rw-r--r--megapixels/app/models/dataset.py107
3 files changed, 112 insertions, 38 deletions
diff --git a/megapixels/app/models/bbox.py b/megapixels/app/models/bbox.py
index 40874691..608aaaf8 100644
--- a/megapixels/app/models/bbox.py
+++ b/megapixels/app/models/bbox.py
@@ -1,4 +1,5 @@
import math
+import random
from dlib import rectangle as dlib_rectangle
import numpy as np
@@ -127,9 +128,39 @@ class BBox:
d = int(math.sqrt(math.pow(dcx, 2) + math.pow(dcy, 2)))
return d
+
# -----------------------------------------------------------------
# Modify
+ def jitter(self, amt):
+ '''Jitters BBox in x,y,w,h values. Used for face feature extraction
+ :param amt: (float) percentage of BBox for maximum translation
+ :returns (BBox)
+ '''
+ w = self._width + (self._width * random.uniform(-amt, amt))
+ h = self._height + (self._height * random.uniform(-amt, amt))
+ cx = self._cx + (self._cx * random.uniform(-amt, amt))
+ cy = self._cy + (self._cy * random.uniform(-amt, amt))
+ x1, y1 = np.clip((cx - w/2, cy - h/2), 0.0, 1.0)
+ x2, y2 = np.clip((cx + w/2, cy + h/2), 0.0, 1.0)
+ return BBox(x1, y1, x2, y2)
+
+ def expand(self, per):
+ """Expands BBox by percentage
+ :param per: (float) percentage to expand 0.0 - 1.0
+ :param dim: (int, int) image width, height
+ :returns (BBox) expanded
+ """
+ # expand
+ dw, dh = [(self._width * per), (self._height * per)]
+ r = list(np.array(self._rect) + np.array([-dw, -dh, dw, dh]))
+ # threshold expanded rectangle
+ r[0] = max(r[0], 0.0)
+ r[1] = max(r[1], 0.0)
+ r[2] = min(r[2], 1.0)
+ r[3] = min(r[3], 1.0)
+ return BBox(*r)
+
def expand_dim(self, amt, bounds):
"""Expands BBox within dim
:param box: (tuple) left, top, right, bottom
@@ -170,7 +201,7 @@ class BBox:
# print(adj)
r = np.add(np.array(r), adj)
- return BBox(*r)
+ return BBox(*r) # updats all BBox values
# -----------------------------------------------------------------
@@ -221,6 +252,13 @@ class BBox:
# Create from
@classmethod
+ def from_xywh_norm(cls, x, y, w, h):
+ """Converts w, y, w, h to normalized BBox
+ :returns BBox
+ """
+ return cls(x, y, x + w, y + h)
+
+ @classmethod
def from_xyxy_dim(cls, x1, y1, x2, y2, dim):
"""Converts x1, y1, w, h to BBox and normalizes
:returns BBox
diff --git a/megapixels/app/models/data_store.py b/megapixels/app/models/data_store.py
index 626c9da4..a8d6916f 100644
--- a/megapixels/app/models/data_store.py
+++ b/megapixels/app/models/data_store.py
@@ -24,6 +24,9 @@ class DataStore:
def metadata_dir(self):
return join(self.dir_metadata)
+ def media_dir(self):
+ return join(self.dir_media)
+
def media_images_original(self):
return join(self.dir_media, 'original')
diff --git a/megapixels/app/models/dataset.py b/megapixels/app/models/dataset.py
index eb0109a7..88986873 100644
--- a/megapixels/app/models/dataset.py
+++ b/megapixels/app/models/dataset.py
@@ -32,7 +32,7 @@ class Dataset:
self.data_store = DataStore(opt_data_store, self._dataset_type)
self.data_store_s3 = DataStoreS3(self._dataset_type)
- def load_face_vectors(self):
+ def _load_face_vectors(self):
metadata_type = types.Metadata.FACE_VECTOR
fp_csv = self.data_store.metadata(metadata_type)
self.log.info(f'loading: {fp_csv}')
@@ -44,22 +44,24 @@ class Dataset:
self.log.info(f'build face vector dict: {len(self._face_vectors)}')
# remove the face vector column, it can be several GB of memory
self._metadata[metadata_type].drop('vec', axis=1, inplace=True)
+ #n_dims = len(self._metadata[metadata_type].keys()) - 2
+ #drop_keys = [f'd{i}' for i in range(1,n_dims+1)]
+ #self._metadata[metadata_type].drop(drop_keys, axis=1, inplace=True)
else:
self.log.error(f'File not found: {fp_csv}. Exiting.')
sys.exit()
- def load_records(self):
+ def _load_file_records(self):
metadata_type = types.Metadata.FILE_RECORD
fp_csv = self.data_store.metadata(metadata_type)
self.log.info(f'loading: {fp_csv}')
if Path(fp_csv).is_file():
- self._metadata[metadata_type] = pd.read_csv(fp_csv).set_index('index')
+ self._metadata[metadata_type] = pd.read_csv(fp_csv, dtype=cfg.FILE_RECORD_DTYPES).set_index('index')
else:
self.log.error(f'File not found: {fp_csv}. Exiting.')
sys.exit()
- def load_identities(self):
- metadata_type = types.Metadata.IDENTITY
+ def _load_metadata(self, metadata_type):
fp_csv = self.data_store.metadata(metadata_type)
self.log.info(f'loading: {fp_csv}')
if Path(fp_csv).is_file():
@@ -67,6 +69,14 @@ class Dataset:
else:
self.log.error(f'File not found: {fp_csv}. Exiting.')
sys.exit()
+
+ def load_metadata(self, metadata_type):
+ if metadata_type == types.Metadata.FILE_RECORD:
+ self._load_file_records()
+ elif metadata_type == types.Metadata.FACE_VECTOR:
+ self._load_face_vectors()
+ else:
+ self._load_metadata(metadata_type)
def metadata(self, opt_metadata_type):
return self._metadata.get(opt_metadata_type, None)
@@ -79,11 +89,11 @@ class Dataset:
# get identity meta
df_identity = self._metadata[types.Metadata.IDENTITY]
# future datasets can have multiple identities per images
- ds_identities = df_identity.iloc[identity_index]
+ #ds_identities = df_identity.iloc[identity_index]
# get filepath and S3 url
fp_im = self.data_store.face(ds_record.subdir, ds_record.fn, ds_record.ext)
s3_url = self.data_store_s3.face(ds_record.uuid)
- image_record = ImageRecord(ds_record, fp_im, s3_url, ds_identities=ds_identities)
+ image_record = ImageRecord(ds_record, fp_im, s3_url)
return image_record
def vector_to_record(self, record_index):
@@ -142,33 +152,61 @@ class Dataset:
# find most similar feature vectors indexes
#match_idxs = self.similar(query_vec, n_results, threshold)
sim_scores = np.linalg.norm(np.array([query_vec]) - np.array(self._face_vectors), axis=1)
- match_idxs = np.argpartition(sim_scores, n_results)[:n_results]
+ match_idxs = np.argpartition(sim_scores, range(n_results))[:n_results]
+
+ df_record = self._metadata[types.Metadata.FILE_RECORD]
+ df_vector = self._metadata[types.Metadata.FACE_VECTOR]
+ df_roi = self._metadata[types.Metadata.FACE_ROI]
+ if types.Metadata.IDENTITY in self._metadata.keys():
+ df_identity = self._metadata[types.Metadata.IDENTITY]
+ else:
+ df_identity = None
+
+ identities = []
for match_idx in match_idxs:
# get the corresponding face vector row
roi_index = self._face_vector_roi_idxs[match_idx]
- df_record = self._metadata[types.Metadata.FILE_RECORD]
- ds_record = df_record.iloc[roi_index]
+ ds_roi = df_roi.iloc[roi_index]
+ record_idx = int(ds_roi.record_index)
+ ds_record = df_record.iloc[record_idx]
+
self.log.debug(f'find match index: {match_idx}, --> roi_index: {roi_index}')
fp_im = self.data_store.face(ds_record.subdir, ds_record.fn, ds_record.ext)
s3_url = self.data_store_s3.face(ds_record.uuid)
- image_record = ImageRecord(ds_record, fp_im, s3_url)
- #roi_index = self._face_vector_roi_idxs[match_idx]
- #image_record = self.roi_idx_to_record(roi_index)
+ identities = []
+
+ bbox_norm = BBox.from_xywh_norm(ds_roi.x, ds_roi.y, ds_roi.w, ds_roi.w)
+
+ if types.Metadata.IDENTITY in self._metadata.keys():
+ ds_id = df_identity.loc[df_identity['identity_key'] == ds_record.identity_key].iloc[0]
+ identity = Identity(record_idx,
+ name_display=ds_id.name_display,
+ description=ds_id.description,
+ gender=ds_id.gender,
+ roi_index=roi_index,
+ identity_key=ds_id.identity_key,
+ num_images=ds_id.num_images)
+ else:
+ identity = None
+ image_record = ImageRecord(ds_record, fp_im, s3_url, bbox_norm, identity=identity)
image_records.append(image_record)
return image_records
# ----------------------------------------------------------------------
# utilities
- def df_vecs_to_dict(self, df):
+ def df_vecs_to_dict(self, df_vec):
# convert the DataFrame CSV to float list of vecs
- return [list(map(float,x.vec.split(','))) for x in df.itertuples()]
+ # n_dims = len(df_vec.keys()) - 2 # number of columns with 'd1, d2,...d256'
+ #return [[df[f'd{i}'] for i in range(1,n_dims+1)] for df_idx, df in df_vec.iterrows()]
+ # return [[df[f'd{i}'] for i in range(1,n_dims+1)] for df_idx, df in df_vec.iterrows()]
+ return [list(map(float, x.vec.split(','))) for x in df_vec.itertuples()]
def df_vec_roi_idxs_to_dict(self, df):
# convert the DataFrame CSV to float list of vecs
#return [x.roi_index for x in df.itertuples()]
- return [x.roi_index for x in df.itertuples()]
+ return [int(x.roi_index) for i,x in df.iterrows()]
def similar(self, query_vec, n_results):
'''Finds most similar N indices of query face vector
@@ -184,23 +222,20 @@ class Dataset:
class ImageRecord:
- def __init__(self, ds_record, fp, url, ds_rois=None, ds_identities=None):
+ def __init__(self, ds_record, fp, url, bbox_norm, identity=None):
# maybe more other meta will go there
self.image_index = ds_record.index
self.sha256 = ds_record.sha256
self.uuid = ds_record.uuid
self.filepath = fp
+ self.width = ds_record.width
+ self.height = ds_record.height
self.url = url
- self._identities = []
+ self.bbox = bbox_norm
+ self.identity = identity
# image records contain ROIs
# ROIs are linked to identities
- #self._identities = [Identity(x) for x in ds_identities]
-
- @property
- def identity(self, index):
- return self._identity
-
def summarize(self):
'''Summarizes data for debugging'''
log = Logger.getLogger()
@@ -208,22 +243,20 @@ class ImageRecord:
log.info(f'sha256: {self.sha256}')
log.info(f'UUID: {self.uuid}')
log.info(f'S3 url: {self.url}')
- for identity in self._identities:
- log.info(f'fullname: {identity.fullname}')
- log.info(f'description: {identity.description}')
- log.info(f'gender: {identity.gender}')
- log.info(f'images: {identity.n_images}')
+ if self.identity:
+ log.info(f'name: {self.identity.name_display}')
+ log.info(f'description: {self.identity.description}')
+ log.info(f'gender: {self.identity.gender}')
+ log.info(f'images: {self.identity.num_images}')
class Identity:
- def __init__(self, idx, name='NA', desc='NA', gender='NA', n_images=1,
- url='NA', age='NA', nationality='NA'):
+ def __init__(self, idx, identity_key=None, name_display=None, num_images=None,
+ description=None, gender=None, roi_index=None):
self.index = idx
- self.name = name
- self.description = desc
+ self.name_display = name_display
+ self.description = description
self.gender = gender
- self.n_images = n_images
- self.url = url
- self.age = age
- self.nationality = nationality
+ self.roi_index = roi_index
+ self.num_images = num_images