summaryrefslogtreecommitdiff
path: root/megapixels/app/settings
diff options
context:
space:
mode:
Diffstat (limited to 'megapixels/app/settings')
-rw-r--r--megapixels/app/settings/app_cfg.py7
-rw-r--r--megapixels/app/settings/paths.py163
-rw-r--r--megapixels/app/settings/types.py12
3 files changed, 181 insertions, 1 deletions
diff --git a/megapixels/app/settings/app_cfg.py b/megapixels/app/settings/app_cfg.py
index 4c540231..7406caad 100644
--- a/megapixels/app/settings/app_cfg.py
+++ b/megapixels/app/settings/app_cfg.py
@@ -8,6 +8,10 @@ import cv2 as cv
from app.settings import types
from app.utils import click_utils
+# -----------------------------------------------------------------------------
+# Metadata type names
+# -----------------------------------------------------------------------------
+
# -----------------------------------------------------------------------------
# Enun lists used for custom Click Params
@@ -16,6 +20,8 @@ from app.utils import click_utils
FaceDetectNetVar = click_utils.ParamVar(types.FaceDetectNet)
HaarCascadeVar = click_utils.ParamVar(types.HaarCascade)
LogLevelVar = click_utils.ParamVar(types.LogLevel)
+MetadataVar = click_utils.ParamVar(types.Metadata)
+DatasetVar = click_utils.ParamVar(types.Dataset)
# # data_store
DATA_STORE = '/data_store_hdd/'
@@ -23,6 +29,7 @@ DATA_STORE_NAS = '/data_store_nas/'
DATA_STORE_HDD = '/data_store_hdd/'
DATA_STORE_SSD = '/data_store_ssd/'
DIR_DATASETS = join(DATA_STORE,'datasets')
+DIR_DATSET_NAS = join(DIR_DATASETS, 'people')
DIR_APPS = join(DATA_STORE,'apps')
DIR_APP = join(DIR_APPS,'megapixels')
DIR_MODELS = join(DIR_APP,'models')
diff --git a/megapixels/app/settings/paths.py b/megapixels/app/settings/paths.py
new file mode 100644
index 00000000..bc1333ba
--- /dev/null
+++ b/megapixels/app/settings/paths.py
@@ -0,0 +1,163 @@
+import os
+from os.path import join
+import logging
+
+from vframe.settings import vframe_cfg as vcfg
+from vframe.settings import types
+
+class Paths:
+
+ # class properties
+ MAPPINGS_DATE = vcfg.SUGARCUBE_DATES[0]
+ DIR_APP_VFRAME = 'apps/vframe/'
+ DIR_APP_SA = 'apps/syrianarchive'
+ DIR_MODELS_VFRAME = join(DIR_APP_VFRAME, 'models')
+ DIR_DARKNET = join(DIR_MODELS_VFRAME, 'darknet/pjreddie')
+ DIR_DARKNET_VFRAME = join(DIR_MODELS_VFRAME, 'darknet/vframe')
+ DIR_MEDIA = join(DIR_APP_SA, 'media')
+ DIR_METADATA = join(DIR_APP_SA, 'metadata')
+ DIR_RECORDS = join(DIR_APP_SA, 'records')
+ DIR_REPORTS = join(DIR_APP_SA, 'reports')
+
+
+ def __init__(self):
+ pass
+
+ @classmethod
+ def DataStorePath(cls, data_store=types.DataStore.HDD):
+ return '/data_store_{}'.format(data_store.name.lower())
+
+ # -------------------------------------------------------------------------------
+ # Darknet Paths
+
+ @classmethod
+ def darknet_classes(cls, data_store=types.DataStore.HDD, opt_net=types.DetectorNet.COCO):
+ if opt_net == types.DetectorNet.COCO:
+ fp = join(cls.DIR_DARKNET, 'data', 'coco.names')
+ elif opt_net == types.DetectorNet.COCO_SPP:
+ fp = join(cls.DIR_DARKNET, 'data', 'coco.names')
+ elif opt_net == types.DetectorNet.VOC:
+ fp = join(cls.DIR_DARKNET, 'data', 'voc.names')
+ elif opt_net == types.DetectorNet.OPENIMAGES:
+ fp = join(cls.DIR_DARKNET, 'data', 'openimages.names')
+ elif opt_net == types.DetectorNet.SUBMUNITION:
+ fp = join(cls.DIR_DARKNET_VFRAME, 'munitions_09b', 'classes.txt')
+ return join(cls.DataStorePath(data_store), fp)
+
+ @classmethod
+ def darknet_data(cls, data_store=types.DataStore.HDD, opt_net=types.DetectorNet.COCO, as_bytes=True):
+ if opt_net == types.DetectorNet.COCO:
+ fp = join(cls.DIR_DARKNET, 'cfg', 'coco.data')
+ elif opt_net == types.DetectorNet.COCO_SPP:
+ fp = join(cls.DIR_DARKNET, 'cfg', 'coco.data')
+ elif opt_net == types.DetectorNet.VOC:
+ fp = join(cls.DIR_DARKNET, 'cfg', 'voc.data')
+ elif opt_net == types.DetectorNet.OPENIMAGES:
+ fp = join(cls.DIR_DARKNET, 'cfg', 'openimages.data')
+ elif opt_net == types.DetectorNet.SUBMUNITION:
+ fp = join(cls.DIR_DARKNET_VFRAME, 'munitions_09b', 'meta.data')
+ fp = join(cls.DataStorePath(data_store), fp)
+ if as_bytes:
+ return bytes(fp, encoding="utf-8")
+ else:
+ return fp
+
+
+ @classmethod
+ def darknet_cfg(cls, data_store=types.DataStore.HDD, opt_net=types.DetectorNet.COCO, as_bytes=True):
+ if opt_net == types.DetectorNet.COCO:
+ fp = join(cls.DIR_DARKNET, 'cfg', 'yolov3.cfg')
+ elif opt_net == types.DetectorNet.COCO_SPP:
+ fp = join(cls.DIR_DARKNET, 'cfg', 'yolov3-spp.cfg')
+ elif opt_net == types.DetectorNet.VOC:
+ fp = join(cls.DIR_DARKNET, 'cfg', 'yolov3-voc.cfg')
+ elif opt_net == types.DetectorNet.OPENIMAGES:
+ fp = join(cls.DIR_DARKNET, 'cfg', 'yolov3-openimages.cfg')
+ elif opt_net == types.DetectorNet.SUBMUNITION:
+ fp = join(cls.DIR_DARKNET_VFRAME, 'munitions_09b', 'yolov3.cfg')
+ fp = join(cls.DataStorePath(data_store), fp)
+ if as_bytes:
+ return bytes(fp, encoding="utf-8")
+ else:
+ return fp
+
+ @classmethod
+ def darknet_weights(cls, data_store=types.DataStore.HDD, opt_net=types.DetectorNet.COCO, as_bytes=True):
+ if opt_net == types.DetectorNet.COCO:
+ fp = join(cls.DIR_DARKNET, 'weights', 'yolov3.weights')
+ elif opt_net == types.DetectorNet.COCO_SPP:
+ fp = join(cls.DIR_DARKNET, 'weights', 'yolov3-spp.weights')
+ elif opt_net == types.DetectorNet.VOC:
+ fp = join(cls.DIR_DARKNET, 'weights', 'yolov3-voc.weights')
+ elif opt_net == types.DetectorNet.OPENIMAGES:
+ fp = join(cls.DIR_DARKNET, 'weights', 'yolov3-openimages.weights')
+ elif opt_net == types.DetectorNet.SUBMUNITION:
+ fp = join(cls.DIR_DARKNET_VFRAME, 'munitions_09b/weights', 'yolov3_40000.weights')
+ fp = join(cls.DataStorePath(data_store), fp)
+ if as_bytes:
+ return bytes(fp, encoding="utf-8")
+ else:
+ return fp
+
+ # -------------------------------------------------------------------------------
+ # Metadata Paths
+
+ @classmethod
+ def mapping_index(cls, opt_date, data_store=types.DataStore.HDD, verified=types.Verified.VERIFIED,
+ file_format=types.FileExt.PKL):
+ """Returns filepath to a mapping file. Mapping files are the original Suguarcube mapping data"""
+ fname = 'index.pkl' if file_format == types.FileExt.PKL else 'index.json'
+ # data_store = 'data_store_{}'.format(data_store.name.lower())
+ date_str = opt_date.name.lower()
+ fp = join(cls.DataStorePath(data_store), cls.DIR_METADATA, 'mapping', date_str, verified.name.lower(), fname)
+ return fp
+
+ @classmethod
+ def media_record_index(cls, data_store=types.DataStore.HDD, verified=types.Verified.VERIFIED,
+ file_format=types.FileExt.PKL):
+ """Returns filepath to a mapping file. Mapping files are the original Suguarcube mapping data"""
+ fname = 'index.pkl' if file_format == types.FileExt.PKL else 'index.json'
+ metadata_type = types.Metadata.MEDIA_RECORD.name.lower()
+ fp = join(cls.DataStorePath(data_store), cls.DIR_METADATA, metadata_type, verified.name.lower(), fname)
+ return fp
+
+ @classmethod
+ def metadata_index(cls, metadata_type, data_store=types.DataStore.HDD,
+ verified=types.Verified.VERIFIED, file_format=types.FileExt.PKL):
+ """Uses key from enum to get folder name and construct filepath"""
+ fname = 'index.pkl' if file_format == types.FileExt.PKL else 'index.json'
+ fp = join(cls.DataStorePath(data_store), cls.DIR_METADATA, metadata_type.name.lower(),
+ verified.name.lower(), fname)
+ return fp
+
+ @classmethod
+ def metadata_dir(cls, metadata_type, data_store=types.DataStore.HDD, verified=types.Verified.VERIFIED):
+ """Uses key from enum to get folder name and construct filepath"""
+ fp = join(cls.DataStorePath(data_store), cls.DIR_METADATA, metadata_type.name.lower(),
+ verified.name.lower())
+ return fp
+
+ @classmethod
+ def metadata_tree_dir(cls, metadata_type, data_store=types.DataStore.HDD):
+ """Uses key from enum to get folder name and construct filepath"""
+ fp = join(cls.DataStorePath(data_store), cls.DIR_METADATA, metadata_type.name.lower())
+ return fp
+
+ @classmethod
+ def media_dir(cls, media_type, data_store=types.DataStore.HDD, verified=types.Verified.VERIFIED):
+ """Returns the directory path to a media directory"""
+ fp = join(cls.DataStorePath(data_store), cls.DIR_MEDIA, media_type.name.lower(), verified.name.lower())
+ return fp
+
+ # @classmethod
+ # def keyframe(cls, dir_media, idx, image_size=types.ImageSize.MEDIUM):
+ # """Returns path to keyframe image using supplied cls.media directory"""
+ # idx = str(idx).zfill(vcfg.ZERO_PADDING)
+ # size_label = vcfg.IMAGE_SIZE_LABELS[image_size]
+ # fp = join(dir_media, sha256_tree, sha256, idx, size_label, 'index.jpg')
+ # return fp
+
+ @classmethod
+ def dnn(cls):
+ """Returns configurations for available DNNs"""
+ pass \ No newline at end of file
diff --git a/megapixels/app/settings/types.py b/megapixels/app/settings/types.py
index e9107803..7157436d 100644
--- a/megapixels/app/settings/types.py
+++ b/megapixels/app/settings/types.py
@@ -7,7 +7,6 @@ def find_type(name, enum_type):
return None
-
class FaceDetectNet(Enum):
"""Scene text detector networks"""
HAAR, DLIB_CNN, DLIB_HOG, CVDNN, MTCNN = range(5)
@@ -31,3 +30,14 @@ class HaarCascade(Enum):
class LogLevel(Enum):
"""Loger vebosity"""
DEBUG, INFO, WARN, ERROR, CRITICAL = range(5)
+
+
+# ---------------------------------------------------------------------
+# Metadata types
+# --------------------------------------------------------------------
+
+class Metadata(Enum):
+ IDENTITIES, POSES, ROIS, FILE_META, SHAS, UUIDS, FACE_VECTORS = range(7)
+
+class Dataset(Enum):
+ LFW, VGG_FACE2 = range(2)