summaryrefslogtreecommitdiff
path: root/megapixels/app/utils
diff options
context:
space:
mode:
Diffstat (limited to 'megapixels/app/utils')
-rw-r--r--megapixels/app/utils/__init__.py0
-rw-r--r--megapixels/app/utils/click_utils.py62
-rw-r--r--megapixels/app/utils/file_utils.py400
-rw-r--r--megapixels/app/utils/im_utils.py506
-rw-r--r--megapixels/app/utils/logger_utils.py68
5 files changed, 1036 insertions, 0 deletions
diff --git a/megapixels/app/utils/__init__.py b/megapixels/app/utils/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/megapixels/app/utils/__init__.py
diff --git a/megapixels/app/utils/click_utils.py b/megapixels/app/utils/click_utils.py
new file mode 100644
index 00000000..dc00f58c
--- /dev/null
+++ b/megapixels/app/utils/click_utils.py
@@ -0,0 +1,62 @@
+"""
+Custom Click parameter types
+"""
+import click
+
+from app.settings import app_cfg as cfg
+from app.settings import types
+
+
+# --------------------------------------------------------
+# Click command helpers
+# --------------------------------------------------------
+def enum_to_names(enum_type):
+ return {x.name.lower(): x for x in enum_type}
+
+def show_help(enum_type):
+ names = enum_to_names(enum_type)
+ return 'Options: "{}"'.format(', '.join(list(names.keys())))
+
+def get_default(opt):
+ return opt.name.lower()
+
+
+# --------------------------------------------------------
+# Custom Click parameter class
+# --------------------------------------------------------
+
+
+class ParamVar(click.ParamType):
+
+ name = 'default_type'
+
+ def __init__(self, param_type):
+ # self.name = '{}'.format(param_type.name.lower())
+ # sealf.
+ self.ops = {x.name.lower(): x for x in param_type}
+
+ def convert(self, value, param, ctx):
+ """converts (str) repr to Enum hash"""
+ try:
+ return self.ops[value.lower()]
+ except:
+ self.fail('{} is not a valid option'.format(value, param, ctx))
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/megapixels/app/utils/file_utils.py b/megapixels/app/utils/file_utils.py
new file mode 100644
index 00000000..773667b1
--- /dev/null
+++ b/megapixels/app/utils/file_utils.py
@@ -0,0 +1,400 @@
+"""
+File utilities
+"""
+import sys
+import os
+from os.path import join
+import stat
+
+from glob import glob
+from pprint import pprint
+import shutil
+import distutils
+import pathlib
+from pathlib import Path
+import json
+import csv
+import pickle
+import threading
+from queue import Queue
+import time
+import logging
+import itertools
+import collections
+
+import hashlib
+import pymediainfo
+import click
+from tqdm import tqdm
+import cv2 as cv
+from PIL import Image
+import imutils
+
+from app.settings import app_cfg as cfg
+from app.settings import types
+
+log = logging.getLogger(cfg.LOGGER_NAME)
+
+
+# ------------------------------------------
+# File I/O read/write little helpers
+# ------------------------------------------
+
+def glob_multi(dir_in, exts):
+ files = []
+ for e in exts:
+ files.append(glob(join(dir_in, '*.{}'.format(e))))
+ return files
+
+
+def zpad(x, zeros=cfg.ZERO_PADDING):
+ return str(x).zfill(zeros)
+
+def get_ext(fpp, lower=True):
+ """Retuns the file extension w/o dot
+ :param fpp: (Pathlib.path) filepath
+ :param lower: (bool) force lowercase
+ :returns: (str) file extension (ie 'jpg')
+ """
+ fpp = ensure_posixpath(fpp)
+ ext = fpp.suffix.replace('.', '')
+ return ext.lower() if lower else ext
+
+
+def convert(fp_in, fp_out):
+ """Converts between JSON and Pickle formats
+ Pickle files are about 30-40% smaller filesize
+ """
+ if get_ext(fp_in) == get_ext(fp_out):
+ log.error('Input: {} and output: {} are the same. Use this to convert.')
+
+ lazywrite(lazyload(fp_in), fp_out)
+
+
+def load_csv(fp_in, as_list=True):
+ """Loads CSV and retuns list of items
+ :param fp_in: string filepath to CSV
+ :returns: list of all CSV data
+ """
+ if not Path(fp_in).exists():
+ log.info('loading {}'.format(fp_in))
+ log.info('loading: {}'.format(fp_in))
+ with open(fp_in, 'r') as fp:
+ items = csv.DictReader(fp)
+ if as_list:
+ items = [x for x in items]
+ log.info('returning {:,} items'.format(len(items)))
+ return items
+
+
+def lazywrite(data, fp_out, sort_keys=True):
+ """Writes JSON or Pickle data"""
+ ext = get_ext(fp_out)
+ if ext == 'json':
+ return write_json(data, fp_out, sort_keys=sort_keys)
+ elif ext == 'pkl':
+ return write_pickle(data, fp_out)
+ else:
+ raise NotImplementedError('[!] {} is not yet supported. Use .pkl or .json'.format(ext))
+
+
+def lazyload(fp_in, ordered=True):
+ """Loads JSON or Pickle serialized data"""
+ if not Path(fp_in).exists():
+ log.error('file does not exist: {}'.format(fp_in))
+ return {}
+ ext = get_ext(fp_in)
+ if ext == 'json':
+ items = load_json(fp_in)
+ elif ext == 'pkl':
+ items = load_pickle(fp_in)
+ else:
+ raise NotImplementedError('[!] {} is not yet supported. Use .pkl or .json'.format(ext))
+
+ if ordered:
+ return collections.OrderedDict(sorted(items.items(), key=lambda t: t[0]))
+ else:
+ return items
+
+
+def load_text(fp_in):
+ with open(fp_in, 'rt') as fp:
+ lines = fp.read().rstrip('\n').split('\n')
+ return lines
+
+def load_json(fp_in):
+ """Loads JSON and returns items
+ :param fp_in: (str) filepath
+ :returns: data from JSON
+ """
+ if not Path(fp_in).exists():
+ log.error('file does not exist: {}'.format(fp_in))
+ return {}
+ with open(str(fp_in), 'r') as fp:
+ data = json.load(fp)
+ return data
+
+
+def load_pickle(fp_in):
+ """Loads Pickle and returns items
+ :param fp_in: (str) filepath
+ :returns: data from JSON
+ """
+ if not Path(fp_in).exists():
+ log.error('file does not exist: {}'.format(fp_in))
+ return {}
+ with open(str(fp_in), 'rb') as fp:
+ data = pickle.load(fp)
+ return data
+
+
+def order_items(records):
+ """Orders records by ASC SHA256"""
+ return collections.OrderedDict(sorted(records.items(), key=lambda t: t[0]))
+
+def write_text(data, fp_out, ensure_path=True):
+ if not data:
+ log.error('no data')
+ return
+
+ if ensure_path:
+ mkdirs(fp_out)
+ with open(fp_out, 'w') as fp:
+ if type(data) == list:
+ fp.write('\n'.join(data))
+ else:
+ fp.write(data)
+
+
+def write_pickle(data, fp_out, ensure_path=True):
+ """
+ """
+ if ensure_path:
+ mkdirs(fp_out) # mkdir
+ with open(fp_out, 'wb') as fp:
+ pickle.dump(data, fp)
+
+
+def write_json(data, fp_out, minify=True, ensure_path=True, sort_keys=True):
+ """
+ """
+ if ensure_path:
+ mkdirs(fp_out)
+ with open(fp_out, 'w') as fp:
+ if minify:
+ json.dump(data, fp, separators=(',',':'), sort_keys=sort_keys)
+ else:
+ json.dump(data, fp, indent=2, sort_keys=sort_keys)
+
+def write_csv(data, fp_out, header=None):
+ """ """
+ with open(fp_out, 'w') as fp:
+ writer = csv.DictWriter(fp, fieldnames=header)
+ writer.writeheader()
+ if type(data) is dict:
+ for k, v in data.items():
+ fp.writerow('{},{}'.format(k, v))
+
+
+def write_serialized_items(items, fp_out, ensure_path=True, minify=True, sort_keys=True):
+ """Writes serialized data
+ :param items: (dict) a sha256 dict of MappingItems
+ :param serialize: (bool) serialize the data
+ :param ensure_path: ensure the parent directories exist
+ :param minify: reduces JSON file size
+ """
+ log.info('Writing serialized data...')
+ fpp_out = ensure_posixpath(fp_out)
+ serialized_items = {k: v.serialize() for k, v in tqdm(items.items()) }
+ # write data
+ ext = get_ext(fpp_out)
+ if ext == 'json':
+ write_json(serialized_items, fp_out, ensure_path=ensure_path, minify=minify, sort_keys=sort_keys)
+ elif ext == 'pkl':
+ write_pickle(serialized_items, fp_out)
+ else:
+ raise NotImplementedError('[!] {} is not yet supported. Use .pkl or .json'.format(ext))
+ log.info('Wrote {:,} items to {}'.format(len(items), fp_out))
+
+
+def write_modeled_data(data, fp_out, ensure_path=False):
+ """
+ """
+ fpp_out = ensure_posixpath(fp_out)
+ if ensure_path:
+ mkdirs(fpp_out)
+ ext = get_ext(fpp_out)
+ if ext == 'pkl':
+ write_pickle(data, str(fp_out))
+ else:
+ raise NotImplementedError('[!] {} is not yet supported. Use .pkl or .json'.format(ext))
+
+
+# ---------------------------------------------------------------------
+# Filepath utilities
+# ---------------------------------------------------------------------
+
+def ensure_posixpath(fp):
+ """Ensures filepath is pathlib.Path
+ :param fp: a (str, LazyFile, PosixPath)
+ :returns: a PosixPath filepath object
+ """
+ if type(fp) == str:
+ fpp = Path(fp)
+ elif type(fp) == click.utils.LazyFile:
+ fpp = Path(fp.name)
+ elif type(fp) == pathlib.PosixPath:
+ fpp = fp
+ else:
+ raise TypeError('{} is not a valid filepath type'.format(type(fp)))
+ return fpp
+
+
+def mkdirs(fp):
+ """Ensure parent directories exist for a filepath
+ :param fp: string, Path, or click.File
+ """
+ fpp = ensure_posixpath(fp)
+ fpp = fpp.parent if fpp.suffix else fpp
+ fpp.mkdir(parents=True, exist_ok=True)
+
+
+def ext_media_format(ext):
+ """Converts file extension into Enum MediaType
+ param ext: str of file extension"
+ """
+ for media_format, exts in cfg.VALID_MEDIA_EXTS.items():
+ if ext in exts:
+ return media_format
+ raise ValueError('{} is not a valid option'.format(ext))
+
+
+def sha256(fp_in, block_size=65536):
+ """Generates SHA256 hash for a file
+ :param fp_in: (str) filepath
+ :param block_size: (int) byte size of block
+ :returns: (str) hash
+ """
+ sha256 = hashlib.sha256()
+ with open(fp_in, 'rb') as fp:
+ for block in iter(lambda: f.read(block_size), b''):
+ sha256.update(block)
+ return sha256.hexdigest()
+
+
+def sha256_tree(sha256):
+ """Split hash into branches with tree-depth for faster file indexing
+ :param sha256: str of a sha256 hash
+ :returns: str with sha256 tree with '/' delimeter
+ """
+ branch_size = cfg.HASH_BRANCH_SIZE
+ tree_size = cfg.HASH_TREE_DEPTH * branch_size
+ sha256_tree = [sha256[i:(i+branch_size)] for i in range(0, tree_size, branch_size)]
+ return '/'.join(sha256_tree)
+
+
+def migrate(fmaps, threads=1, action='copy', force=False):
+ """Copy/move/symlink files form src to dst directory
+ :param fmaps: (dict) with 'src' and 'dst' filepaths
+ :param threads: (int) number of threads
+ :param action: (str) copy/move/symlink
+ :param force: (bool) force overwrite existing files
+ """
+ log = log
+ num_items = len(fmaps)
+
+ def copytree(src, dst, symlinks = False, ignore = None):
+ # ozxyqk: https://stackoverflow.com/questions/22588225/how-do-you-merge-two-directories-or-move-with-replace-from-the-windows-command
+ if not os.path.exists(dst):
+ mkdirs(dst)
+ # os.makedirs(dst)
+ shutil.copystat(src, dst)
+ lst = os.listdir(src)
+ if ignore:
+ excl = ignore(src, lst)
+ lst = [x for x in lst if x not in excl]
+ for item in lst:
+ s = os.path.join(src, item)
+ d = os.path.join(dst, item)
+ if symlinks and os.path.islink(s):
+ if os.path.exists(d):
+ os.remove(d)
+ os.symlink(os.readlink(s), d)
+ try:
+ st = os.lstat(s)
+ mode = stat.S_IMODE(st.st_mode)
+ os.lchmod(d, mode)
+ except:
+ pass # lchmod not available
+ elif os.path.isdir(s):
+ copytree(s, d, symlinks, ignore)
+ else:
+ shutil.copy(s, d)
+
+ assert(action in ['copy','move','symlink'])
+
+ if threads > 1:
+ # threaded
+ task_queue = Queue()
+ print_lock = threading.Lock()
+
+ def migrate_action(fmap):
+ data_local = threading.local()
+ data_local.src, data_local.dst = (fmap['src'], fmap['dst'])
+ data_local.src_path = Path(data_local.src)
+ data_local.dst_path = Path(data_local.dst)
+
+ if force or not data_local.dst_path.exists():
+ if action == 'copy':
+ shutil.copy(data_local.src, data_local.dst)
+ #if data_local.src_path.is_dir():
+ # copytree(data_local.src, data_local.dst)
+ #else:
+ elif action == 'move':
+ shutil.move(data_local.src, data_local.dst)
+ elif action == 'symlink':
+ if force:
+ data_local.dst_path.unlink()
+ Path(data_local.src).symlink_to(data_local.dst)
+
+ def process_queue(num_items):
+ # TODO: progress bar
+ while True:
+ fmap = task_queue.get()
+ migrate_action(fmap)
+ log.info('migrate: {:.2f} {:,}/{:,}'.format(
+ (task_queue.qsize() / num_items)*100, task_queue.qsize(), num_items))
+ task_queue.task_done()
+
+ # avoid race conditions by creating dir structure here
+ log.info('create directory structure')
+ for fmap in tqdm(fmaps):
+ mkdirs(fmap['dst'])
+
+ # init threads
+ for i in range(threads):
+ t = threading.Thread(target=process_queue, args=(num_items,))
+ t.daemon = True
+ t.start()
+
+ # process threads
+ start = time.time()
+ for fmap in fmaps:
+ task_queue.put(fmap)
+
+ task_queue.join()
+
+ else:
+ # non-threaded
+ for fmap in tqdm(fmaps):
+ mkdirs(fmap['dst'])
+ if action == 'copy':
+ shutil.copy(fmap['src'], fmap['dst'])
+ elif action == 'move':
+ shutil.move(fmap['src'], fmap['dst'])
+ elif action == 'symlink':
+ if force:
+ Path(fmap['dst'].unlink())
+ Path(fp_src).symlink_to(fp_dst)
+ return
+
diff --git a/megapixels/app/utils/im_utils.py b/megapixels/app/utils/im_utils.py
new file mode 100644
index 00000000..a0f23cd2
--- /dev/null
+++ b/megapixels/app/utils/im_utils.py
@@ -0,0 +1,506 @@
+import sys
+import os
+from os.path import join
+import cv2 as cv
+import imagehash
+from PIL import Image, ImageDraw, ImageFilter, ImageOps
+from skimage.filters.rank import entropy
+from skimage.morphology import disk
+from skimage import feature
+# import matplotlib.pyplot as plt
+import imutils
+import time
+import numpy as np
+import torch
+import torch.nn as nn
+import torchvision.models as models
+import torchvision.transforms as transforms
+from torch.autograd import Variable
+from sklearn.metrics.pairwise import cosine_similarity
+import datetime
+
+
+
+
+def compute_features(fe,frames,phashes,phash_thresh=1):
+ """
+ Get vector embedding using FeatureExtractor
+ :param fe: FeatureExtractor class
+ :param frames: list of frame images as numpy.ndarray
+ :param phash_thresh: perceptual hash threshold
+ :returns: list of feature vectors
+ """
+ vals = []
+ phash_pre = phashes[0]
+ for i,im in enumerate(frames):
+ if i == 0 or (phashes[i] - phashes[i-1]) > phash_thresh:
+ vals.append(fe.extract(im))
+ else:
+ vals.append(vals[i-1])
+ return vals
+
+
+def ensure_pil(im, bgr2rgb=False):
+ """Ensure image is Pillow format
+ :param im: image in numpy or PIL.Image format
+ :returns: image in Pillow RGB format
+ """
+ try:
+ im.verify()
+ return im
+ except:
+ if bgr2rgb:
+ im = cv.cvtColor(im,cv.COLOR_BGR2RGB)
+ return Image.fromarray(im.astype('uint8'), 'RGB')
+
+def ensure_np(im):
+ """Ensure image is Numpy.ndarry format
+ :param im: image in numpy or PIL.Image format
+ :returns: image in Numpy uint8 format
+ """
+ if type(im) == np.ndarray:
+ return im
+ return np.asarray(im, np.uint8)
+
+
+def resize(im,width=0,height=0):
+ """resize image using imutils. Use w/h=[0 || None] to prioritize other edge size
+ :param im: a Numpy.ndarray image
+ :param wh: a tuple of (width, height)
+ """
+ w = width
+ h = height
+ if w is 0 and h is 0:
+ return im
+ elif w > 0 and h > 0:
+ return imutils.resize(im,width=w,height=h)
+ elif w > 0 and h is 0:
+ return imutils.resize(im,width=w)
+ elif w is 0 and h > 0:
+ return imutils.resize(im,height=h)
+ else:
+ return im
+
+def filter_pixellate(im,num_cells):
+ """Pixellate image by downsample then upsample
+ :param im: PIL.Image
+ :returns: PIL.Image
+ """
+ w,h = im.size
+ im = im.resize((num_cells,num_cells), Image.NEAREST)
+ im = im.resize((w,h), Image.NEAREST)
+ return im
+
+# Plot images inline using Matplotlib
+# def pltimg(im,title=None,mode='rgb',figsize=(8,12),dpi=160,output=None):
+# plt.figure(figsize=figsize)
+# plt.xticks([]),plt.yticks([])
+# if title is not None:
+# plt.title(title)
+# if mode.lower() == 'bgr':
+# im = cv.cvtColor(im,cv.COLOR_BGR2RGB)
+
+# f = plt.gcf()
+# if mode.lower() =='grey' or mode.lower() == 'gray':
+# plt.imshow(im,cmap='gray')
+# else:
+# plt.imshow(im)
+# plt.show()
+# plt.draw()
+# if output is not None:
+# bbox_inches='tight'
+# ext=osp.splitext(output)[1].replace('.','')
+# f.savefig(output,dpi=dpi,format=ext)
+# print('Image saved to: {}'.format(output))
+
+
+
+# Utilities for analyzing frames
+
+def compute_gray(im):
+ im = cv.cvtColor(im,cv.COLOR_BGR2GRAY)
+ n_vals = float(im.shape[0] * im.shape[1])
+ avg = np.sum(im[:]) / n_vals
+ return avg
+
+def compute_rgb(im):
+ im = cv.cvtColor(im,cv.COLOR_BGR2RGB)
+ n_vals = float(im.shape[0] * im.shape[1])
+ avg_r = np.sum(im[:,:,0]) / n_vals
+ avg_g = np.sum(im[:,:,1]) / n_vals
+ avg_b = np.sum(im[:,:,2]) / n_vals
+ avg_rgb = np.sum(im[:,:,:]) / (n_vals * 3.0)
+ return avg_r, avg_b, avg_g, avg_rgb
+
+def compute_hsv(im):
+ im = cv.cvtColor(im,cv.COLOR_BGR2HSV)
+ n_vals = float(im.shape[0] * im.shape[1])
+ avg_h = np.sum(frame[:,:,0]) / n_vals
+ avg_s = np.sum(frame[:,:,1]) / n_vals
+ avg_v = np.sum(frame[:,:,2]) / n_vals
+ avg_hsv = np.sum(frame[:,:,:]) / (n_vals * 3.0)
+ return avg_h, avg_s, avg_v, avg_hsv
+
+def pys_dhash(im, hashSize=8):
+ # resize the input image, adding a single column (width) so we
+ # can compute the horizontal gradient
+ resized = cv.resize(im, (hashSize + 1, hashSize))
+ # compute the (relative) horizontal gradient between adjacent
+ # column pixels
+ diff = resized[:, 1:] > resized[:, :-1]
+ # convert the difference image to a hash
+ return sum([2 ** i for (i, v) in enumerate(diff.flatten()) if v])
+
+
+############################################
+# ImageHash
+# pip install imagehash
+############################################
+
+
+def compute_ahash(im):
+ """Compute average hash using ImageHash library
+ :param im: Numpy.ndarray
+ :returns: Imagehash.ImageHash
+ """
+ return imagehash.average_hash(ensure_pil(im_pil))
+
+def compute_phash(im):
+ """Compute perceptual hash using ImageHash library
+ :param im: Numpy.ndarray
+ :returns: Imagehash.ImageHash
+ """
+ return imagehash.phash(ensure_pil(im))
+
+def compute_dhash(im):
+ """Compute difference hash using ImageHash library
+ :param im: Numpy.ndarray
+ :returns: Imagehash.ImageHash
+ """
+ return imagehash.dhash(ensure_pil(im))
+
+def compute_whash(im):
+ """Compute wavelet hash using ImageHash library
+ :param im: Numpy.ndarray
+ :returns: Imagehash.ImageHash
+ """
+ return imagehash.whash(ensure_pil(im))
+
+def compute_whash_b64(im):
+ """Compute wavelest hash base64 using ImageHash library
+ :param im: Numpy.ndarray
+ :returns: Imagehash.ImageHash
+ """
+ return lambda im: imagehash.whash(ensure_pil(im), mode='db4')
+
+
+############################################
+# Pillow
+############################################
+
+def sharpen(im):
+ """Sharpen image using PIL.ImageFilter
+ param: im: PIL.Image
+ returns: PIL.Image
+ """
+ im = ensure_pil(im)
+ im.filter(ImageFilter.SHARPEN)
+ return ensure_np(im)
+
+def fit_image(im,targ_size):
+ """Force fit image by cropping
+ param: im: PIL.Image
+ param: targ_size: a tuple of target (width, height)
+ returns: PIL.Image
+ """
+ im_pil = ensure_pil(im)
+ frame_pil = ImageOps.fit(im_pil, targ_size,
+ method=Image.BICUBIC, centering=(0.5, 0.5))
+ return ensure_np(frame_pil)
+
+
+def compute_entropy(im):
+ entr_img = entropy(im, disk(10))
+
+
+############################################
+# scikit-learn
+############################################
+
+def compute_entropy(im):
+ # im is grayscale numpy
+ return entropy(im, disk(10))
+
+############################################
+# OpenCV
+############################################
+
+def bgr2gray(im):
+ """Wrapper for cv2.cvtColor transform
+ :param im: Numpy.ndarray (BGR)
+ :returns: Numpy.ndarray (Gray)
+ """
+ return cv.cvtColor(im,cv.COLOR_BGR2GRAY)
+
+def gray2bgr(im):
+ """Wrapper for cv2.cvtColor transform
+ :param im: Numpy.ndarray (Gray)
+ :returns: Numpy.ndarray (BGR)
+ """
+ return cv.cvtColor(im,cv.COLOR_GRAY2BGR)
+
+def bgr2rgb(im):
+ """Wrapper for cv2.cvtColor transform
+ :param im: Numpy.ndarray (BGR)
+ :returns: Numpy.ndarray (RGB)
+ """
+ return cv.cvtColor(im,cv.COLOR_BGR2RGB)
+
+def compute_laplacian(im):
+ # below 100 is usually blurry
+ return cv.Laplacian(im, cv.CV_64F).var()
+
+
+# http://radjkarl.github.io/imgProcessor/index.html#
+
+def modifiedLaplacian(img):
+ ''''LAPM' algorithm (Nayar89)'''
+ M = np.array([-1, 2, -1])
+ G = cv.getGaussianKernel(ksize=3, sigma=-1)
+ Lx = cv.sepFilter2D(src=img, ddepth=cv.CV_64F, kernelX=M, kernelY=G)
+ Ly = cv.sepFilter2D(src=img, ddepth=cv.CV_64F, kernelX=G, kernelY=M)
+ FM = np.abs(Lx) + np.abs(Ly)
+ return cv.mean(FM)[0]
+
+def varianceOfLaplacian(img):
+ ''''LAPV' algorithm (Pech2000)'''
+ lap = cv.Laplacian(img, ddepth=-1)#cv.cv.CV_64F)
+ stdev = cv.meanStdDev(lap)[1]
+ s = stdev[0]**2
+ return s[0]
+
+def tenengrad(img, ksize=3):
+ ''''TENG' algorithm (Krotkov86)'''
+ Gx = cv.Sobel(img, ddepth=cv.CV_64F, dx=1, dy=0, ksize=ksize)
+ Gy = cv.Sobel(img, ddepth=cv.CV_64F, dx=0, dy=1, ksize=ksize)
+ FM = Gx**2 + Gy**2
+ return cv.mean(FM)[0]
+
+def normalizedGraylevelVariance(img):
+ ''''GLVN' algorithm (Santos97)'''
+ mean, stdev = cv.meanStdDev(img)
+ s = stdev[0]**2 / mean[0]
+ return s[0]
+
+def compute_if_blank(im,width=100,sigma=0,thresh_canny=.1,thresh_mean=4,mask=None):
+ # im is graysacale np
+ #im = imutils.resize(im,width=width)
+ #mask = imutils.resize(mask,width=width)
+ if mask is not None:
+ im_canny = feature.canny(im,sigma=sigma,mask=mask)
+ total = len(np.where(mask > 0)[0])
+ else:
+ im_canny = feature.canny(im,sigma=sigma)
+ total = (im.shape[0]*im.shape[1])
+ n_white = len(np.where(im_canny > 0)[0])
+ per = n_white/total
+ if np.mean(im) < thresh_mean or per < thresh_canny:
+ return 1
+ else:
+ return 0
+
+
+def print_timing(t,n):
+ t = time.time()-t
+ print('Elapsed time: {:.2f}'.format(t))
+ print('FPS: {:.2f}'.format(n/t))
+
+def vid2frames(fpath, limit=5000, width=None, idxs=None):
+ """Convert a video file into list of frames
+ :param fpath: filepath to the video file
+ :param limit: maximum number of frames to read
+ :param fpath: the indices of frames to keep (rest are skipped)
+ :returns: (fps, number of frames, list of Numpy.ndarray frames)
+ """
+ frames = []
+ try:
+ cap = cv.VideoCapture(fpath)
+ except:
+ print('[-] Error. Could not read video file: {}'.format(fpath))
+ try:
+ cap.release()
+ except:
+ pass
+ return frames
+
+ fps = cap.get(cv.CAP_PROP_FPS)
+ nframes = int(cap.get(cv.CAP_PROP_FRAME_COUNT))
+
+ if idxs is not None:
+ # read sample indices by seeking to frame index
+ for idx in idxs:
+ cap.set(cv.CAP_PROP_POS_FRAMES, idx)
+ res, frame = cap.read()
+ if width is not None:
+ frame = imutils.resize(frame, width=width)
+ frames.append(frame)
+ else:
+ while(True and len(frames) < limit):
+ res, frame = cap.read()
+ if not res:
+ break
+ if width is not None:
+ frame = imutils.resize(frame, width=width)
+ frames.append(frame)
+
+ cap.release()
+ del cap
+ #return fps,nframes,frames
+ return frames
+
+def convolve_filter(vals,filters=[1]):
+ for k in filters:
+ vals_tmp = np.zeros_like(vals)
+ t = len(vals_tmp)
+ for i,v in enumerate(vals):
+ sum_vals = vals[max(0,i-k):min(t-1,i+k)]
+ vals_tmp[i] = np.mean(sum_vals)
+ vals = vals_tmp.copy()
+ return vals
+
+def cosine_delta(v1,v2):
+ return 1.0 - cosine_similarity(v1.reshape((1, -1)), v2.reshape((1, -1)))[0][0]
+
+
+
+def compute_edges(vals):
+ # find edges (1 = rising, -1 = falling)
+ edges = np.zeros_like(vals)
+ for i in range(len(vals[1:])):
+ delta = vals[i] - vals[i-1]
+ if delta == -1:
+ edges[i] = 1 # rising edge 0 --> 1
+ elif delta == 1:
+ edges[i+1] = 2 # falling edge 1 --> 0
+ # get index for rise fall
+ rising = np.where(np.array(edges) == 1)[0]
+ falling = np.where(np.array(edges) == 2)[0]
+ return rising, falling
+
+
+############################################
+# Point, Rect
+############################################
+
+class Point(object):
+ def __init__(self, x, y):
+ self.x = x
+ self.y = y
+
+class Rect(object):
+ def __init__(self, p1, p2):
+ '''Store the top, bottom, left and right values for points
+ p1 and p2 are the (corners) in either order
+ '''
+ self.left = min(p1.x, p2.x)
+ self.right = max(p1.x, p2.x)
+ self.top = min(p1.y, p2.y)
+ self.bottom = max(p1.y, p2.y)
+
+def overlap(r1, r2):
+ '''Overlapping rectangles overlap both horizontally & vertically
+ '''
+ return range_overlap(r1.left, r1.right, r2.left, r2.right) and \
+ range_overlap(r1.top, r1.bottom, r2.top, r2.bottom)
+
+def range_overlap(a_min, a_max, b_min, b_max):
+ '''Neither range is completely greater than the other
+ '''
+ return (a_min <= b_max) and (b_min <= a_max)
+
+def merge_rects(r1,r2):
+ p1 = Point(min(r1.left,r2.left),min(r1.top,r2.top))
+ p2 = Point(max(r1.right,r2.right),max(r1.bottom,r2.bottom))
+ return Rect(p1,p2)
+
+def is_overlapping(r1,r2):
+ """r1,r2 as [x1,y1,x2,y2] list"""
+ r1x = Rect(Point(r1[0],r1[1]),Point(r1[2],r1[3]))
+ r2x = Rect(Point(r2[0],r2[1]),Point(r2[2],r2[3]))
+ return overlap(r1x,r2x)
+
+def get_rects_merged(rects,bounds,expand=0):
+ """rects: list of points in [x1,y1,x2,y2] format"""
+ rects_expanded = []
+ bx,by = bounds
+ # expand
+ for x1,y1,x2,y2 in rects:
+ x1 = max(0,x1-expand)
+ y1 = max(0,y1-expand)
+ x2 = min(bx,x2+expand)
+ y2 = min(by,y2+expand)
+ rects_expanded.append(Rect(Point(x1,y1),Point(x2,y2)))
+
+ #rects_expanded = [Rect(Point(x1,y1),Point(x2,y2)) for x1,y1,x2,y2 in rects_expanded]
+ rects_merged = []
+ for i,r in enumerate(rects_expanded):
+ found = False
+ for j,rm in enumerate(rects_merged):
+ if overlap(r,rm):
+ rects_merged[j] = merge_rects(r,rm) #expand
+ found = True
+ if not found:
+ rects_merged.append(r)
+ # convert back to [x1,y1,x2,y2] format
+ rects_merged = [(r.left,r.top,r.right,r.bottom) for r in rects_merged]
+ # contract
+ rects_contracted = []
+ for x1,y1,x2,y2 in rects_merged:
+ x1 = min(bx,x1+expand)
+ y1 = min(by,y1+expand)
+ x2 = max(0,x2-expand)
+ y2 = max(0,y2-expand)
+ rects_contracted.append((x1,y1,x2,y2))
+
+ return rects_contracted
+
+
+############################################
+# Image display
+############################################
+
+
+def montage(frames,ncols=4,nrows=None,width=None):
+ """Convert list of frames into a grid montage
+ param: frames: list of frames as Numpy.ndarray
+ param: ncols: number of columns
+ param: width: resize images to this width before adding to grid
+ returns: Numpy.ndarray grid of all images
+ """
+
+ # expand image size if not enough frames
+ if nrows is not None and len(frames) < ncols * nrows:
+ blank = np.zeros_like(frames[0])
+ n = ncols * nrows - len(frames)
+ for i in range(n): frames.append(blank)
+
+ rows = []
+ for i,im in enumerate(frames):
+ if width is not None:
+ im = imutils.resize(im,width=width)
+ h,w = im.shape[:2]
+ if i % ncols == 0:
+ if i > 0:
+ rows.append(ims)
+ ims = []
+ ims.append(im)
+ if len(ims) > 0:
+ for j in range(ncols-len(ims)):
+ ims.append(np.zeros_like(im))
+ rows.append(ims)
+ row_ims = []
+ for row in rows:
+ row_im = np.hstack(np.array(row))
+ row_ims.append(row_im)
+ contact_sheet = np.vstack(np.array(row_ims))
+ return contact_sheet
diff --git a/megapixels/app/utils/logger_utils.py b/megapixels/app/utils/logger_utils.py
new file mode 100644
index 00000000..d4f962eb
--- /dev/null
+++ b/megapixels/app/utils/logger_utils.py
@@ -0,0 +1,68 @@
+"""
+Logger instantiator for use with Click utlity scripts
+"""
+import sys
+import os
+import logging
+
+import colorlog
+
+from app.settings import app_cfg as cfg
+
+
+class Logger:
+
+ logger_name = 'app'
+
+ def __init__(self):
+ pass
+
+ @staticmethod
+ def create(verbosity=4, logfile=None):
+ """Configures a logger from click params
+ :param verbosity: (int) between 0 and 5
+ :param logfile: (str) path to logfile
+ :returns: logging root object
+ """
+
+ loglevel = (5 - (max(0, min(verbosity, 5)))) * 10 # where logging.DEBUG = 10
+ date_format = '%Y-%m-%d %H:%M:%S'
+ if 'colorlog' in sys.modules and os.isatty(2):
+ cformat = '%(log_color)s' + cfg.LOGFILE_FORMAT
+ f = colorlog.ColoredFormatter(cformat, date_format,
+ log_colors = { 'DEBUG' : 'yellow', 'INFO' : 'white',
+ 'WARNING' : 'bold_yellow', 'ERROR': 'bold_red',
+ 'CRITICAL': 'bold_red' })
+ else:
+ f = logging.Formatter(cfg.LOGFILE_FORMAT, date_format)
+
+ # logger = logging.getLogger(Logger.logger_name)
+ logger = logging.getLogger(cfg.LOGGER_NAME)
+ logger.setLevel(loglevel)
+
+ if logfile:
+ # create file handler which logs even debug messages
+ fh = logging.FileHandler(logfile)
+ fh.setLevel(loglevel)
+ logger.addHandler(fh)
+
+ # add colored handler
+ ch = logging.StreamHandler()
+ ch.setFormatter(f)
+ logger.addHandler(ch)
+
+ if verbosity == 0:
+ logger.disabled = True
+
+ # test
+ # logger.debug('Hello Debug')
+ # logger.info('Hello Info')
+ # logger.warn('Hello Warn')
+ # logger.error('Hello Error')
+ # logger.critical('Hello Critical')
+
+ return logger
+
+ @staticmethod
+ def getLogger():
+ return logging.getLogger(cfg.LOGGER_NAME) \ No newline at end of file