summaryrefslogtreecommitdiff
path: root/animism-align/cli/app
diff options
context:
space:
mode:
authorJules Laplace <julescarbon@gmail.com>2020-06-23 23:18:07 +0200
committerJules Laplace <julescarbon@gmail.com>2020-06-23 23:18:07 +0200
commit3cf70771cb45cc16ec33ffe44e7a1a4799d8f395 (patch)
tree55f0edb53141d5f043b486d722f507bfd94abdea /animism-align/cli/app
parent014816dc724c1be60b7dd28d4e608c89b4ed451c (diff)
adding web app base
Diffstat (limited to 'animism-align/cli/app')
-rw-r--r--animism-align/cli/app/controllers/__pycache__/collection_controller.cpython-37.pycbin0 -> 2897 bytes
-rw-r--r--animism-align/cli/app/controllers/__pycache__/crud_controller.cpython-37.pycbin0 -> 3968 bytes
-rw-r--r--animism-align/cli/app/controllers/__pycache__/detection_controller.cpython-37.pycbin0 -> 4526 bytes
-rw-r--r--animism-align/cli/app/controllers/__pycache__/detection_index_controller.cpython-37.pycbin0 -> 1848 bytes
-rw-r--r--animism-align/cli/app/controllers/__pycache__/feature_index_controller.cpython-37.pycbin0 -> 1544 bytes
-rw-r--r--animism-align/cli/app/controllers/__pycache__/graph_controller.cpython-37.pycbin0 -> 1621 bytes
-rw-r--r--animism-align/cli/app/controllers/__pycache__/media_controller.cpython-37.pycbin0 -> 3740 bytes
-rw-r--r--animism-align/cli/app/controllers/__pycache__/media_import_controller.cpython-37.pycbin0 -> 2687 bytes
-rw-r--r--animism-align/cli/app/controllers/__pycache__/modelzoo_controller.cpython-37.pycbin0 -> 2019 bytes
-rw-r--r--animism-align/cli/app/controllers/__pycache__/page_controller.cpython-37.pycbin0 -> 2773 bytes
-rw-r--r--animism-align/cli/app/controllers/__pycache__/search_controller.cpython-37.pycbin0 -> 5588 bytes
-rw-r--r--animism-align/cli/app/controllers/__pycache__/socket_controller.cpython-37.pycbin0 -> 825 bytes
-rw-r--r--animism-align/cli/app/controllers/__pycache__/task_controller.cpython-37.pycbin0 -> 2378 bytes
-rw-r--r--animism-align/cli/app/controllers/__pycache__/tile_controller.cpython-37.pycbin0 -> 1473 bytes
-rw-r--r--animism-align/cli/app/controllers/__pycache__/upload_controller.cpython-37.pycbin0 -> 3543 bytes
-rw-r--r--animism-align/cli/app/controllers/crud_controller.py156
-rw-r--r--animism-align/cli/app/controllers/timestamp_controller.py20
-rw-r--r--animism-align/cli/app/controllers/upload_controller.py142
-rw-r--r--animism-align/cli/app/server/__pycache__/decorators.cpython-37.pycbin0 -> 4476 bytes
-rw-r--r--animism-align/cli/app/server/__pycache__/helpers.cpython-37.pycbin0 -> 1339 bytes
-rw-r--r--animism-align/cli/app/server/__pycache__/socket.cpython-37.pycbin0 -> 3602 bytes
-rw-r--r--animism-align/cli/app/server/__pycache__/web.cpython-37.pycbin0 -> 2555 bytes
-rw-r--r--animism-align/cli/app/server/decorators.py127
-rw-r--r--animism-align/cli/app/server/helpers.py78
-rw-r--r--animism-align/cli/app/server/representations.py12
-rw-r--r--animism-align/cli/app/server/web.py59
-rw-r--r--animism-align/cli/app/settings/__pycache__/app_cfg.cpython-37.pycbin0 -> 1531 bytes
-rw-r--r--animism-align/cli/app/settings/app_cfg.py90
-rw-r--r--animism-align/cli/app/sql/__pycache__/common.cpython-37.pycbin0 -> 852 bytes
-rw-r--r--animism-align/cli/app/sql/__pycache__/env.cpython-37.pycbin0 -> 1643 bytes
-rw-r--r--animism-align/cli/app/sql/common.py35
-rw-r--r--animism-align/cli/app/sql/env.py69
-rw-r--r--animism-align/cli/app/sql/models/__pycache__/collection.cpython-37.pycbin0 -> 2784 bytes
-rw-r--r--animism-align/cli/app/sql/models/__pycache__/collection_media.cpython-37.pycbin0 -> 1836 bytes
-rw-r--r--animism-align/cli/app/sql/models/__pycache__/detection_type.cpython-37.pycbin0 -> 2221 bytes
-rw-r--r--animism-align/cli/app/sql/models/__pycache__/detection_type_import.cpython-37.pycbin0 -> 1450 bytes
-rw-r--r--animism-align/cli/app/sql/models/__pycache__/feature_type.cpython-37.pycbin0 -> 2297 bytes
-rw-r--r--animism-align/cli/app/sql/models/__pycache__/feature_type_import.cpython-37.pycbin0 -> 1424 bytes
-rw-r--r--animism-align/cli/app/sql/models/__pycache__/graph.cpython-37.pycbin0 -> 2315 bytes
-rw-r--r--animism-align/cli/app/sql/models/__pycache__/media.cpython-37.pycbin0 -> 3072 bytes
-rw-r--r--animism-align/cli/app/sql/models/__pycache__/media_detection.cpython-37.pycbin0 -> 2048 bytes
-rw-r--r--animism-align/cli/app/sql/models/__pycache__/media_import.cpython-37.pycbin0 -> 1880 bytes
-rw-r--r--animism-align/cli/app/sql/models/__pycache__/page.cpython-37.pycbin0 -> 2792 bytes
-rw-r--r--animism-align/cli/app/sql/models/__pycache__/tile.cpython-37.pycbin0 -> 2037 bytes
-rw-r--r--animism-align/cli/app/sql/models/__pycache__/timestamp.cpython-37.pycbin0 -> 1852 bytes
-rw-r--r--animism-align/cli/app/sql/models/__pycache__/upload.cpython-37.pycbin0 -> 1991 bytes
-rw-r--r--animism-align/cli/app/sql/models/timestamp.py37
-rw-r--r--animism-align/cli/app/sql/models/upload.py44
-rw-r--r--animism-align/cli/app/sql/script.py.mako25
-rw-r--r--animism-align/cli/app/sql/versions/202006231946_create_database.py47
-rw-r--r--animism-align/cli/app/sql/versions/__pycache__/202006011922_creating_database.cpython-37.pycbin0 -> 1362 bytes
-rw-r--r--animism-align/cli/app/sql/versions/__pycache__/202006011923_adding_pages_table.cpython-37.pycbin0 -> 1485 bytes
-rw-r--r--animism-align/cli/app/sql/versions/__pycache__/202006011925_adding_tiles_table.cpython-37.pycbin0 -> 1153 bytes
-rw-r--r--animism-align/cli/app/sql/versions/__pycache__/202006011937_adding_pages.cpython-37.pycbin0 -> 1109 bytes
-rw-r--r--animism-align/cli/app/sql/versions/__pycache__/202006011943_adding_database.cpython-37.pycbin0 -> 1721 bytes
-rw-r--r--animism-align/cli/app/sql/versions/__pycache__/202006011944_adding_uploads.cpython-37.pycbin0 -> 1094 bytes
-rw-r--r--animism-align/cli/app/sql/versions/__pycache__/202006021608_creating_database.cpython-37.pycbin0 -> 2019 bytes
-rw-r--r--animism-align/cli/app/sql/versions/__pycache__/202006041801_add_sort_order_to_tiles.cpython-37.pycbin0 -> 827 bytes
-rw-r--r--animism-align/cli/app/sql/versions/__pycache__/202006231946_create_database.cpython-37.pycbin0 -> 1326 bytes
-rw-r--r--animism-align/cli/app/utils/__pycache__/click_factory.cpython-37.pycbin0 -> 4943 bytes
-rw-r--r--animism-align/cli/app/utils/__pycache__/click_utils.cpython-37.pycbin0 -> 1614 bytes
-rw-r--r--animism-align/cli/app/utils/__pycache__/file_utils.cpython-37.pycbin0 -> 15412 bytes
-rw-r--r--animism-align/cli/app/utils/__pycache__/log_utils.cpython-37.pycbin0 -> 1862 bytes
-rw-r--r--animism-align/cli/app/utils/click_factory.py145
-rw-r--r--animism-align/cli/app/utils/click_utils.py40
-rw-r--r--animism-align/cli/app/utils/display_utils.py28
-rw-r--r--animism-align/cli/app/utils/draw_utils.py166
-rw-r--r--animism-align/cli/app/utils/file_utils.py519
-rw-r--r--animism-align/cli/app/utils/im_utils.py579
-rw-r--r--animism-align/cli/app/utils/log_utils.py69
-rw-r--r--animism-align/cli/app/utils/process_utils.py60
-rw-r--r--animism-align/cli/app/utils/s3_utils.py114
-rw-r--r--animism-align/cli/app/utils/video_utils.py23
73 files changed, 2684 insertions, 0 deletions
diff --git a/animism-align/cli/app/controllers/__pycache__/collection_controller.cpython-37.pyc b/animism-align/cli/app/controllers/__pycache__/collection_controller.cpython-37.pyc
new file mode 100644
index 0000000..eb3ea33
--- /dev/null
+++ b/animism-align/cli/app/controllers/__pycache__/collection_controller.cpython-37.pyc
Binary files differ
diff --git a/animism-align/cli/app/controllers/__pycache__/crud_controller.cpython-37.pyc b/animism-align/cli/app/controllers/__pycache__/crud_controller.cpython-37.pyc
new file mode 100644
index 0000000..7f7d98d
--- /dev/null
+++ b/animism-align/cli/app/controllers/__pycache__/crud_controller.cpython-37.pyc
Binary files differ
diff --git a/animism-align/cli/app/controllers/__pycache__/detection_controller.cpython-37.pyc b/animism-align/cli/app/controllers/__pycache__/detection_controller.cpython-37.pyc
new file mode 100644
index 0000000..8868de1
--- /dev/null
+++ b/animism-align/cli/app/controllers/__pycache__/detection_controller.cpython-37.pyc
Binary files differ
diff --git a/animism-align/cli/app/controllers/__pycache__/detection_index_controller.cpython-37.pyc b/animism-align/cli/app/controllers/__pycache__/detection_index_controller.cpython-37.pyc
new file mode 100644
index 0000000..b369829
--- /dev/null
+++ b/animism-align/cli/app/controllers/__pycache__/detection_index_controller.cpython-37.pyc
Binary files differ
diff --git a/animism-align/cli/app/controllers/__pycache__/feature_index_controller.cpython-37.pyc b/animism-align/cli/app/controllers/__pycache__/feature_index_controller.cpython-37.pyc
new file mode 100644
index 0000000..769ca33
--- /dev/null
+++ b/animism-align/cli/app/controllers/__pycache__/feature_index_controller.cpython-37.pyc
Binary files differ
diff --git a/animism-align/cli/app/controllers/__pycache__/graph_controller.cpython-37.pyc b/animism-align/cli/app/controllers/__pycache__/graph_controller.cpython-37.pyc
new file mode 100644
index 0000000..cccd73b
--- /dev/null
+++ b/animism-align/cli/app/controllers/__pycache__/graph_controller.cpython-37.pyc
Binary files differ
diff --git a/animism-align/cli/app/controllers/__pycache__/media_controller.cpython-37.pyc b/animism-align/cli/app/controllers/__pycache__/media_controller.cpython-37.pyc
new file mode 100644
index 0000000..e6dbaee
--- /dev/null
+++ b/animism-align/cli/app/controllers/__pycache__/media_controller.cpython-37.pyc
Binary files differ
diff --git a/animism-align/cli/app/controllers/__pycache__/media_import_controller.cpython-37.pyc b/animism-align/cli/app/controllers/__pycache__/media_import_controller.cpython-37.pyc
new file mode 100644
index 0000000..30497c6
--- /dev/null
+++ b/animism-align/cli/app/controllers/__pycache__/media_import_controller.cpython-37.pyc
Binary files differ
diff --git a/animism-align/cli/app/controllers/__pycache__/modelzoo_controller.cpython-37.pyc b/animism-align/cli/app/controllers/__pycache__/modelzoo_controller.cpython-37.pyc
new file mode 100644
index 0000000..3d493fc
--- /dev/null
+++ b/animism-align/cli/app/controllers/__pycache__/modelzoo_controller.cpython-37.pyc
Binary files differ
diff --git a/animism-align/cli/app/controllers/__pycache__/page_controller.cpython-37.pyc b/animism-align/cli/app/controllers/__pycache__/page_controller.cpython-37.pyc
new file mode 100644
index 0000000..b540b38
--- /dev/null
+++ b/animism-align/cli/app/controllers/__pycache__/page_controller.cpython-37.pyc
Binary files differ
diff --git a/animism-align/cli/app/controllers/__pycache__/search_controller.cpython-37.pyc b/animism-align/cli/app/controllers/__pycache__/search_controller.cpython-37.pyc
new file mode 100644
index 0000000..9cf91ff
--- /dev/null
+++ b/animism-align/cli/app/controllers/__pycache__/search_controller.cpython-37.pyc
Binary files differ
diff --git a/animism-align/cli/app/controllers/__pycache__/socket_controller.cpython-37.pyc b/animism-align/cli/app/controllers/__pycache__/socket_controller.cpython-37.pyc
new file mode 100644
index 0000000..56fce90
--- /dev/null
+++ b/animism-align/cli/app/controllers/__pycache__/socket_controller.cpython-37.pyc
Binary files differ
diff --git a/animism-align/cli/app/controllers/__pycache__/task_controller.cpython-37.pyc b/animism-align/cli/app/controllers/__pycache__/task_controller.cpython-37.pyc
new file mode 100644
index 0000000..6db38eb
--- /dev/null
+++ b/animism-align/cli/app/controllers/__pycache__/task_controller.cpython-37.pyc
Binary files differ
diff --git a/animism-align/cli/app/controllers/__pycache__/tile_controller.cpython-37.pyc b/animism-align/cli/app/controllers/__pycache__/tile_controller.cpython-37.pyc
new file mode 100644
index 0000000..2449ca3
--- /dev/null
+++ b/animism-align/cli/app/controllers/__pycache__/tile_controller.cpython-37.pyc
Binary files differ
diff --git a/animism-align/cli/app/controllers/__pycache__/upload_controller.cpython-37.pyc b/animism-align/cli/app/controllers/__pycache__/upload_controller.cpython-37.pyc
new file mode 100644
index 0000000..ed9da15
--- /dev/null
+++ b/animism-align/cli/app/controllers/__pycache__/upload_controller.cpython-37.pyc
Binary files differ
diff --git a/animism-align/cli/app/controllers/crud_controller.py b/animism-align/cli/app/controllers/crud_controller.py
new file mode 100644
index 0000000..595825d
--- /dev/null
+++ b/animism-align/cli/app/controllers/crud_controller.py
@@ -0,0 +1,156 @@
+from flask import request, jsonify
+from flask_classful import FlaskView, route
+from werkzeug.datastructures import MultiDict
+
+from app.sql.common import db, Session
+from app.server.helpers import parse_search_args, parse_sort_args
+
+class CrudView(FlaskView):
+ # to subclass CrudView, specify the model + form:
+ # model = Collection
+ # form = CollectionForm
+ index_all = True
+ excluded_methods = ['on_index', 'on_show', 'on_create', 'on_update', 'on_destroy']
+ default_sort = "id"
+ default_order = "asc"
+
+ # implement these methods:
+ def where(self, query, args):
+ return query
+ def on_index(self, session, data):
+ return data
+ def on_show(self, session, data):
+ return data
+ def on_create(self, session, form, item):
+ pass
+ def on_update(self, session, form, item):
+ pass
+ def on_destroy(self, session, item):
+ pass
+
+ def index(self):
+ """
+ List all {model}s
+ """
+ session = Session()
+ if self.index_all:
+ items = session.query(self.model).all()
+ else:
+ offset, limit = parse_search_args(request.args)
+ sort, order, order_by, order_by_id = parse_sort_args(request.args, self.model, self.default_sort, self.default_order)
+ query = session.query(self.model)
+ query = self.where(query, request.args)
+ if order_by_id is not None:
+ query = query.order_by(order_by, order_by_id)
+ else:
+ query = query.order_by(order_by)
+ items = query.offset(offset).limit(limit).all()
+
+ res = self.on_index(session, {
+ 'status': 'ok',
+ 'res': [ item.toJSON() for item in items ],
+ })
+ session.close()
+ return jsonify(res)
+
+ def get(self, id: int):
+ """
+ Fetch a single {model}.
+ """
+ session = Session()
+ item = session.query(self.model).get(id)
+ if not item:
+ session.close()
+ return jsonify({
+ 'status': 'error',
+ 'error': 'item not found'
+ })
+ result = self.on_show(session, {
+ 'status': 'ok',
+ 'res': item.toFullJSON() if hasattr(item, 'toFullJSON') else item.toJSON(),
+ })
+ session.close()
+ return jsonify(result)
+
+ def post(self):
+ """
+ Create a new {model}.
+
+ * JSON params: {jsonparams}
+ """
+ session = Session()
+ raw_form = MultiDict(request.json) if request.json is not None else request.form
+ form = self.form(raw_form)
+ if form.validate():
+ item = self.model()
+ form.populate_obj(item)
+ self.on_create(session, raw_form, item)
+ session.add(item)
+ session.commit()
+ res = {
+ 'status': 'ok',
+ 'res': item.toJSON(),
+ }
+ else:
+ res = {
+ 'error': 'error',
+ 'errors': form.errors,
+ }
+ session.close()
+ return jsonify(res)
+
+ def put(self, id: int):
+ """
+ Update a {model}.
+
+ * JSON params: {jsonparams}
+ """
+ session = Session()
+ item = session.query(self.model).get(id)
+ if item:
+ raw_form = MultiDict(request.json) if request.json is not None else request.form
+ form = self.form(raw_form, obj=item)
+ # print(item.toJSON())
+ if form.validate():
+ form.populate_obj(item)
+ self.on_update(session, raw_form, item)
+ session.add(item)
+ session.commit()
+ res = {
+ 'status': 'ok',
+ 'res': item.toJSON(),
+ }
+ else:
+ res = {
+ 'status': 'error',
+ 'error': form.errors,
+ }
+ else:
+ res = {
+ 'status': 'error',
+ 'error': 'not found',
+ }
+ session.close()
+ return jsonify(res)
+
+ def delete(self, id: int):
+ """
+ Delete a {model}.
+ """
+ session = Session()
+ item = session.query(self.model).get(id)
+ if item:
+ self.on_destroy(session, item)
+ session.delete(item)
+ session.commit()
+ res = {
+ 'status': 'ok',
+ 'id': id,
+ }
+ else:
+ res = {
+ 'status': 'error',
+ 'error': 'not found',
+ }
+ session.close()
+ return jsonify(res)
diff --git a/animism-align/cli/app/controllers/timestamp_controller.py b/animism-align/cli/app/controllers/timestamp_controller.py
new file mode 100644
index 0000000..d4cef82
--- /dev/null
+++ b/animism-align/cli/app/controllers/timestamp_controller.py
@@ -0,0 +1,20 @@
+from flask import request, jsonify, redirect
+from flask_classful import route
+from werkzeug.datastructures import MultiDict
+
+from app.sql.common import db, Session
+from app.sql.models.graph import Timestamp, TimestampForm
+from app.sql.models.page import Page
+from app.sql.models.tile import Tile
+from app.controllers.crud_controller import CrudView
+
+class TimestampView(CrudView):
+ model = Timestamp
+ form = TimestampForm
+ default_sort = "start_ts"
+
+ def on_create(self, session, form, item):
+ item.settings = form['settings']
+
+ def on_update(self, session, form, item):
+ item.settings = form['settings']
diff --git a/animism-align/cli/app/controllers/upload_controller.py b/animism-align/cli/app/controllers/upload_controller.py
new file mode 100644
index 0000000..86f9f29
--- /dev/null
+++ b/animism-align/cli/app/controllers/upload_controller.py
@@ -0,0 +1,142 @@
+from flask import request, jsonify
+from flask_classful import FlaskView, route
+from werkzeug.datastructures import MultiDict
+from werkzeug.utils import secure_filename
+import os
+import numpy as np
+from PIL import Image
+
+from app.settings import app_cfg
+from app.sql.common import db, Session
+from app.sql.models.upload import Upload
+from app.utils.file_utils import sha256_stream, sha256_tree, VALID_IMAGE_EXTS
+from app.server.decorators import APIError
+
+class UploadView(FlaskView):
+ def index(self):
+ """
+ List all uploaded files.
+
+ * Query string params: offset, limit, sort (id, date), order (asc, desc)
+ """
+ session = Session()
+ uploads = session.query(Upload).all()
+ response = {
+ 'status': 'ok',
+ 'res': [ upload.toJSON() for upload in uploads ],
+ }
+ session.close()
+ return jsonify(response)
+
+ def get(self, id):
+ """
+ Fetch a single upload.
+ """
+ session = Session()
+ upload = session.query(Upload).get(id)
+ response = {
+ 'status': 'ok',
+ 'res': upload.toJSON(),
+ }
+ session.close()
+ return jsonify(response)
+
+ def post(self):
+ """
+ Upload a new file.
+
+ * JSON params: username
+ """
+
+ try:
+ username = request.form.get('username')
+ except:
+ raise APIError('No username specified')
+
+ param_name = 'image'
+ if param_name not in request.files:
+ raise APIError('No file uploaded')
+
+ file = request.files[param_name]
+
+ # get sha256
+ sha256 = sha256_stream(file)
+ _, ext = os.path.splitext(file.filename)
+ if ext == '.jpeg':
+ ext = '.jpg'
+
+ # TODO: here check sha256
+ # upload = Upload.query.get(id)
+
+ if ext[1:] not in VALID_IMAGE_EXTS:
+ return jsonify({ 'status': 'error', 'error': 'Not a valid image' })
+
+ # convert string of image data to uint8
+ file.seek(0)
+ nparr = np.fromstring(file.read(), np.uint8)
+
+ # decode image
+ try:
+ im = Image.fromarray(nparr)
+ except:
+ return jsonify({ 'status': 'error', 'error': 'Image parse error' })
+
+ session = Session()
+ upload = session.query(Upload).filter_by(sha256=sha256).first()
+ if upload is not None:
+ print("Already uploaded image")
+ response = {
+ 'status': 'ok',
+ 'notes': 'Image already uploaded',
+ 'res': upload.toJSON(),
+ }
+ session.close()
+ return jsonify(response)
+
+ uploaded_im_fn = secure_filename(sha256 + ext)
+ uploaded_im_abspath = os.path.join(app_cfg.DIR_UPLOADS, sha256_tree(sha256))
+ uploaded_im_fullpath = os.path.join(uploaded_im_abspath, uploaded_im_fn)
+
+ os.makedirs(uploaded_im_abspath, exist_ok=True)
+ nparr.tofile(uploaded_im_fullpath)
+
+ upload = Upload(username=username, sha256=sha256, ext=ext)
+ session.add(upload)
+ session.commit()
+ response = {
+ 'status': 'ok',
+ 'res': upload.toJSON(),
+ }
+ session.close()
+ return jsonify(response)
+
+ def delete(self, id):
+ """
+ Delete an uploaded file.
+ """
+ session = Session()
+ upload = session.query(Upload).get(id)
+ if not upload:
+ session.close()
+ return jsonify({
+ 'status': 'error',
+ 'error': 'not found',
+ })
+
+ sha256 = upload.sha256
+
+ uploaded_im_fn = secure_filename(sha256 + upload.ext)
+ uploaded_im_abspath = os.path.join(app_cfg.DIR_UPLOADS, sha256_tree(sha256))
+ uploaded_im_fullpath = os.path.join(uploaded_im_abspath, uploaded_im_fn)
+ if os.path.exists(uploaded_im_fullpath):
+ print("Removing " + uploaded_im_fullpath)
+ os.remove(uploaded_im_fullpath)
+
+ session.delete(upload)
+ session.commit()
+ response = {
+ 'status': 'ok',
+ 'id': id,
+ }
+ session.close()
+ return jsonify(response)
diff --git a/animism-align/cli/app/server/__pycache__/decorators.cpython-37.pyc b/animism-align/cli/app/server/__pycache__/decorators.cpython-37.pyc
new file mode 100644
index 0000000..712f13a
--- /dev/null
+++ b/animism-align/cli/app/server/__pycache__/decorators.cpython-37.pyc
Binary files differ
diff --git a/animism-align/cli/app/server/__pycache__/helpers.cpython-37.pyc b/animism-align/cli/app/server/__pycache__/helpers.cpython-37.pyc
new file mode 100644
index 0000000..0b37fd7
--- /dev/null
+++ b/animism-align/cli/app/server/__pycache__/helpers.cpython-37.pyc
Binary files differ
diff --git a/animism-align/cli/app/server/__pycache__/socket.cpython-37.pyc b/animism-align/cli/app/server/__pycache__/socket.cpython-37.pyc
new file mode 100644
index 0000000..15a5126
--- /dev/null
+++ b/animism-align/cli/app/server/__pycache__/socket.cpython-37.pyc
Binary files differ
diff --git a/animism-align/cli/app/server/__pycache__/web.cpython-37.pyc b/animism-align/cli/app/server/__pycache__/web.cpython-37.pyc
new file mode 100644
index 0000000..7a43dff
--- /dev/null
+++ b/animism-align/cli/app/server/__pycache__/web.cpython-37.pyc
Binary files differ
diff --git a/animism-align/cli/app/server/decorators.py b/animism-align/cli/app/server/decorators.py
new file mode 100644
index 0000000..2e6f9dd
--- /dev/null
+++ b/animism-align/cli/app/server/decorators.py
@@ -0,0 +1,127 @@
+"""
+These decorator functions wrap APIs for the simple search server.
+"""
+
+import os
+from time import time
+from datetime import datetime
+import numpy as np
+from PIL import Image
+from flask import request, jsonify
+from werkzeug.utils import secure_filename
+
+from app.sql.common import Session
+
+DEFAULT_LIMIT = 30
+
+def api_query(f):
+ """Wrap basic API queries with timing"""
+ def wrap_api_query(*args, **kwargs):
+ start = time()
+ query = {}
+ kwargs['query'] = query
+ results = f(*args, **kwargs)
+ query['timing'] = round(time() - start, 2)
+ if 'crop' in kwargs['query'] and kwargs['query']['crop'] is None:
+ del kwargs['query']['crop']
+ else:
+ crop = kwargs['query']['crop']
+ kwargs['query']['crop'] = {
+ 'x': crop[0],
+ 'y': crop[1],
+ 'w': crop[2],
+ 'h': crop[3],
+ }
+ if isinstance(results, list):
+ return { 'query': query, 'res': results }
+ else:
+ return { 'query': query, 'res': results }
+ wrap_api_query.__name__ = f.__name__
+ return wrap_api_query
+
+def db_session(f):
+ """Wrap API queries in a database session object which gets committed"""
+ def wrap_db_session(*args, **kwargs):
+ session = Session()
+ try:
+ kwargs['session'] = session
+ f(*args, **kwargs)
+ session.commit()
+ except:
+ session.rollback()
+ raise
+ finally:
+ session.close()
+ wrap_db_session.__name__ = f.__name__
+ return wrap_db_session
+
+def get_offset_and_limit(f):
+ """Normalize offset/limit query string params"""
+ def wrap_offset(*args, **kwargs):
+ kwargs['query']['offset'] = request.args.get('offset', default=0, type=int)
+ kwargs['query']['limit'] = request.args.get('limit', default=DEFAULT_LIMIT, type=int)
+ x = float(request.args.get('x', default=0.0, type=float))
+ y = float(request.args.get('y', default=0.0, type=float))
+ w = float(request.args.get('w', default=0.0, type=float))
+ h = float(request.args.get('h', default=0.0, type=float))
+ if w != 0.0 and h != 0.0:
+ kwargs['query']['crop'] = (x, y, w, h,)
+ else:
+ kwargs['query']['crop'] = None
+ return f(*args, **kwargs)
+ wrap_offset.__name__ = f.__name__
+ return wrap_offset
+
+def store_uploaded_image(param_name, store=False, uploaded_im_path='static/data/uploaded'):
+ """Retrive an uploaded image and prepare for processing. Optionally store it to disk."""
+ def decorator(f):
+ def wrap_uploaded_image(*args, **kwargs):
+ if param_name not in request.files:
+ raise APIError('No file uploaded')
+
+ file = request.files[param_name]
+
+ # convert string of image data to uint8
+ nparr = np.fromstring(file.read(), np.uint8)
+
+ # decode image
+ im = Image.fromarray(nparr)
+ kwargs['im'] = im
+
+ if store:
+ uploaded_im_fn = secure_filename(datetime.now().isoformat() + "_" + file.filename)
+ uploaded_im_abspath = os.path.join(uploaded_im_path, uploaded_im_fn)
+ uploaded_im_remote_path = os.path.join('/', uploaded_im_path, uploaded_im_fn)
+ nparr.tofile(uploaded_im_abspath)
+ kwargs['query']['url'] = uploaded_im_remote_path
+ return f(*args, **kwargs)
+ wrap_uploaded_image.__name__ = f.__name__
+ return wrap_uploaded_image
+ return decorator
+
+def as_json(f):
+ """Output an API query as JSON"""
+ def wrap_jsonify(*args, **kwargs):
+ return jsonify(f(*args, **kwargs))
+ wrap_jsonify.__name__ = f.__name__
+ return wrap_jsonify
+
+def exception_handler(f):
+ """Handle exceptions caused by the API"""
+ def wrapper(*args, **kwargs):
+ try:
+ return f(*args, **kwargs)
+ except Exception as e:
+ return {
+ "error": True,
+ "message": e.message,
+ "query": kwargs['query'],
+ }
+ wrapper.__name__ = f.__name__
+ return wrapper
+
+class APIError(Exception):
+ def __init__(self, message):
+ self.message = message
+ def __str__(self):
+ return repr(self.message)
diff --git a/animism-align/cli/app/server/helpers.py b/animism-align/cli/app/server/helpers.py
new file mode 100644
index 0000000..107a7fa
--- /dev/null
+++ b/animism-align/cli/app/server/helpers.py
@@ -0,0 +1,78 @@
+from sqlalchemy import and_
+from app.settings import app_cfg
+import datetime
+import dateutil
+
+def parse_search_args(args):
+ try:
+ limit = min(int(args.get('limit', default=app_cfg.DEFAULT_LIMIT)), app_cfg.MAX_LIMIT)
+ except Exception as e:
+ limit = app_cfg.DEFAULT_LIMIT
+ try:
+ offset = int(args.get('offset', default=0))
+ except Exception as e:
+ offset = 0
+ return offset, limit
+
+def parse_sort_args(args, table=None, default_sort="id", default_order="asc"):
+ try:
+ sort = args.get('sort', default=default_sort)
+ except Exception as e:
+ sort = 'id'
+ try:
+ order = args.get('order', default=default_order)
+ except:
+ order = 'asc'
+ if table is not None:
+ column = getattr(table, sort)
+ order_by = getattr(column, order)()
+ if sort != 'id':
+ order_by_id = getattr(table.id, order)()
+ else:
+ order_by_id = None
+ else:
+ order_by = None
+ order_by_id = None
+ return sort, order, order_by, order_by_id
+
+def parse_crop_arg(args):
+ try:
+ x = float(args.get('x'))
+ y = float(args.get('y'))
+ w = float(args.get('w'))
+ h = float(args.get('h'))
+ crop = (x, y, w, h,)
+ except Exception as e:
+ crop = None
+ return crop
+
+# def parse_media_args(args):
+# criteria = []
+# criteria_kwargs = {}
+# try:
+# import_id = int(args.get('import_id'))
+# criteria.append(Media.import_id == import_id)
+# criteria_kwargs['import_id_1'] = import_id
+# except:
+# pass
+# try:
+# start_date = str(args.get('start_date'))
+# start_date = dateutil.parser.parse(start_date)
+# start_date = start_date.replace(tzinfo=datetime.timezone.utc)
+# criteria.append(Media.created_at >= start_date)
+# criteria_kwargs['created_at_1'] = start_date
+# except:
+# pass
+# try:
+# end_date = str(args.get('end_date'))
+# end_date = dateutil.parser.parse(end_date)
+# end_date = end_date.replace(tzinfo=datetime.timezone.utc)
+# criteria.append(Media.created_at <= end_date)
+# criteria_kwargs['created_at_2'] = end_date
+# except:
+# pass
+# if len(criteria) > 1:
+# return criteria, criteria_kwargs
+# elif len(criteria) == 1:
+# return criteria, criteria_kwargs
+# return None, {}
diff --git a/animism-align/cli/app/server/representations.py b/animism-align/cli/app/server/representations.py
new file mode 100644
index 0000000..76d9ed4
--- /dev/null
+++ b/animism-align/cli/app/server/representations.py
@@ -0,0 +1,12 @@
+import simplejson as json
+from flask import make_response
+
+def output_json(data, code, headers=None):
+ content_type = 'application/json'
+ dumped = json.dumps(data)
+ if headers:
+ headers.update({'Content-Type': content_type})
+ else:
+ headers = {'Content-Type': content_type}
+ response = make_response(dumped, code, headers)
+ return response
diff --git a/animism-align/cli/app/server/web.py b/animism-align/cli/app/server/web.py
new file mode 100644
index 0000000..c3a812a
--- /dev/null
+++ b/animism-align/cli/app/server/web.py
@@ -0,0 +1,59 @@
+import os
+import logging
+import logging.handlers
+
+logger = logging.getLogger("")
+logger.setLevel(logging.DEBUG)
+handler = logging.handlers.RotatingFileHandler("flask.log",
+ maxBytes=3000000, backupCount=2)
+formatter = logging.Formatter(
+ '[%(asctime)s] {%(filename)s:%(lineno)d} %(levelname)s - %(message)s')
+handler.setFormatter(formatter)
+logger.addHandler(handler)
+logging.getLogger().addHandler(logging.StreamHandler())
+
+from flask import Flask, Blueprint, send_from_directory, request
+from app.sql.common import db, connection_url
+
+from app.settings import app_cfg
+from app.controllers.upload_controller import UploadView
+
+def create_app(script_info=None):
+ """
+ functional pattern for creating the flask app
+ """
+ logging.debug("Starting Flask app...")
+
+ app = Flask(__name__, static_folder=app_cfg.DIR_STATIC, static_url_path='/static')
+ app.config['SQLALCHEMY_DATABASE_URI'] = connection_url
+ app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
+ app.config['SERVER_NAME'] = app_cfg.SERVER_NAME
+ app.url_map.strict_slashes = False
+
+ db.init_app(app)
+
+ UploadView.register(app, route_prefix='/api/v1/')
+
+ index_html = 'index.html'
+
+ @app.errorhandler(404)
+ def page_not_found(e):
+ return app.send_static_file(index_html), 200
+ # path = os.path.join(os.path.dirname(__file__), './static/index.html')
+ # with open(path, "r") as f:
+ # return f.read(), 200
+
+ @app.route('/', methods=['GET'])
+ def index():
+ return app.send_static_file('index.html')
+
+ @app.route('/favicon.ico')
+ def favicon():
+ return send_from_directory(os.path.join(app.root_path, 'static/img/'),
+ 'favicon.ico',mimetype='image/vnd.microsoft.icon')
+
+ @app.shell_context_processor
+ def shell_context():
+ return { 'app': app, 'db': db }
+
+ return app
diff --git a/animism-align/cli/app/settings/__pycache__/app_cfg.cpython-37.pyc b/animism-align/cli/app/settings/__pycache__/app_cfg.cpython-37.pyc
new file mode 100644
index 0000000..f153ad1
--- /dev/null
+++ b/animism-align/cli/app/settings/__pycache__/app_cfg.cpython-37.pyc
Binary files differ
diff --git a/animism-align/cli/app/settings/app_cfg.py b/animism-align/cli/app/settings/app_cfg.py
new file mode 100644
index 0000000..0a33b50
--- /dev/null
+++ b/animism-align/cli/app/settings/app_cfg.py
@@ -0,0 +1,90 @@
+import os
+from os.path import join
+import collections
+import logging
+
+from dotenv import load_dotenv
+import yaml
+
+# from app.models import types
+from pathlib import Path
+
+import codecs
+codecs.register(lambda name: codecs.lookup('utf8') if name == 'utf8mb4' else None)
+
+LOG = logging.getLogger('swimmer')
+
+# -----------------------------------------------------------------------------
+# .env config for keys
+# -----------------------------------------------------------------------------
+# Project directory
+SELF_CWD = os.path.dirname(os.path.realpath(__file__)) # this file
+DIR_PROJECT_ROOT = str(Path(SELF_CWD).parent.parent.parent)
+
+# source .env vars
+fp_env = join(DIR_PROJECT_ROOT, '.env')
+load_dotenv(dotenv_path=fp_env)
+# VFRAME_PRODUCTION = os.getenv('VFRAME_ENV') == 'production'
+
+# -----------------------------------------------------------------------------
+# Click config
+# -----------------------------------------------------------------------------
+
+CLICK_GROUPS = {
+ 'peaks': 'commands/peaks',
+ 'db': '',
+ 'flask': '',
+}
+
+
+# -----------------------------------------------------------------------------
+# File I/O
+# -----------------------------------------------------------------------------
+
+SELF_CWD = os.path.dirname(os.path.realpath(__file__)) # Script CWD
+DIR_APP = str(Path(SELF_CWD).parent.parent.parent)
+
+DIR_DATA_STORE = join(DIR_APP, 'data_store')
+
+DIR_DATABASE = join(DIR_DATA_STORE, 'db')
+DIR_UPLOADS = join(DIR_DATA_STORE, 'uploads')
+DIR_EXPORTS = join(DIR_DATA_STORE, 'exports')
+
+DIR_DOCS = join(DIR_APP, 'docs')
+
+URL_DATA = '/static/data/'
+URL_MEDIA = join(URL_DATA, 'media')
+URL_UPLOADS = join(URL_DATA, 'uploads')
+URL_EXPORTS = join(URL_DATA, 'exports')
+
+if 'cli' in os.getcwd():
+ DIR_STATIC = os.path.abspath('../static')
+else:
+ DIR_STATIC = os.path.abspath('static')
+
+HASH_TREE_DEPTH = 3 # for sha256 subdirs
+HASH_BRANCH_SIZE = 3 # for sha256 subdirs
+
+
+# -----------------------------------------------------------------------------
+# S3 storage
+# -----------------------------------------------------------------------------
+
+try:
+ S3_HTTP_BASE_URL = os.getenv("S3_HTTP_BASE_URL")
+except Exception as e:
+ pass
+
+# -----------------------------------------------------------------------------
+# Exports
+# -----------------------------------------------------------------------------
+
+SERVER_NAME = os.getenv('SERVER_NAME') or '0.0.0.0:5000'
+HTTP_EXTERNAL_HOST = os.getenv('HTTP_EXTERNAL_HOST') or f"http://{SERVER_NAME}"
+
+# -----------------------------------------------------------------------------
+# Unicode symbols for logger
+# -----------------------------------------------------------------------------
+
+UCODE_OK = u"\u2714" # check ok
+UCODE_NOK = u'\u2718' # x no ok \ No newline at end of file
diff --git a/animism-align/cli/app/sql/__pycache__/common.cpython-37.pyc b/animism-align/cli/app/sql/__pycache__/common.cpython-37.pyc
new file mode 100644
index 0000000..fe626e5
--- /dev/null
+++ b/animism-align/cli/app/sql/__pycache__/common.cpython-37.pyc
Binary files differ
diff --git a/animism-align/cli/app/sql/__pycache__/env.cpython-37.pyc b/animism-align/cli/app/sql/__pycache__/env.cpython-37.pyc
new file mode 100644
index 0000000..b8ea29b
--- /dev/null
+++ b/animism-align/cli/app/sql/__pycache__/env.cpython-37.pyc
Binary files differ
diff --git a/animism-align/cli/app/sql/common.py b/animism-align/cli/app/sql/common.py
new file mode 100644
index 0000000..6f3586b
--- /dev/null
+++ b/animism-align/cli/app/sql/common.py
@@ -0,0 +1,35 @@
+import os
+import glob
+import time
+
+# import mysql.connector
+from sqlalchemy import create_engine
+from sqlalchemy.orm import sessionmaker
+from sqlalchemy.ext.declarative import declarative_base
+
+from flask_sqlalchemy import SQLAlchemy
+
+from app.settings import app_cfg
+
+# connection_url = "mysql+mysqlconnector://{}:{}@{}/{}?charset=utf8mb4".format(
+# os.getenv("DB_USER"),
+# os.getenv("DB_PASS"),
+# os.getenv("DB_HOST"),
+# os.getenv("DB_NAME")
+# )
+
+os.makedirs(app_cfg.DIR_DATABASE, exist_ok=True)
+
+connection_url = "sqlite:///{}".format(os.path.join(app_cfg.DIR_DATABASE, 'animism.sqlite3'))
+
+engine = create_engine(connection_url, encoding="utf-8", pool_recycle=3600)
+
+Session = sessionmaker(bind=engine)
+Base = declarative_base()
+Base.metadata.bind = engine
+
+db = SQLAlchemy()
+
+# include the models in reverse dependency order, so relationships work
+from app.sql.models.timestamp import Timestamp
+from app.sql.models.upload import Upload
diff --git a/animism-align/cli/app/sql/env.py b/animism-align/cli/app/sql/env.py
new file mode 100644
index 0000000..839a30b
--- /dev/null
+++ b/animism-align/cli/app/sql/env.py
@@ -0,0 +1,69 @@
+from sqlalchemy import engine_from_config
+from sqlalchemy import pool
+
+from alembic import context
+
+from app.sql.common import db, engine, connection_url, Base
+
+# this is the Alembic Config object, which provides
+# access to the values within the .ini file in use.
+config = context.config
+config.set_main_option("script_location", "./app/sql")
+config.set_main_option("sqlalchemy.url", connection_url)
+
+target_metadata = Base.metadata
+
+# include the models in reverse dependency order, so relationships work
+from app.sql.models.timestamp import Timestamp
+from app.sql.models.upload import Upload
+
+def run_migrations_offline():
+ """Run migrations in 'offline' mode.
+
+ This configures the context with just a URL
+ and not an Engine, though an Engine is acceptable
+ here as well. By skipping the Engine creation
+ we don't even need a DBAPI to be available.
+
+ Calls to context.execute() here emit the given string to the
+ script output.
+
+ """
+ url = config.get_main_option("sqlalchemy.url")
+ context.configure(
+ url=url,
+ target_metadata=target_metadata,
+ literal_binds=True,
+ dialect_opts={"paramstyle": "named"},
+ )
+
+ with context.begin_transaction():
+ context.run_migrations()
+
+
+def run_migrations_online():
+ """Run migrations in 'online' mode.
+
+ In this scenario we need to create an Engine
+ and associate a connection with the context.
+
+ """
+ # connectable = engine_from_config(
+ # config.get_section(config.config_ini_section),
+ # prefix="sqlalchemy.",
+ # poolclass=pool.NullPool,
+ # )
+
+ with engine.connect() as connection:
+ context.configure(
+ connection=connection, target_metadata=target_metadata
+ )
+
+ with context.begin_transaction():
+ context.run_migrations()
+
+
+if context.is_offline_mode():
+ run_migrations_offline()
+else:
+ run_migrations_online()
diff --git a/animism-align/cli/app/sql/models/__pycache__/collection.cpython-37.pyc b/animism-align/cli/app/sql/models/__pycache__/collection.cpython-37.pyc
new file mode 100644
index 0000000..927259d
--- /dev/null
+++ b/animism-align/cli/app/sql/models/__pycache__/collection.cpython-37.pyc
Binary files differ
diff --git a/animism-align/cli/app/sql/models/__pycache__/collection_media.cpython-37.pyc b/animism-align/cli/app/sql/models/__pycache__/collection_media.cpython-37.pyc
new file mode 100644
index 0000000..cacd652
--- /dev/null
+++ b/animism-align/cli/app/sql/models/__pycache__/collection_media.cpython-37.pyc
Binary files differ
diff --git a/animism-align/cli/app/sql/models/__pycache__/detection_type.cpython-37.pyc b/animism-align/cli/app/sql/models/__pycache__/detection_type.cpython-37.pyc
new file mode 100644
index 0000000..195474b
--- /dev/null
+++ b/animism-align/cli/app/sql/models/__pycache__/detection_type.cpython-37.pyc
Binary files differ
diff --git a/animism-align/cli/app/sql/models/__pycache__/detection_type_import.cpython-37.pyc b/animism-align/cli/app/sql/models/__pycache__/detection_type_import.cpython-37.pyc
new file mode 100644
index 0000000..df4e41e
--- /dev/null
+++ b/animism-align/cli/app/sql/models/__pycache__/detection_type_import.cpython-37.pyc
Binary files differ
diff --git a/animism-align/cli/app/sql/models/__pycache__/feature_type.cpython-37.pyc b/animism-align/cli/app/sql/models/__pycache__/feature_type.cpython-37.pyc
new file mode 100644
index 0000000..12b566b
--- /dev/null
+++ b/animism-align/cli/app/sql/models/__pycache__/feature_type.cpython-37.pyc
Binary files differ
diff --git a/animism-align/cli/app/sql/models/__pycache__/feature_type_import.cpython-37.pyc b/animism-align/cli/app/sql/models/__pycache__/feature_type_import.cpython-37.pyc
new file mode 100644
index 0000000..ff1c5c1
--- /dev/null
+++ b/animism-align/cli/app/sql/models/__pycache__/feature_type_import.cpython-37.pyc
Binary files differ
diff --git a/animism-align/cli/app/sql/models/__pycache__/graph.cpython-37.pyc b/animism-align/cli/app/sql/models/__pycache__/graph.cpython-37.pyc
new file mode 100644
index 0000000..f803c6b
--- /dev/null
+++ b/animism-align/cli/app/sql/models/__pycache__/graph.cpython-37.pyc
Binary files differ
diff --git a/animism-align/cli/app/sql/models/__pycache__/media.cpython-37.pyc b/animism-align/cli/app/sql/models/__pycache__/media.cpython-37.pyc
new file mode 100644
index 0000000..8c75482
--- /dev/null
+++ b/animism-align/cli/app/sql/models/__pycache__/media.cpython-37.pyc
Binary files differ
diff --git a/animism-align/cli/app/sql/models/__pycache__/media_detection.cpython-37.pyc b/animism-align/cli/app/sql/models/__pycache__/media_detection.cpython-37.pyc
new file mode 100644
index 0000000..5579e49
--- /dev/null
+++ b/animism-align/cli/app/sql/models/__pycache__/media_detection.cpython-37.pyc
Binary files differ
diff --git a/animism-align/cli/app/sql/models/__pycache__/media_import.cpython-37.pyc b/animism-align/cli/app/sql/models/__pycache__/media_import.cpython-37.pyc
new file mode 100644
index 0000000..8ba8af4
--- /dev/null
+++ b/animism-align/cli/app/sql/models/__pycache__/media_import.cpython-37.pyc
Binary files differ
diff --git a/animism-align/cli/app/sql/models/__pycache__/page.cpython-37.pyc b/animism-align/cli/app/sql/models/__pycache__/page.cpython-37.pyc
new file mode 100644
index 0000000..3907746
--- /dev/null
+++ b/animism-align/cli/app/sql/models/__pycache__/page.cpython-37.pyc
Binary files differ
diff --git a/animism-align/cli/app/sql/models/__pycache__/tile.cpython-37.pyc b/animism-align/cli/app/sql/models/__pycache__/tile.cpython-37.pyc
new file mode 100644
index 0000000..743b301
--- /dev/null
+++ b/animism-align/cli/app/sql/models/__pycache__/tile.cpython-37.pyc
Binary files differ
diff --git a/animism-align/cli/app/sql/models/__pycache__/timestamp.cpython-37.pyc b/animism-align/cli/app/sql/models/__pycache__/timestamp.cpython-37.pyc
new file mode 100644
index 0000000..8337acf
--- /dev/null
+++ b/animism-align/cli/app/sql/models/__pycache__/timestamp.cpython-37.pyc
Binary files differ
diff --git a/animism-align/cli/app/sql/models/__pycache__/upload.cpython-37.pyc b/animism-align/cli/app/sql/models/__pycache__/upload.cpython-37.pyc
new file mode 100644
index 0000000..39fb71b
--- /dev/null
+++ b/animism-align/cli/app/sql/models/__pycache__/upload.cpython-37.pyc
Binary files differ
diff --git a/animism-align/cli/app/sql/models/timestamp.py b/animism-align/cli/app/sql/models/timestamp.py
new file mode 100644
index 0000000..60e9007
--- /dev/null
+++ b/animism-align/cli/app/sql/models/timestamp.py
@@ -0,0 +1,37 @@
+from sqlalchemy import create_engine, Table, Column, Text, String, Integer, Float, DateTime, JSON, ForeignKey
+from sqlalchemy.orm import relationship
+import sqlalchemy.sql.functions as func
+from sqlalchemy_utc import UtcDateTime, utcnow
+from wtforms_alchemy import ModelForm
+
+from app.sql.common import db, Base, Session
+# from app.sql.models.page import Page
+
+from app.settings import app_cfg
+
+class Timestamp(Base):
+ """Table for storing references to graphs"""
+ __tablename__ = 'graph'
+ id = Column(Integer, primary_key=True)
+ type = Column(String(16, convert_unicode=True), nullable=False)
+ start_ts = Column(Float, nullable=False)
+ end_ts = Column(Float, nullable=True)
+ sentence = Column(Text(convert_unicode=True), nullable=True)
+ settings = Column(JSON, default={}, nullable=True)
+
+ def toJSON(self):
+ return {
+ 'id': self.id,
+ 'type': self.type,
+ 'start_ts': self.start_ts,
+ 'end_ts': self.end_ts,
+ 'sentence': self.description,
+ 'settings': self.settings,
+ }
+
+class TimestampForm(ModelForm):
+ class Meta:
+ model = Timestamp
+ exclude = ['settings']
+ def get_session():
+ return Session()
diff --git a/animism-align/cli/app/sql/models/upload.py b/animism-align/cli/app/sql/models/upload.py
new file mode 100644
index 0000000..5863b07
--- /dev/null
+++ b/animism-align/cli/app/sql/models/upload.py
@@ -0,0 +1,44 @@
+from sqlalchemy import create_engine, Table, Column, String, Integer, DateTime
+import sqlalchemy.sql.functions as func
+from sqlalchemy_utc import UtcDateTime, utcnow
+from wtforms_alchemy import ModelForm
+
+from app.sql.common import db, Base, Session
+
+from app.utils.file_utils import sha256_tree
+from app.settings import app_cfg
+
+from os.path import join
+
+class Upload(Base):
+ """Table for storing references to various media"""
+ __tablename__ = 'upload'
+ id = Column(Integer, primary_key=True)
+ sha256 = Column(String(256), nullable=False)
+ fn = Column(String(256), nullable=False)
+ ext = Column(String(4, convert_unicode=True), nullable=False)
+ username = Column(String(16, convert_unicode=True), nullable=False)
+ created_at = Column(UtcDateTime(), default=utcnow())
+
+ def toJSON(self):
+ return {
+ 'id': self.id,
+ 'sha256': self.sha256,
+ 'fn': self.fn,
+ 'ext': self.ext,
+ 'username': self.username,
+ 'url': self.url(),
+ 'created_at': self.created_at,
+ }
+
+ def filename(self):
+ return "{}{}".format(self.fn)
+
+ def filepath(self):
+ return join(app_cfg.DIR_UPLOADS, sha256_tree(self.sha256))
+
+ def fullpath(self):
+ return join(self.filepath(), self.filename())
+
+ def url(self):
+ return join(app_cfg.URL_UPLOADS, sha256_tree(self.sha256), self.filename())
diff --git a/animism-align/cli/app/sql/script.py.mako b/animism-align/cli/app/sql/script.py.mako
new file mode 100644
index 0000000..c4b86b8
--- /dev/null
+++ b/animism-align/cli/app/sql/script.py.mako
@@ -0,0 +1,25 @@
+"""${message}
+
+Revision ID: ${up_revision}
+Revises: ${down_revision | comma,n}
+Create Date: ${create_date}
+
+"""
+from alembic import op
+import sqlalchemy as sa
+import sqlalchemy_utc
+${imports if imports else ""}
+
+# revision identifiers, used by Alembic.
+revision = ${repr(up_revision)}
+down_revision = ${repr(down_revision)}
+branch_labels = ${repr(branch_labels)}
+depends_on = ${repr(depends_on)}
+
+
+def upgrade():
+ ${upgrades if upgrades else "pass"}
+
+
+def downgrade():
+ ${downgrades if downgrades else "pass"}
diff --git a/animism-align/cli/app/sql/versions/202006231946_create_database.py b/animism-align/cli/app/sql/versions/202006231946_create_database.py
new file mode 100644
index 0000000..b74f9b3
--- /dev/null
+++ b/animism-align/cli/app/sql/versions/202006231946_create_database.py
@@ -0,0 +1,47 @@
+"""create database
+
+Revision ID: e45386a1498b
+Revises:
+Create Date: 2020-06-23 19:46:05.161996
+
+"""
+from alembic import op
+import sqlalchemy as sa
+import sqlalchemy_utc
+
+
+# revision identifiers, used by Alembic.
+revision = 'e45386a1498b'
+down_revision = None
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.create_table('graph',
+ sa.Column('id', sa.Integer(), nullable=False),
+ sa.Column('type', sa.String(length=16, _expect_unicode=True), nullable=False),
+ sa.Column('start_ts', sa.Float(), nullable=False),
+ sa.Column('end_ts', sa.Float(), nullable=True),
+ sa.Column('sentence', sa.Text(_expect_unicode=True), nullable=True),
+ sa.Column('settings', sa.JSON(), nullable=True),
+ sa.PrimaryKeyConstraint('id')
+ )
+ op.create_table('upload',
+ sa.Column('id', sa.Integer(), nullable=False),
+ sa.Column('sha256', sa.String(length=256), nullable=False),
+ sa.Column('fn', sa.String(length=256), nullable=False),
+ sa.Column('ext', sa.String(length=4, _expect_unicode=True), nullable=False),
+ sa.Column('username', sa.String(length=16, _expect_unicode=True), nullable=False),
+ sa.Column('created_at', sqlalchemy_utc.sqltypes.UtcDateTime(timezone=True), nullable=True),
+ sa.PrimaryKeyConstraint('id')
+ )
+ # ### end Alembic commands ###
+
+
+def downgrade():
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.drop_table('upload')
+ op.drop_table('graph')
+ # ### end Alembic commands ###
diff --git a/animism-align/cli/app/sql/versions/__pycache__/202006011922_creating_database.cpython-37.pyc b/animism-align/cli/app/sql/versions/__pycache__/202006011922_creating_database.cpython-37.pyc
new file mode 100644
index 0000000..a1adb31
--- /dev/null
+++ b/animism-align/cli/app/sql/versions/__pycache__/202006011922_creating_database.cpython-37.pyc
Binary files differ
diff --git a/animism-align/cli/app/sql/versions/__pycache__/202006011923_adding_pages_table.cpython-37.pyc b/animism-align/cli/app/sql/versions/__pycache__/202006011923_adding_pages_table.cpython-37.pyc
new file mode 100644
index 0000000..beeeb71
--- /dev/null
+++ b/animism-align/cli/app/sql/versions/__pycache__/202006011923_adding_pages_table.cpython-37.pyc
Binary files differ
diff --git a/animism-align/cli/app/sql/versions/__pycache__/202006011925_adding_tiles_table.cpython-37.pyc b/animism-align/cli/app/sql/versions/__pycache__/202006011925_adding_tiles_table.cpython-37.pyc
new file mode 100644
index 0000000..8d16725
--- /dev/null
+++ b/animism-align/cli/app/sql/versions/__pycache__/202006011925_adding_tiles_table.cpython-37.pyc
Binary files differ
diff --git a/animism-align/cli/app/sql/versions/__pycache__/202006011937_adding_pages.cpython-37.pyc b/animism-align/cli/app/sql/versions/__pycache__/202006011937_adding_pages.cpython-37.pyc
new file mode 100644
index 0000000..35b459c
--- /dev/null
+++ b/animism-align/cli/app/sql/versions/__pycache__/202006011937_adding_pages.cpython-37.pyc
Binary files differ
diff --git a/animism-align/cli/app/sql/versions/__pycache__/202006011943_adding_database.cpython-37.pyc b/animism-align/cli/app/sql/versions/__pycache__/202006011943_adding_database.cpython-37.pyc
new file mode 100644
index 0000000..adf3587
--- /dev/null
+++ b/animism-align/cli/app/sql/versions/__pycache__/202006011943_adding_database.cpython-37.pyc
Binary files differ
diff --git a/animism-align/cli/app/sql/versions/__pycache__/202006011944_adding_uploads.cpython-37.pyc b/animism-align/cli/app/sql/versions/__pycache__/202006011944_adding_uploads.cpython-37.pyc
new file mode 100644
index 0000000..159dc66
--- /dev/null
+++ b/animism-align/cli/app/sql/versions/__pycache__/202006011944_adding_uploads.cpython-37.pyc
Binary files differ
diff --git a/animism-align/cli/app/sql/versions/__pycache__/202006021608_creating_database.cpython-37.pyc b/animism-align/cli/app/sql/versions/__pycache__/202006021608_creating_database.cpython-37.pyc
new file mode 100644
index 0000000..c5df7ca
--- /dev/null
+++ b/animism-align/cli/app/sql/versions/__pycache__/202006021608_creating_database.cpython-37.pyc
Binary files differ
diff --git a/animism-align/cli/app/sql/versions/__pycache__/202006041801_add_sort_order_to_tiles.cpython-37.pyc b/animism-align/cli/app/sql/versions/__pycache__/202006041801_add_sort_order_to_tiles.cpython-37.pyc
new file mode 100644
index 0000000..67e35cf
--- /dev/null
+++ b/animism-align/cli/app/sql/versions/__pycache__/202006041801_add_sort_order_to_tiles.cpython-37.pyc
Binary files differ
diff --git a/animism-align/cli/app/sql/versions/__pycache__/202006231946_create_database.cpython-37.pyc b/animism-align/cli/app/sql/versions/__pycache__/202006231946_create_database.cpython-37.pyc
new file mode 100644
index 0000000..dd49abc
--- /dev/null
+++ b/animism-align/cli/app/sql/versions/__pycache__/202006231946_create_database.cpython-37.pyc
Binary files differ
diff --git a/animism-align/cli/app/utils/__pycache__/click_factory.cpython-37.pyc b/animism-align/cli/app/utils/__pycache__/click_factory.cpython-37.pyc
new file mode 100644
index 0000000..e44ccff
--- /dev/null
+++ b/animism-align/cli/app/utils/__pycache__/click_factory.cpython-37.pyc
Binary files differ
diff --git a/animism-align/cli/app/utils/__pycache__/click_utils.cpython-37.pyc b/animism-align/cli/app/utils/__pycache__/click_utils.cpython-37.pyc
new file mode 100644
index 0000000..8aa694e
--- /dev/null
+++ b/animism-align/cli/app/utils/__pycache__/click_utils.cpython-37.pyc
Binary files differ
diff --git a/animism-align/cli/app/utils/__pycache__/file_utils.cpython-37.pyc b/animism-align/cli/app/utils/__pycache__/file_utils.cpython-37.pyc
new file mode 100644
index 0000000..55b8f6a
--- /dev/null
+++ b/animism-align/cli/app/utils/__pycache__/file_utils.cpython-37.pyc
Binary files differ
diff --git a/animism-align/cli/app/utils/__pycache__/log_utils.cpython-37.pyc b/animism-align/cli/app/utils/__pycache__/log_utils.cpython-37.pyc
new file mode 100644
index 0000000..2aaddb6
--- /dev/null
+++ b/animism-align/cli/app/utils/__pycache__/log_utils.cpython-37.pyc
Binary files differ
diff --git a/animism-align/cli/app/utils/click_factory.py b/animism-align/cli/app/utils/click_factory.py
new file mode 100644
index 0000000..40b0c82
--- /dev/null
+++ b/animism-align/cli/app/utils/click_factory.py
@@ -0,0 +1,145 @@
+"""
+Click processor factory
+- Inspired by and used code from @wiretapped's HTSLAM codebase
+- In particular the very useful
+"""
+
+import os
+import sys
+from os.path import join
+from pathlib import Path
+import os
+from os.path import join
+import sys
+from functools import update_wrapper, wraps
+import itertools
+from pathlib import Path
+from glob import glob
+import importlib
+import logging
+
+import click
+from app.settings import app_cfg as cfg
+
+
+# --------------------------------------------------------
+# Click Group Class
+# --------------------------------------------------------
+
+# set global variable during parent class create
+dir_plugins = None # set in create
+
+class ClickComplex:
+ """Wrapper generator for custom Click CLI's based on LR's coroutine"""
+
+ def __init__(self):
+ pass
+
+
+ class CustomGroup(click.Group):
+ #global dir_plugins # from CliGenerator init
+
+ # lists commands in plugin directory
+ def list_commands(self, ctx):
+ global dir_plugins # from CliGenerator init
+ rv = list(self.commands.keys())
+ fp_cmds = [Path(x) for x in Path(dir_plugins).iterdir() \
+ if str(x).endswith('.py') \
+ and '__init__' not in str(x)]
+ for fp_cmd in fp_cmds:
+ try:
+ assert fp_cmd.name not in rv, "[-] Error: {} can't exist in cli.py and {}".format(fp_cmd.name)
+ except Exception as ex:
+ logging.getLogger('app').error('{}'.format(ex))
+ rv.append(fp_cmd.stem)
+ rv.sort()
+ return rv
+
+ # Complex version: gets commands in directory and in this file
+ # Based on code from @wiretapped + HTSLAM
+ def get_command(self, ctx, cmd_name):
+ global dir_plugins
+ if cmd_name in self.commands:
+ return self.commands[cmd_name]
+ ns = {}
+ fpp_cmd = Path(dir_plugins, cmd_name + '.py')
+ fp_cmd = fpp_cmd.as_posix()
+ if not fpp_cmd.exists():
+ sys.exit('[-] {} file does not exist'.format(fpp_cmd))
+ code = compile(fpp_cmd.read_bytes(), fp_cmd, 'exec')
+ try:
+ eval(code, ns, ns)
+ except Exception as ex:
+ logging.getLogger('swimmer').error('exception: {}'.format(ex))
+ @click.command()
+ def _fail():
+ raise Exception('while loading {}'.format(fpp_cmd.name))
+ _fail.short_help = repr(ex)
+ _fail.help = repr(ex)
+ return _fail
+ if 'cli' not in ns:
+ sys.exit('[-] Error: {} does not contain a cli function'.format(fp_cmd))
+ return ns['cli']
+
+ @classmethod
+ def create(self, dir_plugins_local):
+ global dir_plugins
+ dir_plugins = dir_plugins_local
+ return self.CustomGroup
+
+
+
+class ClickSimple:
+ """Wrapper generator for custom Click CLI's"""
+
+ def __init__(self):
+ pass
+
+
+ class CustomGroup(click.Group):
+ #global dir_plugins # from CliGenerator init
+
+ # lists commands in plugin directory
+ def list_commands(self, ctx):
+ global dir_plugins # from CliGenerator init
+ rv = list(self.commands.keys())
+ fp_cmds = [Path(x) for x in Path(dir_plugins).iterdir() \
+ if str(x).endswith('.py') \
+ and '__init__' not in str(x)]
+ for fp_cmd in fp_cmds:
+ assert fp_cmd.name not in rv, "[-] Error: {} can't exist in cli.py and {}".format(fp_cmd.name)
+ rv.append(fp_cmd.stem)
+ rv.sort()
+ return rv
+
+ # Complex version: gets commands in directory and in this file
+ # from HTSLAM
+ def get_command(self, ctx, cmd_name):
+ global dir_plugins # from CliGenerator init
+ if cmd_name in self.commands:
+ return self.commands[cmd_name]
+ ns = {}
+ fpp_cmd = Path(dir_plugins, cmd_name + '.py')
+ fp_cmd = fpp_cmd.as_posix()
+ if not fpp_cmd.exists():
+ sys.exit('[-] {} file does not exist'.format(fpp_cmd))
+ code = compile(fpp_cmd.read_bytes(), fp_cmd, 'exec')
+ try:
+ eval(code, ns, ns)
+ except Exception as ex:
+ logging.getLogger('swimmer').error('exception: {}'.format(ex))
+ @click.command()
+ def _fail():
+ raise Exception('while loading {}'.format(fpp_cmd.name))
+ _fail.short_help = repr(ex)
+ _fail.help = repr(ex)
+ return _fail
+ if 'cli' not in ns:
+ sys.exit('[-] Error: {} does not contain a cli function'.format(fp_cmd))
+ return ns['cli']
+
+ @classmethod
+ def create(self, dir_plugins_local):
+ global dir_plugins
+ dir_plugins = dir_plugins_local
+ return self.CustomGroup
diff --git a/animism-align/cli/app/utils/click_utils.py b/animism-align/cli/app/utils/click_utils.py
new file mode 100644
index 0000000..83e4f75
--- /dev/null
+++ b/animism-align/cli/app/utils/click_utils.py
@@ -0,0 +1,40 @@
+"""
+Custom Click parameter types
+"""
+import click
+
+from app.settings import app_cfg
+from app.models import types
+
+# --------------------------------------------------------
+# Click command helpers
+# --------------------------------------------------------
+
+def enum_to_names(enum_type):
+ return {x.name.lower(): x for x in enum_type}
+
+def show_help(enum_type):
+ names = enum_to_names(enum_type)
+ return 'Options: "{}"'.format(', '.join(list(names.keys())))
+
+def get_default(opt):
+ return opt.name.lower()
+
+
+# --------------------------------------------------------
+# Custom Click parameter class
+# --------------------------------------------------------
+
+class ParamVar(click.ParamType):
+
+ name = 'default_type'
+
+ def __init__(self, param_type):
+ self.opts = {x.name.lower(): x for x in param_type}
+
+ def convert(self, value, param, ctx):
+ """converts (str) repr to Enum hash"""
+ try:
+ return self.opts[value.lower()]
+ except:
+ self.fail('{} is not a valid option'.format(value, param, ctx))
diff --git a/animism-align/cli/app/utils/display_utils.py b/animism-align/cli/app/utils/display_utils.py
new file mode 100644
index 0000000..7bc1782
--- /dev/null
+++ b/animism-align/cli/app/utils/display_utils.py
@@ -0,0 +1,28 @@
+import sys
+import logging
+
+import cv2 as cv
+
+
+
+log = logging.getLogger('swimmer')
+
+def handle_keyboard(delay_amt=1):
+ '''Used with cv.imshow('title', image) to wait for keyboard press
+ '''
+ while True:
+ k = cv.waitKey(delay_amt) & 0xFF
+ if k == 27 or k == ord('q'): # ESC
+ cv.destroyAllWindows()
+ sys.exit()
+ elif k == 32 or k == 83: # 83 = right arrow
+ break
+ elif k != 255:
+ log.debug(f'k: {k}')
+
+def handle_keyboard_video(delay_amt=1):
+ key = cv.waitKey(1) & 0xFF
+ # if the `q` key was pressed, break from the loop
+ if key == ord("q"):
+ cv.destroyAllWindows()
+ sys.exit()
diff --git a/animism-align/cli/app/utils/draw_utils.py b/animism-align/cli/app/utils/draw_utils.py
new file mode 100644
index 0000000..8ae47fe
--- /dev/null
+++ b/animism-align/cli/app/utils/draw_utils.py
@@ -0,0 +1,166 @@
+import sys
+from math import sqrt
+import logging
+
+import numpy as np
+import cv2 as cv
+
+log = logging.getLogger('swimmer')
+
+
+# ---------------------------------------------------------------------------
+# 3D landmark drawing utilities
+# ---------------------------------------------------------------------------
+
+end_list = np.array([17, 22, 27, 42, 48, 31, 36, 68], dtype=np.int32) - 1
+
+def plot_keypoints(im, kpts):
+ '''Draw 68 key points
+ :param im: the input im
+ :param kpts: (68, 3). flattened list
+ '''
+ im = im.copy()
+ kpts = np.round(kpts).astype(np.int32)
+ for i in range(kpts.shape[0]):
+ st = kpts[i, :2]
+ im = cv.circle(im, (st[0], st[1]), 1, (0, 0, 255), 2)
+ if i in end_list:
+ continue
+ ed = kpts[i + 1, :2]
+ im = cv.line(im, (st[0], st[1]), (ed[0], ed[1]), (255, 255, 255), 1)
+ return im
+
+
+def calc_hypotenuse(pts):
+ bbox = [min(pts[0, :]), min(pts[1, :]), max(pts[0, :]), max(pts[1, :])]
+ center = [(bbox[0] + bbox[2]) / 2, (bbox[1] + bbox[3]) / 2]
+ radius = max(bbox[2] - bbox[0], bbox[3] - bbox[1]) / 2
+ bbox = [center[0] - radius, center[1] - radius, center[0] + radius, center[1] + radius]
+ llength = sqrt((bbox[2] - bbox[0]) ** 2 + (bbox[3] - bbox[1]) ** 2)
+ return llength / 3
+
+def build_camera_box(rear_size=90):
+ point_3d = []
+ rear_depth = 0
+ point_3d.append((-rear_size, -rear_size, rear_depth))
+ point_3d.append((-rear_size, rear_size, rear_depth))
+ point_3d.append((rear_size, rear_size, rear_depth))
+ point_3d.append((rear_size, -rear_size, rear_depth))
+ point_3d.append((-rear_size, -rear_size, rear_depth))
+
+ front_size = int(4 / 3 * rear_size)
+ front_depth = int(4 / 3 * rear_size)
+ point_3d.append((-front_size, -front_size, front_depth))
+ point_3d.append((-front_size, front_size, front_depth))
+ point_3d.append((front_size, front_size, front_depth))
+ point_3d.append((front_size, -front_size, front_depth))
+ point_3d.append((-front_size, -front_size, front_depth))
+ point_3d = np.array(point_3d, dtype=np.float).reshape(-1, 3)
+
+ return point_3d
+
+
+def plot_pose_box(im, Ps, pts68s, color=(40, 255, 0), line_width=2):
+ '''Draw a 3D box as annotation of pose.
+ ref: https://github.com/yinguobing/head-pose-estimation/blob/master/pose_estimator.py
+ :param image: the input image
+ :param P: (3, 4). Affine Camera Matrix.
+ :param kpts: (2, 68) or (3, 68)
+ '''
+ im_draw = im.copy()
+ if not isinstance(pts68s, list):
+ pts68s = [pts68s]
+
+ if not isinstance(Ps, list):
+ Ps = [Ps]
+
+ for i in range(len(pts68s)):
+ pts68 = pts68s[i]
+ llength = calc_hypotenuse(pts68)
+ point_3d = build_camera_box(llength)
+ P = Ps[i]
+
+ # Map to 2d im points
+ point_3d_homo = np.hstack((point_3d, np.ones([point_3d.shape[0], 1]))) # n x 4
+ point_2d = point_3d_homo.dot(P.T)[:, :2]
+
+ point_2d[:, 1] = - point_2d[:, 1]
+ point_2d[:, :2] = point_2d[:, :2] - np.mean(point_2d[:4, :2], 0) + np.mean(pts68[:2, :27], 1)
+ point_2d = np.int32(point_2d.reshape(-1, 2))
+
+ # Draw all the lines
+ cv.polylines(im_draw, [point_2d], True, color, line_width, cv.LINE_AA)
+ cv.line(im_draw, tuple(point_2d[1]), tuple(point_2d[6]), color, line_width, cv.LINE_AA)
+ cv.line(im_draw, tuple(point_2d[2]), tuple(point_2d[7]), color, line_width, cv.LINE_AA)
+ cv.line(im_draw, tuple(point_2d[3]), tuple(point_2d[8]), color, line_width, cv.LINE_AA)
+
+ return im_draw
+
+
+
+# ---------------------------------------------------------------------------
+#
+# OpenCV drawing functions
+#
+# ---------------------------------------------------------------------------
+
+pose_types = {'pitch': (0,0,255), 'roll': (255,0,0), 'yaw': (0,255,0)}
+
+def draw_landmarks2D(im, points_norm, radius=3, color=(0,255,0)):
+ '''Draws facial landmarks, either 5pt or 68pt
+ '''
+ im_dst = im.copy()
+ dim = im.shape[:2][::-1]
+ for x,y in points_norm:
+ pt = (int(x*dim[0]), int(y*dim[1]))
+ cv.circle(im_dst, pt, radius, color, -1, cv.LINE_AA)
+ return im_dst
+
+def draw_landmarks3D(im, points, radius=3, color=(0,255,0)):
+ '''Draws 3D facial landmarks
+ '''
+ im_dst = im.copy()
+ for x,y,z in points:
+ cv.circle(im_dst, (x,y), radius, color, -1, cv.LINE_AA)
+ return im_dst
+
+def draw_bbox(im, bbox_norm, color=(0,255,0), stroke_weight=2):
+ '''Draws BBox onto cv image
+ '''
+ im_dst = im.copy()
+ bbox_dim = bbox_norm.to_bbox_dim(im.shape[:2][::-1])
+ cv.rectangle(im_dst, bbox_dim.p1.xy, bbox_dim.p2.xy, color, stroke_weight, cv.LINE_AA)
+ return im_dst
+
+def draw_pose(im, pt_nose, image_pts):
+ '''Draws 3-axis pose over image
+ TODO: normalize point data
+ '''
+ im_dst = im.copy()
+ log.debug(f'pt_nose: {pt_nose}')
+ log.debug(f'image_pts pitch: {image_pts["pitch"]}')
+ cv.line(im_dst, pt_nose, tuple(image_pts['pitch']), pose_types['pitch'], 3)
+ cv.line(im_dst, pt_nose, tuple(image_pts['yaw']), pose_types['yaw'], 3)
+ cv.line(im_dst, pt_nose, tuple(image_pts['roll']), pose_types['roll'], 3)
+ return im_dst
+
+def draw_text(im, pt_norm, text, size=1.0, color=(0,255,0)):
+ '''Draws degrees as text over image
+ '''
+ im_dst = im.copy()
+ dim = im.shape[:2][::-1]
+ pt = tuple(map(int, (pt_norm[0]*dim[0], pt_norm[1]*dim[1])))
+ cv.putText(im_dst, text, pt, cv.FONT_HERSHEY_SIMPLEX, size, color, thickness=1, lineType=cv.LINE_AA)
+ return im_dst
+
+def draw_degrees(im, pose_data, color=(0,255,0)):
+ '''Draws degrees as text over image
+ '''
+ im_dst = im.copy()
+ for i, pose_type in enumerate(pose_types.items()):
+ k, clr = pose_type
+ v = pose_data[k]
+ t = '{}: {:.2f}'.format(k, v)
+ origin = (10, 30 + (25 * i))
+ cv.putText(im_dst, t, origin, cv.FONT_HERSHEY_SIMPLEX, 0.5, clr, thickness=2, lineType=2)
+ return im_dst \ No newline at end of file
diff --git a/animism-align/cli/app/utils/file_utils.py b/animism-align/cli/app/utils/file_utils.py
new file mode 100644
index 0000000..7f1f417
--- /dev/null
+++ b/animism-align/cli/app/utils/file_utils.py
@@ -0,0 +1,519 @@
+"""
+File utility scripts
+- load and save files in csv, json, yaml, pkl formats
+- shutil helper scripts to move, copy, symlink files
+"""
+
+import sys
+import os
+from os.path import join
+import stat
+
+from glob import glob
+from pprint import pprint
+import shutil
+import distutils
+import pathlib
+from pathlib import Path
+import json
+import csv
+import pickle
+import threading
+from queue import Queue
+import time
+import logging
+import itertools
+import collections
+
+import yaml
+import hashlib
+import click
+from tqdm import tqdm
+
+
+
+# -----------------------------------------------------------------------------
+# Filesystem settings
+# -----------------------------------------------------------------------------
+
+ZERO_PADDING = 6 # padding for enumerated image filenames
+HASH_TREE_DEPTH = 2
+HASH_BRANCH_SIZE = 2
+VALID_IMAGE_EXTS = ['jpg', 'jpeg', 'png']
+VALID_VIDEO_EXTS = ['mp4', 'mov']
+
+# ------------------------------------------
+# Logger: use local app logging
+# ------------------------------------------
+
+log = logging.getLogger('swimmer')
+
+
+# ------------------------------------------
+# File I/O read/write little helpers
+# ------------------------------------------
+
+def glob_exts(dir_in, exts, recursive=False):
+ """Globs folders for multiple extensions
+ :param dir_in: input directory
+ :param exts: extensions as jpg, png, mp4
+ :param recursive: use recursive globbing
+ """
+ if not (type(exts) == list or type(exts) == tuple):
+ exts = [exts]
+ files = []
+ for ext in exts:
+ if recursive:
+ fp_glob = join(dir_in, '**/*.{}'.format(ext))
+ log.info(f'glob {fp_glob}')
+ files += glob(fp_glob, recursive=True)
+ else:
+ fp_glob = join(dir_in, '*.{}'.format(ext))
+ files += glob(fp_glob)
+ return files
+
+
+def zpad(x, zeros=ZERO_PADDING):
+ return str(x).zfill(zeros)
+
+def get_ext(fpp, lower=True):
+ """Retuns the file extension w/o dot
+ :param fpp: (Pathlib.path) filepath
+ :param lower: (bool) force lowercase
+ :returns: (str) file extension (ie 'jpg')
+ """
+ fpp = ensure_posixpath(fpp)
+ ext = fpp.suffix.replace('.', '')
+ return ext.lower() if lower else ext
+
+
+def convert(fp_in, fp_out):
+ """Converts between JSON and Pickle formats
+ Pickle files are about 30-40% smaller filesize
+ """
+ if get_ext(fp_in) == get_ext(fp_out):
+ log.error('Input: {} and output: {} are the same. Use this to convert.')
+
+ lazywrite(lazyload(fp_in), fp_out)
+
+
+def load_csv(fp_in, as_list=True):
+ """Loads CSV and retuns list of items
+ :param fp_in: string filepath to CSV
+ :returns: list of all CSV data
+ """
+ if not Path(fp_in).exists():
+ log.info('not found: {}'.format(fp_in))
+ log.info('loading: {}'.format(fp_in))
+ with open(fp_in, 'r') as fp:
+ items = csv.DictReader(fp)
+ if as_list:
+ items = [x for x in items]
+ log.info('returning {:,} items'.format(len(items)))
+ return items
+
+def unfussy_csv_reader(reader):
+ """Loads a CSV while ignoring possible data errors
+ :param reader: Special reader for load_csv_safe which ignores CSV parse errors
+ """
+ while True:
+ try:
+ yield next(reader)
+ except StopIteration:
+ return
+ except csv.Error:
+ print(csv.Error)
+ # log the problem or whatever
+ continue
+
+def load_csv_safe(fp_in, keys=True, create=False):
+ """Loads a CSV while ignoring possible data errors
+ :param fp_in: string filepath to JSON file
+ :param keys: boolean set to false if the first line is not headers (for some reason)
+ :param create: boolean set to true to return an empty keys/values if the CSV does not exist
+ """
+ try:
+ with open(fp_in, 'r', newline='', encoding='utf-8') as f:
+ # reader = csv.reader( (line.replace('\0','') for line in f) )
+ reader = csv.reader(f)
+ lines = list(unfussy_csv_reader(reader))
+ if keys:
+ keys = lines[0]
+ lines = lines[1:]
+ return keys, lines
+ return lines
+ except:
+ if create:
+ if keys:
+ return {}, []
+ return []
+ raise
+
+def load_recipe(fp_in):
+ """Loads a JSON file as an object with properties accessible with dot syntax
+ :param fp_in: string filepath to JSON file
+ """
+ with open(path) as fh:
+ return json.load(fh, object_hook=lambda d: collections.namedtuple('X', d.keys())(*d.values()))
+
+
+def lazywrite(data, fp_out, sort_keys=True):
+ """Writes JSON or Pickle data"""
+ ext = get_ext(fp_out)
+ if ext == 'json':
+ return write_json(data, fp_out, sort_keys=sort_keys)
+ elif ext == 'pkl':
+ return write_pickle(data, fp_out)
+ else:
+ raise NotImplementedError('[!] {} is not yet supported. Use .pkl or .json'.format(ext))
+
+
+def lazyload(fp_in, ordered=True):
+ """Loads JSON or Pickle serialized data"""
+ if not Path(fp_in).exists():
+ log.error('file does not exist: {}'.format(fp_in))
+ return {}
+ ext = get_ext(fp_in)
+ if ext == 'json':
+ items = load_json(fp_in)
+ elif ext == 'pkl':
+ items = load_pickle(fp_in)
+ else:
+ raise NotImplementedError('[!] {} is not yet supported. Use .pkl or .json'.format(ext))
+
+ if ordered:
+ return collections.OrderedDict(sorted(items.items(), key=lambda t: t[0]))
+ else:
+ return items
+
+
+def load_yaml(fp_in):
+ """Loads YAML file and returns (dict)
+ :param fp_in: (str) filepath
+ """
+ with open(fp_in, 'r') as fp:
+ cfg = yaml.load(fp, Loader=yaml.Loader)
+ return cfg
+
+def load_text(fp_in):
+ """Load a text file into an array
+ :param fp_in: (str) filepath
+ """
+ with open(fp_in, 'rt') as fp:
+ lines = fp.read().rstrip('\n').split('\n')
+ return lines
+
+def load_line_lookup(fp_in):
+ """Load a text file into a lookup of lines to line numbers
+ :param fp_in: (str) filepath
+ """
+ with open(fp_in, 'rt') as fp:
+ lines = fp.read().rstrip('\n').split('\n')
+ return { line: i for i, line in enumerate(lines) }
+
+def load_json(fp_in):
+ """Loads JSON and returns items
+ :param fp_in: (str) filepath
+ :returns: data from JSON
+ """
+ if not Path(fp_in).exists():
+ log.error('file does not exist: {}'.format(fp_in))
+ return {}
+ with open(str(fp_in), 'r') as fp:
+ data = json.load(fp)
+ return data
+
+
+def load_pickle(fp_in):
+ """Loads Pickle and returns items
+ :param fp_in: (str) filepath
+ :returns: data from JSON
+ """
+ if not Path(fp_in).exists():
+ log.error('file does not exist: {}'.format(fp_in))
+ return {}
+ with open(str(fp_in), 'rb') as fp:
+ data = pickle.load(fp)
+ return data
+
+
+def order_items(records):
+ """Orders records by ASC SHA256"""
+ return collections.OrderedDict(sorted(records.items(), key=lambda t: t[0]))
+
+def write_text(data, fp_out, ensure_path=True):
+ if not data:
+ log.error('no data')
+ return
+
+ if ensure_path:
+ mkdirs(fp_out)
+ with open(fp_out, 'w') as fp:
+ if type(data) == list:
+ fp.write('\n'.join(data))
+ else:
+ fp.write(data)
+
+
+def write_pickle(data, fp_out, ensure_path=True):
+ """
+ """
+ if ensure_path:
+ mkdirs(fp_out) # mkdir
+ with open(fp_out, 'wb') as fp:
+ pickle.dump(data, fp)
+
+
+def write_json(data, fp_out, minify=True, ensure_path=True, sort_keys=True, verbose=False):
+ """
+ """
+ if ensure_path:
+ mkdirs(fp_out)
+ with open(fp_out, 'w') as fp:
+ if minify:
+ json.dump(data, fp, separators=(',',':'), sort_keys=sort_keys)
+ else:
+ json.dump(data, fp, indent=2, sort_keys=sort_keys)
+ if verbose:
+ log.info('Wrote JSON: {}'.format(fp_out))
+
+def write_csv(data, fp_out, header=None):
+ """ """
+ with open(fp_out, 'w') as fp:
+ writer = csv.DictWriter(fp, fieldnames=header)
+ writer.writeheader()
+ if type(data) is dict:
+ for k, v in data.items():
+ fp.writerow('{},{}'.format(k, v))
+
+def write_serialized_items(items, fp_out, ensure_path=True, minify=True, sort_keys=True):
+ """Writes serialized data
+ :param items: (dict) a sha256 dict of MappingItems
+ :param serialize: (bool) serialize the data
+ :param ensure_path: ensure the parent directories exist
+ :param minify: reduces JSON file size
+ """
+ log.info('Writing serialized data...')
+ fpp_out = ensure_posixpath(fp_out)
+ serialized_items = {k: v.serialize() for k, v in tqdm(items.items()) }
+ # write data
+ ext = get_ext(fpp_out)
+ if ext == 'json':
+ write_json(serialized_items, fp_out, ensure_path=ensure_path, minify=minify, sort_keys=sort_keys)
+ elif ext == 'pkl':
+ write_pickle(serialized_items, fp_out)
+ else:
+ raise NotImplementedError('[!] {} is not yet supported. Use .pkl or .json'.format(ext))
+ log.info('Wrote {:,} items to {}'.format(len(items), fp_out))
+
+
+def write_modeled_data(data, fp_out, ensure_path=False):
+ """
+ """
+ fpp_out = ensure_posixpath(fp_out)
+ if ensure_path:
+ mkdirs(fpp_out)
+ ext = get_ext(fpp_out)
+ if ext == 'pkl':
+ write_pickle(data, str(fp_out))
+ else:
+ raise NotImplementedError('[!] {} is not yet supported. Use .pkl or .json'.format(ext))
+
+
+# ---------------------------------------------------------------------
+# Filepath utilities
+# ---------------------------------------------------------------------
+
+def ensure_posixpath(fp):
+ """Ensures filepath is pathlib.Path
+ :param fp: a (str, LazyFile, PosixPath)
+ :returns: a PosixPath filepath object
+ """
+ if type(fp) == str:
+ fpp = Path(fp)
+ elif type(fp) == click.utils.LazyFile:
+ fpp = Path(fp.name)
+ elif type(fp) == pathlib.PosixPath:
+ fpp = fp
+ else:
+ raise TypeError('{} is not a valid filepath type'.format(type(fp)))
+ return fpp
+
+
+def mkdirs(fp):
+ """Ensure parent directories exist for a filepath
+ :param fp: string, Path, or click.File
+ """
+ fpp = ensure_posixpath(fp)
+ fpp = fpp.parent if fpp.suffix else fpp
+ fpp.mkdir(parents=True, exist_ok=True)
+
+def ensure_posixpath(fp):
+ """Ensures filepath is pathlib.Path
+ :param fp: a (str, LazyFile, PosixPath)
+ :returns: a PosixPath filepath object
+ """
+ if type(fp) == str:
+ fpp = Path(fp)
+ elif type(fp) == click.utils.LazyFile:
+ fpp = Path(fp.name)
+ elif type(fp) == pathlib.PosixPath:
+ fpp = fp
+ else:
+ raise TypeError('{} is not a valid filepath type'.format(type(fp)))
+ return fpp
+
+def ensure_dir(fp):
+ if not Path(fp).is_dir():
+ mkdirs(fp)
+
+
+def ext_media_format(ext):
+ """Converts file extension into Enum MediaType
+ param ext: str of file extension"
+ """
+ for media_format, exts in VALID_MEDIA_EXTS.items():
+ if ext in exts:
+ return media_format
+ raise ValueError('{} is not a valid option'.format(ext))
+
+
+def sha256(fp_in, block_size=65536):
+ """Generates SHA256 hash for a file
+ :param fp_in: (str) filepath
+ :param block_size: (int) byte size of block
+ :returns: (str) hash
+ """
+ sha256 = hashlib.sha256()
+ with open(fp_in, 'rb') as fp:
+ for block in iter(lambda: fp.read(block_size), b''):
+ sha256.update(block)
+ return sha256.hexdigest()
+
+def sha256_stream(stream, block_size=65536):
+ """Generates SHA256 hash for a file stream (from Flask)
+ :param fp_in: (FileStream) stream object
+ :param block_size: (int) byte size of block
+ :returns: (str) hash
+ """
+ sha256 = hashlib.sha256()
+ for block in iter(lambda: stream.read(block_size), b''):
+ sha256.update(block)
+ return sha256.hexdigest()
+
+def sha256_tree(sha256):
+ """Split hash into branches with tree-depth for faster file indexing
+ :param sha256: str of a sha256 hash
+ :returns: str with sha256 tree with '/' delimeter
+ """
+ branch_size = HASH_BRANCH_SIZE
+ tree_size = HASH_TREE_DEPTH * branch_size
+ sha256_tree = [sha256[i:(i+branch_size)] for i in range(0, tree_size, branch_size)]
+ return '/'.join(sha256_tree)
+
+
+def migrate(fmaps, threads=1, action='copy', force=False):
+ """Copy/move/symlink files form src to dst directory
+ :param fmaps: (dict) with 'src' and 'dst' filepaths
+ :param threads: (int) number of threads
+ :param action: (str) copy/move/symlink
+ :param force: (bool) force overwrite existing files
+ """
+ log = log
+ num_items = len(fmaps)
+
+ def copytree(src, dst, symlinks = False, ignore = None):
+ # ozxyqk: https://stackoverflow.com/questions/22588225/how-do-you-merge-two-directories-or-move-with-replace-from-the-windows-command
+ if not os.path.exists(dst):
+ mkdirs(dst)
+ # os.makedirs(dst)
+ shutil.copystat(src, dst)
+ lst = os.listdir(src)
+ if ignore:
+ excl = ignore(src, lst)
+ lst = [x for x in lst if x not in excl]
+ for item in lst:
+ s = os.path.join(src, item)
+ d = os.path.join(dst, item)
+ if symlinks and os.path.islink(s):
+ if os.path.exists(d):
+ os.remove(d)
+ os.symlink(os.readlink(s), d)
+ try:
+ st = os.lstat(s)
+ mode = stat.S_IMODE(st.st_mode)
+ os.lchmod(d, mode)
+ except:
+ pass # lchmod not available
+ elif os.path.isdir(s):
+ copytree(s, d, symlinks, ignore)
+ else:
+ shutil.copy(s, d)
+
+ assert(action in ['copy','move','symlink'])
+
+ if threads > 1:
+ # threaded
+ task_queue = Queue()
+ print_lock = threading.Lock()
+
+ def migrate_action(fmap):
+ data_local = threading.local()
+ data_local.src, data_local.dst = (fmap['src'], fmap['dst'])
+ data_local.src_path = Path(data_local.src)
+ data_local.dst_path = Path(data_local.dst)
+
+ if force or not data_local.dst_path.exists():
+ if action == 'copy':
+ shutil.copy(data_local.src, data_local.dst)
+ #if data_local.src_path.is_dir():
+ # copytree(data_local.src, data_local.dst)
+ #else:
+ elif action == 'move':
+ shutil.move(data_local.src, data_local.dst)
+ elif action == 'symlink':
+ if force:
+ data_local.dst_path.unlink()
+ Path(data_local.src).symlink_to(data_local.dst)
+
+ def process_queue(num_items):
+ # TODO: progress bar
+ while True:
+ fmap = task_queue.get()
+ migrate_action(fmap)
+ log.info('migrate: {:.2f} {:,}/{:,}'.format(
+ (task_queue.qsize() / num_items)*100, task_queue.qsize(), num_items))
+ task_queue.task_done()
+
+ # avoid race conditions by creating dir structure here
+ log.info('create directory structure')
+ for fmap in tqdm(fmaps):
+ mkdirs(fmap['dst'])
+
+ # init threads
+ for i in range(threads):
+ t = threading.Thread(target=process_queue, args=(num_items,))
+ t.daemon = True
+ t.start()
+
+ # process threads
+ start = time.time()
+ for fmap in fmaps:
+ task_queue.put(fmap)
+
+ task_queue.join()
+
+ else:
+ # non-threaded
+ for fmap in tqdm(fmaps):
+ mkdirs(fmap['dst'])
+ if action == 'copy':
+ shutil.copy(fmap['src'], fmap['dst'])
+ elif action == 'move':
+ shutil.move(fmap['src'], fmap['dst'])
+ elif action == 'symlink':
+ if force:
+ Path(fmap['dst'].unlink())
+ Path(fp_src).symlink_to(fp_dst)
+ return
+
diff --git a/animism-align/cli/app/utils/im_utils.py b/animism-align/cli/app/utils/im_utils.py
new file mode 100644
index 0000000..839c268
--- /dev/null
+++ b/animism-align/cli/app/utils/im_utils.py
@@ -0,0 +1,579 @@
+import sys
+import os
+from os.path import join
+import cv2 as cv
+import imagehash
+from PIL import Image, ImageDraw, ImageFilter, ImageOps
+from skimage.filters.rank import entropy
+from skimage.morphology import disk
+from skimage import feature
+# import matplotlib.pyplot as plt
+import imutils
+import time
+import numpy as np
+import struct
+from sklearn.metrics.pairwise import cosine_similarity
+import datetime
+
+def ensure_pil(im):
+ """Ensure image is Pillow format"""
+ try:
+ im.verify()
+ return im
+ except:
+ return Image.fromarray(im.astype('uint8'), 'RGB')
+
+def ensure_np(im):
+ """Ensure image is numpy array"""
+ if type(im) == np.ndarray:
+ return im
+ return np.asarray(im, np.uint8)
+
+def np2pil(im, swap=True):
+ """Ensure image is Pillow format
+ :param im: image in numpy or PIL.Image format
+ :returns: image in Pillow RGB format
+ """
+ try:
+ im.verify()
+ return im
+ except:
+ if swap:
+ im = bgr2rgb(im)
+ return Image.fromarray(im.astype('uint8'), 'RGB')
+
+def pil2np(im, swap=True):
+ """Ensure image is Numpy.ndarry format
+ :param im: image in numpy or PIL.Image format
+ :returns: image in Numpy uint8 format
+ """
+ if type(im) == np.ndarray:
+ return im
+ im = np.asarray(im, np.uint8)
+ if swap:
+ im = rgb2bgr(im)
+ return im
+
+def num_channels(im):
+ '''Returns number of channels in numpy.ndarray image'''
+ if len(im.shape) > 2:
+ return im.shape[2]
+ else:
+ return 1
+
+def is_grayscale(im, threshold=5):
+ """Returns True if image is grayscale
+ :param im: (numpy.array) image
+ :return (bool) of if image is grayscale"""
+ b = im[:,:,0]
+ g = im[:,:,1]
+ mean = np.mean(np.abs(g - b))
+ return mean < threshold
+
+
+def compute_features(fe,frames,phashes,phash_thresh=1):
+ """
+ Get vector embedding using FeatureExtractor
+ :param fe: FeatureExtractor class
+ :param frames: list of frame images as numpy.ndarray
+ :param phash_thresh: perceptual hash threshold
+ :returns: list of feature vectors
+ """
+ vals = []
+ phash_pre = phashes[0]
+ for i,im in enumerate(frames):
+ if i == 0 or (phashes[i] - phashes[i-1]) > phash_thresh:
+ vals.append(fe.extract(im))
+ else:
+ vals.append(vals[i-1])
+ return vals
+
+
+# def np2pil(im, swap=True):
+# """Ensure image is Pillow format
+# :param im: image in numpy or PIL.Image format
+# :returns: image in Pillow RGB format
+# """
+# try:
+# im.verify()
+# return im
+# except:
+# if swap:
+# im = cv.cvtColor(im,cv.COLOR_BGR2RGB)
+# return Image.fromarray(im.astype('uint8'), 'RGB')
+
+# def pil2np(im, swap=True):
+# """Ensure image is Numpy.ndarry format
+# :param im: image in numpy or PIL.Image format
+# :returns: image in Numpy uint8 format
+# """
+# if type(im) == np.ndarray:
+# return im
+# im = np.asarray(im, np.uint8)
+# if swap:
+# im = cv.cvtColor(im, cv.COLOR_RGB2BGR)
+# return im
+
+
+def resize(im, width=0, height=0):
+ """resize image using imutils. Use w/h=[0 || None] to prioritize other edge size
+ :param im: a Numpy.ndarray image
+ :param wh: a tuple of (width, height)
+ """
+ # TODO change to cv.resize and add algorithm choices
+ w = width
+ h = height
+ if w is 0 and h is 0:
+ return im
+ elif w > 0 and h > 0:
+ ws = im.shape[1] / w
+ hs = im.shape[0] / h
+ if ws > hs:
+ return imutils.resize(im, width=w)
+ else:
+ return imutils.resize(im, height=h)
+ elif w > 0 and h is 0:
+ return imutils.resize(im, width=w)
+ elif w is 0 and h > 0:
+ return imutils.resize(im, height=h)
+ else:
+ return im
+
+def filter_pixellate(im,num_cells):
+ """Pixellate image by downsample then upsample
+ :param im: PIL.Image
+ :returns: PIL.Image
+ """
+ w,h = im.size
+ im = im.resize((num_cells,num_cells), Image.NEAREST)
+ im = im.resize((w,h), Image.NEAREST)
+ return im
+
+# Plot images inline using Matplotlib
+# def pltimg(im,title=None,mode='rgb',figsize=(8,12),dpi=160,output=None):
+# plt.figure(figsize=figsize)
+# plt.xticks([]),plt.yticks([])
+# if title is not None:
+# plt.title(title)
+# if mode.lower() == 'bgr':
+# im = cv.cvtColor(im,cv.COLOR_BGR2RGB)
+
+# f = plt.gcf()
+# if mode.lower() =='grey' or mode.lower() == 'gray':
+# plt.imshow(im,cmap='gray')
+# else:
+# plt.imshow(im)
+# plt.show()
+# plt.draw()
+# if output is not None:
+# bbox_inches='tight'
+# ext=osp.splitext(output)[1].replace('.','')
+# f.savefig(output,dpi=dpi,format=ext)
+# print('Image saved to: {}'.format(output))
+
+
+
+# Utilities for analyzing frames
+
+# def compute_gray(im):
+# im = cv.cvtColor(im,cv.COLOR_BGR2GRAY)
+# n_vals = float(im.shape[0] * im.shape[1])
+# avg = np.sum(im[:]) / n_vals
+# return avg
+
+# def compute_rgb(im):
+# im = cv.cvtColor(im,cv.COLOR_BGR2RGB)
+# n_vals = float(im.shape[0] * im.shape[1])
+# avg_r = np.sum(im[:,:,0]) / n_vals
+# avg_g = np.sum(im[:,:,1]) / n_vals
+# avg_b = np.sum(im[:,:,2]) / n_vals
+# avg_rgb = np.sum(im[:,:,:]) / (n_vals * 3.0)
+# return avg_r, avg_b, avg_g, avg_rgb
+
+# def compute_hsv(im):
+# im = cv.cvtColor(im,cv.COLOR_BGR2HSV)
+# n_vals = float(im.shape[0] * im.shape[1])
+# avg_h = np.sum(frame[:,:,0]) / n_vals
+# avg_s = np.sum(frame[:,:,1]) / n_vals
+# avg_v = np.sum(frame[:,:,2]) / n_vals
+# avg_hsv = np.sum(frame[:,:,:]) / (n_vals * 3.0)
+# return avg_h, avg_s, avg_v, avg_hsv
+
+# def pys_dhash(im, hashSize=8):
+# # resize the input image, adding a single column (width) so we
+# # can compute the horizontal gradient
+# resized = cv.resize(im, (hashSize + 1, hashSize))
+# # compute the (relative) horizontal gradient between adjacent
+# # column pixels
+# diff = resized[:, 1:] > resized[:, :-1]
+# # convert the difference image to a hash
+# return sum([2 ** i for (i, v) in enumerate(diff.flatten()) if v])
+
+
+############################################
+# ImageHash
+# pip install imagehash
+############################################
+
+def compute_ahash(im):
+ """Compute average hash using ImageHash library
+ :param im: Numpy.ndarray
+ :returns: Imagehash.ImageHash
+ """
+ return imagehash.average_hash(ensure_pil(im_pil))
+
+def compute_phash(im):
+ """Compute perceptual hash using ImageHash library
+ :param im: Numpy.ndarray
+ :returns: Imagehash.ImageHash
+ """
+ return imagehash.phash(ensure_pil(im))
+
+def phash2int(phash):
+ """Compute perceptual hash using ImageHash library and convert to binary
+ :param phash: Imagehash.ImageHash
+ :returns: binary-encoded bigint
+ """
+ phash.hash[-1] = False
+ phash_as_bigint = struct.unpack('Q', np.packbits(phash.hash))[0]
+ return phash_as_bigint
+
+def compute_phash_int(im):
+ """Compute perceptual hash using ImageHash library and convert to binary
+ :param im: Numpy.ndarray
+ :returns: binary-encoded bigint
+ """
+ return phash2int(compute_phash(im))
+
+def compute_dhash(im):
+ """Compute difference hash using ImageHash library
+ :param im: Numpy.ndarray
+ :returns: Imagehash.ImageHash
+ """
+ return imagehash.dhash(ensure_pil(im))
+
+def compute_whash(im):
+ """Compute wavelet hash using ImageHash library
+ :param im: Numpy.ndarray
+ :returns: Imagehash.ImageHash
+ """
+ return imagehash.whash(ensure_pil(im))
+
+def compute_whash_b64(im):
+ """Compute wavelest hash base64 using ImageHash library
+ :param im: Numpy.ndarray
+ :returns: Imagehash.ImageHash
+ """
+ return lambda im: imagehash.whash(ensure_pil(im), mode='db4')
+
+
+############################################
+# Pillow
+############################################
+
+def sharpen(im):
+ """Sharpen image using PIL.ImageFilter
+ param: im: PIL.Image
+ returns: PIL.Image
+ """
+ im = ensure_pil(im)
+ im.filter(ImageFilter.SHARPEN)
+ return ensure_np(im)
+
+def fit_image(im,targ_size):
+ """Force fit image by cropping
+ param: im: PIL.Image
+ param: targ_size: a tuple of target (width, height)
+ returns: PIL.Image
+ """
+ im_pil = ensure_pil(im)
+ frame_pil = ImageOps.fit(im_pil, targ_size,
+ method=Image.BICUBIC, centering=(0.5, 0.5))
+ return ensure_np(frame_pil)
+
+
+def compute_entropy(im):
+ entr_img = entropy(im, disk(10))
+
+
+############################################
+# scikit-learn
+############################################
+
+def compute_entropy(im):
+ # im is grayscale numpy
+ return entropy(im, disk(10))
+
+############################################
+# OpenCV
+############################################
+
+def bgr2gray(im):
+ """Wrapper for cv2.cvtColor transform
+ :param im: Numpy.ndarray (BGR)
+ :returns: Numpy.ndarray (Gray)
+ """
+ return cv.cvtColor(im,cv.COLOR_BGR2GRAY)
+
+def gray2bgr(im):
+ """Wrapper for cv2.cvtColor transform
+ :param im: Numpy.ndarray (Gray)
+ :returns: Numpy.ndarray (BGR)
+ """
+ return cv.cvtColor(im,cv.COLOR_GRAY2BGR)
+
+def bgr2rgb(im):
+ """Wrapper for cv2.cvtColor transform
+ :param im: Numpy.ndarray (BGR)
+ :returns: Numpy.ndarray (RGB)
+ """
+ return cv.cvtColor(im,cv.COLOR_BGR2RGB)
+
+def compute_laplacian(im):
+ # below 100 is usually blurry
+ return cv.Laplacian(im, cv.CV_64F).var()
+
+
+# # http://radjkarl.github.io/imgProcessor/index.html#
+
+# def modifiedLaplacian(img):
+# ''''LAPM' algorithm (Nayar89)'''
+# M = np.array([-1, 2, -1])
+# G = cv.getGaussianKernel(ksize=3, sigma=-1)
+# Lx = cv.sepFilter2D(src=img, ddepth=cv.CV_64F, kernelX=M, kernelY=G)
+# Ly = cv.sepFilter2D(src=img, ddepth=cv.CV_64F, kernelX=G, kernelY=M)
+# FM = np.abs(Lx) + np.abs(Ly)
+# return cv.mean(FM)[0]
+
+# def varianceOfLaplacian(img):
+# ''''LAPV' algorithm (Pech2000)'''
+# lap = cv.Laplacian(img, ddepth=-1)#cv.cv.CV_64F)
+# stdev = cv.meanStdDev(lap)[1]
+# s = stdev[0]**2
+# return s[0]
+
+# def tenengrad(img, ksize=3):
+# ''''TENG' algorithm (Krotkov86)'''
+# Gx = cv.Sobel(img, ddepth=cv.CV_64F, dx=1, dy=0, ksize=ksize)
+# Gy = cv.Sobel(img, ddepth=cv.CV_64F, dx=0, dy=1, ksize=ksize)
+# FM = Gx**2 + Gy**2
+# return cv.mean(FM)[0]
+
+# def normalizedGraylevelVariance(img):
+# ''''GLVN' algorithm (Santos97)'''
+# mean, stdev = cv.meanStdDev(img)
+# s = stdev[0]**2 / mean[0]
+# return s[0]
+
+def compute_if_blank(im,width=100,sigma=0,thresh_canny=.1,thresh_mean=4,mask=None):
+ # im is graysacale np
+ #im = imutils.resize(im,width=width)
+ #mask = imutils.resize(mask,width=width)
+ if mask is not None:
+ im_canny = feature.canny(im,sigma=sigma,mask=mask)
+ total = len(np.where(mask > 0)[0])
+ else:
+ im_canny = feature.canny(im,sigma=sigma)
+ total = (im.shape[0]*im.shape[1])
+ n_white = len(np.where(im_canny > 0)[0])
+ per = n_white/total
+ if np.mean(im) < thresh_mean or per < thresh_canny:
+ return 1
+ else:
+ return 0
+
+
+def print_timing(t,n):
+ t = time.time()-t
+ print('Elapsed time: {:.2f}'.format(t))
+ print('FPS: {:.2f}'.format(n/t))
+
+# def vid2frames(fpath, limit=5000, width=None, idxs=None):
+# """Convert a video file into list of frames
+# :param fpath: filepath to the video file
+# :param limit: maximum number of frames to read
+# :param fpath: the indices of frames to keep (rest are skipped)
+# :returns: (fps, number of frames, list of Numpy.ndarray frames)
+# """
+# frames = []
+# try:
+# cap = cv.VideoCapture(fpath)
+# except:
+# print('[-] Error. Could not read video file: {}'.format(fpath))
+# try:
+# cap.release()
+# except:
+# pass
+# return frames
+
+# fps = cap.get(cv.CAP_PROP_FPS)
+# nframes = int(cap.get(cv.CAP_PROP_FRAME_COUNT))
+
+# if idxs is not None:
+# # read sample indices by seeking to frame index
+# for idx in idxs:
+# cap.set(cv.CAP_PROP_POS_FRAMES, idx)
+# res, frame = cap.read()
+# if width is not None:
+# frame = imutils.resize(frame, width=width)
+# frames.append(frame)
+# else:
+# while(True and len(frames) < limit):
+# res, frame = cap.read()
+# if not res:
+# break
+# if width is not None:
+# frame = imutils.resize(frame, width=width)
+# frames.append(frame)
+
+# cap.release()
+# del cap
+# #return fps,nframes,frames
+# return frames
+
+def convolve_filter(vals,filters=[1]):
+ for k in filters:
+ vals_tmp = np.zeros_like(vals)
+ t = len(vals_tmp)
+ for i,v in enumerate(vals):
+ sum_vals = vals[max(0,i-k):min(t-1,i+k)]
+ vals_tmp[i] = np.mean(sum_vals)
+ vals = vals_tmp.copy()
+ return vals
+
+def cosine_delta(v1,v2):
+ return 1.0 - cosine_similarity(v1.reshape((1, -1)), v2.reshape((1, -1)))[0][0]
+
+
+
+def compute_edges(vals):
+ # find edges (1 = rising, -1 = falling)
+ edges = np.zeros_like(vals)
+ for i in range(len(vals[1:])):
+ delta = vals[i] - vals[i-1]
+ if delta == -1:
+ edges[i] = 1 # rising edge 0 --> 1
+ elif delta == 1:
+ edges[i+1] = 2 # falling edge 1 --> 0
+ # get index for rise fall
+ rising = np.where(np.array(edges) == 1)[0]
+ falling = np.where(np.array(edges) == 2)[0]
+ return rising, falling
+
+
+############################################
+# Point, Rect
+############################################
+
+class Point(object):
+ def __init__(self, x, y):
+ self.x = x
+ self.y = y
+
+class Rect(object):
+ def __init__(self, p1, p2):
+ '''Store the top, bottom, left and right values for points
+ p1 and p2 are the (corners) in either order
+ '''
+ self.left = min(p1.x, p2.x)
+ self.right = max(p1.x, p2.x)
+ self.top = min(p1.y, p2.y)
+ self.bottom = max(p1.y, p2.y)
+
+def overlap(r1, r2):
+ '''Overlapping rectangles overlap both horizontally & vertically
+ '''
+ return range_overlap(r1.left, r1.right, r2.left, r2.right) and \
+ range_overlap(r1.top, r1.bottom, r2.top, r2.bottom)
+
+def range_overlap(a_min, a_max, b_min, b_max):
+ '''Neither range is completely greater than the other
+ '''
+ return (a_min <= b_max) and (b_min <= a_max)
+
+def merge_rects(r1,r2):
+ p1 = Point(min(r1.left,r2.left),min(r1.top,r2.top))
+ p2 = Point(max(r1.right,r2.right),max(r1.bottom,r2.bottom))
+ return Rect(p1,p2)
+
+def is_overlapping(r1,r2):
+ """r1,r2 as [x1,y1,x2,y2] list"""
+ r1x = Rect(Point(r1[0],r1[1]),Point(r1[2],r1[3]))
+ r2x = Rect(Point(r2[0],r2[1]),Point(r2[2],r2[3]))
+ return overlap(r1x,r2x)
+
+def get_rects_merged(rects,bounds,expand=0):
+ """rects: list of points in [x1,y1,x2,y2] format"""
+ rects_expanded = []
+ bx,by = bounds
+ # expand
+ for x1,y1,x2,y2 in rects:
+ x1 = max(0,x1-expand)
+ y1 = max(0,y1-expand)
+ x2 = min(bx,x2+expand)
+ y2 = min(by,y2+expand)
+ rects_expanded.append(Rect(Point(x1,y1),Point(x2,y2)))
+
+ #rects_expanded = [Rect(Point(x1,y1),Point(x2,y2)) for x1,y1,x2,y2 in rects_expanded]
+ rects_merged = []
+ for i,r in enumerate(rects_expanded):
+ found = False
+ for j,rm in enumerate(rects_merged):
+ if overlap(r,rm):
+ rects_merged[j] = merge_rects(r,rm) #expand
+ found = True
+ if not found:
+ rects_merged.append(r)
+ # convert back to [x1,y1,x2,y2] format
+ rects_merged = [(r.left,r.top,r.right,r.bottom) for r in rects_merged]
+ # contract
+ rects_contracted = []
+ for x1,y1,x2,y2 in rects_merged:
+ x1 = min(bx,x1+expand)
+ y1 = min(by,y1+expand)
+ x2 = max(0,x2-expand)
+ y2 = max(0,y2-expand)
+ rects_contracted.append((x1,y1,x2,y2))
+
+ return rects_contracted
+
+
+############################################
+# Image display
+############################################
+
+
+def montage(frames,ncols=4,nrows=None,width=None):
+ """Convert list of frames into a grid montage
+ param: frames: list of frames as Numpy.ndarray
+ param: ncols: number of columns
+ param: width: resize images to this width before adding to grid
+ returns: Numpy.ndarray grid of all images
+ """
+
+ # expand image size if not enough frames
+ if nrows is not None and len(frames) < ncols * nrows:
+ blank = np.zeros_like(frames[0])
+ n = ncols * nrows - len(frames)
+ for i in range(n): frames.append(blank)
+
+ rows = []
+ for i,im in enumerate(frames):
+ if width is not None:
+ im = imutils.resize(im,width=width)
+ h,w = im.shape[:2]
+ if i % ncols == 0:
+ if i > 0:
+ rows.append(ims)
+ ims = []
+ ims.append(im)
+ if len(ims) > 0:
+ for j in range(ncols-len(ims)):
+ ims.append(np.zeros_like(im))
+ rows.append(ims)
+ row_ims = []
+ for row in rows:
+ row_im = np.hstack(np.array(row))
+ row_ims.append(row_im)
+ contact_sheet = np.vstack(np.array(row_ims))
+ return contact_sheet
diff --git a/animism-align/cli/app/utils/log_utils.py b/animism-align/cli/app/utils/log_utils.py
new file mode 100644
index 0000000..1e0f22f
--- /dev/null
+++ b/animism-align/cli/app/utils/log_utils.py
@@ -0,0 +1,69 @@
+"""
+Logger instantiator for use with Click utlity scripts
+"""
+import sys
+import os
+import logging
+
+import colorlog
+
+class Logger:
+
+ LOGGER_NAME = 'swimmer'
+ LOGFILE_FORMAT = "%(log_color)s%(levelname)-8s%(reset)s %(cyan)s%(filename)s:%(lineno)s:%(bold_cyan)s%(funcName)s() %(reset)s%(message)s"
+
+ def __init__(self):
+ pass
+
+ @staticmethod
+ def create(verbosity=4, logfile=None):
+ """Configures a logger from click params
+ :param verbosity: (int) between 0 and 5
+ :param logfile: (str) path to logfile
+ :returns: logging root object
+ """
+
+ loglevel = (5 - (max(0, min(verbosity, 5)))) * 10 # where logging.DEBUG = 10
+ date_format = '%Y-%m-%d %H:%M:%S'
+ if 'colorlog' in sys.modules and os.isatty(2):
+ cformat = '%(log_color)s' + Logger.LOGFILE_FORMAT
+ f = colorlog.ColoredFormatter(cformat, date_format,
+ log_colors = { 'DEBUG' : 'yellow', 'INFO' : 'white',
+ 'WARNING' : 'bold_yellow', 'ERROR': 'bold_red',
+ 'CRITICAL': 'bold_red' })
+ else:
+ f = logging.Formatter(Logger.LOGFILE_FORMAT, date_format)
+
+ logger = logging.getLogger(Logger.LOGGER_NAME)
+ logger.setLevel(loglevel)
+
+ # remove existing handlers
+ for handler in logger.handlers:
+ logger.removeHandler(handler)
+
+ if logfile:
+ # create file handler which logs even debug messages
+ fh = logging.FileHandler(logfile)
+ fh.setLevel(loglevel)
+ logger.addHandler(fh)
+
+ # add colored handler
+ ch = logging.StreamHandler()
+ ch.setFormatter(f)
+ logger.addHandler(ch)
+
+ if verbosity == 0:
+ logger.disabled = True
+
+ # test
+ # logger.debug('Hello Debug')
+ # logger.info('Hello Info')
+ # logger.warn('Hello Warn')
+ # logger.error('Hello Error')
+ # logger.critical('Hello Critical')
+
+ return logger
+
+ @staticmethod
+ def getLogger():
+ return logging.getLogger(Logger.LOGGER_NAME) \ No newline at end of file
diff --git a/animism-align/cli/app/utils/process_utils.py b/animism-align/cli/app/utils/process_utils.py
new file mode 100644
index 0000000..7f243ae
--- /dev/null
+++ b/animism-align/cli/app/utils/process_utils.py
@@ -0,0 +1,60 @@
+import os
+import pathos.pools as pp
+from tqdm import tqdm
+from concurrent.futures import ProcessPoolExecutor, as_completed
+
+def parallel_process(array, function, n_jobs=16, use_kwargs=False, front_num=3):
+ """
+ A parallel version of the map function with a progress bar.
+
+ Args:
+ array (array-like): An array to iterate over.
+ function (function): A python function to apply to the elements of array
+ n_jobs (int, default=16): The number of cores to use
+ use_kwargs (boolean, default=False): Whether to consider the elements of array as dictionaries of
+ keyword arguments to function
+ front_num (int, default=3): The number of iterations to run serially before kicking off the parallel job.
+ Useful for catching bugs
+ Returns:
+ [function(array[0]), function(array[1]), ...]
+ """
+ #We run the first few iterations serially to catch bugs
+ if front_num > 0:
+ front = [function(**a) if use_kwargs else function(a) for a in array[:front_num]]
+ #If we set n_jobs to 1, just run a list comprehension. This is useful for benchmarking and debugging.
+ if n_jobs==1:
+ return front + [function(**a) if use_kwargs else function(a) for a in tqdm(array[front_num:])]
+ #Assemble the workers
+ with ProcessPoolExecutor(max_workers=n_jobs) as pool:
+ #Pass the elements of array into function
+ if use_kwargs:
+ futures = [pool.submit(function, **a) for a in array[front_num:]]
+ else:
+ futures = [pool.submit(function, a) for a in array[front_num:]]
+ kwargs = {
+ 'total': len(futures),
+ 'unit': 'it',
+ 'unit_scale': True,
+ 'leave': True
+ }
+ #Print out the progress as tasks complete
+ for f in tqdm(as_completed(futures), **kwargs):
+ pass
+ out = []
+ #Get the results from the futures.
+ for i, future in tqdm(enumerate(futures)):
+ try:
+ out.append(future.result())
+ except Exception as e:
+ out.append(e)
+ return front + out
+
+def parallelize(rows, func):
+ print("Processing {} items".format(len(rows)))
+ if hasattr(os, 'sched_getaffinity'):
+ processCount = len(os.sched_getaffinity(0))
+ else:
+ processCount = 4
+ print('processes {}'.format(processCount))
+ with pp.ProcessPool(processes=processCount) as pool:
+ pool.map(func, rows) \ No newline at end of file
diff --git a/animism-align/cli/app/utils/s3_utils.py b/animism-align/cli/app/utils/s3_utils.py
new file mode 100644
index 0000000..d8cff79
--- /dev/null
+++ b/animism-align/cli/app/utils/s3_utils.py
@@ -0,0 +1,114 @@
+import os
+import glob
+from os.path import join
+from dataclasses import dataclass
+from glob import glob
+from pathlib import Path
+import logging
+
+import dacite # dataclass helper util
+import boto3
+
+
+@dataclass
+class S3Config:
+ S3_BUCKET: str
+ S3_KEY: str
+ S3_SECRET: str
+ S3_ENDPOINT: str
+ S3_REGION: str
+
+
+class RemoteStorageS3:
+
+ def __init__(self):
+
+ self.log = logging.getLogger('swimmer')
+
+ self.s3_cfg = dacite.from_dict(data_class=S3Config, data=os.environ)
+
+ self.session = boto3.session.Session()
+
+ self.s3_client = self.session.client(
+ service_name='s3',
+ aws_access_key_id=self.s3_cfg.S3_KEY,
+ aws_secret_access_key=self.s3_cfg.S3_SECRET,
+ endpoint_url=self.s3_cfg.S3_ENDPOINT,
+ region_name=self.s3_cfg.S3_REGION,
+ )
+
+
+ def list_dir(self, fp_dir_remote):
+ '''Sync local directory to remote directory
+ '''
+
+ obj_list_remote = self.s3_client.list_objects(
+ Bucket=self.s3_cfg.S3_BUCKET,
+ Prefix=fp_dir_remote)
+
+
+ for obj in obj_list_remote.get('Contents', []):
+ s3_fn = obj['Key']
+ self.log.debug(s3_fn)
+
+
+ def sync_dir(self, fp_dir_local, fp_dir_remote):
+ '''Sync local directory to remote directory
+ '''
+
+ # get list of local files
+ fps_local = glob(join(fp_dir_local, '*'))
+ fp_local_lkup = {}
+ for fp in fps_local:
+ fp_local_lkup[Path(fp).name] = fp
+
+ # get list of remote files
+ obj_list_remote = self.s3_client.list_objects(Bucket=self.s3_cfg.S3_BUCKET, Prefix=fp_dir_remote)
+ # check if remove files exist locally
+ if 'Contents' in obj_list_remote:
+ for obj in obj_list_remote['Contents']:
+ s3_fn = obj['Key']
+ fn_remote = Path(s3_fn).name
+ if fn_remote in fp_local_lkup.keys():
+ # remove from queue
+ # compare timestamps
+ fp_local = fp_local_lkup[fn_remote]
+ del fp_local_lkup[fn_remote]
+ if obj['LastModified'].timestamp() < os.path.getmtime(fp_local):
+ self.log.debug("Update s3 with newer local file: {}".format(s3_fn))
+ self.s3_client.upload_file(
+ fp_local,
+ self.s3_cfg.S3_BUCKET,
+ s3_fn,
+ ExtraArgs={'ACL': 'public-read' })
+ else:
+ self.log.debug(f'Skipping same file: {s3_fn}')
+ else:
+ self.log.debug(f'Orphan remote file: {s3_fn}')
+ self.log.debug("s3 delete {}".format(s3_fn))
+ response = self.s3_client.delete_object(
+ Bucket=self.s3_cfg.S3_BUCKET,
+ Key=s3_fn,
+ )
+ else:
+ self.log.debug(f'No "Contents" in {obj_list_remote.keys()}')
+
+ # put the remaining files to S3
+ for fn_local, fp_local in fp_local_lkup.items():
+ s3_fn = join(fp_dir_remote, fn_local)
+ self.log.debug("s3 create {}".format(s3_fn))
+ self.s3_client.upload_file(
+ fp_local,
+ os.getenv('S3_BUCKET'),
+ s3_fn,
+ ExtraArgs={ 'ACL': 'public-read' })
+
+
+ def sync_file(self, fp_local, fp_remote):
+ '''Sync local file to remove file
+ '''
+ self.log.warn('Not yet implemented')
+
+
+ #def make_s3_path(s3_dir, metadata_path):
+ # return "{}/{}/{}{}".format(os.getenv('S3_ENDPOINT'), os.getenv('S3_BUCKET'), s3_dir, metadata_path)
diff --git a/animism-align/cli/app/utils/video_utils.py b/animism-align/cli/app/utils/video_utils.py
new file mode 100644
index 0000000..992f0f8
--- /dev/null
+++ b/animism-align/cli/app/utils/video_utils.py
@@ -0,0 +1,23 @@
+
+from pymediainfo import MediaInfo
+
+def mediainfo(fp_in):
+ """Returns abbreviated video/audio metadata for video files
+ :param fp_in: filepath"""
+
+ result = {}
+ media_info_raw = MediaInfo.parse(fp_in).to_data()
+
+ for d in media_info_raw['tracks']:
+ if d['track_type'] == 'Video':
+ result = {
+ 'codec_cc': d['codec_cc'],
+ 'duration': int(d['duration']),
+ 'display_aspect_ratio': float(d['display_aspect_ratio']),
+ 'width': int(d['width']),
+ 'height': int(d['height']),
+ 'frame_rate': float(d['frame_rate']),
+ 'frame_count': int(d['frame_count']),
+ }
+
+ return result \ No newline at end of file