summaryrefslogtreecommitdiff
path: root/cli
diff options
context:
space:
mode:
authorlens <lens@neural.garden>2021-03-23 21:10:11 +0000
committerlens <lens@neural.garden>2021-03-23 21:10:11 +0000
commitcc1d0c52e104245f9f1c0d77eb24a5a33800be38 (patch)
tree02d8483dfe47803525b926a43c582dcfbf61c5db /cli
parent81c673f058fda04b96baae7b2302f876479bc0a9 (diff)
parent7a3ec205e001e4c071a67ecc5c375612fa72afdc (diff)
Merge branch 'master' of asdf.us:swimmer
Diffstat (limited to 'cli')
-rw-r--r--cli/app/controllers/graph_controller.py11
-rw-r--r--cli/app/controllers/upload_controller.py75
-rw-r--r--cli/app/server/demo.py48
-rw-r--r--cli/app/server/web.py2
-rw-r--r--cli/app/settings/app_cfg.py15
-rw-r--r--cli/app/site/export.py154
-rw-r--r--cli/app/sql/common.py23
-rw-r--r--cli/app/sql/env.py2
-rw-r--r--cli/app/sql/models/graph.py3
-rw-r--r--cli/app/sql/models/page.py6
-rw-r--r--cli/app/sql/models/tile.py2
-rw-r--r--cli/app/sql/models/upload.py19
-rw-r--r--cli/app/sql/versions/202103161637_make_uploads_like_on_animism.py31
-rw-r--r--cli/app/sql/versions/202103161645_add_foreign_key_constraint.py29
-rw-r--r--cli/app/sql/versions/202103201916_remove_foreign_key_constraint_from_.py29
-rwxr-xr-xcli/cli.py7
-rw-r--r--cli/commands/admin/migrate_to_mysql.py120
-rw-r--r--cli/commands/site/export.py127
-rw-r--r--cli/commands/site/populate.py79
19 files changed, 596 insertions, 186 deletions
diff --git a/cli/app/controllers/graph_controller.py b/cli/app/controllers/graph_controller.py
index 7efda73..fcca50a 100644
--- a/cli/app/controllers/graph_controller.py
+++ b/cli/app/controllers/graph_controller.py
@@ -7,6 +7,7 @@ from app.sql.models.graph import Graph, GraphForm
from app.sql.models.page import Page
from app.sql.models.tile import Tile
from app.controllers.crud_controller import CrudView
+from app.site.export import export_site
class GraphView(CrudView):
model = Graph
@@ -20,7 +21,7 @@ class GraphView(CrudView):
@route('/name/<graph_path>', methods=['GET'])
def get_name(self, graph_path: str):
"""
- Fetch a single {model}.
+ Fetch a single graph.
"""
session = Session()
item = session.query(self.model).filter(self.model.path == graph_path).first()
@@ -36,3 +37,11 @@ class GraphView(CrudView):
}
session.close()
return jsonify(result)
+
+ @route('/export/<graph_path>', methods=['GET'])
+ def export(self, graph_path: str):
+ export_site(opt_graph_path=graph_path)
+ result = {
+ 'status': 'ok',
+ }
+ return jsonify(result)
diff --git a/cli/app/controllers/upload_controller.py b/cli/app/controllers/upload_controller.py
index 86f9f29..94a7fd1 100644
--- a/cli/app/controllers/upload_controller.py
+++ b/cli/app/controllers/upload_controller.py
@@ -15,18 +15,22 @@ from app.server.decorators import APIError
class UploadView(FlaskView):
def index(self):
"""
- List all uploaded files.
-
- * Query string params: offset, limit, sort (id, date), order (asc, desc)
+ List all uploads
"""
session = Session()
- uploads = session.query(Upload).all()
- response = {
+ query = session.query(Upload)
+ graph_id = args.get('graph_id', default=None)
+ if graph_id is not None:
+ query = query.filter(Upload.graph_id == int(graph_id))
+
+ items = query.all()
+
+ res = {
'status': 'ok',
- 'res': [ upload.toJSON() for upload in uploads ],
+ 'res': [ item.toJSON() for item in items ],
}
session.close()
- return jsonify(response)
+ return jsonify(res)
def get(self, id):
"""
@@ -50,14 +54,31 @@ class UploadView(FlaskView):
try:
username = request.form.get('username')
+ # print(username)
except:
raise APIError('No username specified')
- param_name = 'image'
- if param_name not in request.files:
- raise APIError('No file uploaded')
+ try:
+ tag = request.form.get('tag')
+ # print(tag)
+ except:
+ raise APIError('No tag specified')
- file = request.files[param_name]
+ try:
+ graph_id = request.form.get('graph_id')
+ # print(graph_id)
+ except:
+ raise APIError('No graph_id specified')
+
+ if 'image' in request.files:
+ file = request.files['image']
+ # print(fn)
+ elif 'file' in request.files:
+ file = request.files['file']
+ # print(request.form.get('__image_filename'))
+ # print(fn)
+ else:
+ raise APIError('No file uploaded')
# get sha256
sha256 = sha256_stream(file)
@@ -65,42 +86,34 @@ class UploadView(FlaskView):
if ext == '.jpeg':
ext = '.jpg'
- # TODO: here check sha256
- # upload = Upload.query.get(id)
-
- if ext[1:] not in VALID_IMAGE_EXTS:
- return jsonify({ 'status': 'error', 'error': 'Not a valid image' })
+ ext = ext[1:]
- # convert string of image data to uint8
file.seek(0)
- nparr = np.fromstring(file.read(), np.uint8)
- # decode image
- try:
- im = Image.fromarray(nparr)
- except:
- return jsonify({ 'status': 'error', 'error': 'Image parse error' })
+ uploaded_im_fn = secure_filename(file.filename)
+ uploaded_im_abspath = os.path.join(app_cfg.DIR_UPLOADS, str(graph_id), tag)
+ uploaded_im_fullpath = os.path.join(uploaded_im_abspath, uploaded_im_fn)
session = Session()
upload = session.query(Upload).filter_by(sha256=sha256).first()
if upload is not None:
- print("Already uploaded image")
+ print("Already uploaded file")
+ if not os.path.exists(uploaded_im_fullpath):
+ # if we got in some weird state where the record wasnt deleted....
+ os.makedirs(uploaded_im_abspath, exist_ok=True)
+ file.save(uploaded_im_fullpath)
response = {
'status': 'ok',
- 'notes': 'Image already uploaded',
+ 'notes': 'File already uploaded',
'res': upload.toJSON(),
}
session.close()
return jsonify(response)
- uploaded_im_fn = secure_filename(sha256 + ext)
- uploaded_im_abspath = os.path.join(app_cfg.DIR_UPLOADS, sha256_tree(sha256))
- uploaded_im_fullpath = os.path.join(uploaded_im_abspath, uploaded_im_fn)
-
os.makedirs(uploaded_im_abspath, exist_ok=True)
- nparr.tofile(uploaded_im_fullpath)
+ file.save(uploaded_im_fullpath)
- upload = Upload(username=username, sha256=sha256, ext=ext)
+ upload = Upload(username=username, tag=tag, fn=uploaded_im_fn, sha256=sha256, ext=ext, graph_id=graph_id)
session.add(upload)
session.commit()
response = {
diff --git a/cli/app/server/demo.py b/cli/app/server/demo.py
new file mode 100644
index 0000000..847f95b
--- /dev/null
+++ b/cli/app/server/demo.py
@@ -0,0 +1,48 @@
+import os
+import logging
+import logging.handlers
+
+logger = logging.getLogger("")
+logger.setLevel(logging.DEBUG)
+handler = logging.handlers.RotatingFileHandler("flask.log",
+ maxBytes=3000000, backupCount=2)
+formatter = logging.Formatter(
+ '[%(asctime)s] {%(filename)s:%(lineno)d} %(levelname)s - %(message)s')
+handler.setFormatter(formatter)
+logger.addHandler(handler)
+logging.getLogger().addHandler(logging.StreamHandler())
+
+from flask import Flask, send_from_directory, request
+
+from app.settings import app_cfg
+
+def create_demo_app(script_info=None):
+ """
+ functional pattern for creating the flask app
+ """
+ logging.debug("Starting Swimmer demo server...")
+ app = Flask(__name__, static_folder=app_cfg.DIR_EXPORTS, static_url_path='/')
+ app.config['SERVER_NAME'] = app_cfg.DEMO_SERVER_NAME
+ app.url_map.strict_slashes = False
+
+ @app.errorhandler(404)
+ def not_found(error):
+ path, fn = os.path.split(request.path)
+ path = path[1:]
+ dir_path = os.path.join(app_cfg.DIR_EXPORTS, path)
+ if os.path.isfile(os.path.join(dir_path, fn)):
+ return send_from_directory(dir_path, fn)
+ if os.path.isfile(os.path.join(dir_path, fn, 'index.html')):
+ return send_from_directory(os.path.join(dir_path, fn), 'index.html')
+ return "404", 404
+
+ @app.route('/')
+ def serve_index():
+ return "Swimmer demo", 200
+
+ @app.route('/favicon.ico')
+ def favicon():
+ return send_from_directory(os.path.join(app_cfg.DIR_STATIC, 'img'),
+ 'favicon.ico', mimetype='image/vnd.microsoft.icon')
+
+ return app
diff --git a/cli/app/server/web.py b/cli/app/server/web.py
index 1a3b064..5eb172c 100644
--- a/cli/app/server/web.py
+++ b/cli/app/server/web.py
@@ -52,7 +52,7 @@ def create_app(script_info=None):
@app.route('/favicon.ico')
def favicon():
return send_from_directory(os.path.join(app.root_path, 'static/img/'),
- 'favicon.ico',mimetype='image/vnd.microsoft.icon')
+ 'favicon.ico', mimetype='image/vnd.microsoft.icon')
@app.shell_context_processor
def shell_context():
diff --git a/cli/app/settings/app_cfg.py b/cli/app/settings/app_cfg.py
index 5fc4982..4aa4bee 100644
--- a/cli/app/settings/app_cfg.py
+++ b/cli/app/settings/app_cfg.py
@@ -15,9 +15,10 @@ codecs.register(lambda name: codecs.lookup('utf8') if name == 'utf8mb4' else Non
LOG = logging.getLogger('swimmer')
# -----------------------------------------------------------------------------
-# .env config for keys
+# .env config
# -----------------------------------------------------------------------------
# Project directory
+
SELF_CWD = os.path.dirname(os.path.realpath(__file__)) # this file
DIR_PROJECT_ROOT = str(Path(SELF_CWD).parent.parent.parent)
@@ -31,13 +32,13 @@ load_dotenv(dotenv_path=fp_env)
# -----------------------------------------------------------------------------
CLICK_GROUPS = {
- # 'process': 'commands/process',
'site': 'commands/site',
+ 'admin': 'commands/admin',
'db': '',
'flask': '',
+ 'demo': '',
}
-
# -----------------------------------------------------------------------------
# File I/O
# -----------------------------------------------------------------------------
@@ -63,6 +64,13 @@ DIR_STATIC = join(DIR_APP, 'static')
HASH_TREE_DEPTH = 3 # for sha256 subdirs
HASH_BRANCH_SIZE = 3 # for sha256 subdirs
+DIR_PUBLIC_EXPORTS = os.getenv('DIR_PUBLIC_EXPORTS') or DIR_EXPORTS
+
+# -----------------------------------------------------------------------------
+# Database
+# -----------------------------------------------------------------------------
+
+USE_SQLITE = os.getenv("USE_SQLITE") == "True"
# -----------------------------------------------------------------------------
# S3 storage
@@ -78,6 +86,7 @@ except Exception as e:
# -----------------------------------------------------------------------------
SERVER_NAME = os.getenv('SERVER_NAME') or '0.0.0.0:5000'
+DEMO_SERVER_NAME = os.getenv('DEMO_SERVER_NAME') or '0.0.0.0:3000'
HTTP_EXTERNAL_HOST = os.getenv('HTTP_EXTERNAL_HOST') or f"http://{SERVER_NAME}"
# -----------------------------------------------------------------------------
diff --git a/cli/app/site/export.py b/cli/app/site/export.py
new file mode 100644
index 0000000..aa74165
--- /dev/null
+++ b/cli/app/site/export.py
@@ -0,0 +1,154 @@
+import click
+
+from app.settings import app_cfg
+from app.utils.file_utils import load_text, write_json, write_text
+from os.path import join
+import os
+
+from app.sql.common import db, Session, Graph, Page, Tile
+from distutils.dir_util import copy_tree
+
+def export_site(opt_graph_path, opt_output_dir=app_cfg.DIR_EXPORTS, opt_build_js=False):
+ """Export a graph"""
+
+ # ------------------------------------------------
+ # generate HTML for index and all pages
+
+ session = Session()
+ graph = session.query(Graph).filter(Graph.path == opt_graph_path).first()
+ if graph is None:
+ print(f"Not a graph: {opt_graph_path}")
+ return
+
+ # build everything here
+ graph_dir = os.path.abspath(join(opt_output_dir, graph.path))
+
+ # load site index
+ index_html = load_text(join(app_cfg.DIR_STATIC, 'site.html'), split=False)
+ index_html = index_html.replace('SITE_PATH', '/' + graph.path)
+
+ # write site JSON data
+ site_data = { 'graph': sanitize_graph(graph.toSiteJSON()) }
+ write_json(site_data, join(graph_dir, 'index.json'), default=str, minify=False)
+
+ # import custom css
+ site_css = load_text(join(app_cfg.DIR_STATIC, 'site.css'), split=False)
+ site_css = site_css.replace('SITE_PATH', '/' + graph.path)
+ write_text(site_css, join(graph_dir, 'site.css'))
+ copy_tree(join(app_cfg.DIR_STATIC, 'fonts'), join(graph_dir, 'static/fonts'))
+ copy_tree(join(app_cfg.DIR_STATIC, 'img'), join(graph_dir, 'static/img'))
+
+ # write index file, redirects to homepage
+ home_page = site_data['graph']['home_page']
+ if home_page is None:
+ print("Homepage not set! Shift-click a page on the graph to make it the homepage.")
+ session.close()
+ return
+ write_text(f'<meta http-equiv="refresh" content="0; url={home_page}">', join(graph_dir, 'index.html'))
+
+ index_path = ""
+ for page in graph.pages:
+ page_path = f'{graph.path}/{page.path}'
+ if page.id == graph.home_page_id:
+ index_path = page_path
+ print(f'/{page_path} [index]')
+ else:
+ print(f'/{page_path}')
+ write_index(graph, page, index_html, join(graph_dir, page.path, 'index.html'))
+
+ if opt_build_js or not os.path.exists(f"{graph_dir}/bundle.js"):
+ build_javascript(graph_dir)
+
+ session.close()
+ print("Site export complete!")
+ print(f"Graph exported to: {graph_dir}")
+
+def build_javascript(graph_dir):
+ print("Building javascript...")
+ print(f'NODE_ENV=production node ./node_modules/webpack-cli/bin/cli.js --config ./webpack.config.site.js -o {graph_dir}/bundle.js')
+ os.chdir(app_cfg.DIR_PROJECT_ROOT)
+ os.system(f'NODE_ENV=production node ./node_modules/webpack-cli/bin/cli.js --config ./webpack.config.site.js -o {graph_dir}/bundle.js')
+
+def write_index(graph, page, index_html, fp_out):
+ if page is None:
+ page_title = graph.title
+ else:
+ page_title = page.title
+ index_html = index_html.replace('BUNDLE_PATH', join('/', graph.path, 'bundle.js'))
+ index_html = index_html.replace('PAGE_TITLE', page_title)
+ write_text(index_html, fp_out)
+
+def sanitize_graph(graph):
+ page_path_lookup = {}
+ page_lookup = {}
+ for page in graph['pages']:
+ page_path = join('/', graph['path'], page['path'])
+ if page_path in page_path_lookup:
+ print(f"/!\\ WARNING! Duplicate found of {page_path}")
+ else:
+ page_path_lookup[page['id']] = page_path
+ for page in graph['pages']:
+ sanitize_page(page)
+ for tile in page['tiles']:
+ if tile['target_page_id']:
+ if tile['target_page_id'] == -1:
+ tile['href'] = tile['settings']['external_link_url']
+ elif tile['target_page_id'] == -2:
+ tile['href'] = '__open_popup'
+ elif tile['target_page_id'] == -3:
+ tile['href'] = '__close_popup'
+ elif tile['target_page_id'] > 0:
+ tile['href'] = page_path_lookup[tile['target_page_id']]
+ if 'url' in tile['settings'] and tile['settings']['url'].startswith('/static'):
+ tile['settings']['url'] = '/' + graph['path'] + tile['settings']['url']
+ if len(tile['settings'].get('appear_after', "")):
+ tile['settings']['appear_after'] = timestampToSeconds(tile['settings']['appear_after']) or 0
+ sanitize_tile(tile)
+ page_path = page_path_lookup[page['id']]
+ page_lookup[page_path] = page
+ for upload in graph['uploads']:
+ sanitize_upload(upload)
+ if upload['url'].startswith('/static'):
+ upload['url'] = '/' + graph['path'] + upload['url']
+ # print(page_lookup['/asdf/testttt'])
+ graph['pages'] = page_lookup
+ graph['home_page'] = page_path_lookup[graph['home_page_id']]
+ return graph
+
+def sanitize_upload(data):
+ if 'created_at' in data:
+ del data['created_at']
+ if 'username' in data:
+ del data['username']
+ if 'graph_id' in data:
+ del data['graph_id']
+
+def sanitize_page(data):
+ if 'created_at' in data:
+ del data['created_at']
+ if 'updated_at' in data:
+ del data['updated_at']
+ if 'graph_id' in data:
+ del data['graph_id']
+
+def sanitize_tile(data):
+ if 'created_at' in data:
+ del data['created_at']
+ if 'updated_at' in data:
+ del data['updated_at']
+ if 'username' in data:
+ del data['username']
+ if 'graph_id' in data:
+ del data['graph_id']
+ if 'page_id' in data:
+ del data['page_id']
+ if 'target_page_id' in data:
+ del data['target_page_id']
+
+def timestampToSeconds(time_str):
+ time_str_parts = list(map(float, time_str.strip().split(":")))
+ if len(time_str_parts) == 3:
+ return (time_str_parts[0] * 60 + time_str_parts[1]) * 60 + time_str_parts[2]
+ if len(time_str_parts) == 2:
+ return time_str_parts[0] * 60 + time_str_parts[1]
+ return time_str_parts[0]
diff --git a/cli/app/sql/common.py b/cli/app/sql/common.py
index c8bd557..8e1d2b3 100644
--- a/cli/app/sql/common.py
+++ b/cli/app/sql/common.py
@@ -2,7 +2,6 @@ import os
import glob
import time
-# import mysql.connector
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from sqlalchemy.ext.declarative import declarative_base
@@ -11,16 +10,16 @@ from flask_sqlalchemy import SQLAlchemy
from app.settings import app_cfg
-# connection_url = "mysql+mysqlconnector://{}:{}@{}/{}?charset=utf8mb4".format(
-# os.getenv("DB_USER"),
-# os.getenv("DB_PASS"),
-# os.getenv("DB_HOST"),
-# os.getenv("DB_NAME")
-# )
-
-os.makedirs(app_cfg.DIR_DATABASE, exist_ok=True)
-
-connection_url = "sqlite:///{}".format(os.path.join(app_cfg.DIR_DATABASE, 'swimmer.sqlite3'))
+if app_cfg.USE_SQLITE:
+ os.makedirs(app_cfg.DIR_DATABASE, exist_ok=True)
+ connection_url = "sqlite:///{}".format(os.path.join(app_cfg.DIR_DATABASE, 'swimmer.sqlite3'))
+else:
+ connection_url = "mysql+pymysql://{}:{}@{}/{}?charset=utf8mb4".format(
+ os.getenv("DB_USER"),
+ os.getenv("DB_PASS"),
+ os.getenv("DB_HOST"),
+ os.getenv("DB_NAME")
+ )
engine = create_engine(connection_url, encoding="utf-8", pool_recycle=3600)
@@ -31,7 +30,7 @@ Base.metadata.bind = engine
db = SQLAlchemy()
# include the models in reverse dependency order, so relationships work
+from app.sql.models.upload import Upload
from app.sql.models.tile import Tile
from app.sql.models.page import Page
from app.sql.models.graph import Graph
-from app.sql.models.upload import Upload
diff --git a/cli/app/sql/env.py b/cli/app/sql/env.py
index 7753565..3e015b5 100644
--- a/cli/app/sql/env.py
+++ b/cli/app/sql/env.py
@@ -14,10 +14,10 @@ config.set_main_option("sqlalchemy.url", connection_url)
target_metadata = Base.metadata
# include the models in reverse dependency order, so relationships work
+from app.sql.models.upload import Upload
from app.sql.models.tile import Tile
from app.sql.models.page import Page
from app.sql.models.graph import Graph
-from app.sql.models.upload import Upload
def run_migrations_offline():
"""Run migrations in 'offline' mode.
diff --git a/cli/app/sql/models/graph.py b/cli/app/sql/models/graph.py
index 8e068a0..08f4d3c 100644
--- a/cli/app/sql/models/graph.py
+++ b/cli/app/sql/models/graph.py
@@ -23,6 +23,7 @@ class Graph(Base):
updated_at = Column(UtcDateTime(), onupdate=utcnow())
pages = relationship('Page', lazy='dynamic')
+ uploads = relationship('Upload', lazy='dynamic')
def toJSON(self):
return {
@@ -40,11 +41,13 @@ class Graph(Base):
def toFullJSON(self):
data = self.toJSON()
data['pages'] = [ page.toLinkJSON() for page in self.pages ]
+ data['uploads'] = [ upload.toJSON() for upload in self.uploads ]
return data
def toSiteJSON(self):
data = self.toJSON()
data['pages'] = [ page.toFullJSON() for page in self.pages ]
+ data['uploads'] = [ upload.toJSON() for upload in self.uploads ]
return data
class GraphForm(ModelForm):
diff --git a/cli/app/sql/models/page.py b/cli/app/sql/models/page.py
index 2f7065b..35efa39 100644
--- a/cli/app/sql/models/page.py
+++ b/cli/app/sql/models/page.py
@@ -1,11 +1,11 @@
from sqlalchemy import create_engine, Table, Column, Text, String, Integer, DateTime, JSON, ForeignKey
-from sqlalchemy.orm import relationship
+from sqlalchemy.orm import relationship, foreign, remote
import sqlalchemy.sql.functions as func
from sqlalchemy_utc import UtcDateTime, utcnow
from wtforms_alchemy import ModelForm
from app.sql.common import db, Base, Session
-# from app.sql.models.graph import Graph
+from app.sql.models.tile import Tile
from app.settings import app_cfg
@@ -23,7 +23,7 @@ class Page(Base):
updated_at = Column(UtcDateTime(), onupdate=utcnow())
tiles = relationship("Tile", foreign_keys="Tile.page_id", lazy='dynamic', order_by="asc(Tile.sort_order)")
- backlinks = relationship("Tile", foreign_keys="Tile.target_page_id", lazy='dynamic')
+ backlinks = relationship("Tile", primaryjoin=id == foreign(Tile.target_page_id), lazy='dynamic')
def toJSON(self):
return {
diff --git a/cli/app/sql/models/tile.py b/cli/app/sql/models/tile.py
index 3f6ce31..ed4a5f8 100644
--- a/cli/app/sql/models/tile.py
+++ b/cli/app/sql/models/tile.py
@@ -18,7 +18,7 @@ class Tile(Base):
id = Column(Integer, primary_key=True)
graph_id = Column(Integer, ForeignKey('graph.id'), nullable=True)
page_id = Column(Integer, ForeignKey('page.id'), nullable=True)
- target_page_id = Column(Integer, ForeignKey('page.id'), nullable=True)
+ target_page_id = Column(Integer, nullable=True)
type = Column(String(16, convert_unicode=True), nullable=False)
sort_order = Column(Integer, default=0)
settings = Column(JSON, default={}, nullable=True)
diff --git a/cli/app/sql/models/upload.py b/cli/app/sql/models/upload.py
index 5863b07..d9307ff 100644
--- a/cli/app/sql/models/upload.py
+++ b/cli/app/sql/models/upload.py
@@ -1,4 +1,4 @@
-from sqlalchemy import create_engine, Table, Column, String, Integer, DateTime
+from sqlalchemy import create_engine, Table, Column, ForeignKey, String, Integer, DateTime
import sqlalchemy.sql.functions as func
from sqlalchemy_utc import UtcDateTime, utcnow
from wtforms_alchemy import ModelForm
@@ -14,31 +14,28 @@ class Upload(Base):
"""Table for storing references to various media"""
__tablename__ = 'upload'
id = Column(Integer, primary_key=True)
+ graph_id = Column(Integer, ForeignKey('graph.id'), nullable=True)
sha256 = Column(String(256), nullable=False)
fn = Column(String(256), nullable=False)
ext = Column(String(4, convert_unicode=True), nullable=False)
+ tag = Column(String(64, convert_unicode=True), nullable=True)
username = Column(String(16, convert_unicode=True), nullable=False)
created_at = Column(UtcDateTime(), default=utcnow())
def toJSON(self):
return {
'id': self.id,
+ 'graph_id': self.graph_id,
'sha256': self.sha256,
'fn': self.fn,
'ext': self.ext,
+ 'tag': self.tag,
'username': self.username,
'url': self.url(),
'created_at': self.created_at,
}
- def filename(self):
- return "{}{}".format(self.fn)
-
- def filepath(self):
- return join(app_cfg.DIR_UPLOADS, sha256_tree(self.sha256))
-
- def fullpath(self):
- return join(self.filepath(), self.filename())
-
def url(self):
- return join(app_cfg.URL_UPLOADS, sha256_tree(self.sha256), self.filename())
+ if self.tag:
+ return join('/static/uploads', str(self.graph_id), self.tag, self.fn)
+ return join('/static/uploads', str(self.graph_id), self.fn)
diff --git a/cli/app/sql/versions/202103161637_make_uploads_like_on_animism.py b/cli/app/sql/versions/202103161637_make_uploads_like_on_animism.py
new file mode 100644
index 0000000..18bf0bc
--- /dev/null
+++ b/cli/app/sql/versions/202103161637_make_uploads_like_on_animism.py
@@ -0,0 +1,31 @@
+"""make uploads like on animism
+
+Revision ID: 645f315e651d
+Revises: d929da3e398b
+Create Date: 2021-03-16 16:37:08.985792
+
+"""
+from alembic import op
+import sqlalchemy as sa
+import sqlalchemy_utc
+
+
+# revision identifiers, used by Alembic.
+revision = '645f315e651d'
+down_revision = 'd929da3e398b'
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.add_column('upload', sa.Column('graph_id', sa.Integer(), nullable=True))
+ op.add_column('upload', sa.Column('tag', sa.String(length=64, _expect_unicode=True), nullable=True))
+ # ### end Alembic commands ###
+
+
+def downgrade():
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.drop_column('upload', 'tag')
+ op.drop_column('upload', 'graph_id')
+ # ### end Alembic commands ###
diff --git a/cli/app/sql/versions/202103161645_add_foreign_key_constraint.py b/cli/app/sql/versions/202103161645_add_foreign_key_constraint.py
new file mode 100644
index 0000000..673f9e4
--- /dev/null
+++ b/cli/app/sql/versions/202103161645_add_foreign_key_constraint.py
@@ -0,0 +1,29 @@
+"""add foreign key constraint
+
+Revision ID: 3f7df6bf63b8
+Revises: 645f315e651d
+Create Date: 2021-03-16 16:45:39.455892
+
+"""
+from alembic import op
+import sqlalchemy as sa
+import sqlalchemy_utc
+
+
+# revision identifiers, used by Alembic.
+revision = '3f7df6bf63b8'
+down_revision = '645f315e651d'
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.create_foreign_key(None, 'upload', 'graph', ['graph_id'], ['id'])
+ # ### end Alembic commands ###
+
+
+def downgrade():
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.drop_constraint(None, 'upload', type_='foreignkey')
+ # ### end Alembic commands ###
diff --git a/cli/app/sql/versions/202103201916_remove_foreign_key_constraint_from_.py b/cli/app/sql/versions/202103201916_remove_foreign_key_constraint_from_.py
new file mode 100644
index 0000000..ed19feb
--- /dev/null
+++ b/cli/app/sql/versions/202103201916_remove_foreign_key_constraint_from_.py
@@ -0,0 +1,29 @@
+"""remove foreign key constraint from target_page_id
+
+Revision ID: 9b687880918d
+Revises: 3f7df6bf63b8
+Create Date: 2021-03-20 19:16:21.582373
+
+"""
+from alembic import op
+import sqlalchemy as sa
+import sqlalchemy_utc
+
+
+# revision identifiers, used by Alembic.
+revision = '9b687880918d'
+down_revision = '3f7df6bf63b8'
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.drop_constraint('tile_ibfk_3', 'tile', type_='foreignkey')
+ # ### end Alembic commands ###
+
+
+def downgrade():
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.create_foreign_key('tile_ibfk_3', 'tile', 'page', ['target_page_id'], ['id'])
+ # ### end Alembic commands ###
diff --git a/cli/cli.py b/cli/cli.py
index 2158398..3534c43 100755
--- a/cli/cli.py
+++ b/cli/cli.py
@@ -29,6 +29,13 @@ if __name__ == '__main__':
cli = FlaskGroup(create_app=create_app)
+ elif args.group == 'demo':
+
+ from flask.cli import FlaskGroup
+ from app.server.demo import create_demo_app
+
+ cli = FlaskGroup(create_app=create_demo_app)
+
elif args.group == 'db':
import re
diff --git a/cli/commands/admin/migrate_to_mysql.py b/cli/commands/admin/migrate_to_mysql.py
new file mode 100644
index 0000000..0ad9929
--- /dev/null
+++ b/cli/commands/admin/migrate_to_mysql.py
@@ -0,0 +1,120 @@
+import click
+import os
+import glob
+import time
+
+from sqlalchemy import create_engine
+from sqlalchemy.orm import sessionmaker
+from sqlalchemy.ext.declarative import declarative_base
+
+from flask_sqlalchemy import SQLAlchemy
+
+from app.settings import app_cfg
+
+@click.command('migrate_to_mysql')
+@click.pass_context
+def cli(ctx):
+ """
+ - Create connections to both databases
+ - For each table, for each row, insert from one to the other
+ """
+ mysql_session, mysql_base = make_mysql_base()
+ sqlite_session, sqlite_base = make_sqlite3_base()
+ mysql_classes = make_classes(mysql_base)
+ sqlite_classes = make_classes(sqlite_base)
+
+ for mysql_class, sqlite_class in zip(mysql_classes, sqlite_classes):
+ sqlite_objs = sqlite_session.query(sqlite_class).order_by(sqlite_class.id).all()
+ for sqlite_obj in sqlite_objs:
+ mysql_obj = mysql_class()
+ for column in sqlite_class.__table__.columns:
+ table_name, column_name = str(column).split(".")
+ # print(f"{table_name} => {column_name}")
+ # if column_name != 'id':
+ setattr(mysql_obj, column_name, getattr(sqlite_obj, column_name))
+ mysql_session.add(mysql_obj)
+ mysql_session.commit()
+
+def make_mysql_base():
+ """Make a Mysql connection"""
+ connection_url = "mysql+pymysql://{}:{}@{}/{}?charset=utf8mb4".format(
+ os.getenv("DB_USER"),
+ os.getenv("DB_PASS"),
+ os.getenv("DB_HOST"),
+ os.getenv("DB_NAME")
+ )
+ return make_base(connection_url)
+
+def make_sqlite3_base():
+ """Make a SQLite3 connection"""
+ connection_url = "sqlite:///{}".format(os.path.join(app_cfg.DIR_DATABASE, 'swimmer.sqlite3'))
+ return make_base(connection_url)
+
+def make_base(connection_url):
+ """Make a connection base from a connection URL"""
+ engine = create_engine(connection_url, encoding="utf-8", pool_recycle=3600)
+ Session = sessionmaker(bind=engine)
+ Base = declarative_base()
+ Base.metadata.bind = engine
+ db = SQLAlchemy()
+ return Session(), Base
+
+def make_classes(Base):
+ """Make classes from a base"""
+
+ from sqlalchemy import create_engine, Table, Column, Text, String, Integer, \
+ Boolean, Float, DateTime, JSON, ForeignKey
+ from sqlalchemy_utc import UtcDateTime, utcnow
+
+ class Upload(Base):
+ """Table for storing references to various media"""
+ __tablename__ = 'upload'
+ id = Column(Integer, primary_key=True)
+ graph_id = Column(Integer, ForeignKey('graph.id'), nullable=True)
+ sha256 = Column(String(256), nullable=False)
+ fn = Column(String(256), nullable=False)
+ ext = Column(String(4, convert_unicode=True), nullable=False)
+ tag = Column(String(64, convert_unicode=True), nullable=True)
+ username = Column(String(16, convert_unicode=True), nullable=False)
+ created_at = Column(UtcDateTime(), default=utcnow())
+
+ class Tile(Base):
+ """Table for storing references to tiles"""
+ __tablename__ = 'tile'
+ id = Column(Integer, primary_key=True)
+ graph_id = Column(Integer, ForeignKey('graph.id'), nullable=True)
+ page_id = Column(Integer, ForeignKey('page.id'), nullable=True)
+ target_page_id = Column(Integer, ForeignKey('page.id'), nullable=True)
+ type = Column(String(16, convert_unicode=True), nullable=False)
+ sort_order = Column(Integer, default=0)
+ settings = Column(JSON, default={}, nullable=True)
+ created_at = Column(UtcDateTime(), default=utcnow())
+ updated_at = Column(UtcDateTime(), onupdate=utcnow())
+
+ class Page(Base):
+ """Table for storing references to pages"""
+ __tablename__ = 'page'
+ id = Column(Integer, primary_key=True)
+ graph_id = Column(Integer, ForeignKey('graph.id'), nullable=True)
+ path = Column(String(64, convert_unicode=True), nullable=False)
+ title = Column(String(64, convert_unicode=True), nullable=False)
+ username = Column(String(32, convert_unicode=True), nullable=False)
+ description = Column(Text(convert_unicode=True), nullable=False)
+ settings = Column(JSON, default={}, nullable=True)
+ created_at = Column(UtcDateTime(), default=utcnow())
+ updated_at = Column(UtcDateTime(), onupdate=utcnow())
+
+ class Graph(Base):
+ """Table for storing references to graphs"""
+ __tablename__ = 'graph'
+ id = Column(Integer, primary_key=True)
+ home_page_id = Column(Integer, nullable=True)
+ path = Column(String(64, convert_unicode=True), nullable=False)
+ title = Column(String(64, convert_unicode=True), nullable=False)
+ username = Column(String(32, convert_unicode=True), nullable=False)
+ description = Column(Text(convert_unicode=True), nullable=False)
+ settings = Column(JSON, default={}, nullable=True)
+ created_at = Column(UtcDateTime(), default=utcnow())
+ updated_at = Column(UtcDateTime(), onupdate=utcnow())
+
+ return [ Graph, Page, Tile, Upload ]
diff --git a/cli/commands/site/export.py b/cli/commands/site/export.py
index 0ba6a62..78e7228 100644
--- a/cli/commands/site/export.py
+++ b/cli/commands/site/export.py
@@ -1,134 +1,17 @@
import click
from app.settings import app_cfg
-from app.utils.file_utils import load_text, write_json, write_text
-from os.path import join
-import os
+from app.site.export import export_site
@click.command('info')
@click.option('-g', '--graph', 'opt_graph_path', required=True,
help='Graph name')
@click.option('-o', '--output', 'opt_output_dir', required=True, default=app_cfg.DIR_EXPORTS,
help='Output dir')
+@click.option('-j', '--js/--no-js', 'opt_js', required=False, default=False,
+ help='Whether to rebuild the Javascript bundle')
@click.pass_context
-def cli(ctx, opt_graph_path, opt_output_dir):
+def cli(ctx, opt_graph_path, opt_output_dir, opt_build_js):
"""Export a graph"""
- # ------------------------------------------------
- # imports
-
- from app.sql.common import db, Session, Graph, Page, Tile
- from distutils.dir_util import copy_tree
-
- # ------------------------------------------------
- # generate HTML for index and all pages
-
- session = Session()
- graph = session.query(Graph).filter(Graph.path == opt_graph_path).first()
- if graph is None:
- print(f"Not a graph: {opt_graph_path}")
- return
-
- # build everything here
- graph_dir = os.path.abspath(join(opt_output_dir, graph.path))
-
- # load site index
- index_html = load_text(join(app_cfg.DIR_STATIC, 'site.html'), split=False)
- index_html = index_html.replace('SITE_PATH', '/' + graph.path)
-
- # write site JSON data
- site_data = { 'graph': sanitize_graph(graph.toSiteJSON()) }
- write_json(site_data, join(graph_dir, 'index.json'), default=str, minify=False)
-
- # import custom css
- site_css = load_text(join(app_cfg.DIR_STATIC, 'site.css'), split=False)
- site_css = site_css.replace('SITE_PATH', '/' + graph.path)
- write_text(site_css, join(graph_dir, 'site.css'))
- copy_tree(join(app_cfg.DIR_STATIC, 'fonts'), join(graph_dir, 'static/fonts'))
- copy_tree(join(app_cfg.DIR_STATIC, 'img'), join(graph_dir, 'static/img'))
-
- # write index file, redirects to homepage
- home_page = site_data['graph']['home_page']
- if home_page is None:
- print("Homepage not set! Shift-click a page on the graph to make it the homepage.")
- return
- write_text(f'<meta http-equiv="refresh" content="0; url={home_page}">', join(graph_dir, 'index.html'))
-
- index_path = ""
- for page in graph.pages:
- page_path = f'{graph.path}/{page.path}'
- if page.id == graph.home_page_id:
- index_path = page_path
- print(f'/{page_path} [index]')
- else:
- print(f'/{page_path}')
- write_index(graph, page, index_html, join(graph_dir, page.path, 'index.html'))
-
- # ------------------------------------------------
- # build javascript
-
- print("Building javascript...")
- print(f'NODE_ENV=production node ./node_modules/webpack-cli/bin/cli.js --config ./webpack.config.site.js -o {graph_dir}/bundle.js')
- os.chdir(app_cfg.DIR_PROJECT_ROOT)
- os.system(f'NODE_ENV=production node ./node_modules/webpack-cli/bin/cli.js --config ./webpack.config.site.js -o {graph_dir}/bundle.js')
-
- print("Site export complete!")
- print(f"Graph exported to: {graph_dir}")
-
-def write_index(graph, page, index_html, fp_out):
- if page is None:
- page_title = graph.title
- else:
- page_title = page.title
- index_html = index_html.replace('BUNDLE_PATH', join('/', graph.path, 'bundle.js'))
- index_html = index_html.replace('PAGE_TITLE', page_title)
- write_text(index_html, fp_out)
-
-def sanitize_graph(graph):
- page_path_lookup = {}
- page_lookup = {}
- for page in graph['pages']:
- page_path = join('/', graph['path'], page['path'])
- if page_path in page_path_lookup:
- print(f"/!\\ WARNING! Duplicate found of {page_path}")
- else:
- page_path_lookup[page['id']] = page_path
- for page in graph['pages']:
- sanitize_page(page)
- if page['id'] == 12:
- print(page)
- for tile in page['tiles']:
- if tile['target_page_id']:
- if tile['target_page_id'] == -1:
- tile['href'] = tile['settings']['external_link_url']
- elif tile['target_page_id'] > 0:
- tile['href'] = page_path_lookup[tile['target_page_id']]
- sanitize_tile(tile)
- page_path = page_path_lookup[page['id']]
- page_lookup[page_path] = page
- # print(page_lookup['/asdf/testttt'])
- graph['pages'] = page_lookup
- graph['home_page'] = page_path_lookup[graph['home_page_id']]
- return graph
-
-def sanitize_page(data):
- if 'created_at' in data:
- del data['created_at']
- if 'updated_at' in data:
- del data['updated_at']
- if 'graph_id' in data:
- del data['graph_id']
-
-def sanitize_tile(data):
- if 'created_at' in data:
- del data['created_at']
- if 'updated_at' in data:
- del data['updated_at']
- if 'username' in data:
- del data['username']
- if 'graph_id' in data:
- del data['graph_id']
- if 'page_id' in data:
- del data['page_id']
- if 'target_page_id' in data:
- del data['target_page_id']
+ export_site(opt_graph_path, opt_output_dir, opt_build_js) \ No newline at end of file
diff --git a/cli/commands/site/populate.py b/cli/commands/site/populate.py
new file mode 100644
index 0000000..b1b9691
--- /dev/null
+++ b/cli/commands/site/populate.py
@@ -0,0 +1,79 @@
+import click
+
+lines = """/static/media/last-museum/nicole-foreshew/establishing1.mp4
+/static/media/last-museum/nicole-foreshew/sequence1b.mp4
+/static/media/last-museum/nicole-foreshew/sequence2.mp4
+/static/media/last-museum/nicole-foreshew/sequence3.mp4
+/static/media/last-museum/nicole-foreshew/sequence4.mp4
+/static/media/last-museum/nicole-foreshew/sequence5.mp4""".split("\n")
+
+letters = ['a','b','c','d','e','f','g','h','i','j']
+
+@click.command('populate')
+@click.pass_context
+def cli(ctx):
+ """Populate video pages"""
+
+ import requests
+
+ def post(endpoint, data):
+ resp = requests.post(endpoint, json=data)
+ return None if resp.status_code != 200 else resp.json()
+
+ graph_id = 3
+ name = "Nicole Foreshew"
+ index = 0
+
+ for url in lines:
+ # slug = url.split("/")[5].replace(".mp4", "").lower()
+ slug = "foreshew-" + str(index) # + letters[index]
+ print(slug)
+ index += 1
+
+ page_data = {
+ "graph_id": graph_id,
+ "path": slug,
+ "title": name, # + str(index),
+ "username": "jules",
+ "description":"",
+ "settings": {
+ "x": 0.05,
+ "y": 0.05,
+ "background_color": "#000000",
+ "background_audio_id": 0,
+ "restart_audio": False
+ }
+ }
+ page_res = post("http://0.0.0.0:5000/api/v1/page/", page_data)
+ page_id = page_res['res']['id']
+
+ tile_data = {
+ "graph_id": graph_id,
+ "page_id": page_id,
+ "target_page_id": None,
+ "type": "video",
+ "settings": {
+ "x": 0,
+ "y": 0,
+ "width": 1920,
+ "height": 1080,
+ "rotation": 0,
+ "scale": 1,
+ "opacity": 1,
+ "units": False,
+ "align": "center_center",
+ "has_audio": False,
+ "audio_on_click_id": 0,
+ "audio_on_hover_id": 0,
+ "navigate_when_audio_finishes": False,
+ "video_style": "cover",
+ "url": url,
+ "external_link_url": "",
+ "cursor": "none",
+ "muted": True,
+ "loop": True,
+ "autoadvance": False
+ }
+ }
+
+ page_res = post("http://0.0.0.0:5000/api/v1/tile/", tile_data)