summaryrefslogtreecommitdiff
path: root/animism-align/cli/commands/site/export.py
diff options
context:
space:
mode:
Diffstat (limited to 'animism-align/cli/commands/site/export.py')
-rw-r--r--animism-align/cli/commands/site/export.py104
1 files changed, 92 insertions, 12 deletions
diff --git a/animism-align/cli/commands/site/export.py b/animism-align/cli/commands/site/export.py
index 4bd5ba1..a00ef0d 100644
--- a/animism-align/cli/commands/site/export.py
+++ b/animism-align/cli/commands/site/export.py
@@ -4,6 +4,7 @@ from app.settings import app_cfg
from app.utils.file_utils import load_text, write_json, write_text
from os.path import join
from functools import reduce
+from shutil import copyfile
import os
@click.command('info')
@@ -22,25 +23,27 @@ def cli(ctx, opt_output_dir):
from distutils.dir_util import copy_tree
# ------------------------------------------------
- # load the db
-
- db = export_db()
- prune_db(db)
-
- # ------------------------------------------------
# export settings
page_title = "Animism: Episode 1"
page_name = "episode1"
page_desc = "A Report on Migrating Souls in Museums and Moving Pictures"
+
page_url = "/" + page_name
+ media_url = "/" + page_name + "/media"
site_title = f"{page_title}: {page_desc}"
-
- # where to build everything
site_path = opt_output_dir or datetime.datetime.now().strftime("animism_%Y%m%d%H%M")
- site_static = join(app_cfg.DIR_EXPORTS, site_path, 'static')
+ site_fp_static = join(app_cfg.DIR_EXPORTS, site_path, 'static')
site_fp_out = join(app_cfg.DIR_EXPORTS, site_path, page_name)
+ site_fp_media = join(app_cfg.DIR_EXPORTS, site_path, page_name, 'media')
+
+ # ------------------------------------------------
+ # load the db
+
+ db = export_db()
+ prune_db(db)
+ media_to_copy = rewrite_db_media(db, site_fp_media, media_url)
# ------------------------------------------------
# build the index.html
@@ -66,10 +69,15 @@ def cli(ctx, opt_output_dir):
# write_text(site_css, join(site_fp_out, 'site.css'))
# ------------------------------------------------
+ # copy media from the exhibition
+
+ copy_media(site_fp_media, media_to_copy)
+
+ # ------------------------------------------------
# copy any static assets
- copy_tree(join(app_cfg.DIR_STATIC, 'fonts'), join(site_static, 'fonts'))
- copy_tree(join(app_cfg.DIR_STATIC, 'img'), join(site_static, 'img'))
+ copy_tree(join(app_cfg.DIR_STATIC, 'fonts'), join(site_fp_static, 'fonts'))
+ copy_tree(join(app_cfg.DIR_STATIC, 'img'), join(site_fp_static, 'img'))
# ------------------------------------------------
# build javascript
@@ -86,9 +94,81 @@ def cli(ctx, opt_output_dir):
# Database Functions
######################################################################
+def copy_media(fp_media, to_copy):
+ os.makedirs(fp_media, exist_ok=True)
+ print(f"copying {len(to_copy.keys())} uploaded files")
+ total_size = 0
+ for fp in to_copy.values():
+ copyfile(fp['src'], fp['dst'])
+ total_size += os.path.getsize(fp['dst'])
+ print(f"wrote {round(total_size / 1000000, 2)} MB")
+
+def rewrite_db_media(db, fp_out, url_out):
+ """
+ Go over all the media and find any Upload objects.
+ Figure out which to copy, and rewrite DB to use the export URL schema.
+ """
+ to_copy = {}
+ for item in IterateTable(db['media']):
+ # images - various sizes
+ settings = item['settings']
+ if item['type'] == 'image':
+ for field in app_cfg.IMAGE_UPLOAD_FIELDS:
+ del settings['fullsize']
+ if field in settings:
+ settings[field] = rewrite_upload(to_copy, settings[field], fp_out, url_out)
+ # videos - poster images
+ elif item['type'] == 'video':
+ if 'poster' in settings:
+ settings['poster'] = rewrite_upload(to_copy, settings['poster'], fp_out, url_out)
+ # galleries - a bunch of lookups
+ elif item['type'] == 'gallery':
+ for field in app_cfg.IMAGE_UPLOAD_GALLERY_LOOKUPS:
+ for id in settings['image_order']:
+ id = str(id)
+ if id in settings[field]:
+ settings[field][id] = rewrite_upload(to_copy, settings[field][id], fp_out, url_out)
+ # files - singleton file uploads
+ elif item['type'] == 'file':
+ if 'file' in settings:
+ settings['file'] = rewrite_upload(to_copy, settings['file'], fp_out, url_out)
+ return to_copy
+
+def rewrite_upload(to_copy, item, fp_out, url_out):
+ """
+ # rewriting uploads. they look like this:
+ "fn": "koester.gif",
+ "sha256": "c7c25e8d9be8b3e5db89df0f4a35f8a599dfdcf8bf9bc1f6c4137c7b6522d710",
+ "tag": "file",
+ "url": "/static/data_store/uploads/file/koester.gif",
+ "username": "animism"
+ """
+ if 'sha256' not in item:
+ return item
+ sha = item['sha256']
+ out_fn = sha + item['ext']
+ out_obj = {
+ "url": join(url_out, out_fn),
+ }
+
+ if sha not in to_copy:
+ # print(f"SHA: {sha}")
+ in_fn = item['fn']
+ in_path = join(app_cfg.DIR_UPLOADS, item['tag'], in_fn)
+ if os.path.exists(in_path):
+ to_copy[sha] = {
+ "src": in_path,
+ "dst": join(fp_out, out_fn)
+ }
+ else:
+ print(f"Missing path: {in_path}")
+
+ return out_obj
+
def prune_db(db):
"""Remove random stuff from the JSON that doesn't need to be there
- extraneous paragraphs
+ - extraneous media
"""
seen_paras = {}
seen_media = {}
@@ -101,7 +181,7 @@ def prune_db(db):
db['media'] = filter_db(db, 'media', seen_media)
def filter_db(db, table, seen):
- order = filter(lambda i: i in seen, db[table]['order'])
+ order = list(filter(lambda i: i in seen, db[table]['order']))
lookup = { id: db[table]['lookup'][id] for id in order }
return { 'order': order, 'lookup': lookup }