summaryrefslogtreecommitdiff
path: root/cli
diff options
context:
space:
mode:
Diffstat (limited to 'cli')
-rw-r--r--cli/app/sql/models/graph.py5
-rw-r--r--cli/app/utils/file_utils.py12
-rw-r--r--cli/commands/site/export.py74
3 files changed, 81 insertions, 10 deletions
diff --git a/cli/app/sql/models/graph.py b/cli/app/sql/models/graph.py
index fbfb09c..8e068a0 100644
--- a/cli/app/sql/models/graph.py
+++ b/cli/app/sql/models/graph.py
@@ -42,6 +42,11 @@ class Graph(Base):
data['pages'] = [ page.toLinkJSON() for page in self.pages ]
return data
+ def toSiteJSON(self):
+ data = self.toJSON()
+ data['pages'] = [ page.toFullJSON() for page in self.pages ]
+ return data
+
class GraphForm(ModelForm):
class Meta:
model = Graph
diff --git a/cli/app/utils/file_utils.py b/cli/app/utils/file_utils.py
index 7f1f417..0e672fc 100644
--- a/cli/app/utils/file_utils.py
+++ b/cli/app/utils/file_utils.py
@@ -195,12 +195,14 @@ def load_yaml(fp_in):
cfg = yaml.load(fp, Loader=yaml.Loader)
return cfg
-def load_text(fp_in):
+def load_text(fp_in, split=True):
"""Load a text file into an array
:param fp_in: (str) filepath
"""
with open(fp_in, 'rt') as fp:
- lines = fp.read().rstrip('\n').split('\n')
+ lines = fp.read().rstrip('\n')
+ if split:
+ lines = lines.split('\n')
return lines
def load_line_lookup(fp_in):
@@ -264,16 +266,16 @@ def write_pickle(data, fp_out, ensure_path=True):
pickle.dump(data, fp)
-def write_json(data, fp_out, minify=True, ensure_path=True, sort_keys=True, verbose=False):
+def write_json(data, fp_out, minify=True, ensure_path=True, sort_keys=True, verbose=False, default=None):
"""
"""
if ensure_path:
mkdirs(fp_out)
with open(fp_out, 'w') as fp:
if minify:
- json.dump(data, fp, separators=(',',':'), sort_keys=sort_keys)
+ json.dump(data, fp, separators=(',',':'), sort_keys=sort_keys, default=default)
else:
- json.dump(data, fp, indent=2, sort_keys=sort_keys)
+ json.dump(data, fp, indent=2, sort_keys=sort_keys, default=default)
if verbose:
log.info('Wrote JSON: {}'.format(fp_out))
diff --git a/cli/commands/site/export.py b/cli/commands/site/export.py
index 8212f55..c8e687a 100644
--- a/cli/commands/site/export.py
+++ b/cli/commands/site/export.py
@@ -1,6 +1,8 @@
import click
from app.settings import app_cfg
+from app.utils.file_utils import load_text, write_json, write_text
+from os.path import join
@click.command('info')
@click.option('-g', '--graph', 'opt_graph_path', required=True,
@@ -14,12 +16,10 @@ def cli(ctx, opt_graph_path, opt_output_dir):
# ------------------------------------------------
# imports
- from os.path import join
-
from app.sql.common import db, Session, Graph, Page, Tile
# ------------------------------------------------
- # generate HTML for all pages
+ # generate HTML for index and all pages
session = Session()
graph = session.query(Graph).filter(Graph.path == opt_graph_path).first()
@@ -27,12 +27,76 @@ def cli(ctx, opt_graph_path, opt_output_dir):
print(f"Not a graph: {opt_graph_path}")
return
+ print(f"Output site to {opt_output_dir}")
+
+ site_data = { 'graph': sanitize_graph(graph.toSiteJSON()) }
+
+ index_html = load_text(join(app_cfg.DIR_STATIC, 'site.html'), split=False)
+ write_json(site_data, join(opt_output_dir, graph.path, 'index.json'), default=str)
+ write_index(graph, None, index_html, join(opt_output_dir, graph.path, 'index.html'))
+
+ index_path = ""
for page in graph.pages:
page_path = f'{graph.path}/{page.path}'
if page.id == graph.home_page_id:
+ index_path = page_path
print(f'/{page_path} [index]')
else:
print(f'/{page_path}')
- #
+ write_index(graph, page, index_html, join(opt_output_dir, graph.path, page.path, 'index.html'))
+
# ------------------------------------------------
- # cat all the relevant CSS from the main site
+ # generate javascript...
+
+ # NODE_ENV=production webpack --config ./webpack.config.site.js -o ./data_store/exports/asdf/bundle.js
+
+def write_index(graph, page, index_html, fp_out):
+ if page is None:
+ page_title = graph.title
+ else:
+ page_title = page.title
+ index_html = index_html.replace('BUNDLE_PATH', join('/', graph.path, 'bundle.js'))
+ index_html = index_html.replace('PAGE_TITLE', page_title)
+ write_text(index_html, fp_out)
+
+def sanitize_graph(graph):
+ page_path_lookup = {}
+ page_lookup = {}
+ for page in graph['pages']:
+ page_path_lookup[page['id']] = join('/', graph['path'], page['path'])
+ for page in graph['pages']:
+ sanitize_page(page)
+ for tile in page['tiles']:
+ if tile['target_page_id']:
+ if tile['target_page_id'] == -1:
+ tile['href'] = tile['external_link_url']
+ elif tile['target_page_id'] > 0:
+ tile['href'] = page_path_lookup[tile['target_page_id']]
+ sanitize_tile(tile)
+ page_path = page_path_lookup[page['id']]
+ page_lookup[page_path] = page
+ graph['pages'] = page_lookup
+ graph['home_page'] = page_path_lookup[graph['home_page_id']]
+ return graph
+
+def sanitize_page(data):
+ if 'created_at' in data:
+ del data['created_at']
+ if 'updated_at' in data:
+ del data['updated_at']
+ if 'graph_id' in data:
+ del data['graph_id']
+
+def sanitize_tile(data):
+ if 'created_at' in data:
+ del data['created_at']
+ if 'updated_at' in data:
+ del data['updated_at']
+ if 'username' in data:
+ del data['username']
+ if 'graph_id' in data:
+ del data['graph_id']
+ if 'page_id' in data:
+ del data['page_id']
+ if 'target_page_id' in data:
+ del data['target_page_id']