1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
|
import click
from app.settings import app_cfg
from app.utils.file_utils import load_text, write_json, write_text
from os.path import join
import os
from app.sql.common import db, Session, Graph, Page, Tile
from distutils.dir_util import copy_tree
def export_site(opt_graph_path, opt_output_dir=app_cfg.DIR_EXPORTS, opt_build_js=False):
"""Export a graph"""
# ------------------------------------------------
# generate HTML for index and all pages
session = Session()
graph = session.query(Graph).filter(Graph.path == opt_graph_path).first()
if graph is None:
print(f"Not a graph: {opt_graph_path}")
return
# build everything here
graph_dir = os.path.abspath(join(opt_output_dir, graph.path))
# load site index
index_html = load_text(join(app_cfg.DIR_STATIC, 'site.html'), split=False)
index_html = index_html.replace('SITE_PATH', '/' + graph.path)
# write site JSON data
site_data = { 'graph': sanitize_graph(graph.toSiteJSON()) }
write_json(site_data, join(graph_dir, 'index.json'), default=str, minify=False)
# import custom css
site_css = load_text(join(app_cfg.DIR_STATIC, 'site.css'), split=False)
site_css = site_css.replace('SITE_PATH', '/' + graph.path)
write_text(site_css, join(graph_dir, 'site.css'))
copy_tree(join(app_cfg.DIR_STATIC, 'fonts'), join(graph_dir, 'static/fonts'))
copy_tree(join(app_cfg.DIR_STATIC, 'img'), join(graph_dir, 'static/img'))
# write index file, redirects to homepage
home_page = site_data['graph']['home_page']
if home_page is None:
print("Homepage not set! Shift-click a page on the graph to make it the homepage.")
session.close()
return
write_text(f'<meta http-equiv="refresh" content="0; url={home_page}">', join(graph_dir, 'index.html'))
index_path = ""
for page in graph.pages:
page_path = f'{graph.path}/{page.path}'
if page.id == graph.home_page_id:
index_path = page_path
print(f'/{page_path} [index]')
else:
print(f'/{page_path}')
write_index(graph, page, index_html, join(graph_dir, page.path, 'index.html'))
if opt_build_js or not os.path.exists(f"{graph_dir}/bundle.js"):
build_javascript(graph_dir)
session.close()
print("Site export complete!")
print(f"Graph exported to: {graph_dir}")
def build_javascript(graph_dir):
print("Building javascript...")
print(f'NODE_ENV=production node ./node_modules/webpack-cli/bin/cli.js --config ./webpack.config.site.js -o {graph_dir}/bundle.js')
os.chdir(app_cfg.DIR_PROJECT_ROOT)
os.system(f'NODE_ENV=production node ./node_modules/webpack-cli/bin/cli.js --config ./webpack.config.site.js -o {graph_dir}/bundle.js')
def write_index(graph, page, index_html, fp_out):
if page is None:
page_title = graph.title
else:
page_title = page.title
index_html = index_html.replace('BUNDLE_PATH', join('/', graph.path, 'bundle.js'))
index_html = index_html.replace('PAGE_TITLE', page_title)
write_text(index_html, fp_out)
def sanitize_graph(graph):
page_path_lookup = {}
page_lookup = {}
for page in graph['pages']:
page_path = join('/', graph['path'], page['path'])
if page_path in page_path_lookup:
print(f"/!\\ WARNING! Duplicate found of {page_path}")
else:
page_path_lookup[page['id']] = page_path
for page in graph['pages']:
sanitize_page(page)
for tile in page['tiles']:
if tile['target_page_id']:
if tile['target_page_id'] == -1:
tile['href'] = tile['settings']['external_link_url']
elif tile['target_page_id'] > 0:
tile['href'] = page_path_lookup[tile['target_page_id']]
if 'url' in tile['settings'] and tile['settings']['url'].startswith('/static'):
tile['settings']['url'] = '/' + graph['path'] + tile['settings']['url']
sanitize_tile(tile)
page_path = page_path_lookup[page['id']]
page_lookup[page_path] = page
for upload in graph['uploads']:
sanitize_upload(upload)
if upload['url'].startswith('/static'):
upload['url'] = '/' + graph['path'] + upload['url']
# print(page_lookup['/asdf/testttt'])
graph['pages'] = page_lookup
graph['home_page'] = page_path_lookup[graph['home_page_id']]
return graph
def sanitize_upload(data):
if 'created_at' in data:
del data['created_at']
if 'username' in data:
del data['username']
if 'graph_id' in data:
del data['graph_id']
def sanitize_page(data):
if 'created_at' in data:
del data['created_at']
if 'updated_at' in data:
del data['updated_at']
if 'graph_id' in data:
del data['graph_id']
def sanitize_tile(data):
if 'created_at' in data:
del data['created_at']
if 'updated_at' in data:
del data['updated_at']
if 'username' in data:
del data['username']
if 'graph_id' in data:
del data['graph_id']
if 'page_id' in data:
del data['page_id']
if 'target_page_id' in data:
del data['target_page_id']
|