import click from app.settings import app_cfg from app.utils.file_utils import load_text, write_json, write_text from os.path import join import os from app.sql.common import db, Session, Graph, Page, Tile from distutils.dir_util import copy_tree def export_site(opt_graph_path, opt_output_dir=app_cfg.DIR_EXPORTS, opt_build_js=False): """Export a graph""" # ------------------------------------------------ # generate HTML for index and all pages session = Session() graph = session.query(Graph).filter(Graph.path == opt_graph_path).first() if graph is None: print(f"Not a graph: {opt_graph_path}") return # build everything here graph_dir = os.path.abspath(join(opt_output_dir, graph.path)) # load site index index_html = load_text(join(app_cfg.DIR_STATIC, 'site.html'), split=False) index_html = index_html.replace('CUSTOM_HEADER', graph.settings.get('custom_header', '')) index_html = index_html.replace('SITE_PATH', '/' + graph.path) # write site JSON data site_data = { 'graph': sanitize_graph(graph.toSiteJSON()) } write_json(site_data, join(graph_dir, 'index.json'), default=str, minify=False) # import custom css site_css = load_text(join(app_cfg.DIR_STATIC, 'site.css'), split=False) site_css = site_css.replace('SITE_PATH', '/' + graph.path) write_text(site_css, join(graph_dir, 'site.css')) copy_tree(join(app_cfg.DIR_STATIC, 'fonts'), join(graph_dir, 'static/fonts')) copy_tree(join(app_cfg.DIR_STATIC, 'img'), join(graph_dir, 'static/img')) # write index file, redirects to homepage home_page = site_data['graph']['home_page'] if home_page is None: print("Homepage not set! Shift-click a page on the graph to make it the homepage.") session.close() return # write_text(f'', join(graph_dir, 'index.html')) write_index(graph=graph, page=None, index_html=index_html, fp_out=join(graph_dir, 'index.html')) index_path = "" for page in graph.pages: page_path = f'{graph.path}/{page.path}' if page.id == graph.home_page_id: index_path = page_path print(f'/{page_path} [index]') else: print(f'/{page_path}') write_index(graph, page, index_html, join(graph_dir, page.path, 'index.html')) if opt_build_js or not os.path.exists(f"{graph_dir}/bundle.js"): build_javascript(graph_dir) session.close() print("Site export complete!") print(f"Graph exported to: {graph_dir}") def build_javascript(graph_dir): print("Building javascript...") print(f'NODE_ENV=production node ./node_modules/webpack-cli/bin/cli.js --config ./webpack.config.site.js -o {graph_dir}/bundle.js') os.chdir(app_cfg.DIR_PROJECT_ROOT) os.system(f'NODE_ENV=production node ./node_modules/webpack-cli/bin/cli.js --config ./webpack.config.site.js -o {graph_dir}/bundle.js') def write_index(graph, page, index_html, fp_out): if page is None: page_title = graph.title else: page_title = page.title index_html = index_html.replace('BUNDLE_PATH', join('/', graph.path, 'bundle.js')) index_html = index_html.replace('PAGE_TITLE', page_title) write_text(index_html, fp_out) def sanitize_graph(graph): page_path_lookup = {} page_lookup = {} for page in graph['pages']: page_path = join('/', graph['path'], page['path']) if page_path in page_path_lookup: print(f"/!\\ WARNING! Duplicate found of {page_path}") else: page_path_lookup[page['id']] = page_path for page in graph['pages']: sanitize_page(page) for tile in page['tiles']: if tile['target_page_id']: if tile['target_page_id'] == -1: tile['href'] = tile['settings']['external_link_url'] elif tile['target_page_id'] == -2: tile['href'] = '__open_popup' elif tile['target_page_id'] == -3: tile['href'] = '__close_popup' elif tile['target_page_id'] == -4: tile['href'] = '__toggle_popup' elif tile['target_page_id'] > 0: tile['href'] = page_path_lookup[tile['target_page_id']] if 'url' in tile['settings'] and tile['settings']['url'].startswith('/static'): tile['settings']['url'] = '/' + graph['path'] + tile['settings']['url'] if len(tile['settings'].get('appear_after', "")): tile['settings']['appear_after'] = timestampToSeconds(tile['settings']['appear_after']) or 0 sanitize_tile(tile) page_path = page_path_lookup[page['id']] page_lookup[page_path] = page for upload in graph['uploads']: sanitize_upload(upload) if upload['url'].startswith('/static'): upload['url'] = '/' + graph['path'] + upload['url'] # print(page_lookup['/asdf/testttt']) graph['pages'] = page_lookup graph['home_page'] = page_path_lookup[graph['home_page_id']] return graph def sanitize_upload(data): if 'created_at' in data: del data['created_at'] if 'username' in data: del data['username'] if 'graph_id' in data: del data['graph_id'] def sanitize_page(data): if 'created_at' in data: del data['created_at'] if 'updated_at' in data: del data['updated_at'] if 'graph_id' in data: del data['graph_id'] def sanitize_tile(data): if 'created_at' in data: del data['created_at'] if 'updated_at' in data: del data['updated_at'] if 'username' in data: del data['username'] if 'graph_id' in data: del data['graph_id'] if 'page_id' in data: del data['page_id'] if 'target_page_id' in data: del data['target_page_id'] def timestampToSeconds(time_str): try: time_str_parts = list(map(float, time_str.strip().split(":"))) if len(time_str_parts) == 3: return (time_str_parts[0] * 60 + time_str_parts[1]) * 60 + time_str_parts[2] if len(time_str_parts) == 2: return time_str_parts[0] * 60 + time_str_parts[1] return time_str_parts[0] except: return 0