From 194564985418192f73627c97dbb46c0d748db96f Mon Sep 17 00:00:00 2001 From: Jules Laplace Date: Sat, 9 Mar 2019 12:39:30 +0100 Subject: using jinja to parse the inclues --- megapixels/app/site/loader.py | 2 + megapixels/app/site/parser.py | 32 ++++++---- site/includes/chart.html | 2 + site/public/about/index.html | 11 +++- site/public/about/press/index.html | 11 +++- site/public/about/privacy/index.html | 11 +++- site/public/about/research/index.html | 11 +++- site/public/about/terms/index.html | 11 +++- .../datasets/50_people_one_question/index.html | 57 +++++++++++++++++- site/public/datasets/brainwash/index.html | 69 +++++++++++++++++++++- site/public/datasets/celeba/index.html | 57 +++++++++++++++++- site/public/datasets/cofw/index.html | 69 +++++++++++++++++++++- site/public/datasets/lfw/index.html | 69 +++++++++++++++++++++- site/public/datasets/mars/index.html | 57 +++++++++++++++++- 14 files changed, 446 insertions(+), 23 deletions(-) diff --git a/megapixels/app/site/loader.py b/megapixels/app/site/loader.py index 8fd7a2f8..a544333b 100644 --- a/megapixels/app/site/loader.py +++ b/megapixels/app/site/loader.py @@ -85,6 +85,8 @@ def parse_metadata(fn, sections): metadata['meta'] = load_json(dataset_path) if not metadata['meta']: print("Bad metadata? {}".format(dataset_path)) + if 'meta' not in metadata or not metadata['meta']: # dude + metadata['meta'] = {} return metadata, valid_sections diff --git a/megapixels/app/site/parser.py b/megapixels/app/site/parser.py index f6e308f3..79093bc7 100644 --- a/megapixels/app/site/parser.py +++ b/megapixels/app/site/parser.py @@ -4,6 +4,7 @@ import re import glob import simplejson as json import mistune +from jinja2 import Environment, FileSystemLoader, select_autoescape import app.settings.app_cfg as cfg import app.site.s3 as s3 @@ -11,6 +12,11 @@ import app.site.s3 as s3 renderer = mistune.Renderer(escape=False) markdown = mistune.Markdown(renderer=renderer) +includes_env = Environment( + loader=FileSystemLoader(cfg.DIR_SITE_INCLUDES), + autoescape=select_autoescape([]) +) + footnote_count = 0 def parse_markdown(metadata, sections, s3_path, skip_h1=False): @@ -63,7 +69,7 @@ def parse_markdown(metadata, sections, s3_path, skip_h1=False): current_group = [] current_group.append(section) if section.strip().endswith(' %}'): - groups.append(format_include("\n\n".join(current_group))) + groups.append(format_include("\n\n".join(current_group), metadata)) current_group = [] elif section.strip().startswith('```'): groups.append(format_section(current_group, s3_path)) @@ -232,20 +238,22 @@ def format_footnotes(footnotes, s3_path): footnote_txt = '
' return footnote_txt, footnote_index_lookup -def format_include(section): +def format_include(section, metadata): """ Include html template """ - include_dir = cfg.DIR_SITE_INCLUDES - fp_html = section.strip().strip('\n').strip().strip('{%').strip().strip('%}').strip() - fp_html = fp_html.strip('include').strip().strip('"').strip().strip("'").strip() - try: - with open(join(include_dir, fp_html), 'r') as fp: - html = fp.read().replace('\n', '') - return html - except Exception as e: - print(f'Error parsing include: {e}') - return '' + include_fn = section.strip().strip('\n').strip().strip('{%').strip().strip('%}').strip() + include_fn = include_fn.strip('include').strip().strip('"').strip().strip("'").strip() + return includes_env.get_template(include_fn).render(metadata=metadata) + # include_dir = cfg.DIR_SITE_INCLUDES + # try: + # includes_env.get_template(fp_html) + # with open(join(include_dir, fp_html), 'r') as fp: + # html = fp.read().replace('\n', '') + # return html + # except Exception as e: + # print(f'Error parsing include: {e}') + # return '' def format_applet(section, s3_path): """ diff --git a/site/includes/chart.html b/site/includes/chart.html index 63108df1..913e09b2 100644 --- a/site/includes/chart.html +++ b/site/includes/chart.html @@ -1,4 +1,6 @@
+

Who used {{ metadata.meta.dataset.name_display }}?

+

This bar chart presents a ranking of the top countries where citations originated. Mouse over individual columns to see yearly totals. Colors are only assigned to the top 10 overall countries. diff --git a/site/public/about/index.html b/site/public/about/index.html index a1370663..125d1feb 100644 --- a/site/public/about/index.html +++ b/site/public/about/index.html @@ -28,7 +28,16 @@

About MegaPixels

-

MegaPixels is an art and research project by Adam Harvey about the origins and ethics of facial analysis datasets. Where do they come from? Who's included? Who created it and for what reason?

+
+ +

MegaPixels is an art and research project by Adam Harvey about the origins and ethics of facial analysis datasets. Where do they come from? Who's included? Who created it and for what reason?

MegaPixels sets out to answer to these questions and reveal the stories behind the millions of images used to train, evaluate, and power the facial recognition surveillance algorithms used today. MegaPixels is authored by Adam Harvey, developed in collaboration with Jules LaPlace, and produced in partnership with Mozilla.

MegaPixels sets out to answer to these questions and reveal the stories behind the millions of images used to train, evaluate, and power the facial recognition surveillance algorithms used today. MegaPixels is authored by Adam Harvey, developed in collaboration with Jules LaPlace, and produced in partnership with Mozilla.

Notes

diff --git a/site/public/about/press/index.html b/site/public/about/press/index.html index 1efe1999..d6dbdb2c 100644 --- a/site/public/about/press/index.html +++ b/site/public/about/press/index.html @@ -28,7 +28,16 @@

Press

-

(list of press articles and images will go here)

+
+ +

(list of press articles and images will go here)

diff --git a/site/public/about/privacy/index.html b/site/public/about/privacy/index.html index 9a0836a1..a934b7e9 100644 --- a/site/public/about/privacy/index.html +++ b/site/public/about/privacy/index.html @@ -28,7 +28,16 @@

Privacy Policy

-

A summary of our privacy policy is as follows:

+
+ +

A summary of our privacy policy is as follows:

The MegaPixels site does not use any analytics programs or collect any data besides the necessary IP address of your connection, which are deleted every 30 days and used only for security and to prevent misuse.

The image processing sections of the site do not collect any data whatsoever. All processing takes place in temporary memory (RAM) and then is displayed back to the user over a SSL secured HTTPS connection. It is the sole responsibility of the user whether they discard, by closing the page, or share their analyzed information and any potential consequences that may arise from doing so.

A more complete legal version is below:

diff --git a/site/public/about/research/index.html b/site/public/about/research/index.html index db21fbc3..559cadd0 100644 --- a/site/public/about/research/index.html +++ b/site/public/about/research/index.html @@ -28,7 +28,16 @@
+ +
diff --git a/site/public/about/terms/index.html b/site/public/about/terms/index.html index b5b9a457..b8253a33 100644 --- a/site/public/about/terms/index.html +++ b/site/public/about/terms/index.html @@ -28,7 +28,16 @@

Terms and Conditions ("Terms")

-

(FPO: this is only example text)

+
+ +

(FPO: this is only example text)

Last updated: December 04, 2018

Please read these Terms and Conditions ("Terms", "Terms and Conditions") carefully before using the MegaPixels website (the "Service") operated by megapixels.cc ("us", "we", or "our").

Your access to and use of the Service is conditioned on your acceptance of and compliance with these Terms.

diff --git a/site/public/datasets/50_people_one_question/index.html b/site/public/datasets/50_people_one_question/index.html index e0b3581a..945a1233 100644 --- a/site/public/datasets/50_people_one_question/index.html +++ b/site/public/datasets/50_people_one_question/index.html @@ -31,7 +31,62 @@

50 People 1 Question

At vero eos et accusamus et iusto odio dignissimos ducimus, qui blanditiis praesentium voluptatum deleniti atque corrupti, quos dolores et quas molestias excepturi sint, obcaecati cupiditate non-provident, similique sunt in culpa, qui officia deserunt mollitia animi, id est laborum et dolorum fuga. Et harum quidem rerum facilis est et expedita distinctio.

Nam libero tempore, cum soluta nobis est eligendi optio, cumque nihil impedit, quo minus id, quod maxime placeat, facere possimus, omnis voluptas assumenda est, omnis dolor repellendus. Temporibus autem quibusdam et aut officiis debitis aut rerum necessitatibus saepe eveniet, ut et voluptates repudiandae sint et molestiae non-recusandae. Itaque earum rerum hic tenetur a sapiente delectus, ut aut reiciendis voluptatibus maiores alias consequatur aut perferendis doloribus asperiores repellat

-

Biometric Trade Routes (beta)

To understand how this dataset has been used around the world... affected global research on computer vision, surveillance, defense, and consumer technology, the and where this dataset has been used the locations of each organization that used or referenced the datast

Academic
Industry
Government
Data is compiled from Semantic Scholar and not yet manually verified.

The data is generated by collecting all citations for all original research papers associated with the dataset. Then the PDFs are then converted to text and the organization names are extracted and geocoded. Because of the automated approach to extracting data, actual use of the dataset can not yet be confirmed. This visualization is provided to help locate and confirm usage and will be updated as data noise is reduced.

Supplementary Information

Citations

Citations were collected from Semantic Scholar, a website which aggregates and indexes research papers. Metadata was extracted from these papers, including extracting names of institutions automatically from PDFs, and then the addresses were geocoded. Data is not yet manually verified, and reflects anytime the paper was cited. Some papers may only mention the dataset in passing, while others use it as part of their research methodology.

Add button/link to download CSV

+
+ +

Biometric Trade Routes (beta)

+ +

+ To understand how this dataset has been used around the world... + affected global research on computer vision, surveillance, defense, and consumer technology, the and where this dataset has been used the locations of each organization that used or referenced the datast +

+ +
+ +
+
+
+ +
+
Academic
+
Industry
+
Government
+ Data is compiled from Semantic Scholar and not yet manually verified. +
+ +
+

+ The data is generated by collecting all citations for all original research papers associated with the dataset. Then the PDFs are then converted to text and the organization names are extracted and geocoded. Because of the automated approach to extracting data, actual use of the dataset can not yet be confirmed. This visualization is provided to help locate and confirm usage and will be updated as data noise is reduced. +

+
+ + +
+
+
+
+ +

Supplementary Information

+
+

Citations

+

+ Citations were collected from Semantic Scholar, a website which aggregates + and indexes research papers. Metadata was extracted from these papers, including extracting names of institutions automatically from PDFs, and then the addresses were geocoded. Data is not yet manually verified, and reflects anytime the paper was cited. Some papers may only mention the dataset in passing, while others use it as part of their research methodology. +

+

+ Add button/link to download CSV +

+ +
+