summaryrefslogtreecommitdiff
path: root/site
diff options
context:
space:
mode:
authorJules Laplace <julescarbon@gmail.com>2018-12-16 20:01:23 +0100
committerJules Laplace <julescarbon@gmail.com>2018-12-16 20:01:23 +0100
commit76f36c6c5dafe754b066903b1ee8ecdd1b92dcab (patch)
tree374deb919344611e642c83926dd36a12f31f709a /site
parent6431d06048791763f3644b3a0457cc9c4f1df6d3 (diff)
faceSearch client
Diffstat (limited to 'site')
-rw-r--r--site/assets/css/applets.css45
-rw-r--r--site/assets/css/css.css16
-rw-r--r--site/assets/img/icon_camera.svg2
-rw-r--r--site/public/about/privacy/index.html8
-rw-r--r--site/public/about/terms/index.html10
-rw-r--r--site/public/datasets/lfw/index.html12
-rw-r--r--site/public/datasets/vgg_face2/index.html6
-rw-r--r--site/public/research/00_introduction/index.html5
-rw-r--r--site/public/research/01_from_1_to_100_pixels/index.html4
-rw-r--r--site/public/research/index.html2
-rw-r--r--site/public/test/citations/index.html2
-rw-r--r--site/public/test/csv/index.html2
-rw-r--r--site/public/test/datasets/index.html2
-rw-r--r--site/public/test/face_search/index.html2
-rw-r--r--site/public/test/gallery/index.html2
-rw-r--r--site/public/test/index.html10
-rw-r--r--site/public/test/map/index.html2
-rw-r--r--site/public/test/name_search/index.html2
-rw-r--r--site/public/test/style/index.html8
19 files changed, 97 insertions, 45 deletions
diff --git a/site/assets/css/applets.css b/site/assets/css/applets.css
index fc71ecc4..2b531908 100644
--- a/site/assets/css/applets.css
+++ b/site/assets/css/applets.css
@@ -1,7 +1,52 @@
.applet {
margin-bottom: 40px;
+ transition: opacity 0.2s cubic-bezier(0,0,1,1);
+ opacity: 0;
}
.applet.map {
width: 100vw;
height: 50vh;
+}
+.applet.loaded {
+ opacity: 1;
+}
+
+.row {
+ display: flex;
+ flex-direction: row;
+ justify-content: flex-start;
+}
+
+.query h2 {
+ margin-top: 0; padding-top: 0;
+}
+.cta {
+ padding-left: 20px;
+}
+.uploadContainer > div {
+ position: relative;
+ width: 300px;
+ height: 300px;
+ display: flex;
+ align-items: center;
+ justify-content: center;
+ background: #333;
+ border: 3px dashed #fff;
+ border-radius: 10px;
+ opacity: 0.3;
+ transition: opacity 0.2s cubic-bezier(0,0,1,1);
+}
+.uploadContainer.active,
+.desktop .uploadContainer > div:hover {
+ opacity: 1;
+}
+.uploadContainer input {
+ position: absolute;
+ top: 0; left: 0;
+ width: 100%; height: 100%;
+ opacity: 0;
+ cursor: pointer;
+}
+.uploadContainer img {
+ max-width: 40px;
} \ No newline at end of file
diff --git a/site/assets/css/css.css b/site/assets/css/css.css
index b6742cdc..4f2d7c6e 100644
--- a/site/assets/css/css.css
+++ b/site/assets/css/css.css
@@ -131,23 +131,31 @@ h1 {
padding: 0;
transition: color 0.2s cubic-bezier(0,0,1,1);
}
-h2, h3 {
+h2 {
+ color: #ddd;
+ font-weight: 300;
+ font-size: 18pt;
+ margin: 20px 0 10px;
+ padding: 0;
+ transition: color 0.2s cubic-bezier(0,0,1,1);
+}
+h3 {
margin: 0 0 20px 0;
padding: 0;
font-size: 11pt;
font-weight: 500;
transition: color 0.2s cubic-bezier(0,0,1,1);
}
-.content h2 a {
+.content h3 a {
color: #888;
text-decoration: none;
}
-.desktop .content h2 a:hover {
+.desktop .content h3 a:hover {
color: #fff;
text-decoration: underline;
}
-th, .gray, h2, h3 {
+th, .gray, h3 {
font-family: 'Roboto Mono', monospace;
font-weight: 400;
text-transform: uppercase;
diff --git a/site/assets/img/icon_camera.svg b/site/assets/img/icon_camera.svg
index b349072e..605fcfe1 100644
--- a/site/assets/img/icon_camera.svg
+++ b/site/assets/img/icon_camera.svg
@@ -1,2 +1,2 @@
<?xml version="1.0"?>
-<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 -256 1950 1950"><path d="m975.19 549.05q119 0 203.5 84.5t84.5 203.5-84.5 203.5-203.5 84.5-203.5-84.5-84.5-203.5 84.5-203.5 203.5-84.5m704-416q106 0 181 75t75 181v896q0 106-75 181t-181 75h-1408q-106 0-181-75t-75-181v-896q0-106 75-181t181-75h224l51-136q19-49 69.5-84.5t103.5-35.5h512q53 0 103.5 35.5t69.5 84.5l51 136h224m-704 1152q185 0 316.5-131.5t131.5-316.5-131.5-316.5-316.5-131.5-316.5 131.5-131.5 316.5 131.5 316.5 316.5 131.5"/></svg> \ No newline at end of file
+<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 -256 1950 1950"><path d="m975.19 549.05q119 0 203.5 84.5t84.5 203.5-84.5 203.5-203.5 84.5-203.5-84.5-84.5-203.5 84.5-203.5 203.5-84.5m704-416q106 0 181 75t75 181v896q0 106-75 181t-181 75h-1408q-106 0-181-75t-75-181v-896q0-106 75-181t181-75h224l51-136q19-49 69.5-84.5t103.5-35.5h512q53 0 103.5 35.5t69.5 84.5l51 136h224m-704 1152q185 0 316.5-131.5t131.5-316.5-131.5-316.5-316.5-131.5-316.5 131.5-131.5 316.5 131.5 316.5 316.5 131.5" fill='#fff'/></svg> \ No newline at end of file
diff --git a/site/public/about/privacy/index.html b/site/public/about/privacy/index.html
index 6b4ac42f..ab4f12b8 100644
--- a/site/public/about/privacy/index.html
+++ b/site/public/about/privacy/index.html
@@ -49,15 +49,15 @@
<p>Usage Data is data collected automatically either generated by the use of the Service or from the Service infrastructure itself</p>
<h2>Information Collection and Use</h2>
<p>We collect several different types of information for various purposes to provide and improve our Service to you.</p>
-<h3>Types of Data Collected</h3>
-<h4>Personal Data</h4>
+<h2>Types of Data Collected</h2>
+<h3>Personal Data</h3>
<p>While using our Service, we may ask you to provide us with certain personally identifiable information that can be used to contact or identify you ("Personal Data"). Personally identifiable information may include, but is not limited to:</p>
<ul>
<li>Cookies and Usage Data</li>
</ul>
-<h4>Usage Data</h4>
+<h3>Usage Data</h3>
<p>We may also collect information how the Service is accessed and used ("Usage Data"). This Usage Data may include information such as your computer's Internet Protocol address (e.g. IP address), browser type, browser version, the pages of our Service that you visit, the time and date of your visit, the time spent on those pages, unique device identifiers and other diagnostic data.</p>
-<h4>Tracking &amp; Cookies Data</h4>
+<h3>Tracking &amp; Cookies Data</h3>
<p>We use cookies and similar tracking technologies to track the activity on our Service and we hold certain information.
Cookies are files with a small amount of data which may include an anonymous unique identifier. Cookies are sent to your browser from a website and stored on your device. Other tracking technologies are also used such as beacons, tags and scripts to collect and track information and to improve and analyse our Service.</p>
<p>You can instruct your browser to refuse all cookies or to indicate when a cookie is being sent. However, if you do not accept cookies, you may not be able to use some portions of our Service.
diff --git a/site/public/about/terms/index.html b/site/public/about/terms/index.html
index 1e317715..72c1b670 100644
--- a/site/public/about/terms/index.html
+++ b/site/public/about/terms/index.html
@@ -33,20 +33,20 @@
<p>Please read these Terms and Conditions ("Terms", "Terms and Conditions") carefully before using the MegaPixels website (the "Service") operated by megapixels.cc ("us", "we", or "our").</p>
<p>Your access to and use of the Service is conditioned on your acceptance of and compliance with these Terms.</p>
<p>By accessing or using the Service you agree to be bound by these Terms. If you disagree with any part of the terms then you may not access the Service.</p>
-<h2>Links To Other Web Sites</h2>
+<h3>Links To Other Web Sites</h3>
<p>Our Service may contain links to third-party web sites or services that are not owned or controlled by megapixels.cc.</p>
<p>megapixels.cc has no control over, and assumes no responsibility for, the content, privacy policies, or practices of any third party web sites or services. You further acknowledge and agree that megapixels.cc shall not be responsible or liable, directly or indirectly, for any damage or loss caused or alleged to be caused by or in connection with use of or reliance on any such content, goods or services available on or through any such web sites or services.</p>
<p>We strongly advise you to read the terms and conditions and privacy policies of any third-party web sites or services that you visit.</p>
-<h2>Termination</h2>
+<h3>Termination</h3>
<p>We may terminate or suspend access to our Service immediately, without prior notice or liability, for any reason whatsoever, including without limitation if you breach the Terms.</p>
<p>All provisions of the Terms which by their nature should survive termination shall survive termination, including, without limitation, ownership provisions, warranty disclaimers, indemnity and limitations of liability.</p>
-<h2>Governing Law</h2>
+<h3>Governing Law</h3>
<p>These Terms shall be governed and construed in accordance with the laws of Berlin, Germany, without regard to its conflict of law provisions.</p>
<p>Our failure to enforce any right or provision of these Terms will not be considered a waiver of those rights. If any provision of these Terms is held to be invalid or unenforceable by a court, the remaining provisions of these Terms will remain in effect. These Terms constitute the entire agreement between us regarding our Service, and supersede and replace any prior agreements we might have between us regarding the Service.</p>
-<h2>Changes</h2>
+<h3>Changes</h3>
<p>We reserve the right, at our sole discretion, to modify or replace these Terms at any time. If a revision is material we will try to provide at least 30 days notice prior to any new terms taking effect. What constitutes a material change will be determined at our sole discretion.</p>
<p>By continuing to access or use our Service after those revisions become effective, you agree to be bound by the revised terms. If you do not agree to the new terms, please stop using the Service.</p>
-<h2>Contact Us</h2>
+<h3>Contact Us</h3>
<p>If you have any questions about these Terms, please contact us.</p>
</section>
diff --git a/site/public/datasets/lfw/index.html b/site/public/datasets/lfw/index.html
index 9adf29b1..ddb3f1d1 100644
--- a/site/public/datasets/lfw/index.html
+++ b/site/public/datasets/lfw/index.html
@@ -30,9 +30,9 @@
<section><h1>Labeled Faces in the Wild</h1>
</section><section><div class='meta'><div><div class='gray'>Created</div><div>2007</div></div><div><div class='gray'>Images</div><div>13,233</div></div><div><div class='gray'>People</div><div>5,749</div></div><div><div class='gray'>Created From</div><div>Yahoo News images</div></div><div><div class='gray'>Search available</div><div>Searchable</div></div></div></section><section><p>Labeled Faces in The Wild (LFW) is amongst the most widely used facial recognition training datasets in the world and is the first of its kind to be created entirely from images posted online. The LFW dataset includes 13,233 images of 5,749 people that were collected between 2002-2004. Use the tools below to check if you were included in this dataset or scroll down to read the analysis.</p>
-</section><section class='applet_container'><div class='applet' data-payload='{"command": "face_search"}'></div></section><section class='applet_container'><div class='applet' data-payload='{"command": "name_search"}'></div></section><section class='applet_container'><div class='applet' data-payload='{"command": "load_file assets/lfw_names_gender_kg_min.csv", "fields": ["Name, Images, Gender, Description"]}'></div></section><section class='images'><div class='image'><img src='https://nyc3.digitaloceanspaces.com/megapixels/v1/datasets/lfw/assets/lfw_feature.jpg' alt='Eighteen of the 5,749 people in the Labeled Faces in the Wild Dataset. The most widely used face dataset for benchmarking commercial face recognition algorithms.'><div class='caption'>Eighteen of the 5,749 people in the Labeled Faces in the Wild Dataset. The most widely used face dataset for benchmarking commercial face recognition algorithms.</div></div></section><section><h2>Intro</h2>
+</section><section class='applet_container'><div class='applet' data-payload='{"command": "face_search"}'></div></section><section class='applet_container'><div class='applet' data-payload='{"command": "name_search"}'></div></section><section class='applet_container'><div class='applet' data-payload='{"command": "load_file assets/lfw_names_gender_kg_min.csv", "fields": ["Name, Images, Gender, Description"]}'></div></section><section class='images'><div class='image'><img src='https://nyc3.digitaloceanspaces.com/megapixels/v1/datasets/lfw/assets/lfw_feature.jpg' alt='Eighteen of the 5,749 people in the Labeled Faces in the Wild Dataset. The most widely used face dataset for benchmarking commercial face recognition algorithms.'><div class='caption'>Eighteen of the 5,749 people in the Labeled Faces in the Wild Dataset. The most widely used face dataset for benchmarking commercial face recognition algorithms.</div></div></section><section><h3>Intro</h3>
<p>Three paragraphs describing the LFW dataset in a format that can be easily replicated for the other datasets. Nothing too custom. An analysis of the initial research papers with context relative to all the other dataset papers.</p>
-</section><section class='images'><div class='image'><img src='https://nyc3.digitaloceanspaces.com/megapixels/v1/datasets/lfw/assets/lfw_montage_everyone_nocrop_1920.jpg' alt=' all 5,749 people in the LFW Dataset sorted from most to least images collected.'><div class='caption'> all 5,749 people in the LFW Dataset sorted from most to least images collected.</div></div></section><section><h2>LFW by the Numbers</h2>
+</section><section class='images'><div class='image'><img src='https://nyc3.digitaloceanspaces.com/megapixels/v1/datasets/lfw/assets/lfw_montage_everyone_nocrop_1920.jpg' alt=' all 5,749 people in the LFW Dataset sorted from most to least images collected.'><div class='caption'> all 5,749 people in the LFW Dataset sorted from most to least images collected.</div></div></section><section><h3>LFW by the Numbers</h3>
<ul>
<li>Was first published in 2007</li>
<li>Developed out of a prior dataset from Berkely called "Faces in the Wild" or "Names and Faces" [^lfw_original_paper]</li>
@@ -46,7 +46,7 @@
<li>In all the LFW publications provided by the authors the words "ethics", "consent", and "privacy" appear 0 times [^lfw_original_paper], [^lfw_survey], [^lfw_tech_report] , [^lfw_website]</li>
<li>The word "future" appears 71 times</li>
</ul>
-<h1>Facts</h1>
+<h3>Facts</h3>
<ul>
<li>Was created for the purpose of improving "unconstrained face recognition" [^lfw_original_paper]</li>
<li>All images in LFW were obtained "in the wild" meaning without any consent from the subject or from the photographer</li>
@@ -61,7 +61,7 @@
<li>SenseTime, who has relied on LFW for benchmarking their facial recognition performance, is the leading provider of surveillance to the Chinese Government (need citation)</li>
</ul>
</section><section class='images'><div class='image'><img src='https://nyc3.digitaloceanspaces.com/megapixels/v1/datasets/lfw/assets/lfw_montage_top1_640.jpg' alt=' former President George W. Bush'><div class='caption'> former President George W. Bush</div></div>
-<div class='image'><img src='https://nyc3.digitaloceanspaces.com/megapixels/v1/datasets/lfw/assets/lfw_montage_top2_4_640.jpg' alt=' Colin Powel (236), Tony Blair (144), and Donald Rumsfeld (121)'><div class='caption'> Colin Powel (236), Tony Blair (144), and Donald Rumsfeld (121)</div></div></section><section><h2>People and Companies using the LFW Dataset</h2>
+<div class='image'><img src='https://nyc3.digitaloceanspaces.com/megapixels/v1/datasets/lfw/assets/lfw_montage_top2_4_640.jpg' alt=' Colin Powel (236), Tony Blair (144), and Donald Rumsfeld (121)'><div class='caption'> Colin Powel (236), Tony Blair (144), and Donald Rumsfeld (121)</div></div></section><section><h3>People and Companies using the LFW Dataset</h3>
<p>This section describes who is using the dataset and for what purposes. It should include specific examples of people or companies with citations and screenshots. This section is followed up by the graph, the map, and then the supplementary material.</p>
<p>The LFW dataset is used by numerous companies for <a href="about/glossary#benchmarking">benchmarking</a> algorithms and in some cases <a href="about/glossary#training">training</a>. According to the benchmarking results page [^lfw_results] provided by the authors, over 2 dozen companies have contributed their benchmark results.</p>
<p>According to BiometricUpdate.com [^lfw_pingan], LFW is "the most widely used evaluation set in the field of facial recognition, LFW attracts a few dozen teams from around the globe including Google, Facebook, Microsoft Research Asia, Baidu, Tencent, SenseTime, Face++ and Chinese University of Hong Kong."</p>
@@ -97,10 +97,10 @@
<div class='image'><img src='https://nyc3.digitaloceanspaces.com/megapixels/v1/datasets/lfw/assets/lfw_screenshot_02.jpg' alt=' "Face Recognition Performance in LFW benchmark"'><div class='caption'> "Face Recognition Performance in LFW benchmark"</div></div>
<div class='image'><img src='https://nyc3.digitaloceanspaces.com/megapixels/v1/datasets/lfw/assets/lfw_screenshot_03.jpg' alt=' "The 1st place in face verification challenge, LFW"'><div class='caption'> "The 1st place in face verification challenge, LFW"</div></div></section><section><p>In benchmarking, companies use a dataset to evaluate their algorithms which are typically trained on other data. After training, researchers will use LFW as a benchmark to compare results with other algorithms.</p>
<p>For example, Baidu (est. net worth $13B) uses LFW to report results for their "Targeting Ultimate Accuracy: Face Recognition via Deep Embedding". According to the three Baidu researchers who produced the paper:</p>
-<h2>Citations</h2>
+<h3>Citations</h3>
<p>Overall, LFW has at least 456 citations from 123 countries. Sed ut perspiciatis, unde omnis iste natus error sit voluptatem accusantium doloremque laudantium, totam rem aperiam eaque ipsa, quae ab illo inventore veritatis et quasi architecto beatae vitae dicta sunt, explicabo. Nemo enim ipsam voluptatem, quia voluptas sit, aspernatur aut odit aut fugit, sed quia consequuntur magni dolores eos.</p>
<p>Sed ut perspiciatis, unde omnis iste natus error sit voluptatem accusantium doloremque laudantium, totam rem aperiam eaque ipsa, quae ab illo inventore veritatis et quasi architecto beatae vitae dicta sunt, explicabo. Nemo enim ipsam voluptatem, quia voluptas sit, aspernatur aut odit aut fugit, sed quia consequuntur magni dolores eos.</p>
-</section><section class='images'><div class='image'><img src='https://nyc3.digitaloceanspaces.com/megapixels/v1/datasets/lfw/assets/temp_graph.jpg' alt='Distribution of citations per year per country for the top 5 countries with citations for the LFW Dataset'><div class='caption'>Distribution of citations per year per country for the top 5 countries with citations for the LFW Dataset</div></div></section><section class='applet_container'><div class='applet' data-payload='{"command": "map"}'></div></section><section class='applet_container'><div class='applet' data-payload='{"command": "citations"}'></div></section><section><h2>Conclusion</h2>
+</section><section class='images'><div class='image'><img src='https://nyc3.digitaloceanspaces.com/megapixels/v1/datasets/lfw/assets/temp_graph.jpg' alt='Distribution of citations per year per country for the top 5 countries with citations for the LFW Dataset'><div class='caption'>Distribution of citations per year per country for the top 5 countries with citations for the LFW Dataset</div></div></section><section class='applet_container'><div class='applet' data-payload='{"command": "map"}'></div></section><section class='applet_container'><div class='applet' data-payload='{"command": "citations"}'></div></section><section><h3>Conclusion</h3>
<p>The LFW face recognition training and evaluation dataset is a historically important face dataset as it was the first popular dataset to be created entirely from Internet images, paving the way for a global trend towards downloading anyone’s face from the Internet and adding it to a dataset. As will be evident with other datasets, LFW’s approach has now become the norm.</p>
<p>For all the 5,000 people in this datasets, their face is forever a part of facial recognition history. It would be impossible to remove anyone from the dataset because it is so ubiquitous. For their rest of the lives and forever after, these 5,000 people will continue to be used for training facial recognition surveillance.</p>
<h2>Right to Removal</h2>
diff --git a/site/public/datasets/vgg_face2/index.html b/site/public/datasets/vgg_face2/index.html
index 63715a4f..6e6c7ac1 100644
--- a/site/public/datasets/vgg_face2/index.html
+++ b/site/public/datasets/vgg_face2/index.html
@@ -30,7 +30,7 @@
<section><h1>VGG Faces2</h1>
</section><section><div class='meta'><div><div class='gray'>Created</div><div>2018</div></div><div><div class='gray'>Images</div><div>3.3M</div></div><div><div class='gray'>People</div><div>9,000</div></div><div><div class='gray'>Created From</div><div>Scraping search engines</div></div><div><div class='gray'>Search available</div><div>[Searchable](#)</div></div></div></section><section><p>VGG Face2 is the updated version of the VGG Face dataset and now includes over 3.3M face images from over 9K people. The identities were selected by taking the top 500K identities in Google's Knowledge Graph of celebrities and then selecting only the names that yielded enough training images. The dataset was created in the UK but funded by Office of Director of National Intelligence in the United States.</p>
-</section><section class='applet_container'><div class='applet' data-payload='{"command": "face_search"}'></div></section><section class='applet_container'><div class='applet' data-payload='{"command": "name_search"}'></div></section><section class='applet_container'><div class='applet' data-payload='{"command": "load_file assets/lfw_names_gender_kg_min.csv", "fields": ["Name, Images, Gender, Description"]}'></div></section><section><h2>VGG Face2 by the Numbers</h2>
+</section><section class='applet_container'><div class='applet' data-payload='{"command": "face_search"}'></div></section><section class='applet_container'><div class='applet' data-payload='{"command": "name_search"}'></div></section><section class='applet_container'><div class='applet' data-payload='{"command": "load_file assets/lfw_names_gender_kg_min.csv", "fields": ["Name, Images, Gender, Description"]}'></div></section><section><h3>VGG Face2 by the Numbers</h3>
<ul>
<li>1,331 actresses, 139 presidents</li>
<li>3 husbands and 16 wives</li>
@@ -39,14 +39,14 @@
<li>1 pornographic actress</li>
<li>3 computer programmer</li>
</ul>
-<h1>Names and descriptions</h1>
+<h3>Names and descriptions</h3>
<ul>
<li>The original VGGF2 name list has been updated with the results returned from Google Knowledge</li>
<li>Names with a similarity score greater than 0.75 where automatically updated. Scores computed using <code>import difflib; seq = difflib.SequenceMatcher(a=a.lower(), b=b.lower()); score = seq.ratio()</code></li>
<li>The 97 names with a score of 0.75 or lower were manually reviewed and includes name changes validating using Wikipedia.org results for names such as "Bruce Jenner" to "Caitlyn Jenner", spousal last-name changes, and discretionary changes to improve search results such as combining nicknames with full name when appropriate, for example changing "Aleksandar Petrović" to "Aleksandar 'Aco' Petrović" and minor changes such as "Mohammad Ali" to "Muhammad Ali"</li>
<li>The 'Description' text was automatically added when the Knowledge Graph score was greater than 250</li>
</ul>
-<h1>TODO</h1>
+<h2>TODO</h2>
<ul>
<li>create name list, and populate with Knowledge graph information like LFW</li>
<li>make list of interesting number stats, by the numbers</li>
diff --git a/site/public/research/00_introduction/index.html b/site/public/research/00_introduction/index.html
index 7b132cd5..edaf8206 100644
--- a/site/public/research/00_introduction/index.html
+++ b/site/public/research/00_introduction/index.html
@@ -46,7 +46,7 @@
<section><div class='meta'><div><div class='gray'>Posted</div><div>Dec. 15</div></div><div><div class='gray'>Author</div><div>Adam Harvey</div></div></div></section><section><p>It was the early 2000s. Face recognition was new and no one seemed sure exactly how well it was going to perform in practice. In theory, face recognition was poised to be a game changer, a force multiplier, a strategic military advantage, a way to make cities safer and to secure borders. This was the future John Ashcroft demanded with the Total Information Awareness act of the 2003 and that spooks had dreamed of for decades. It was a future that academics at Carnegie Mellon Universtiy and Colorado State University would help build. It was also a future that celebrities would play a significant role in building. And to the surprise of ordinary Internet users like myself and perhaps you, it was a future that millions of Internet users would unwittingly play role in creating.</p>
<p>Now the future has arrived and it doesn't make sense. Facial recognition works yet it doesn't actually work. Facial recognition is cheap and accessible but also expensive and out of control. Facial recognition research has achieved headline grabbing superhuman accuracies over 99.9% yet facial recognition is also dangerously inaccurate. During a trial installation at Sudkreuz station in Berlin in 2018, 20% of the matches were wrong, a number so low that it should not have any connection to law enforcement or justice. And in London, the Metropolitan police had been using facial recognition software that mistakenly identified an alarming 98% of people as criminals <sup class="footnote-ref" id="fnref-met_police"><a href="#fn-met_police">1</a></sup>, which perhaps is a crime itself.</p>
<p>MegaPixels is an online art project that explores the history of facial recognition from the perspective of datasets. To paraphrase the artist Trevor Paglen, whoever controls the dataset controls the meaning. MegaPixels aims to unravel the meanings behind the data and expose the darker corners of the biometric industry that have contributed to its growth. MegaPixels does not start with a conclusion, a moralistic slant, or a</p>
-<p>Whether or not to build facial recognition was a question that can no longer be asked. As an outspoken critic of face recognition I've developed, and hopefully furthered, my understanding during the last 10 years I've spent working with computer vision. Though I initially disagreed, I've come to see technocratic perspective as a non-negotiable reality. As Oren (nytimes article) wrote in NYT Op-Ed "the horse is out of the barn" and the only thing we can do collectively or individually is to steer towards the least worse outcome. Computational communication has entered a new era and it's both exciting and frightening to explore the potentials and opportunities. In 1997 getting access to 1 teraFLOPS of computational power would have cost you $55 million and required a strategic partnership with the Department of Defense. At the time of writing, anyone can rent 1 teraFLOPS on a cloud GPU marketplace for less than $1/day. <sup class="footnote-ref" id="fnref-asci_option_red"><a href="#fn-asci_option_red">2</a></sup>.</p>
+<p>Whether or not to build facial recognition was a question that can no longer be asked. As an outspoken critic of face recognition I've developed, and hopefully furthered, my understanding during the last 10 years I've spent working with computer vision. Though I initially disagreed, I've come to see technocratic perspective as a non-negotiable reality. As Oren (nytimes article) wrote in NYT Op-Ed "the horse is out of the barn" and the only thing we can do collectively or individually is to steer towards the least worse outcome. Computational communication has entered a new era and it's both exciting and frightening to explore the potentials and opportunities. In 1997 getting access to 1 teraFLOPS of computational power would have cost you $55 million and required a strategic partnership with the Department of Defense. At the time of writing, anyone can rent 1 teraFLOPS on a cloud GPU marketplace for less than $1/day. <sup class="footnote-ref" id="fnref-asci_option_red"><a href="#fn-asci_option_red">2</a></sup>.</p>
<p>I hope that this project will illuminate the darker areas of strange world of facial recognition that have not yet received attention and encourage discourse in academic, industry, and . By no means do I believe discourse can save the day. Nor do I think creating artwork can. In fact, I'm not exactly sure what the outcome of this project will be. The project is not so much what I publish here but what happens after. This entire project is only a prologue.</p>
<p>As McLuhan wrote, "You can't have a static, fixed position in the electric age". And in our hyper-connected age of mass surveillance, artificial intelligece, and unevenly distributed virtual futures the most irrational thing to be is rational. Increasingly the world is becoming a contradiction where people use surveillance to protest surveillance, use</p>
<p>Like many projects, MegaPixels had spent years meandering between formats, unfeasible budgets, and was generally too niche of a subject. The basic idea for this project, as proposed to the original <a href="https://tacticaltech.org/projects/the-glass-room-nyc/">Glass Room</a> installation in 2016 in NYC, was to build an interactive mirror that showed people if they had been included in the <a href="/datasets/lfw">LFW</a> facial recognition dataset. The idea was based on my reaction to all the datasets I'd come across during research for the CV Dazzle project. I'd noticed strange datasets created for training and testing face detection algorithms. Most were created in labratory settings and their interpretation of face data was very strict.</p>
@@ -55,10 +55,11 @@
<p>About me</p>
<p>About the team</p>
<p>Conclusion</p>
-<h2>for other post</h2>
+<h3>for other post</h3>
<p>It was the early 2000s. Face recognition was new and no one seemed sure how well it was going to perform in practice. In theory, face recognition was poised to be a game changer, a force multiplier, a strategic military advantage, a way to make cities safer and to secure the borders. It was the future that John Ashcroft demanded with the Total Information Awareness act of the 2003. It was a future that academics helped build. It was a future that celebrities helped build. And it was a future that</p>
<p>A decade earlier the Department of Homeland Security and the Counterdrug Technology Development Program Office initated a feasibilty study called FERET (FacE REcognition Technology) to "develop automatic face recognition capabilities that could be employed to assist security, intelligence, and law enforcement personnel in the performance of their duties [^feret_website]."</p>
<p>One problem with FERET dataset was that the photos were in controlled settings. For face recognition to work it would have to be used in uncontrolled settings. Even newer datasets such as the Multi-PIE (Pose, Illumination, and Expression) from Carnegie Mellon University included only indoor photos of cooperative subjects. Not only were the photos completely unrealistic, CMU's Multi-Pie included only 18 individuals and cost $500 for academic use [^cmu_multipie_cost], took years to create, and required consent from every participant.</p>
+<h2>Add progressive gan of FERET</h2>
<div class="footnotes">
<hr>
<ol><li id="fn-met_police"><p>Sharman, Jon. "Metropolitan Police's facial recognition technology 98% inaccurate, figures show". 2018. <a href="https://www.independent.co.uk/news/uk/home-news/met-police-facial-recognition-success-south-wales-trial-home-office-false-positive-a8345036.html">https://www.independent.co.uk/news/uk/home-news/met-police-facial-recognition-success-south-wales-trial-home-office-false-positive-a8345036.html</a><a href="#fnref-met_police" class="footnote">&#8617;</a></p></li>
diff --git a/site/public/research/01_from_1_to_100_pixels/index.html b/site/public/research/01_from_1_to_100_pixels/index.html
index aac4b7e1..e24e5d9a 100644
--- a/site/public/research/01_from_1_to_100_pixels/index.html
+++ b/site/public/research/01_from_1_to_100_pixels/index.html
@@ -43,7 +43,7 @@
</div>
</section>
- <section><h2>High resolution insights from low resolution data</h2>
+ <section><h3>High resolution insights from low resolution data</h3>
<p>This post will be about the meaning of "face". How do people define it? How to biometrics researchers define it? How has it changed during the last decade.</p>
<p>What can you know from a very small amount of information?</p>
<ul>
@@ -64,7 +64,7 @@
<li>100x100 0.5% of one Instagram photo</li>
</ul>
<p>Find specific cases of facial resolution being used in legal cases, forensic investigations, or military footage</p>
-<p>Research</p>
+<h3>Research</h3>
<ul>
<li>NIST report on sres states several resolutions</li>
<li>"Results show that the tested face recognition systems yielded similar performance for query sets with eye-to-eye distance from 60 pixels to 30 pixels" <sup class="footnote-ref" id="fnref-nist_sres"><a href="#fn-nist_sres">1</a></sup></li>
diff --git a/site/public/research/index.html b/site/public/research/index.html
index dc69cdc5..cfaa039b 100644
--- a/site/public/research/index.html
+++ b/site/public/research/index.html
@@ -29,7 +29,7 @@
<div class="content">
<section><h1>Research Blog</h1>
-<h2>The darkside of datasets and the future of computer vision</h2>
+<h3>The darkside of datasets and the future of computer vision</h3>
</section><div class='research_index'><a href='/research/00_introduction/'><section class='wide'><img src='data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==' alt='Research post' /><section><h1>00: Introduction</h1><h2></h2></section></section></a><a href='/research/01_from_1_to_100_pixels/'><section class='wide'><img src='https://nyc3.digitaloceanspaces.com/megapixels/v1/site/research/01_from_1_to_100_pixels/assets/intro.jpg' alt='Research post' /><section><h1>From 1 to 100 Pixels</h1><h2>Photographs are for romantics. For the rest of us, it's all about data. And a photo contains a massive amount of information about who you are.</h2></section></section></a></div>
</div>
diff --git a/site/public/test/citations/index.html b/site/public/test/citations/index.html
index c2bed996..60860190 100644
--- a/site/public/test/citations/index.html
+++ b/site/public/test/citations/index.html
@@ -29,7 +29,7 @@
<div class="content">
<section><h1>Citations</h1>
-<h2><a href="/test/">&larr; Back to test index</a></h2>
+<h3><a href="/test/">&larr; Back to test index</a></h3>
</section><section class='applet_container'><div class='applet' data-payload='{"command": "citations lfw"}'></div></section>
</div>
diff --git a/site/public/test/csv/index.html b/site/public/test/csv/index.html
index e53c1421..b9a0ba7b 100644
--- a/site/public/test/csv/index.html
+++ b/site/public/test/csv/index.html
@@ -29,7 +29,7 @@
<div class="content">
<section><h1>CSV Test</h1>
-<h2><a href="/test/">&larr; Back to test index</a></h2>
+<h3><a href="/test/">&larr; Back to test index</a></h3>
</section><section class='applet_container'><div class='applet' data-payload='{"command": "load_file /datasets/lfw/assets/lfw_names_gender_kg_min.csv", "fields": ["Name, Images, Gender, Description"]}'></div></section>
</div>
diff --git a/site/public/test/datasets/index.html b/site/public/test/datasets/index.html
index 421ecb97..e310bc48 100644
--- a/site/public/test/datasets/index.html
+++ b/site/public/test/datasets/index.html
@@ -29,7 +29,7 @@
<div class="content">
<section><h1>Index of datasets</h1>
-<h2><a href="/test/">&larr; Back to test index</a></h2>
+<h3><a href="/test/">&larr; Back to test index</a></h3>
</section><section class='applet_container'><div class='applet' data-payload='{"command": "load_file https://megapixels.nyc3.digitaloceanspaces.com/v1/citations/datasets.csv"}'></div></section>
</div>
diff --git a/site/public/test/face_search/index.html b/site/public/test/face_search/index.html
index 1823318d..569b4d41 100644
--- a/site/public/test/face_search/index.html
+++ b/site/public/test/face_search/index.html
@@ -29,7 +29,7 @@
<div class="content">
<section><h1>Face search</h1>
-<h2><a href="/test/">&larr; Back to test index</a></h2>
+<h3><a href="/test/">&larr; Back to test index</a></h3>
</section><section class='applet_container'><div class='applet' data-payload='{"command": "face_search lfw"}'></div></section>
</div>
diff --git a/site/public/test/gallery/index.html b/site/public/test/gallery/index.html
index 8ead03eb..14f10b7c 100644
--- a/site/public/test/gallery/index.html
+++ b/site/public/test/gallery/index.html
@@ -29,7 +29,7 @@
<div class="content">
<section><h1>Gallery test</h1>
-<h2><a href="/test/">&larr; Back to test index</a></h2>
+<h3><a href="/test/">&larr; Back to test index</a></h3>
</section><section class='images'><div class='image'><img src='https://nyc3.digitaloceanspaces.com/megapixels/v1/site/test/assets/man.jpg' alt='Modal image 1'><div class='caption'>Modal image 1</div></div>
<div class='image'><img src='https://nyc3.digitaloceanspaces.com/megapixels/v1/site/test/assets/man.jpg' alt='Modal image 2'><div class='caption'>Modal image 2</div></div>
<div class='image'><img src='https://nyc3.digitaloceanspaces.com/megapixels/v1/site/test/assets/man.jpg' alt='Modal image 3'><div class='caption'>Modal image 3</div></div></section>
diff --git a/site/public/test/index.html b/site/public/test/index.html
index 62837b3b..b4d16036 100644
--- a/site/public/test/index.html
+++ b/site/public/test/index.html
@@ -30,15 +30,13 @@
<section><h1>Megapixels UI Tests</h1>
<ul>
-<li><p><a href="/test/style">Style Guide</a></p>
-</li>
-<li><p><a href="/test/csv">CSV</a></p>
-</li>
+<li><a href="/test/style">Style Guide</a></li>
+<li><a href="/test/csv">CSV</a></li>
<li><a href="/test/datasets/">Dataset list</a></li>
<li><a href="/test/citations/">Citation list</a></li>
<li><a href="/test/map/">Citation map</a></li>
-<li><a href="/test/search_face/">Face search</a></li>
-<li><a href="/test/search_name/">Name search</a></li>
+<li><a href="/test/face_search/">Face search</a></li>
+<li><a href="/test/name_search/">Name search</a></li>
<li><a href="/test/gallery/">Modal image gallery</a></li>
</ul>
</section>
diff --git a/site/public/test/map/index.html b/site/public/test/map/index.html
index c1f67471..206aef5a 100644
--- a/site/public/test/map/index.html
+++ b/site/public/test/map/index.html
@@ -29,7 +29,7 @@
<div class="content">
<section><h1>Map test</h1>
-<h2><a href="/test/">&larr; Back to test index</a></h2>
+<h3><a href="/test/">&larr; Back to test index</a></h3>
</section><section class='applet_container'><div class='applet' data-payload='{"command": "map lfw"}'></div></section>
</div>
diff --git a/site/public/test/name_search/index.html b/site/public/test/name_search/index.html
index db38ba04..1b6769c8 100644
--- a/site/public/test/name_search/index.html
+++ b/site/public/test/name_search/index.html
@@ -29,7 +29,7 @@
<div class="content">
<section><h1>Name search</h1>
-<h2><a href="/test/">&larr; Back to test index</a></h2>
+<h3><a href="/test/">&larr; Back to test index</a></h3>
</section><section class='applet_container'><div class='applet' data-payload='{"command": "name_search lfw"}'></div></section>
</div>
diff --git a/site/public/test/style/index.html b/site/public/test/style/index.html
index 3ef7d918..6d99a236 100644
--- a/site/public/test/style/index.html
+++ b/site/public/test/style/index.html
@@ -29,7 +29,7 @@
<div class="content">
<section><h1>Style Examples</h1>
-<h2><a href="/test/">&larr; Back to test index</a></h2>
+<h3><a href="/test/">&larr; Back to test index</a></h3>
</section><section class='wide'><div class='image'><img src='https://nyc3.digitaloceanspaces.com/megapixels/v1/site/test/assets/test.jpg' alt='Style Guide Test'><div class='caption'>Style Guide Test</div></div></section><section><div class='meta'><div><div class='gray'>Date</div><div>17-Jan-2019</div></div><div><div class='gray'>Numbers</div><div>17</div></div><div><div class='gray'>Identities</div><div>12,139</div></div><div><div class='gray'>But also</div><div>This is a test of the stylesheet</div></div></div></section><section><h1>Header 1</h1>
<h2>Header 2</h2>
<h3>Header 3</h3>
@@ -45,10 +45,10 @@
<li>Odit aut fugit, sed quia consequuntur magni dolores eos</li>
<li>Qui ratione voluptatem sequi nesciunt, neque porro quisquam </li>
</ul>
-<h2>single image test</h2>
-</section><section class='images'><div class='image'><img src='https://nyc3.digitaloceanspaces.com/megapixels/v1/site/test/assets/man.jpg' alt='This person is alone'><div class='caption'>This person is alone</div></div></section><section><h2>double image test</h2>
+<h3>single image test</h3>
+</section><section class='images'><div class='image'><img src='https://nyc3.digitaloceanspaces.com/megapixels/v1/site/test/assets/man.jpg' alt='This person is alone'><div class='caption'>This person is alone</div></div></section><section><h3>double image test</h3>
</section><section class='images'><div class='image'><img src='https://nyc3.digitaloceanspaces.com/megapixels/v1/site/test/assets/man.jpg' alt='This person is on the left'><div class='caption'>This person is on the left</div></div>
-<div class='image'><img src='https://nyc3.digitaloceanspaces.com/megapixels/v1/site/test/assets/man.jpg' alt='This person is on the right'><div class='caption'>This person is on the right</div></div></section><section><h2>triple image test</h2>
+<div class='image'><img src='https://nyc3.digitaloceanspaces.com/megapixels/v1/site/test/assets/man.jpg' alt='This person is on the right'><div class='caption'>This person is on the right</div></div></section><section><h3>triple image test</h3>
</section><section class='images'><div class='image'><img src='https://nyc3.digitaloceanspaces.com/megapixels/v1/site/test/assets/man.jpg' alt='Person 1'><div class='caption'>Person 1</div></div>
<div class='image'><img src='https://nyc3.digitaloceanspaces.com/megapixels/v1/site/test/assets/man.jpg' alt='Person 2'><div class='caption'>Person 2</div></div>
<div class='image'><img src='https://nyc3.digitaloceanspaces.com/megapixels/v1/site/test/assets/man.jpg' alt='Person 3. Let me tell you about Person 3. This person has a very long description with text which wraps like crazy'><div class='caption'>Person 3. Let me tell you about Person 3. This person has a very long description with text which wraps like crazy</div></div></section><section><blockquote><p>est, qui dolorem ipsum, quia dolor sit amet consectetur adipisci[ng] velit, sed quia non-numquam [do] eius modi tempora inci[di]dunt, ut labore et dolore magnam aliquam quaerat voluptatem.</p>