diff options
| author | jules@lens <julescarbon@gmail.com> | 2019-04-02 20:36:51 +0200 |
|---|---|---|
| committer | jules@lens <julescarbon@gmail.com> | 2019-04-02 20:36:51 +0200 |
| commit | 1d238346b5609e9454a4917c75631a550b5b43d1 (patch) | |
| tree | 8a936e721e78c7b5948b303e6a1686c96b882d51 /site | |
| parent | b4b58f2279fb01fa0240006c460c0b5ec95c1126 (diff) | |
| parent | f58d41731fc07d94d594d5582aef203564f990ec (diff) | |
Merge branch 'master' of asdf.us:megapixels_dev
Diffstat (limited to 'site')
84 files changed, 1687 insertions, 1259 deletions
diff --git a/site/assets/css/applets.css b/site/assets/css/applets.css index e5c427b2..7fac3e27 100644 --- a/site/assets/css/applets.css +++ b/site/assets/css/applets.css @@ -3,6 +3,7 @@ .applet_container { min-height: 340px; clear: left; + margin: 20px auto 40px auto; } .applet_container.autosize { min-height: 0; @@ -168,6 +169,10 @@ .desktop .map_cover:hover { opacity: 1; } +.leaflet-popup-content-wrapper { + max-height: 230px; + overflow-y: auto; +} .leaflet-popup-content a { color: #0078A8; border-bottom: 1px solid rgba(0, 128, 160, 0.4); @@ -218,6 +223,10 @@ } .piechart .chart > div { width: 50%; + display: flex; + flex-direction: column; + justify-content: space-between; + align-items: center; } .piechart .chart .c3-chart-arc text { fill: #fff; @@ -227,4 +236,10 @@ } .piechart .c3 path, .piechart .c3 line { stroke: rgba(64,64,64,0.3); -}
\ No newline at end of file +} +.piechart .chartCaption { + color: #888; + font-size: 12px; + font-family: 'Roboto', sans-serif; + font-weight: 400; +} diff --git a/site/assets/css/css.css b/site/assets/css/css.css index 47fc0652..824819a9 100644 --- a/site/assets/css/css.css +++ b/site/assets/css/css.css @@ -1,10 +1,11 @@ -da* { box-sizing: border-box; outline: 0; } +* { box-sizing: border-box; outline: 0; } html, body { margin: 0; padding: 0; width: 100%; min-height: 100%; - font-family: 'Roboto Mono', sans-serif; + /*font-family: 'Roboto Mono', sans-serif;*/ + font-family: 'Roboto', sans-serif; color: #eee; overflow-x: hidden; } @@ -63,10 +64,16 @@ header .site_name { line-height: 11px; letter-spacing: 3px; } -header .site_name.splash{ - font-size: 20px; - line-height: 20px; - font-weight: 400; +header .splash{ + font-size: 15px; + font-weight: 400; + color: #888; + padding-left: 8px; + opacity: 0; + transition: 0.3s opacity cubic-bezier(0,0,1,1); +} +.scrolled header .splash { + opacity: 1; } header .links{ font-size: 18px; @@ -163,33 +170,33 @@ h1 { margin: 75px 0 10px; padding: 0; transition: color 0.1s cubic-bezier(0,0,1,1); - font-family: 'Roboto'; + font-family: 'Roboto Mono', monospace; } h2 { color: #eee; font-weight: 400; - font-size: 32pt; - line-height: 43pt; - margin: 20px 0 10px; + font-size: 32px; + line-height: 43px; + margin: 20px 0 20px; padding: 0; transition: color 0.1s cubic-bezier(0,0,1,1); - font-family: 'Roboto'; + font-family: 'Roboto Mono', monospace; } h3 { margin: 0 0 20px 0; padding: 20px 0 0 0; font-size: 22pt; - font-weight: 500; + font-weight: 400; transition: color 0.1s cubic-bezier(0,0,1,1); - font-family: 'Roboto'; + font-family: 'Roboto Mono', monospace; } h4 { margin: 0 0 10px 0; padding: 0; font-size: 11pt; - font-weight: 500; + font-weight: 400; transition: color 0.1s cubic-bezier(0,0,1,1); - font-family: 'Roboto'; + font-family: 'Roboto Mono', monospace; } .content h3 a { color: #888; @@ -212,11 +219,11 @@ h4 { border-bottom: 0; } th, .gray { - font-family: 'Roboto Mono', monospace; + font-family: 'Roboto', monospace; font-weight: 500; text-transform: uppercase; letter-spacing: .15rem; - color: #999; + color: #777; } th, .gray { font-size: 9pt; @@ -248,8 +255,9 @@ section { p { margin: 0 10px 20px 0; line-height: 2; - font-size: 16px; + font-size: 18px; font-weight: 300; + color: #dedede; } p.subp{ font-size: 14px; @@ -272,18 +280,16 @@ p.subp{ flex-direction: row; justify-content: flex-start; align-items: flex-start; - font-size: 14px; + font-size: 12px; + color: #ccc; margin-bottom: 20px; font-family: 'Roboto', sans-serif; -} -.meta > div { margin-right: 20px; - line-height: 19px - /*font-size:11px;*/ + line-height: 17px } .meta .gray { font-size: 9pt; - padding-bottom: 4px; + padding-bottom: 5px; line-height: 14px } .right-sidebar { @@ -303,7 +309,7 @@ p.subp{ padding-top: 10px; padding-right: 20px; /*margin-right: 20px;*/ - margin-bottom: 30px; + margin-bottom: 10px; /*border-right: 1px solid #444;*/ font-family: 'Roboto'; font-size: 14px; @@ -313,12 +319,6 @@ p.subp{ .left-sidebar .meta, .right-sidebar .meta { flex-direction: column; } -.right-sidebar .meta > div { - margin-bottom: 10px; -} -.left-sidebar .meta > div { - margin-bottom: 15px; -} .right-sidebar ul { margin-bottom: 10px; color: #aaa; @@ -345,6 +345,9 @@ ul { } ul li { margin-bottom: 8px; + color: #dedede; + font-weight: 300; + font-size: 13px; } /* misc formatting */ @@ -370,6 +373,35 @@ pre code { table { margin-bottom: 40px; } +table thead{ + text-align: left; +} +table, tr, td, th { + border: none; + border-collapse:collapse; +} +.chart table tr{ + width: auto; +} +table tr{ + display:table; + table-layout: fixed; + width:100%; +} +table td, table th{ + padding:10px; + +} +table tr td{ + font-size:12px; +} +table tbody tr:nth-child(odd){ + background-color:#292929; +} +table tbody tr:nth-child(even){ + background-color:#333; +} + hr { height: 1px; background: #888; @@ -464,10 +496,10 @@ section.fullwidth .image { text-align: center; } .image .caption.intro-caption{ - text-align: left; + text-align: center; } .caption { - text-align: left; + text-align: center; font-size: 10pt; color: #999; max-width: 960px; @@ -484,50 +516,6 @@ section.fullwidth .image { } -/* about page */ - - -.flex-container { - padding: 0; - margin: 0; - list-style: none; - - display: -webkit-box; - display: -moz-box; - display: -ms-flexbox; - display: -webkit-flex; - display: flex; - - -webkit-flex-flow: row wrap; - justify-content: space-around; -} - -.team-photos-container{ - margin-top:40px; -} -.team-member { - height: auto; - margin-top: 10px; - color: white; - width: 400px; - font-weight: bold; - flex-grow: 1; - margin:0 40px 0 0; - padding:20px; - border-radius:6px; - background: #202020; -} -.team-member&:last-child{ - margin:0 0 0 40px; -} -.team-member p{ - font-size:14px; -} -.team-member img{ - margin:0; - display: block; -} - .sideimage { margin: 0px 0 40px 0; @@ -771,7 +759,7 @@ section.intro_section { background-size: cover; background-position: center left; padding: 50px 0; - /*min-height: 40vh;*/ + min-height: 40vh; display: flex; justify-content: center; align-items: center; @@ -829,7 +817,7 @@ page-specific formatting position: relative; width: 100%; height: 50px; - margin-top:100px; + margin-top:50px; } .hr-wave-line1 { @@ -848,39 +836,59 @@ page-specific formatting /* map */ -.map-legend-item{ +ul.map-legend{ display: inline-block; + margin:0; + font-size:14px; +} +ul.map-legend li{ margin-right: 10px; padding-top:4px; + display: inline-block; } -.map-legend-item:before { +ul.map-legend li:before { content: ''; display: inline-block; width: 10px; height: 10px; - /*margin-bottom: 2px;*/ margin-right: 6px; } -.map-legend-item.edu:before { +ul.map-legend li.edu:before { background-color: #f2f293; } -.map-legend-item.com:before { +ul.map-legend li.com:before { background-color: #3264f6; } -.map-legend-item.gov:before { +ul.map-legend li.gov:before { background-color: #f30000; } +ul.map-legend li.source:before { + content: none; + display: inline-block; + width: 0px; + margin-right: 10px; +} /* about */ .content-about { color: #fff; } +.content-about p { + font-size: 16px; +} +.content-about ul{ + font-size: 14px; + font-weight: 300; +} +.content-about section:first-of-type > p:first-of-type { + font-size: 23px; + line-height: 39px; +} .content-about .about-menu ul li { display: inline-block; font-size: 14px; font-weight: 400; - margin-right: 8px; text-transform: uppercase; margin-right: 15px; font-family: 'Roboto'; @@ -895,6 +903,84 @@ page-specific formatting color: #ddd; } +/* columns */ + +.columns { + display: flex; + flex-direction: row; + justify-content: flex-start; +} +.content-about .columns .column{ + /* on the about page use padding and bg color */ + background: #202020; + padding: 20px; +} +.columns .column { + font-family: Roboto, sans-serif; + font-weight: 400; + margin: 10px; +} +.columns .column:first-of-type { + margin-left: 0; +} +.columns-2 .column { + width: 50%; +} +.columns-3 .column { + width: 33%; +} +.columns-4 .column { + width: 25%; +} + +/* about page */ + + +.flex-container { + padding: 0; + margin: 0; + list-style: none; + + display: -webkit-box; + display: -moz-box; + display: -ms-flexbox; + display: -webkit-flex; + display: flex; + + -webkit-flex-flow: row wrap; + justify-content: space-around; +} + +.content-about .team-photos-container{ + margin:40px 0; +} +.content-about .team-member { + height: auto; + margin-top: 10px; + color: white; + width: 400px; + font-weight: bold; + flex-grow: 1; + margin:0 40px 0 0; + padding:20px; + border-radius:6px; + background: #202020; +} +.content-about .team-member&:last-child{ + margin:0 0 0 40px; +} +.content-about .team-member p{ + font-size:16px; +} +.content-about .team-member img{ + margin:0; + display: block; +} +.content-about .team-member h3{ + font-size:18px; +} + + /* footnotes */ @@ -944,4 +1030,10 @@ ul.footnotes { li p { margin: 0; padding: 0; display: inline; +} + +/* citation browser */ + +.citationHeader { + padding-bottom: 10px }
\ No newline at end of file diff --git a/site/assets/css/tabulator.css b/site/assets/css/tabulator.css index 41c7ffa4..d26b5cfc 100755 --- a/site/assets/css/tabulator.css +++ b/site/assets/css/tabulator.css @@ -1,7 +1,7 @@ /* Tabulator v4.1.3 (c) Oliver Folkerd */ .tabulator { position: relative; - font-size: 14px; + font-size: 13px; text-align: left; overflow: hidden; -ms-transform: translatez(0); @@ -23,7 +23,7 @@ position: relative; box-sizing: border-box; width: 100%; - border-bottom: 1px solid #999; + border-bottom: 1px solid #333; color: #ddd; white-space: nowrap; overflow: hidden; @@ -41,6 +41,7 @@ text-align: left; vertical-align: bottom; overflow: hidden; + padding: 5px 0; } .tabulator .tabulator-header .tabulator-col.tabulator-moving { @@ -63,6 +64,9 @@ overflow: hidden; text-overflow: ellipsis; vertical-align: bottom; + /* AH */ + font-weight: 500; + font-size:14px; } .tabulator .tabulator-header .tabulator-col .tabulator-col-content .tabulator-col-title .tabulator-title-editor { @@ -76,13 +80,13 @@ .tabulator .tabulator-header .tabulator-col .tabulator-col-content .tabulator-arrow { display: inline-block; position: absolute; - top: 9px; + top: 11px; right: 8px; width: 0; height: 0; border-left: 6px solid transparent; border-right: 6px solid transparent; - border-bottom: 6px solid #bbb; + border-bottom: 6px solid #666; } .tabulator .tabulator-header .tabulator-col.tabulator-col-group .tabulator-col-group-cols { @@ -129,6 +133,7 @@ .tabulator .tabulator-header .tabulator-col.tabulator-sortable .tabulator-col-title { padding-right: 25px; + padding-left: 6px; } .tabulator .tabulator-header .tabulator-col.tabulator-sortable:hover { @@ -137,7 +142,7 @@ .tabulator .tabulator-header .tabulator-col.tabulator-sortable[aria-sort="none"] .tabulator-col-content .tabulator-arrow { border-top: none; - border-bottom: 6px solid #bbb; + border-bottom: 6px solid #666; } .tabulator .tabulator-header .tabulator-col.tabulator-sortable[aria-sort="asc"] .tabulator-col-content .tabulator-arrow { @@ -723,7 +728,7 @@ max-height: 200px; background: #fff; border: 1px solid #333; - font-size: 14px; + font-size: 13px; overflow-y: auto; -webkit-overflow-scrolling: touch; z-index: 10000; diff --git a/site/assets/img/icon-search.png b/site/assets/img/icon-search.png Binary files differnew file mode 100644 index 00000000..43684627 --- /dev/null +++ b/site/assets/img/icon-search.png diff --git a/site/content/pages/about/credits.md b/site/content/pages/about/credits.md deleted file mode 100644 index bc2283fd..00000000 --- a/site/content/pages/about/credits.md +++ /dev/null @@ -1,48 +0,0 @@ ------------- - -status: published -title: MegaPixels Press and News -desc: MegaPixels Press and News -slug: press -cssclass: about -published: 2018-12-04 -updated: 2018-12-04 -authors: Adam Harvey - ------------- - -# Credits - -<section class="about-menu"> -<ul> - <li><a href="/about/">About</a></li> - <li><a href="/about/press/">Press</a></li> - <li><a class="current" href="/about/credits/">Credits</a></li> - <li><a href="/about/disclaimer/">Disclaimer</a></li> - <li><a href="/about/terms/">Terms and Conditions</a></li> - <li><a href="/about/privacy/">Privacy Policy</a></li> -</ul> -</section> - - -#### Team - -- Research and image analysis: Adam Harvey -- Development and visualizations: Jules LaPlace -- Produced in Partnership with Mozilla -- Contributing researchers: Berit Gilma, Mathana Stender - -#### Code - -<<<<<<< HEAD -- This site uses D3.js, C3.js, and ThreeJS for visualizations. -- Data aggregation uses Pandas and PDFMiner.Six. -======= -- This site uses D3 and C2 for visuzations -- Add more here ->>>>>>> 26646e6adf3833f6282e9515c14ad61e485440c0 - -#### Data - -- link to github -- how it was gathered
\ No newline at end of file diff --git a/site/content/pages/about/disclaimer.md b/site/content/pages/about/disclaimer.md deleted file mode 100644 index f82a09a0..00000000 --- a/site/content/pages/about/disclaimer.md +++ /dev/null @@ -1,41 +0,0 @@ ------------- - -status: published -title: Disclaimer -desc: MegaPixels Disclaimer -slug: disclaimer -cssclass: about -published: 2018-12-04 -updated: 2018-12-04 -authors: Adam Harvey - ------------- - -# Disclaimer - -<section class="about-menu"> -<ul> -<li><a href="/about/">About</a></li> -<li><a href="/about/press/">Press</a></li> -<li><a href="/about/credits/">Credits</a></li> -<li><a class="current" href="/about/disclaimer/">Disclaimer</a></li> -<li><a href="/about/terms/">Terms and Conditions</a></li> -<li><a href="/about/privacy/">Privacy Policy</a></li> -</ul> -</section> - -(TEMPORARY PAGE) - -Last updated: December 04, 2018 - -The information contained on MegaPixels.cc website (the "Service") is for academic and artistic purposes only. - -MegaPixels.cc assumes no responsibility for errors or omissions in the contents on the Service. - -In no event shall MegaPixels.cc be liable for any special, direct, indirect, consequential, or incidental damages or any damages whatsoever, whether in an action of contract, negligence or other tort, arising out of or in connection with the use of the Service or the contents of the Service. MegaPixels.cc reserves the right to make additions, deletions, or modification to the contents on the Service at any time without prior notice. - -## External links disclaimer - -MegaPixels.cc website may contain links to external websites that are not provided or maintained by or in any way affiliated with MegaPixels.cc - -Please note that the MegaPixels.cc does not guarantee the accuracy, relevance, timeliness, or completeness of any information on these external websites. diff --git a/site/content/pages/about/faq.md b/site/content/pages/about/faq.md new file mode 100644 index 00000000..8b780262 --- /dev/null +++ b/site/content/pages/about/faq.md @@ -0,0 +1,26 @@ +------------ + +status: published +title: MegaPixels Press and News +desc: MegaPixels Press and News +slug: faqs +cssclass: about +published: 2018-12-04 +updated: 2018-12-04 +authors: Adam Harvey + +------------ + +# FAQs + +<section class="about-menu"> +<ul> +<li><a href="/about/">About</a></li> +<li><a class="current" href="/about/faq/">FAQs</a></li> +<li><a href="/about/press/">Press</a></li> +<li><a href="/about/terms/">Terms</a></li> +<li><a href="/about/privacy/">Privacy</a></li> +</ul> +</section> + +[ page under development ] diff --git a/site/content/pages/about/index.md b/site/content/pages/about/index.md index 4fec0777..5fc147c9 100644 --- a/site/content/pages/about/index.md +++ b/site/content/pages/about/index.md @@ -16,41 +16,63 @@ authors: Adam Harvey <section class="about-menu"> <ul> <li><a class="current" href="/about/">About</a></li> +<li><a href="/about/faq/">FAQs</a></li> <li><a href="/about/press/">Press</a></li> -<li><a href="/about/credits/">Credits</a></li> -<li><a href="/about/disclaimer/">Disclaimer</a></li> -<li><a href="/about/terms/">Terms and Conditions</a></li> -<li><a href="/about/privacy/">Privacy Policy</a></li> +<li><a href="/about/terms/">Terms</a></li> +<li><a href="/about/privacy/">Privacy</a></li> </ul> </section> -(PAGE UNDER DEVELOPMENT) +MegaPixels is an independent art and research project by Adam Harvey and Jules LaPlace investigating the ethics and individual privacy implications of publicly available face recognition datasets, and their role in industry and governmental expansion into biometric surveillance technologies. -<p><div style="font-size:20px;line-height:36px">Ever since government agencies began developing face recognition in the early 1960's, datasets of face images have always been central to technological advancements. Today, these datasets no longer originate in labs, but instead from family photo albums posted on photo sharing sites, surveillance cameras on college campuses, search engine queries for celebrities, cafe livestreams, and <a href="https://www.theverge.com/2017/8/22/16180080/transgender-youtubers-ai-facial-recognition-dataset">personal videos</a> posted on YouTube. </div></p> - -Collectively, facial recognition datasets are now gathered "in the wild". - -<p>MegaPixels is art and research by <a href="https://ahprojects.com">Adam Harvey</a> about facial recognition datasets that unravels their histories, futures, geographies, and meanings. Throughout 2019 this site this site will publish research reports, visualizations, raw data, and interactive tools to explore how publicly available facial recognition datasets contribute to a global supply chain of biometric data that powers the global facial recognition industry.</p> - -During the last year, hundreds of these facial analysis datasets created "in the wild" have been collected to understand how they contribute to a global supply chain of biometric data that is powering the global facial recognition industry. - -The MegaPixels website is produced in partnership with [Mozilla](https://mozilla.org). +The MegaPixels site is made possible with support from <a href="http://mozilla.org">Mozilla</a> <div class="flex-container team-photos-container"> <div class="team-member"> - <img src="https://nyc3.digitaloceanspaces.com/megapixels/v1/site/about/assets/adam-harvey-3d.jpg" /> <h3>Adam Harvey</h3> - <p>is Berlin-based American artist and researcher. His previous projects (CV Dazzle, Stealth Wear, and SkyLift) explore the potential for countersurveillance as artwork. He is the founder of VFRAME (visual forensics software for human rights groups), the recipient of 2 PrototypeFund grants, and is a researcher in residence at Karlsruhe HfG. - <br> - <a href="https://ahprojects.com">ahprojects.com</a> + <p>is Berlin-based American artist and researcher. His previous projects (CV Dazzle, Stealth Wear, and SkyLift) explore the potential for counter-surveillance as artwork. He is the founder of VFRAME (visual forensics software for human rights groups) and is a currently researcher in residence at Karlsruhe HfG.</p> + <p><a href="https://ahprojects.com">ahprojects.com</a></p> </p> </div> <div class="team-member"> - <img src="https://nyc3.digitaloceanspaces.com/megapixels/v1/site/about/assets/jules-laplace-3d.jpg" /> <h3>Jules LaPlace</h3> - <p>is an American creative technologist also based in Berlin. He was previously the CTO of a digital agency in NYC and now also works at VFRAME, developing computer vision for human rights groups. Jules also builds creative software for artists and musicians. - <br> - <a href="https://asdf.us/">asdf.us</a> + <p>is an American technologist and artist also based in Berlin. He was previously the CTO of a digital agency in NYC and now also works at VFRAME, developing computer vision and data analysis software for human rights groups. Jules also builds experimental software for artists and musicians. </p> + <p><a href="https://asdf.us/">asdf.us</a></p> </div> </div> + +MegaPixels.cc is an independent research project about publicly available face recognition datasets. This website is based, in part, on earlier installations and research projects about facial recognition datasets in 2016-2018, which focused particularly on the MegaFace dataset. Since then it has evolved into a large-scale survey of publicly-available face and person analysis datasets, covering their usage, geographies, and ethics. + +An academic report and presentation on the findings is forthcoming. This site is published to make the research more accessible to a wider audience and to include visualizations and interactive features not possible in PDF publications. Continued research on MegaPixels is supported by a 1 year Researcher-in-Residence grant from Karlsruhe HfG. + +When possible, and once thoroughly verified, data generated for MegaPixels will be made available for download on [github.com/adamhrv/megapixels](https://github.com/adamhrv/megapixels) + +=== columns 3 + +#### Team + +- Adam Harvey: Concept, research, design, computer vision +- Jules LaPlace: Information and systems architecture, data retrieval, web applications + +=========== + +#### Contributing Researchers + +- Berit Gilma: Dataset statistics and discovery +- Beth (aka Ms. Celeb): Dataset usage verification and research +- Mathana Stender: Commercial usage verification and research on LFW + +=========== + +#### Code and Libraries + +- [Semantic Scholar](https://semanticscholar.org) for citation aggregation +- Leaflet.js for maps +- C3.js for charts +- ThreeJS for 3D visualizations +- PDFMiner.Six and Pandas for research paper data analysis + +=== end columns + +Please direct questions, comments, or feedback to [mastodon.social/@adamhrv](https://mastodon.social/@adamhrv)
\ No newline at end of file diff --git a/site/content/pages/about/legal.md b/site/content/pages/about/legal.md new file mode 100644 index 00000000..089f32d7 --- /dev/null +++ b/site/content/pages/about/legal.md @@ -0,0 +1,88 @@ +------------ + +status: published +title: Privacy Policy +desc: MegaPixels Privacy Policy +slug: privacy-policy +cssclass: about +published: 2018-12-04 +updated: 2018-12-04 +authors: Adam Harvey + +------------ + +# Legal + +<section class="about-menu"> +<ul> +<li><a href="/about/">About</a></li> +<li><a href="/about/faq/">FAQs</a></li> +<li><a href="/about/press/">Press</a></li> +<li><a href="/about/terms/">Terms</a></li> +<li><a class="current" href="/about/privacy/">Privacy</a></li> +</ul> +</section> + +MegaPixels.cc Terms and Privacy + +MegaPixels is an independent art and research project about the origins and ethics of publicly available face analysis image datasets. By accessing MegaPixels (the *Service* or *Services*) you agree to the terms and conditions set forth below. + +### Changes + +We reserve the right, at our sole discretion, to modify or replace these Terms at any time. If a revision is material we will try to provide at least 30 days notice prior to any new terms taking effect. What constitutes a material change will be determined at our sole discretion. + +By continuing to access or use our Service after those revisions become effective, you agree to be bound by the revised terms. If you do not agree to the new terms, please stop using the Service. + + +## Privacy + +The MegaPixels site has been designed to minimize the amount of network requests to 3rd party services and therefore prioritize the privacy of the viewer by only loading local dependencies. Additionaly, this site does not use any anaytics programs to monitor site viewers. In fact, the only data collected are the necessary server logs that used only for preventing misuse, which are deleteted at regular short-term intervals. + +## 3rd Party Services + +In order to provide certain features of the site, some 3rd party services are needed. Currently, the MegaPixels.cc site uses two 3rd party services: (1) Leaflet.js for the interactive map and (2 Digital Ocean Spaces as a condent delivery network. Both services encrypt your requests to their server using HTTPS and neither service requires storing any cookies or authentication. However, both services will store files in your web browser's local cache (local storage) to improve loading performance. None of these local storage files are using for analytics, cookie-like technologies, tracking, or any similar purpose. + +### Links To Other Web Sites + +The MegaPixels.cc contains many links to 3rd party websites, especically in the list of citations that are provided for each dataset. This website has no control over and assumes no responsibility for, the content, privacy policies, or practices of any third party web sites or services. You further acknowledge and agree that megapixels.cc shall not be responsible or liable, directly or indirectly, for any damage or loss caused or alleged to be caused by or in connection with use of or reliance on any such content, goods or services available on or through any such web sites or services. + +We advise you to read the terms and conditions and privacy policies of any third-party web sites or services that you visit. + + +### The Information We Provide + +While every intention is made to verify and publish only verifiablenformation, at times amendments to accuracy may be required. In no event will the operators of this site be liable for your use or misuse of the information provided. + +We may terminate or suspend access to our Service immediately, without prior notice or liability, for any reason whatsoever, including without limitation if you breach the Terms. + +All provisions of the Terms which by their nature should survive termination shall survive termination, including, without limitation, ownership provisions, warranty disclaimers, indemnity and limitations of liability. + +### Prohibited Uses + +You may not access or use, or attempt to access or use, the Services to take any action that could harm us or a third party. You may not use the Services in violation of applicable laws or in violation of our or any third party’s intellectual property or other proprietary or legal rights. You further agree that you shall not attempt (or encourage or support anyone else's attempt) to circumvent, reverse engineer, decrypt, or otherwise alter or interfere with the Services, or any content thereof, or make any unauthorized use thereof. + +Without prior written consent, you shall not: + +(i) access any part of the Services, Content, data or information you do not have permission or authorization to access; + +(ii) use robots, spiders, scripts, service, software or any manual or automatic device, tool, or process designed to data mine or scrape the Content, data or information from the Services, or otherwise access or collect the Content, data or information from the Services using automated means; + +(iii) use services, software or any manual or automatic device, tool, or process designed to circumvent any restriction, condition, or technological measure that controls access to the Services in any way, including overriding any security feature or bypassing or circumventing any access controls or use limits of the Services; + +(iv) cache or archive the Content (except for a public search engine’s use of spiders for creating search indices); + +(v) take action that imposes an unreasonable or disproportionately large load on our network or infrastructure; and + +(vi) do anything that could disable, damage or change the functioning or appearance of the Services, including the presentation of advertising. + +Engaging in a prohibited use of the Services may result in civil, criminal, and/or administrative penalties, fines, or sanctions against the user and those assisting the user. + +### Governing Law + +These Terms shall be governed and construed in accordance with the laws of Berlin, Germany, without regard to its conflict of law provisions. + +Our failure to enforce any right or provision of these Terms will not be considered a waiver of those rights. If any provision of these Terms is held to be invalid or unenforceable by a court, the remaining provisions of these Terms will remain in effect. These Terms constitute the entire agreement between us regarding our Service, and supersede and replace any prior agreements we might have between us regarding the Service. + +### Indemnity + +You hereby indemnify, defend and hold harmless MegaPixels (and its creators) and all officers, directors, owners, agents, information providers, affiliates, licensors and licensees (collectively, the "Indemnified Parties") from and against any and all liability and costs, including, without limitation, reasonable attorneys' fees, incurred by the Indemnified Parties in connection with any claim arising out of any breach by you or any user of your account of these Terms of Service or the foregoing representations, warranties and covenants. You shall cooperate as fully as reasonably required in the defense of any such claim. We reserves the right, at its own expense, to assume the exclusive defense and control of any matter subject to indemnification by you.
\ No newline at end of file diff --git a/site/content/pages/about/press.md b/site/content/pages/about/press.md index 47e1af52..a0780d64 100644 --- a/site/content/pages/about/press.md +++ b/site/content/pages/about/press.md @@ -16,11 +16,10 @@ authors: Adam Harvey <section class="about-menu"> <ul> <li><a href="/about/">About</a></li> +<li><a href="/about/faq/">FAQs</a></li> <li><a class="current" href="/about/press/">Press</a></li> -<li><a href="/about/credits/">Credits</a></li> -<li><a href="/about/disclaimer/">Disclaimer</a></li> -<li><a href="/about/terms/">Terms and Conditions</a></li> -<li><a href="/about/privacy/">Privacy Policy</a></li> +<li><a href="/about/terms/">Terms</a></li> +<li><a href="/about/privacy/">Privacy</a></li> </ul> </section> diff --git a/site/content/pages/about/privacy.md b/site/content/pages/about/privacy.md deleted file mode 100644 index e36daf2a..00000000 --- a/site/content/pages/about/privacy.md +++ /dev/null @@ -1,165 +0,0 @@ ------------- - -status: published -title: Privacy Policy -desc: MegaPixels Privacy Policy -slug: privacy-policy -cssclass: about -published: 2018-12-04 -updated: 2018-12-04 -authors: Adam Harvey - ------------- - -# Privacy Policy - -<section class="about-menu"> -<ul> -<li><a href="/about/">About</a></li> -<li><a href="/about/press/">Press</a></li> -<li><a href="/about/credits/">Credits</a></li> -<li><a href="/about/disclaimer/">Disclaimer</a></li> -<li><a href="/about/terms/">Terms and Conditions</a></li> -<li><a class="current" href="/about/privacy/">Privacy Policy</a></li> -</ul> -</section> - -(TEMPORARY PAGE) - -A summary of our privacy policy is as follows: - -The MegaPixels site does not use any analytics programs or collect any data besides the necessary IP address of your connection, which are deleted every 30 days and used only for security and to prevent misuse. - -The image processing sections of the site do not collect any data whatsoever. All processing takes place in temporary memory (RAM) and then is displayed back to the user over a SSL secured HTTPS connection. It is the sole responsibility of the user whether they discard, by closing the page, or share their analyzed information and any potential consequences that may arise from doing so. - ----- - -A more complete legal version is below: - -**This is a boilerplate Privacy policy from <https://termsfeed.com/>** - -**Needs to be reviewed** - -Effective date: December 04, 2018 - -megapixels.cc ("us", "we", or "our") operates the WebsiteName website (hereinafter referred to as the "Service"). - -This page informs you of our policies regarding the collection, use, and disclosure of personal data when you use our Service and the choices you have associated with that data. - -We use your data to provide and improve the Service. By using the Service, you agree to the collection and use of information in accordance with this policy. Unless otherwise defined in this Privacy Policy, the terms used in this Privacy Policy have the same meanings as in our Terms and Conditions, accessible from WebsiteName - -## Definitions - -**Service** - -Service is the MegaPixels website operated by megapixels.cc - -**Personal Data** - -Personal Data means data about a living individual who can be identified from those data (or from those and other information either in our possession or likely to come into our possession). - -**Usage Data** - -Usage Data is data collected automatically either generated by the use of the Service or from the Service infrastructure itself - -## Information Collection and Use - -We collect several different types of information for various purposes to provide and improve our Service to you. - -## Types of Data Collected - -### Personal Data - -While using our Service, we may ask you to provide us with certain personally identifiable information that can be used to contact or identify you ("Personal Data"). Personally identifiable information may include, but is not limited to: - -- Cookies and Usage Data - -### Usage Data - -We may also collect information how the Service is accessed and used ("Usage Data"). This Usage Data may include information such as your computer's Internet Protocol address (e.g. IP address), browser type, browser version, the pages of our Service that you visit, the time and date of your visit, the time spent on those pages, unique device identifiers and other diagnostic data. - -### Tracking & Cookies Data - -We use cookies and similar tracking technologies to track the activity on our Service and we hold certain information. -Cookies are files with a small amount of data which may include an anonymous unique identifier. Cookies are sent to your browser from a website and stored on your device. Other tracking technologies are also used such as beacons, tags and scripts to collect and track information and to improve and analyse our Service. - -You can instruct your browser to refuse all cookies or to indicate when a cookie is being sent. However, if you do not accept cookies, you may not be able to use some portions of our Service. -Examples of Cookies we use: - -- <strong>Session Cookies.</strong> We use Session Cookies to operate our Service. -- <strong>Preference Cookies.</strong> We use Preference Cookies to remember your preferences and various settings. -- <strong>Security Cookies.</strong> We use Security Cookies for security purposes. - - -## Use of Data - -megapixels.cc uses the collected data for various purposes: - -- To provide and maintain the Service -- To notify you about changes to our Service -- To allow you to participate in interactive features of our Service when you choose to do so -- To provide customer care and support -- To provide analysis or valuable information so that we can improve the Service -- To monitor the usage of the Service -- To detect, prevent and address technical issues - -## Transfer Of Data - -Your information, including Personal Data, may be transferred to — and maintained on — computers located outside of your state, province, country or other governmental jurisdiction where the data protection laws may differ than those from your jurisdiction. - -If you are located outside Germany and choose to provide information to us, please note that we transfer the data, including Personal Data, to Germany and process it there. -Your consent to this Privacy Policy followed by your submission of such information represents your agreement to that transfer. -megapixels.cc will take all steps reasonably necessary to ensure that your data is treated securely and in accordance with this Privacy Policy and no transfer of your Personal Data will take place to an organization or a country unless there are adequate controls in place including the security of your data and other personal information. - -## Disclosure Of Data - -### Legal Requirements - -megapixels.cc may disclose your Personal Data in the good faith belief that such action is necessary to: - -- To comply with a legal obligation -- To protect and defend the rights or property of megapixels.cc -- To prevent or investigate possible wrongdoing in connection with the Service -- To protect the personal safety of users of the Service or the public -- To protect against legal liability - - - -## Security of Data - -The security of your data is important to us but remember that no method of transmission over the Internet or method of electronic storage is 100% secure. While we strive to use commercially acceptable means to protect your Personal Data, we cannot guarantee its absolute security. - -## Service Providers - -We may employ third party companies and individuals to facilitate our Service ("Service Providers"), to provide the Service on our behalf, to perform Service-related services or to assist us in analyzing how our Service is used. - -These third parties have access to your Personal Data only to perform these tasks on our behalf and are obligated not to disclose or use it for any other purpose. - - - - -## Links to Other Sites - -Our Service may contain links to other sites that are not operated by us. If you click a third party link, you will be directed to that third party's site. We strongly advise you to review the Privacy Policy of every site you visit. -We have no control over and assume no responsibility for the content, privacy policies or practices of any third party sites or services. - - -## Children's Privacy - -Our Service does not address anyone under the age of 18 ("Children"). - -We do not knowingly collect personally identifiable information from anyone under the age of 18. If you are a parent or guardian and you are aware that your Child has provided us with Personal Data, please contact us. If we become aware that we have collected Personal Data from children without verification of parental consent, we take steps to remove that information from our servers. - - -## Changes to This Privacy Policy - -We may update our Privacy Policy from time to time. We will notify you of any changes by posting the new Privacy Policy on this page. -We will let you know via email and/or a prominent notice on our Service, prior to the change becoming effective and update the "effective date" at the top of this Privacy Policy. -You are advised to review this Privacy Policy periodically for any changes. Changes to this Privacy Policy are effective when they are posted on this page. - - -## Contact Us - -If you have any questions about this Privacy Policy, please contact us: - -- By visiting this page on our website: <https://megapixels.cc/contact>
\ No newline at end of file diff --git a/site/content/pages/about/terms.md b/site/content/pages/about/terms.md deleted file mode 100644 index 7ae6dac7..00000000 --- a/site/content/pages/about/terms.md +++ /dev/null @@ -1,74 +0,0 @@ ------------- - -status: published -title: Terms of Use -desc: MegaPixels Terms of Use and Privacy Policy -slug: terms -cssclass: about -published: 2018-12-04 -updated: 2018-12-04 -authors: Adam Harvey - ------------- - - -# Terms and Conditions ("Terms") - -<section class="about-menu"> -<ul> -<li><a href="/about/">About</a></li> -<li><a href="/about/press/">Press</a></li> -<li><a href="/about/credits/">Credits</a></li> -<li><a href="/about/disclaimer/">Disclaimer</a></li> -<li><a class="current" href="/about/terms/">Terms and Conditions</a></li> -<li><a href="/about/privacy/">Privacy Policy</a></li> -</ul> -</section> - -(TEMPORARY PAGE) - -(FPO: this is only example text) - -Last updated: December 04, 2018 - -Please read these Terms and Conditions ("Terms", "Terms and Conditions") carefully before using the MegaPixels website (the "Service") operated by megapixels.cc ("us", "we", or "our"). - -Your access to and use of the Service is conditioned on your acceptance of and compliance with these Terms. - -By accessing or using the Service you agree to be bound by these Terms. If you disagree with any part of the terms then you may not access the Service. - - -### Links To Other Web Sites - -Our Service may contain links to third-party web sites or services that are not owned or controlled by megapixels.cc. - -megapixels.cc has no control over, and assumes no responsibility for, the content, privacy policies, or practices of any third party web sites or services. You further acknowledge and agree that megapixels.cc shall not be responsible or liable, directly or indirectly, for any damage or loss caused or alleged to be caused by or in connection with use of or reliance on any such content, goods or services available on or through any such web sites or services. - -We strongly advise you to read the terms and conditions and privacy policies of any third-party web sites or services that you visit. - - -### Termination - -We may terminate or suspend access to our Service immediately, without prior notice or liability, for any reason whatsoever, including without limitation if you breach the Terms. - -All provisions of the Terms which by their nature should survive termination shall survive termination, including, without limitation, ownership provisions, warranty disclaimers, indemnity and limitations of liability. - - - -### Governing Law - -These Terms shall be governed and construed in accordance with the laws of Berlin, Germany, without regard to its conflict of law provisions. - -Our failure to enforce any right or provision of these Terms will not be considered a waiver of those rights. If any provision of these Terms is held to be invalid or unenforceable by a court, the remaining provisions of these Terms will remain in effect. These Terms constitute the entire agreement between us regarding our Service, and supersede and replace any prior agreements we might have between us regarding the Service. - - -### Changes - -We reserve the right, at our sole discretion, to modify or replace these Terms at any time. If a revision is material we will try to provide at least 30 days notice prior to any new terms taking effect. What constitutes a material change will be determined at our sole discretion. - -By continuing to access or use our Service after those revisions become effective, you agree to be bound by the revised terms. If you do not agree to the new terms, please stop using the Service. - - -### Contact Us - -If you have any questions about these Terms, please contact us.
\ No newline at end of file diff --git a/site/content/pages/datasets/brainwash/assets/00818000_640x480.jpg b/site/content/pages/datasets/brainwash/assets/00818000_640x480.jpg Binary files differdeleted file mode 100644 index 30c0fcb1..00000000 --- a/site/content/pages/datasets/brainwash/assets/00818000_640x480.jpg +++ /dev/null diff --git a/site/content/pages/datasets/brainwash/assets/background_540.jpg b/site/content/pages/datasets/brainwash/assets/background_540.jpg Binary files differdeleted file mode 100644 index 5c8c0ad4..00000000 --- a/site/content/pages/datasets/brainwash/assets/background_540.jpg +++ /dev/null diff --git a/site/content/pages/datasets/brainwash/assets/background_600.jpg b/site/content/pages/datasets/brainwash/assets/background_600.jpg Binary files differdeleted file mode 100755 index 8f2de697..00000000 --- a/site/content/pages/datasets/brainwash/assets/background_600.jpg +++ /dev/null diff --git a/site/content/pages/datasets/brainwash/assets/brainwash_mean_overlay.jpg b/site/content/pages/datasets/brainwash/assets/brainwash_mean_overlay.jpg Binary files differnew file mode 100755 index 00000000..2f5917e3 --- /dev/null +++ b/site/content/pages/datasets/brainwash/assets/brainwash_mean_overlay.jpg diff --git a/site/content/pages/datasets/brainwash/assets/brainwash_mean_overlay_wm.jpg b/site/content/pages/datasets/brainwash/assets/brainwash_mean_overlay_wm.jpg Binary files differnew file mode 100755 index 00000000..790dbb79 --- /dev/null +++ b/site/content/pages/datasets/brainwash/assets/brainwash_mean_overlay_wm.jpg diff --git a/site/content/pages/datasets/brainwash/index.md b/site/content/pages/datasets/brainwash/index.md index 0bf67455..db88d949 100644 --- a/site/content/pages/datasets/brainwash/index.md +++ b/site/content/pages/datasets/brainwash/index.md @@ -2,8 +2,8 @@ status: published title: Brainwash -desc: Brainwash is a dataset of webcam images taken from the Brainwash Cafe in San Francisco -subdesc: The Brainwash dataset includes 11,918 images of "everyday life of a busy downtown cafe" and is used for training head detection algorithms +desc: Brainwash is a dataset of webcam images taken from the Brainwash Cafe in San Francisco in 2014 +subdesc: The Brainwash dataset includes 11,918 images of "everyday life of a busy downtown cafe" and is used for training head detection surveillance algorithms slug: brainwash cssclass: dataset image: assets/background.jpg @@ -15,32 +15,18 @@ authors: Adam Harvey ------------ ### sidebar - -+ Published: 2015 -+ Images: 11,918 -+ Faces: 91,146 -+ Created by: Stanford Department of Computer Science -+ Funded by: Max Planck Center for Visual Computing and Communication -+ Location: Brainwash Cafe, San Franscisco -+ Purpose: Training face detection -+ Website: <a href="https://exhibits.stanford.edu/data/catalog/sx925dc9385">stanford.edu</a> -+ Paper: <a href="http://arxiv.org/abs/1506.04878">End-to-End People Detection in Crowded Scenes</a> -+ Explicit Consent: No - +### end sidebar ## Brainwash Dataset -(PAGE UNDER DEVELOPMENT) +*Brainwash* is a head detection dataset created from San Francisco's Brainwash Cafe livecam footage. It includes 11,918 images of "everyday life of a busy downtown cafe"[^readme] captured at 100 second intervals throught the entire day. Brainwash dataset was captured during 3 days in 2014: October 27, November 13, and November 24. According the author's reserach paper introducing the dataset, the images were acquired with the help of Angelcam.com [cite orig paper]. -*Brainwash* is a face detection dataset created from the Brainwash Cafe's livecam footage including 11,918 images of "everyday life of a busy downtown cafe[^readme]". The images are used to develop face detection algorithms for the "challenging task of detecting people in crowded scenes" and tracking them. +Brainwash is not a widely used dataset but since its publication by Stanford University in 2015, it has notably appeared in several research papers from the National University of Defense Technology in Changsha, China. In 2016 and in 2017 researchers there conducted studies on detecting people's heads in crowded scenes for the purpose of surveillance [^localized_region_context] [^replacement_algorithm]. -Before closing in 2017, Brainwash Cafe was a "cafe and laundromat" located in San Francisco's SoMA district. The cafe published a publicy available livestream from the cafe with a view of the cash register, performance stage, and seating area. +If you happen to have been at Brainwash cafe in San Franscisco at any time on October 26, November 13, or November 24 in 2014 you are most likely included in the Brainwash dataset. -Since it's publication by Stanford in 2015, the Brainwash dataset has appeared in several notable research papers. In September 2016 four researchers from the National University of Defense Technology in Changsha, China used the Brainwash dataset for a research study on "people head detection in crowded scenes", concluding that their algorithm "achieves superior head detection performance on the crowded scenes dataset[^localized_region_context]". And again in 2017 three researchers at the National University of Defense Technology used Brainwash for a study on object detection noting "the data set used in our experiment is shown in Table 1, which includes one scene of the brainwash dataset[^replacement_algorithm]". + - - - {% include 'chart.html' %} @@ -48,19 +34,27 @@ Since it's publication by Stanford in 2015, the Brainwash dataset has appeared i {% include 'map.html' %} -Add more analysis here - +{% include 'citations.html' %} {% include 'supplementary_header.html' %} -{% include 'citations.html' %} + + -### Additional Information +#### Additional Resources - The dataset author spoke about his research at the CVPR conference in 2016 <https://www.youtube.com/watch?v=Nl2fBKxwusQ> +TODO + +- add bounding boxes to the header image +- remake montage with randomized images, with bboxes +- clean up intro text +- verify quote citations + + ### Footnotes [^readme]: "readme.txt" https://exhibits.stanford.edu/data/catalog/sx925dc9385. diff --git a/site/content/pages/datasets/duke_mtmc/assets/duke_mtmc_cam5_average_comp.jpg b/site/content/pages/datasets/duke_mtmc/assets/duke_mtmc_cam5_average_comp.jpg Binary files differnew file mode 100755 index 00000000..3cd64df1 --- /dev/null +++ b/site/content/pages/datasets/duke_mtmc/assets/duke_mtmc_cam5_average_comp.jpg diff --git a/site/content/pages/datasets/duke_mtmc/index.md b/site/content/pages/datasets/duke_mtmc/index.md index de1fa14c..c626ef4e 100644 --- a/site/content/pages/datasets/duke_mtmc/index.md +++ b/site/content/pages/datasets/duke_mtmc/index.md @@ -2,8 +2,8 @@ status: published title: Duke Multi-Target, Multi-Camera Tracking -desc: <span class="dataset-name">Duke MTMC</span> is a dataset of CCTV footage of students at Duke University -subdesc: Duke MTMC contains over 2 million video frames and 2,000 unique identities collected from 8 cameras at Duke University campus in March 2014 +desc: <span class="dataset-name">Duke MTMC</span> is a dataset of surveillance camera footage of students on Duke University campus +subdesc: Duke MTMC contains over 2 million video frames and 2,000 unique identities collected from 8 HD cameras at Duke University campus in March 2014 slug: duke_mtmc cssclass: dataset image: assets/background.jpg @@ -15,17 +15,27 @@ authors: Adam Harvey ### sidebar -+ Collected: March 19, 2014 -+ Cameras: 8 -+ Video Frames: 2,000,000 -+ Identities: Over 2,000 -+ Used for: Person re-identification, <br>face recognition -+ Sector: Academic ++ Created: 2014 ++ Identities: Over 2,700 ++ Used for: Face recognition, person re-identification ++ Created by: Computer Science Department, Duke University, Durham, US + Website: <a href="http://vision.cs.duke.edu/DukeMTMC/">duke.edu</a> ## Duke Multi-Target, Multi-Camera Tracking Dataset (Duke MTMC) -(PAGE UNDER DEVELOPMENT) +[ PAGE UNDER DEVELOPMENT ] + +Duke MTMC is a dataset of video recorded on Duke University campus during for the purpose of training, evaluating, and improving *multi-target multi-camera tracking*. The videos were recorded during February and March 2014 and cinclude + +Includes a total of 888.8 minutes of video (ind. verified) + +"We make available a new data set that has more than 2 million frames and more than 2,700 identities. It consists of 8×85 minutes of 1080p video recorded at 60 frames per second from 8 static cameras deployed on the Duke University campus during periods between lectures, when pedestrian traffic is heavy." + +The dataset includes approximately 2,000 annotated identities appearing in 85 hours of video from 8 cameras located throughout Duke University's campus. + + + +According to the dataset authors, {% include 'map.html' %} diff --git a/site/content/pages/datasets/index.md b/site/content/pages/datasets/index.md index 2e943fbe..c0373d60 100644 --- a/site/content/pages/datasets/index.md +++ b/site/content/pages/datasets/index.md @@ -13,4 +13,4 @@ sync: false # Facial Recognition Datasets -### Survey +Explore publicly available facial recognition datasets. More datasets will be added throughout 2019. diff --git a/site/content/pages/datasets/msceleb/assets/background.jpg b/site/content/pages/datasets/msceleb/assets/background.jpg Binary files differnew file mode 100644 index 00000000..c1cd486e --- /dev/null +++ b/site/content/pages/datasets/msceleb/assets/background.jpg diff --git a/site/content/pages/datasets/msceleb/assets/index.jpg b/site/content/pages/datasets/msceleb/assets/index.jpg Binary files differnew file mode 100644 index 00000000..fb3a934a --- /dev/null +++ b/site/content/pages/datasets/msceleb/assets/index.jpg diff --git a/site/content/pages/datasets/msceleb/index.md b/site/content/pages/datasets/msceleb/index.md new file mode 100644 index 00000000..eb084eaa --- /dev/null +++ b/site/content/pages/datasets/msceleb/index.md @@ -0,0 +1,56 @@ +------------ + +status: published +title: MS Celeb +desc: MS Celeb is a dataset of web images used for training and evaluating face recognition algorithms +subdesc: The MS Celeb dataset includes over 10,000,000 images and 93,000 identities of semi-public figures collected using the Bing search engine +slug: msceleb +cssclass: dataset +image: assets/background.jpg +year: 2015 +published: 2019-2-23 +updated: 2019-2-23 +authors: Adam Harvey + +------------ + +### sidebar + ++ Published: TBD ++ Images: TBD ++ Faces: TBD ++ Created by: TBD + + +## Microsoft Celeb Dataset (MS Celeb) + +(PAGE UNDER DEVELOPMENT) + +At vero eos et accusamus et iusto odio dignissimos ducimus, qui blanditiis praesentium voluptatum deleniti atque corrupti, quos dolores et quas molestias excepturi sint, obcaecati cupiditate non-provident, similique sunt in culpa, qui officia deserunt mollitia animi, id est laborum et dolorum fuga. Et harum quidem rerum facilis est et expedita distinctio. + +Nam libero tempore, cum soluta nobis est eligendi optio, cumque nihil impedit, quo minus id, quod maxime placeat, facere possimus, omnis voluptas assumenda est, omnis dolor repellendus. Temporibus autem quibusdam et aut officiis debitis aut rerum necessitatibus saepe eveniet, ut et voluptates repudiandae sint et molestiae non-recusandae. Itaque earum rerum hic tenetur a sapiente delectus, ut aut reiciendis voluptatibus maiores alias consequatur aut perferendis doloribus asperiores repellat + +{% include 'chart.html' %} + +{% include 'piechart.html' %} + +{% include 'map.html' %} + +Add more analysis here + + +{% include 'supplementary_header.html' %} + +{% include 'citations.html' %} + + +### Additional Information + +- The dataset author spoke about his research at the CVPR conference in 2016 <https://www.youtube.com/watch?v=Nl2fBKxwusQ> + + +### Footnotes + +[^readme]: "readme.txt" https://exhibits.stanford.edu/data/catalog/sx925dc9385. +[^localized_region_context]: Li, Y. and Dou, Y. and Liu, X. and Li, T. Localized Region Context and Object Feature Fusion for People Head Detection. ICIP16 Proceedings. 2016. Pages 594-598. +[^replacement_algorithm]: Zhao. X, Wang Y, Dou, Y. A Replacement Algorithm of Non-Maximum Suppression Base on Graph Clustering.
\ No newline at end of file diff --git a/site/content/pages/datasets/uccs/assets/uccs_bboxes_clr_fill.jpg b/site/content/pages/datasets/uccs/assets/uccs_bboxes_clr_fill.jpg Binary files differdeleted file mode 100644 index c8002bb9..00000000 --- a/site/content/pages/datasets/uccs/assets/uccs_bboxes_clr_fill.jpg +++ /dev/null diff --git a/site/content/pages/datasets/uccs/assets/uccs_bboxes_grayscale.jpg b/site/content/pages/datasets/uccs/assets/uccs_bboxes_grayscale.jpg Binary files differdeleted file mode 100644 index 6e2833dd..00000000 --- a/site/content/pages/datasets/uccs/assets/uccs_bboxes_grayscale.jpg +++ /dev/null diff --git a/site/content/pages/datasets/uccs/assets/uccs_mean_bboxes_comp.jpg b/site/content/pages/datasets/uccs/assets/uccs_mean_bboxes_comp.jpg Binary files differnew file mode 100644 index 00000000..18f4c5ec --- /dev/null +++ b/site/content/pages/datasets/uccs/assets/uccs_mean_bboxes_comp.jpg diff --git a/site/content/pages/datasets/uccs/index.md b/site/content/pages/datasets/uccs/index.md index 092638c0..8ae1f324 100644 --- a/site/content/pages/datasets/uccs/index.md +++ b/site/content/pages/datasets/uccs/index.md @@ -2,11 +2,12 @@ status: published title: Unconstrained College Students -desc: <span class="dataset-name">Unconstrained College Students (UCCS)</span> is a dataset of images ... -subdesc: The UCCS dataset includes ... slug: uccs +desc: <span class="dataset-name">Unconstrained College Students (UCCS)</span> is a dataset of long-range surveillance photos of students taken without their knowledge +subdesc: The UCCS dataset includes 16,149 images and 1,732 identities of students at University of Colorado Colorado Springs campus and is used for face recognition and face detection cssclass: dataset image: assets/background.jpg +slug: uccs published: 2019-2-23 updated: 2019-2-23 authors: Adam Harvey @@ -15,30 +16,75 @@ authors: Adam Harvey ### sidebar -+ Collected: TBD -+ Published: TBD -+ Images: TBD -+ Faces: TBD ++ Published: 2018 ++ Images: 16,149 ++ Identities: 1,732 ++ Used for: Face recognition, face detection ++ Created by: Unviversity of Colorado Colorado Springs (US) ++ Funded by: ODNI, IARPA, ONR MURI, Amry SBIR, SOCOM SBIR ++ Website: <a href="https://vast.uccs.edu/Opensetface/">vast.uccs.edu</a> ## Unconstrained College Students ... (PAGE UNDER DEVELOPMENT) +Unconstrained College Students (UCCS) is a dataset of long-range surveillance photos captured at University of Colorado Colorado Springs. According to the authors of two papers associated with the dataset, subjects were "photographed using a long-range high-resolution surveillance camera without their knowledge" [^funding_sb]. The images were captured using a Canon 7D digital camera fitted with a Sigma 800mm telephoto lens pointed out the window of an office. + +The UCCS dataset was funded by ODNI (Office of Director of National Intelligence), IARPA (Intelligence Advance Research Projects Activity), ONR MURI Office of Naval Research and The Department of Defense Multidisciplinary University Research Initiative, Army SBIR (Small Business Innovation Research), SOCOM SBIR (Special Operations Command and Small Business Innovation Research), and the National Science Foundation. + +The images in UCCS include students walking between classes on campus over 19 days in 2012 - 2013. The dates include: + +| Year | Month | Day | Date | Time Range | Photos | +| --- | --- | --- | --- | --- | --- | +| 2012 | Februay | --- | 23 | - | 132 | +| 2012 | March | --- | 6 | - | - | +| 2012 | March | --- | 8 | - | - | +| 2012 | March | --- | 13 | - | - | +| 2012 | Februay | --- | 23 | - | 132 | +| 2012 | March | --- | 6 | - | - | +| 2012 | March | --- | 8 | - | - | +| 2012 | March | --- | 13 | - | - | +| 2012 | Februay | --- | 23 | - | 132 | +| 2012 | March | --- | 6 | - | - | +| 2012 | March | --- | 8 | - | - | +| 2012 | March | --- | 13 | - | - | +| 2012 | Februay | --- | 23 | - | 132 | +| 2012 | March | --- | 6 | - | - | +| 2012 | March | --- | 8 | - | - | +| 2012 | March | --- | 13 | - | - | +| 2012 | Februay | --- | 23 | - | 132 | +| 2012 | March | --- | 6 | - | - | +| 2012 | March | --- | 8 | - | - | + + +2012-03-20 +2012-03-22 +2012-04-03 +2012-04-12 +2012-04-17 +2012-04-24 +2012-04-25 +2012-04-26 +2013-01-28 +2013-01-29 +2013-02-13 +2013-02-19 +2013-02-20 +2013-02-26 + + + + {% include 'map.html' %} {% include 'chart.html' %} {% include 'piechart.html' %} -{% include 'supplementary_header.html' %} - {% include 'citations.html' %} - - - -### Research Notes +{% include 'supplementary_header.html' %} The original Sapkota and Boult dataset, from which UCCS is derived, received funding from[^funding_sb]: @@ -53,6 +99,14 @@ The more recent UCCS version of the dataset received funding from [^funding_uccs - IARPA (Intelligence Advance Research Projects Activity) R&D contract 2014-14071600012 +### TODO + +- add tabulator module for dates +- parse dates into CSV using Python +- get google image showing line of sight? +- fix up quote/citations + +### footnotes [^funding_sb]: Sapkota, Archana and Boult, Terrance. "Large Scale Unconstrained Open Set Face Database." 2013. [^funding_uccs]: Günther, M. et. al. "Unconstrained Face Detection and Open-Set Face Recognition Challenge," 2018. Arxiv 1708.02337v3.
\ No newline at end of file diff --git a/site/content/pages/research/01_from_1_to_100_pixels/index.md b/site/content/pages/research/01_from_1_to_100_pixels/index.md index a7b863a9..b219dffb 100644 --- a/site/content/pages/research/01_from_1_to_100_pixels/index.md +++ b/site/content/pages/research/01_from_1_to_100_pixels/index.md @@ -56,3 +56,55 @@ Ideas: - "Note that we only keep the images with a minimal side length of 80 pixels." and "a face will be labeled as “Ignore” if it is very difficult to be detected due to blurring, severe deformation and unrecognizable eyes, or the side length of its bounding box is less than 32 pixels." Ge_Detecting_Masked_Faces_CVPR_2017_paper.pdf - IBM DiF: "Faces with region size less than 50x50 or inter-ocular distance of less than 30 pixels were discarded. Faces with non-frontal pose, or anything beyond being slightly tilted to the left or the right, were also discarded." + + + + +As the resolution +formatted as rectangular databases of 16 bit RGB-tuples or 8 bit grayscale values + + +To consider how visual privacy applies to real world surveillance situations, the first + +A single 8-bit grayscale pixel with 256 values is enough to represent the entire alphabet `a-Z0-9` with room to spare. + +A 2x2 pixels contains + +Using no more than a 42 pixel (6x7 image) face image researchers [cite] were able to correctly distinguish between a group of 50 people. Yet + +The likely outcome of face recognition research is that more data is needed to improve. Indeed, resolution is the determining factor for all biometric systems, both as training data to increase + +Pixels, typically considered the buiding blocks of images and vidoes, can also be plotted as a graph of sensor values corresponding to the intensity of RGB-calibrated sensors. + + +Wi-Fi and cameras presents elevated risks for transmitting videos and image documentation from conflict zones, high-risk situations, or even sharing on social media. How can new developments in computer vision also be used in reverse, as a counter-forensic tool, to minimize an individual's privacy risk? + +As the global Internet becomes increasingly effecient at turning the Internet into a giant dataset for machine learning, forensics, and data analysing, it would be prudent to also consider tools for decreasing the resolution. The Visual Defense module is just that. What are new ways to minimize the adverse effects of surveillance by dulling the blade. For example, a researcher paper showed that by decreasing a face size to 12x16 it was possible to do 98% accuracy with 50 people. This is clearly an example of + +This research module, tentatively called Visual Defense Tools, aims to explore the + + +### Prior Research + +- MPI visual privacy advisor +- NIST: super resolution +- YouTube blur tool +- WITNESS: blur tool +- Pixellated text +- CV Dazzle +- Bellingcat guide to geolocation +- Peng! magic passport + +### Notes + +- In China, out of the approximately 200 million surveillance cameras only about 15% have enough resolution for face recognition. +- In Apple's FaceID security guide, the probability of someone else's face unlocking your phone is 1 out of 1,000,000. +- In England, the Metropolitan Police reported a false-positive match rate of 98% when attempting to use face recognition to locate wanted criminals. +- In a face recognition trial at Berlin's Sudkreuz station, the false-match rate was 20%. + + +What all 3 examples illustrate is that face recognition is anything but absolute. In a 2017 talk, Jason Matheny the former directory of IARPA, admitted the face recognition is so brittle it can be subverted by using a magic marker and drawing "a few dots on your forehead". In fact face recognition is a misleading term. Face recognition is search engine for faces that can only ever show you the mos likely match. This presents real a real threat to privacy and lends + + +Globally, iPhone users unwittingly agree to 1/1,000,000 probably +relying on FaceID and TouchID to protect their information agree to a
\ No newline at end of file diff --git a/site/content/pages/research/02_what_computers_can_see/index.md b/site/content/pages/research/02_what_computers_can_see/index.md index ab4c7884..51621f46 100644 --- a/site/content/pages/research/02_what_computers_can_see/index.md +++ b/site/content/pages/research/02_what_computers_can_see/index.md @@ -100,6 +100,7 @@ A list of 100 things computer vision can see, eg: - Wearing Necktie - Wearing Necklace +for i in {1..9};do wget http://visiond1.cs.umbc.edu/webpage/codedata/ADLdataset/ADL_videos/P_0$i.MP4;done;for i in {10..20}; do wget http://visiond1.cs.umbc.edu/webpage/codedata/ADLdataset/ADL_videos/P_$i.MP4;done ## From Market 1501 @@ -149,4 +150,26 @@ Visibility boolean for each keypoint Region annotations (upper clothes, lower clothes, dress, socks, shoes, hands, gloves, neck, face, hair, hat, sunglasses, bag, occluder) Body type (male, female or child) -source: https://www2.eecs.berkeley.edu/Research/Projects/CS/vision/shape/h3d/
\ No newline at end of file +source: https://www2.eecs.berkeley.edu/Research/Projects/CS/vision/shape/h3d/ + +## From Leeds Sports Pose + +=INDEX(A2:A9,MATCH(datasets!D1,B2:B9,0)) +=VLOOKUP(A2, datasets!A:J, 7, FALSE) + +Right ankle +Right knee +Right hip +Left hip +Left knee +Left ankle +Right wrist +Right elbow +Right shoulder +Left shoulder +Left elbow +Left wrist +Neck +Head top + +source: http://web.archive.org/web/20170915023005/sam.johnson.io/research/lsp.html
\ No newline at end of file diff --git a/site/content/pages/test/assets/test.csv b/site/content/pages/test/assets/test.csv new file mode 100644 index 00000000..7156a814 --- /dev/null +++ b/site/content/pages/test/assets/test.csv @@ -0,0 +1,8 @@ +name,images,year,gender,description,url +aardvark,100,2019,m,bim da,https://asdf.us/ +bobcat,10,2017,f,in a tree,https://asdf.us/ +cow,20,2012,f,moooo,https://asdf.us/ +doe,2,2016,f,doe a deer,https://asdf.us/ +earwig,1,2017,m,just a bug,https://i.asdf.us/ +frog,17,2018,f,ribbit ribbit,https://i.asdf.us/ +giraffe,23,2009,m,i get around,https://adsf.us/ diff --git a/site/content/pages/test/csv.md b/site/content/pages/test/csv.md index b5f37754..85f714b4 100644 --- a/site/content/pages/test/csv.md +++ b/site/content/pages/test/csv.md @@ -15,6 +15,6 @@ authors: Megapixels ### [← Back to test index](/test/) ``` -load_file /datasets/lfw/assets/lfw_names_gender_kg_min.csv -Name, Images, Gender, Description +load_file /site/test/assets/test.csv +Name, Images, Year, Gender, Description, URL ``` diff --git a/site/includes/chart.html b/site/includes/chart.html index 45c13493..01c2e83b 100644 --- a/site/includes/chart.html +++ b/site/includes/chart.html @@ -2,8 +2,7 @@ <h3>Who used {{ metadata.meta.dataset.name_display }}?</h3> <p> - This bar chart presents a ranking of the top countries where citations originated. Mouse over individual columns - to see yearly totals. These charts show at most the top 10 countries. + This bar chart presents a ranking of the top countries where dataset citations originated. Mouse over individual columns to see yearly totals. These charts show at most the top 10 countries. </p> </section> diff --git a/site/includes/citations.html b/site/includes/citations.html index 058a1834..5cd40a29 100644 --- a/site/includes/citations.html +++ b/site/includes/citations.html @@ -1,12 +1,8 @@ <section class="applet_container"> - <h3>Citations</h3> + <h3>Dataset Citations</h3> <p> - Citations were collected from <a href="https://www.semanticscholar.org">Semantic Scholar</a>, a website which aggregates - and indexes research papers. The citations were geocoded using names of institutions found in the PDF front matter, or as listed on other resources. These papers have been manually verified to show that researchers downloaded and used the dataset to train and/or test machine learning algorithms. - </p> - <p> - Add [button/link] to download CSV. Add search input field to filter. Expand number of rows to 10. Reduce URL text to show only the domain (ie https://arxiv.org/pdf/123456 --> arxiv.org) + The dataset citations used in the visualizations were collected from <a href="https://www.semanticscholar.org">Semantic Scholar</a>, a website which aggregates and indexes research papers. Each citation was geocoded using names of institutions found in the PDF front matter, or as listed on other resources. These papers have been manually verified to show that researchers downloaded and used the dataset to train or test machine learning algorithms. </p> <div class="applet" data-payload="{"command": "citations"}"></div> diff --git a/site/includes/map.html b/site/includes/map.html index 74771768..7511d4c7 100644 --- a/site/includes/map.html +++ b/site/includes/map.html @@ -1,6 +1,6 @@ <section> - <h3>Information Supply Chain</h3> + <h3>Biometric Trade Routes</h3> <!-- <div class="map-sidebar right-sidebar"> <h3>Legend</h3> @@ -12,27 +12,28 @@ </div> --> <p> - To understand how {{ metadata.meta.dataset.name_display }} has been used around the world... - affected global research on computer vision, surveillance, defense, and consumer technology, the and where this dataset has been used the locations of each organization that used or referenced the datast + To help understand how {{ metadata.meta.dataset.name_display }} has been used around the world for commercial, military and academic research; publicly available research citing {{ metadata.meta.dataset.name_full }} is collected, verified, and geocoded to show the biometric trade routes of people appearing in the images. Click on the markers to reveal reserach projects at that location. </p> </section> -<section class="applet_container"> +<section class="applet_container fullwidth"> <div class="applet" data-payload="{"command": "map"}"></div> + </section> <div class="caption"> <ul class="map-legend"> <li class="edu">Academic</li> - <li class="com">Industry</li> - <li class="gov">Government / Military</li> + <li class="com">Commercial</li> + <li class="gov">Military / Government</li> <li class="source">Citation data is collected using <a href="https://semanticscholar.org" target="_blank">SemanticScholar.org</a> then dataset usage verified and geolocated.</li> </ul> </div> -<section> +<!-- <section> <p class='subp'> [section under development] {{ metadata.meta.dataset.name_display }} ... Standardized paragraph of text about the map. Sed ut perspiciatis, unde omnis iste natus error sit voluptatem accusantium doloremque laudantium, totam rem aperiam eaque ipsa, quae ab illo inventore veritatis et quasi architecto beatae vitae dicta sunt, explicabo. </p> </section> + -->
\ No newline at end of file diff --git a/site/includes/piechart.html b/site/includes/piechart.html index e739bb28..94c8aae7 100644 --- a/site/includes/piechart.html +++ b/site/includes/piechart.html @@ -1,10 +1,3 @@ -<section> - <p> - These pie charts show overall totals based on country and institution type. - </p> - - </section> - <section class="applet_container"> <div class="applet" data-payload="{"command": "piechart"}"></div> </section> diff --git a/site/includes/sidebar.html b/site/includes/sidebar.html new file mode 100644 index 00000000..0f7d2dad --- /dev/null +++ b/site/includes/sidebar.html @@ -0,0 +1,6 @@ +{% for item in metadata.sidebar %} + <div class='meta'> + <div class='gray'>{{ item.title }}</div> + <div>{{ item.value }}</div> + </div> +{% endfor %}
\ No newline at end of file diff --git a/site/includes/supplementary_header.html b/site/includes/supplementary_header.html index 5fd4b2b4..be0967e4 100644 --- a/site/includes/supplementary_header.html +++ b/site/includes/supplementary_header.html @@ -1,10 +1,10 @@ <section> - <div class="hr-wave-holder"> <div class="hr-wave-line hr-wave-line1"></div> <div class="hr-wave-line hr-wave-line2"></div> </div> - <h2>Supplementary Information</h2> + <h3>Supplementary Information</h3> + </section> diff --git a/site/public/about/credits/index.html b/site/public/about/credits/index.html deleted file mode 100644 index b4c17c4d..00000000 --- a/site/public/about/credits/index.html +++ /dev/null @@ -1,88 +0,0 @@ -<!doctype html> -<html> -<head> - <title>MegaPixels</title> - <meta charset="utf-8" /> - <meta name="author" content="Adam Harvey" /> - <meta name="description" content="MegaPixels Press and News" /> - <meta name="referrer" content="no-referrer" /> - <meta name="viewport" content="width=device-width, initial-scale=1.0, user-scalable=yes" /> - <link rel='stylesheet' href='/assets/css/fonts.css' /> - <link rel='stylesheet' href='/assets/css/tabulator.css' /> - <link rel='stylesheet' href='/assets/css/css.css' /> - <link rel='stylesheet' href='/assets/css/leaflet.css' /> - <link rel='stylesheet' href='/assets/css/applets.css' /> -</head> -<body> - <header> - <a class='slogan' href="/"> - <div class='logo'></div> - <div class='site_name'>MegaPixels</div> - </a> - <div class='links'> - <a href="/datasets/">Datasets</a> - <a href="/about/">About</a> - </div> - </header> - <div class="content content-about"> - - <section><h1>Credits</h1> -<section class="about-menu"> -<ul> - <li><a href="/about/">About</a></li> - <li><a href="/about/press/">Press</a></li> - <li><a class="current" href="/about/credits/">Credits</a></li> - <li><a href="/about/disclaimer/">Disclaimer</a></li> - <li><a href="/about/terms/">Terms and Conditions</a></li> - <li><a href="/about/privacy/">Privacy Policy</a></li> -</ul> -</section><h4>Team</h4> -<ul> -<li>Research and image analysis: Adam Harvey</li> -<li>Development and visualizations: Jules LaPlace</li> -<li>Produced in Partnership with Mozilla</li> -<li>Contributing researchers: Berit Gilma, Mathana Stender</li> -</ul> -<h4>Code</h4> -<p><<<<<<< HEAD</p> -<ul> -<li>This site uses D3.js, C3.js, and ThreeJS for visualizations.</li> -<li><h1>Data aggregation uses Pandas and PDFMiner.Six.</h1> -</li> -<li>This site uses D3 and C2 for visuzations</li> -<li>Add more here<blockquote><blockquote><blockquote><blockquote><blockquote><blockquote><blockquote><p>> 26646e6adf3833f6282e9515c14ad61e485440c0</p> -</blockquote> -</blockquote> -</blockquote> -</blockquote> -</blockquote> -</blockquote> -</blockquote> -</li> -</ul> -<h4>Data</h4> -<ul> -<li>link to github</li> -<li>how it was gathered</li> -</ul> -</section> - - </div> - <footer> - <div> - <a href="/">MegaPixels.cc</a> - <a href="/about/disclaimer/">Disclaimer</a> - <a href="/about/terms/">Terms of Use</a> - <a href="/about/privacy/">Privacy</a> - <a href="/about/">About</a> - <a href="/about/team/">Team</a> - </div> - <div> - MegaPixels ©2017-19 Adam R. Harvey / - <a href="https://ahprojects.com">ahprojects.com</a> - </div> - </footer> -</body> - -<script src="/assets/js/dist/index.js"></script> -</html>
\ No newline at end of file diff --git a/site/public/about/disclaimer/index.html b/site/public/about/disclaimer/index.html deleted file mode 100644 index 28588708..00000000 --- a/site/public/about/disclaimer/index.html +++ /dev/null @@ -1,67 +0,0 @@ -<!doctype html> -<html> -<head> - <title>MegaPixels</title> - <meta charset="utf-8" /> - <meta name="author" content="Adam Harvey" /> - <meta name="description" content="MegaPixels Disclaimer" /> - <meta name="referrer" content="no-referrer" /> - <meta name="viewport" content="width=device-width, initial-scale=1.0, user-scalable=yes" /> - <link rel='stylesheet' href='/assets/css/fonts.css' /> - <link rel='stylesheet' href='/assets/css/tabulator.css' /> - <link rel='stylesheet' href='/assets/css/css.css' /> - <link rel='stylesheet' href='/assets/css/leaflet.css' /> - <link rel='stylesheet' href='/assets/css/applets.css' /> -</head> -<body> - <header> - <a class='slogan' href="/"> - <div class='logo'></div> - <div class='site_name'>MegaPixels</div> - </a> - <div class='links'> - <a href="/datasets/">Datasets</a> - <a href="/about/">About</a> - </div> - </header> - <div class="content content-about"> - - <section><h1>Disclaimer</h1> -<section class="about-menu"> -<ul> -<li><a href="/about/">About</a></li> -<li><a href="/about/press/">Press</a></li> -<li><a href="/about/credits/">Credits</a></li> -<li><a class="current" href="/about/disclaimer/">Disclaimer</a></li> -<li><a href="/about/terms/">Terms and Conditions</a></li> -<li><a href="/about/privacy/">Privacy Policy</a></li> -</ul> -</section><p>(TEMPORARY PAGE)</p> -<p>Last updated: December 04, 2018</p> -<p>The information contained on MegaPixels.cc website (the "Service") is for academic and artistic purposes only.</p> -<p>MegaPixels.cc assumes no responsibility for errors or omissions in the contents on the Service.</p> -<p>In no event shall MegaPixels.cc be liable for any special, direct, indirect, consequential, or incidental damages or any damages whatsoever, whether in an action of contract, negligence or other tort, arising out of or in connection with the use of the Service or the contents of the Service. MegaPixels.cc reserves the right to make additions, deletions, or modification to the contents on the Service at any time without prior notice.</p> -<h2>External links disclaimer</h2> -<p>MegaPixels.cc website may contain links to external websites that are not provided or maintained by or in any way affiliated with MegaPixels.cc</p> -<p>Please note that the MegaPixels.cc does not guarantee the accuracy, relevance, timeliness, or completeness of any information on these external websites.</p> -</section> - - </div> - <footer> - <div> - <a href="/">MegaPixels.cc</a> - <a href="/about/disclaimer/">Disclaimer</a> - <a href="/about/terms/">Terms of Use</a> - <a href="/about/privacy/">Privacy</a> - <a href="/about/">About</a> - <a href="/about/team/">Team</a> - </div> - <div> - MegaPixels ©2017-19 Adam R. Harvey / - <a href="https://ahprojects.com">ahprojects.com</a> - </div> - </footer> -</body> - -<script src="/assets/js/dist/index.js"></script> -</html>
\ No newline at end of file diff --git a/site/public/about/research/index.html b/site/public/about/faq/index.html index 5ad30e20..168abd0b 100644 --- a/site/public/about/research/index.html +++ b/site/public/about/faq/index.html @@ -4,11 +4,10 @@ <title>MegaPixels</title> <meta charset="utf-8" /> <meta name="author" content="Adam Harvey" /> - <meta name="description" content="About MegaPixels Research Methodologies" /> + <meta name="description" content="MegaPixels Press and News" /> <meta name="referrer" content="no-referrer" /> <meta name="viewport" content="width=device-width, initial-scale=1.0, user-scalable=yes" /> <link rel='stylesheet' href='/assets/css/fonts.css' /> - <link rel='stylesheet' href='/assets/css/tabulator.css' /> <link rel='stylesheet' href='/assets/css/css.css' /> <link rel='stylesheet' href='/assets/css/leaflet.css' /> <link rel='stylesheet' href='/assets/css/applets.css' /> @@ -18,6 +17,7 @@ <a class='slogan' href="/"> <div class='logo'></div> <div class='site_name'>MegaPixels</div> + </a> <div class='links'> <a href="/datasets/">Datasets</a> @@ -26,15 +26,16 @@ </header> <div class="content content-about"> - <section><h1>Research Methodologies</h1> -</section><section class="about-menu"> + <section><h1>FAQs</h1> +<section class="about-menu"> <ul> <li><a href="/about/">About</a></li> +<li><a class="current" href="/about/faq/">FAQs</a></li> <li><a href="/about/press/">Press</a></li> -<li><a href="/about/disclaimer/">Disclaimer</a></li> -<li><a href="/about/terms/">Terms and Conditions</a></li> -<li><a href="/about/privacy/">Privacy Policy</a></li> +<li><a href="/about/terms/">Terms</a></li> +<li><a href="/about/privacy/">Privacy</a></li> </ul> +</section><p>[ page under development ]</p> </section> </div> diff --git a/site/public/about/index.html b/site/public/about/index.html index 3c270ee1..c379ec43 100644 --- a/site/public/about/index.html +++ b/site/public/about/index.html @@ -8,7 +8,6 @@ <meta name="referrer" content="no-referrer" /> <meta name="viewport" content="width=device-width, initial-scale=1.0, user-scalable=yes" /> <link rel='stylesheet' href='/assets/css/fonts.css' /> - <link rel='stylesheet' href='/assets/css/tabulator.css' /> <link rel='stylesheet' href='/assets/css/css.css' /> <link rel='stylesheet' href='/assets/css/leaflet.css' /> <link rel='stylesheet' href='/assets/css/applets.css' /> @@ -18,6 +17,7 @@ <a class='slogan' href="/"> <div class='logo'></div> <div class='site_name'>MegaPixels</div> + </a> <div class='links'> <a href="/datasets/">Datasets</a> @@ -30,34 +30,50 @@ <section class="about-menu"> <ul> <li><a class="current" href="/about/">About</a></li> +<li><a href="/about/faq/">FAQs</a></li> <li><a href="/about/press/">Press</a></li> -<li><a href="/about/credits/">Credits</a></li> -<li><a href="/about/disclaimer/">Disclaimer</a></li> -<li><a href="/about/terms/">Terms and Conditions</a></li> -<li><a href="/about/privacy/">Privacy Policy</a></li> +<li><a href="/about/terms/">Terms</a></li> +<li><a href="/about/privacy/">Privacy</a></li> </ul> -</section><p>(PAGE UNDER DEVELOPMENT)</p> -<p><div style="font-size:20px;line-height:36px">Ever since government agencies began developing face recognition in the early 1960's, datasets of face images have always been central to technological advancements. Today, these datasets no longer originate in labs, but instead from family photo albums posted on photo sharing sites, surveillance cameras on college campuses, search engine queries for celebrities, cafe livestreams, and <a href="https://www.theverge.com/2017/8/22/16180080/transgender-youtubers-ai-facial-recognition-dataset">personal videos</a> posted on YouTube. </div></p><p>Collectively, facial recognition datasets are now gathered "in the wild".</p> -<p>MegaPixels is art and research by <a href="https://ahprojects.com">Adam Harvey</a> about facial recognition datasets that unravels their histories, futures, geographies, and meanings. Throughout 2019 this site this site will publish research reports, visualizations, raw data, and interactive tools to explore how publicly available facial recognition datasets contribute to a global supply chain of biometric data that powers the global facial recognition industry.</p><p>During the last year, hundreds of these facial analysis datasets created "in the wild" have been collected to understand how they contribute to a global supply chain of biometric data that is powering the global facial recognition industry.</p> -<p>The MegaPixels website is produced in partnership with <a href="https://mozilla.org">Mozilla</a>.</p> +</section><p>MegaPixels is an independent art and research project by Adam Harvey and Jules LaPlace investigating the ethics and individual privacy implications of publicly available face recognition datasets, and their role in industry and governmental expansion into biometric surveillance technologies.</p> +<p>The MegaPixels site is made possible with support from <a href="http://mozilla.org">Mozilla</a></p> <div class="flex-container team-photos-container"> <div class="team-member"> - <img src="https://nyc3.digitaloceanspaces.com/megapixels/v1/site/about/assets/adam-harvey-3d.jpg" /> <h3>Adam Harvey</h3> - <p>is Berlin-based American artist and researcher. His previous projects (CV Dazzle, Stealth Wear, and SkyLift) explore the potential for countersurveillance as artwork. He is the founder of VFRAME (visual forensics software for human rights groups), the recipient of 2 PrototypeFund grants, and is a researcher in residence at Karlsruhe HfG. - <br> - <a href="https://ahprojects.com">ahprojects.com</a> + <p>is Berlin-based American artist and researcher. His previous projects (CV Dazzle, Stealth Wear, and SkyLift) explore the potential for counter-surveillance as artwork. He is the founder of VFRAME (visual forensics software for human rights groups) and is a currently researcher in residence at Karlsruhe HfG.</p> + <p><a href="https://ahprojects.com">ahprojects.com</a></p> </p> </div> <div class="team-member"> - <img src="https://nyc3.digitaloceanspaces.com/megapixels/v1/site/about/assets/jules-laplace-3d.jpg" /> <h3>Jules LaPlace</h3> - <p>is an American creative technologist also based in Berlin. He was previously the CTO of a digital agency in NYC and now also works at VFRAME, developing computer vision for human rights groups. Jules also builds creative software for artists and musicians. - <br> - <a href="https://asdf.us/">asdf.us</a> + <p>is an American technologist and artist also based in Berlin. He was previously the CTO of a digital agency in NYC and now also works at VFRAME, developing computer vision and data analysis software for human rights groups. Jules also builds experimental software for artists and musicians. </p> + <p><a href="https://asdf.us/">asdf.us</a></p> </div> -</div></section> +</div><p>MegaPixels.cc is an independent research project about publicly available face recognition datasets. This website is based, in part, on earlier installations and research projects about facial recognition datasets in 2016-2018, which focused particularly on the MegaFace dataset. Since then it has evolved into a large-scale survey of publicly-available face and person analysis datasets, covering their usage, geographies, and ethics.</p> +<p>An academic report and presentation on the findings is forthcoming. This site is published to make the research more accessible to a wider audience and to include visualizations and interactive features not possible in PDF publications. Continued research on MegaPixels is supported by a 1 year Researcher-in-Residence grant from Karlsruhe HfG.</p> +<p>When possible, and once thoroughly verified, data generated for MegaPixels will be made available for download on <a href="https://github.com/adamhrv/megapixels">github.com/adamhrv/megapixels</a></p> +</section><section><div class='columns columns-3'><div class='column'><h4>Team</h4> +<ul> +<li>Adam Harvey: Concept, research, design, computer vision</li> +<li>Jules LaPlace: Information and systems architecture, data retrieval, web applications</li> +</ul> +</div><div class='column'><h4>Contributing Researchers</h4> +<ul> +<li>Berit Gilma: Dataset statistics and discovery</li> +<li>Beth (aka Ms. Celeb): Dataset usage verification and research</li> +<li>Mathana Stender: Commercial usage verification and research on LFW</li> +</ul> +</div><div class='column'><h4>Code and Libraries</h4> +<ul> +<li><a href="https://semanticscholar.org">Semantic Scholar</a> for citation aggregation</li> +<li>Leaflet.js for maps</li> +<li>C3.js for charts</li> +<li>ThreeJS for 3D visualizations</li> +<li>PDFMiner.Six and Pandas for research paper data analysis</li> +</ul> +</div></div></section><section><p>Please direct questions, comments, or feedback to <a href="https://mastodon.social/@adamhrv">mastodon.social/@adamhrv</a></p> +</section> </div> <footer> diff --git a/site/public/about/legal/index.html b/site/public/about/legal/index.html new file mode 100644 index 00000000..4e84a601 --- /dev/null +++ b/site/public/about/legal/index.html @@ -0,0 +1,89 @@ +<!doctype html> +<html> +<head> + <title>MegaPixels</title> + <meta charset="utf-8" /> + <meta name="author" content="Adam Harvey" /> + <meta name="description" content="MegaPixels Privacy Policy" /> + <meta name="referrer" content="no-referrer" /> + <meta name="viewport" content="width=device-width, initial-scale=1.0, user-scalable=yes" /> + <link rel='stylesheet' href='/assets/css/fonts.css' /> + <link rel='stylesheet' href='/assets/css/css.css' /> + <link rel='stylesheet' href='/assets/css/leaflet.css' /> + <link rel='stylesheet' href='/assets/css/applets.css' /> +</head> +<body> + <header> + <a class='slogan' href="/"> + <div class='logo'></div> + <div class='site_name'>MegaPixels</div> + + </a> + <div class='links'> + <a href="/datasets/">Datasets</a> + <a href="/about/">About</a> + </div> + </header> + <div class="content content-about"> + + <section><h1>Legal</h1> +<section class="about-menu"> +<ul> +<li><a href="/about/">About</a></li> +<li><a href="/about/faq/">FAQs</a></li> +<li><a href="/about/press/">Press</a></li> +<li><a href="/about/terms/">Terms</a></li> +<li><a class="current" href="/about/privacy/">Privacy</a></li> +</ul> +</section><p>MegaPixels.cc Terms and Privacy</p> +<p>MegaPixels is an independent art and research project about the origins and ethics of publicly available face analysis image datasets. By accessing MegaPixels (the <em>Service</em> or <em>Services</em>) you agree to the terms and conditions set forth below.</p> +<h3>Changes</h3> +<p>We reserve the right, at our sole discretion, to modify or replace these Terms at any time. If a revision is material we will try to provide at least 30 days notice prior to any new terms taking effect. What constitutes a material change will be determined at our sole discretion.</p> +<p>By continuing to access or use our Service after those revisions become effective, you agree to be bound by the revised terms. If you do not agree to the new terms, please stop using the Service.</p> +<h2>Privacy</h2> +<p>The MegaPixels site has been designed to minimize the amount of network requests to 3rd party services and therefore prioritize the privacy of the viewer by only loading local dependencies. Additionaly, this site does not use any anaytics programs to monitor site viewers. In fact, the only data collected are the necessary server logs that used only for preventing misuse, which are deleteted at regular short-term intervals.</p> +<h2>3rd Party Services</h2> +<p>In order to provide certain features of the site, some 3rd party services are needed. Currently, the MegaPixels.cc site uses two 3rd party services: (1) Leaflet.js for the interactive map and (2 Digital Ocean Spaces as a condent delivery network. Both services encrypt your requests to their server using HTTPS and neither service requires storing any cookies or authentication. However, both services will store files in your web browser's local cache (local storage) to improve loading performance. None of these local storage files are using for analytics, cookie-like technologies, tracking, or any similar purpose.</p> +<h3>Links To Other Web Sites</h3> +<p>The MegaPixels.cc contains many links to 3rd party websites, especically in the list of citations that are provided for each dataset. This website has no control over and assumes no responsibility for, the content, privacy policies, or practices of any third party web sites or services. You further acknowledge and agree that megapixels.cc shall not be responsible or liable, directly or indirectly, for any damage or loss caused or alleged to be caused by or in connection with use of or reliance on any such content, goods or services available on or through any such web sites or services.</p> +<p>We advise you to read the terms and conditions and privacy policies of any third-party web sites or services that you visit.</p> +<h3>The Information We Provide</h3> +<p>While every intention is made to verify and publish only verifiablenformation, at times amendments to accuracy may be required. In no event will the operators of this site be liable for your use or misuse of the information provided.</p> +<p>We may terminate or suspend access to our Service immediately, without prior notice or liability, for any reason whatsoever, including without limitation if you breach the Terms.</p> +<p>All provisions of the Terms which by their nature should survive termination shall survive termination, including, without limitation, ownership provisions, warranty disclaimers, indemnity and limitations of liability.</p> +<h3>Prohibited Uses</h3> +<p>You may not access or use, or attempt to access or use, the Services to take any action that could harm us or a third party. You may not use the Services in violation of applicable laws or in violation of our or any third party’s intellectual property or other proprietary or legal rights. You further agree that you shall not attempt (or encourage or support anyone else's attempt) to circumvent, reverse engineer, decrypt, or otherwise alter or interfere with the Services, or any content thereof, or make any unauthorized use thereof.</p> +<p>Without prior written consent, you shall not:</p> +<p>(i) access any part of the Services, Content, data or information you do not have permission or authorization to access;</p> +<p>(ii) use robots, spiders, scripts, service, software or any manual or automatic device, tool, or process designed to data mine or scrape the Content, data or information from the Services, or otherwise access or collect the Content, data or information from the Services using automated means;</p> +<p>(iii) use services, software or any manual or automatic device, tool, or process designed to circumvent any restriction, condition, or technological measure that controls access to the Services in any way, including overriding any security feature or bypassing or circumventing any access controls or use limits of the Services;</p> +<p>(iv) cache or archive the Content (except for a public search engine’s use of spiders for creating search indices);</p> +<p>(v) take action that imposes an unreasonable or disproportionately large load on our network or infrastructure; and</p> +<p>(vi) do anything that could disable, damage or change the functioning or appearance of the Services, including the presentation of advertising.</p> +<p>Engaging in a prohibited use of the Services may result in civil, criminal, and/or administrative penalties, fines, or sanctions against the user and those assisting the user.</p> +<h3>Governing Law</h3> +<p>These Terms shall be governed and construed in accordance with the laws of Berlin, Germany, without regard to its conflict of law provisions.</p> +<p>Our failure to enforce any right or provision of these Terms will not be considered a waiver of those rights. If any provision of these Terms is held to be invalid or unenforceable by a court, the remaining provisions of these Terms will remain in effect. These Terms constitute the entire agreement between us regarding our Service, and supersede and replace any prior agreements we might have between us regarding the Service.</p> +<h3>Indemnity</h3> +<p>You hereby indemnify, defend and hold harmless MegaPixels (and its creators) and all officers, directors, owners, agents, information providers, affiliates, licensors and licensees (collectively, the "Indemnified Parties") from and against any and all liability and costs, including, without limitation, reasonable attorneys' fees, incurred by the Indemnified Parties in connection with any claim arising out of any breach by you or any user of your account of these Terms of Service or the foregoing representations, warranties and covenants. You shall cooperate as fully as reasonably required in the defense of any such claim. We reserves the right, at its own expense, to assume the exclusive defense and control of any matter subject to indemnification by you.</p> +</section> + + </div> + <footer> + <div> + <a href="/">MegaPixels.cc</a> + <a href="/about/disclaimer/">Disclaimer</a> + <a href="/about/terms/">Terms of Use</a> + <a href="/about/privacy/">Privacy</a> + <a href="/about/">About</a> + <a href="/about/team/">Team</a> + </div> + <div> + MegaPixels ©2017-19 Adam R. Harvey / + <a href="https://ahprojects.com">ahprojects.com</a> + </div> + </footer> +</body> + +<script src="/assets/js/dist/index.js"></script> +</html>
\ No newline at end of file diff --git a/site/public/about/press/index.html b/site/public/about/press/index.html index e2e646da..610fda6e 100644 --- a/site/public/about/press/index.html +++ b/site/public/about/press/index.html @@ -8,7 +8,6 @@ <meta name="referrer" content="no-referrer" /> <meta name="viewport" content="width=device-width, initial-scale=1.0, user-scalable=yes" /> <link rel='stylesheet' href='/assets/css/fonts.css' /> - <link rel='stylesheet' href='/assets/css/tabulator.css' /> <link rel='stylesheet' href='/assets/css/css.css' /> <link rel='stylesheet' href='/assets/css/leaflet.css' /> <link rel='stylesheet' href='/assets/css/applets.css' /> @@ -18,6 +17,7 @@ <a class='slogan' href="/"> <div class='logo'></div> <div class='site_name'>MegaPixels</div> + </a> <div class='links'> <a href="/datasets/">Datasets</a> @@ -30,11 +30,10 @@ <section class="about-menu"> <ul> <li><a href="/about/">About</a></li> +<li><a href="/about/faq/">FAQs</a></li> <li><a class="current" href="/about/press/">Press</a></li> -<li><a href="/about/credits/">Credits</a></li> -<li><a href="/about/disclaimer/">Disclaimer</a></li> -<li><a href="/about/terms/">Terms and Conditions</a></li> -<li><a href="/about/privacy/">Privacy Policy</a></li> +<li><a href="/about/terms/">Terms</a></li> +<li><a href="/about/privacy/">Privacy</a></li> </ul> </section><p>(TEMPORARY PAGE)</p> <ul> diff --git a/site/public/about/privacy/index.html b/site/public/about/privacy/index.html index f6915d66..6e760cf7 100644 --- a/site/public/about/privacy/index.html +++ b/site/public/about/privacy/index.html @@ -30,96 +30,20 @@ <section class="about-menu"> <ul> <li><a href="/about/">About</a></li> +<li><a href="/about/faq/">FAQs</a></li> <li><a href="/about/press/">Press</a></li> -<li><a href="/about/credits/">Credits</a></li> -<li><a href="/about/disclaimer/">Disclaimer</a></li> -<li><a href="/about/terms/">Terms and Conditions</a></li> -<li><a class="current" href="/about/privacy/">Privacy Policy</a></li> -</ul> -</section><p>(TEMPORARY PAGE)</p> -<p>A summary of our privacy policy is as follows:</p> -<p>The MegaPixels site does not use any analytics programs or collect any data besides the necessary IP address of your connection, which are deleted every 30 days and used only for security and to prevent misuse.</p> -<p>The image processing sections of the site do not collect any data whatsoever. All processing takes place in temporary memory (RAM) and then is displayed back to the user over a SSL secured HTTPS connection. It is the sole responsibility of the user whether they discard, by closing the page, or share their analyzed information and any potential consequences that may arise from doing so.</p> -<p>A more complete legal version is below:</p> -<p><strong>This is a boilerplate Privacy policy from <a href="https://termsfeed.com/">https://termsfeed.com/</a></strong></p> -<p><strong>Needs to be reviewed</strong></p> -<p>Effective date: December 04, 2018</p> -<p>megapixels.cc ("us", "we", or "our") operates the WebsiteName website (hereinafter referred to as the "Service").</p> -<p>This page informs you of our policies regarding the collection, use, and disclosure of personal data when you use our Service and the choices you have associated with that data.</p> -<p>We use your data to provide and improve the Service. By using the Service, you agree to the collection and use of information in accordance with this policy. Unless otherwise defined in this Privacy Policy, the terms used in this Privacy Policy have the same meanings as in our Terms and Conditions, accessible from WebsiteName</p> -<h2>Definitions</h2> -<p><strong>Service</strong></p> -<p>Service is the MegaPixels website operated by megapixels.cc</p> -<p><strong>Personal Data</strong></p> -<p>Personal Data means data about a living individual who can be identified from those data (or from those and other information either in our possession or likely to come into our possession).</p> -<p><strong>Usage Data</strong></p> -<p>Usage Data is data collected automatically either generated by the use of the Service or from the Service infrastructure itself</p> -<h2>Information Collection and Use</h2> -<p>We collect several different types of information for various purposes to provide and improve our Service to you.</p> -<h2>Types of Data Collected</h2> -<h3>Personal Data</h3> -<p>While using our Service, we may ask you to provide us with certain personally identifiable information that can be used to contact or identify you ("Personal Data"). Personally identifiable information may include, but is not limited to:</p> -<ul> -<li>Cookies and Usage Data</li> -</ul> -<h3>Usage Data</h3> -<p>We may also collect information how the Service is accessed and used ("Usage Data"). This Usage Data may include information such as your computer's Internet Protocol address (e.g. IP address), browser type, browser version, the pages of our Service that you visit, the time and date of your visit, the time spent on those pages, unique device identifiers and other diagnostic data.</p> -<h3>Tracking & Cookies Data</h3> -<p>We use cookies and similar tracking technologies to track the activity on our Service and we hold certain information. -Cookies are files with a small amount of data which may include an anonymous unique identifier. Cookies are sent to your browser from a website and stored on your device. Other tracking technologies are also used such as beacons, tags and scripts to collect and track information and to improve and analyse our Service.</p> -<p>You can instruct your browser to refuse all cookies or to indicate when a cookie is being sent. However, if you do not accept cookies, you may not be able to use some portions of our Service. -Examples of Cookies we use:</p> -<ul> -<li><strong>Session Cookies.</strong> We use Session Cookies to operate our Service.</li> -<li><strong>Preference Cookies.</strong> We use Preference Cookies to remember your preferences and various settings.</li> -<li><strong>Security Cookies.</strong> We use Security Cookies for security purposes.</li> -</ul> -<h2>Use of Data</h2> -<p>megapixels.cc uses the collected data for various purposes:</p> -<ul> -<li>To provide and maintain the Service</li> -<li>To notify you about changes to our Service</li> -<li>To allow you to participate in interactive features of our Service when you choose to do so</li> -<li>To provide customer care and support</li> -<li>To provide analysis or valuable information so that we can improve the Service</li> -<li>To monitor the usage of the Service</li> -<li>To detect, prevent and address technical issues</li> -</ul> -<h2>Transfer Of Data</h2> -<p>Your information, including Personal Data, may be transferred to — and maintained on — computers located outside of your state, province, country or other governmental jurisdiction where the data protection laws may differ than those from your jurisdiction.</p> -<p>If you are located outside Germany and choose to provide information to us, please note that we transfer the data, including Personal Data, to Germany and process it there. -Your consent to this Privacy Policy followed by your submission of such information represents your agreement to that transfer. -megapixels.cc will take all steps reasonably necessary to ensure that your data is treated securely and in accordance with this Privacy Policy and no transfer of your Personal Data will take place to an organization or a country unless there are adequate controls in place including the security of your data and other personal information.</p> -<h2>Disclosure Of Data</h2> -<h3>Legal Requirements</h3> -<p>megapixels.cc may disclose your Personal Data in the good faith belief that such action is necessary to:</p> -<ul> -<li>To comply with a legal obligation</li> -<li>To protect and defend the rights or property of megapixels.cc</li> -<li>To prevent or investigate possible wrongdoing in connection with the Service</li> -<li>To protect the personal safety of users of the Service or the public</li> -<li>To protect against legal liability</li> -</ul> -<h2>Security of Data</h2> -<p>The security of your data is important to us but remember that no method of transmission over the Internet or method of electronic storage is 100% secure. While we strive to use commercially acceptable means to protect your Personal Data, we cannot guarantee its absolute security.</p> -<h2>Service Providers</h2> -<p>We may employ third party companies and individuals to facilitate our Service ("Service Providers"), to provide the Service on our behalf, to perform Service-related services or to assist us in analyzing how our Service is used.</p> -<p>These third parties have access to your Personal Data only to perform these tasks on our behalf and are obligated not to disclose or use it for any other purpose.</p> -<h2>Links to Other Sites</h2> -<p>Our Service may contain links to other sites that are not operated by us. If you click a third party link, you will be directed to that third party's site. We strongly advise you to review the Privacy Policy of every site you visit. -We have no control over and assume no responsibility for the content, privacy policies or practices of any third party sites or services.</p> -<h2>Children's Privacy</h2> -<p>Our Service does not address anyone under the age of 18 ("Children").</p> -<p>We do not knowingly collect personally identifiable information from anyone under the age of 18. If you are a parent or guardian and you are aware that your Child has provided us with Personal Data, please contact us. If we become aware that we have collected Personal Data from children without verification of parental consent, we take steps to remove that information from our servers.</p> -<h2>Changes to This Privacy Policy</h2> -<p>We may update our Privacy Policy from time to time. We will notify you of any changes by posting the new Privacy Policy on this page. -We will let you know via email and/or a prominent notice on our Service, prior to the change becoming effective and update the "effective date" at the top of this Privacy Policy. -You are advised to review this Privacy Policy periodically for any changes. Changes to this Privacy Policy are effective when they are posted on this page.</p> -<h2>Contact Us</h2> -<p>If you have any questions about this Privacy Policy, please contact us:</p> -<ul> -<li>By visiting this page on our website: <a href="https://megapixels.cc/contact">https://megapixels.cc/contact</a></li> +<li><a href="/about/terms/">Terms</a></li> +<li><a class="current" href="/about/privacy/">Privacy</a></li> </ul> +</section><p>MegaPixels.cc Terms and Privacy</p> +<p>MegaPixels is an independent art and research project about the origins and ethics of publicly available face analysis image datasets. By accessing this site you agree to the terms and conditions set forth below.</p> +<h2>Privacy</h2> +<p>The MegaPixels site has been designed to minimize the amount of network requests to 3rd party services and therefore prioritize the privacy of the viewer by only loading local dependencies. Additionaly, this site does not use any anaytics programs to monitor site viewers. In fact, the only data collected are the necessary server logs that used only for preventing misuse, which are deleteted at regular short-term intervals.</p> +<h2>3rd Party Services</h2> +<p>In order to provide certain features of the site, some 3rd party services are needed. Currently, the MegaPixels.cc site uses two 3rd party services: (1) Leaflet.js for the interactive map and (2 Digital Ocean Spaces as a condent delivery network. Both services encrypt your requests to their server using HTTPS and neither service requires storing any cookies or authentication. However, both services will store files in your web browser's local cache (local storage) to improve loading performance. None of these local storage files are using for analytics, cookie-like technologies, tracking, or any similar purpose.</p> +<h3>Links To Other Web Sites</h3> +<p>The MegaPixels.cc contains many links to 3rd party websites, especically in the list of citations that are provided for each dataset. This website has no control over and assumes no responsibility for, the content, privacy policies, or practices of any third party web sites or services. You further acknowledge and agree that megapixels.cc shall not be responsible or liable, directly or indirectly, for any damage or loss caused or alleged to be caused by or in connection with use of or reliance on any such content, goods or services available on or through any such web sites or services.</p> +<p>We advise you to read the terms and conditions and privacy policies of any third-party web sites or services that you visit.</p> </section> </div> diff --git a/site/public/about/terms/index.html b/site/public/about/terms/index.html index b86eae88..58e49b78 100644 --- a/site/public/about/terms/index.html +++ b/site/public/about/terms/index.html @@ -30,11 +30,10 @@ <section class="about-menu"> <ul> <li><a href="/about/">About</a></li> +<li><a href="/about/faq/">FAQs</a></li> <li><a href="/about/press/">Press</a></li> -<li><a href="/about/credits/">Credits</a></li> -<li><a href="/about/disclaimer/">Disclaimer</a></li> -<li><a class="current" href="/about/terms/">Terms and Conditions</a></li> -<li><a href="/about/privacy/">Privacy Policy</a></li> +<li><a class="current" href="/about/terms/">Terms</a></li> +<li><a href="/about/privacy/">Privacy</a></li> </ul> </section><p>(TEMPORARY PAGE)</p> <p>(FPO: this is only example text)</p> diff --git a/site/public/datasets/50_people_one_question/index.html b/site/public/datasets/50_people_one_question/index.html index bded7fbd..796af8d6 100644 --- a/site/public/datasets/50_people_one_question/index.html +++ b/site/public/datasets/50_people_one_question/index.html @@ -8,7 +8,6 @@ <meta name="referrer" content="no-referrer" /> <meta name="viewport" content="width=device-width, initial-scale=1.0, user-scalable=yes" /> <link rel='stylesheet' href='/assets/css/fonts.css' /> - <link rel='stylesheet' href='/assets/css/tabulator.css' /> <link rel='stylesheet' href='/assets/css/css.css' /> <link rel='stylesheet' href='/assets/css/leaflet.css' /> <link rel='stylesheet' href='/assets/css/applets.css' /> @@ -18,6 +17,7 @@ <a class='slogan' href="/"> <div class='logo'></div> <div class='site_name'>MegaPixels</div> + <div class='splash'>50 People One Question</div> </a> <div class='links'> <a href="/datasets/">Datasets</a> @@ -27,13 +27,25 @@ <div class="content content-dataset"> <section class='intro_section' style='background-image: url(https://nyc3.digitaloceanspaces.com/megapixels/v1/datasets/50_people_one_question/assets/background.jpg)'><div class='inner'><div class='hero_desc'><span class='bgpad'><span style="color:#ffaa00">People One Question</span> is a dataset of people from an online video series on YouTube and Vimeo used for building facial recogntion algorithms</span></div><div class='hero_subdesc'><span class='bgpad'>People One Question dataset includes ... -</span></div></div></section><section><div class='left-sidebar'><div class='meta'><div><div class='gray'>Collected</div><div>TBD</div></div><div><div class='gray'>Published</div><div>TBD</div></div><div><div class='gray'>Images</div><div>TBD</div></div><div><div class='gray'>Faces</div><div>TBD</div></div></div></div><h2>50 People 1 Question</h2> +</span></div></div></section><section><div class='left-sidebar'><div class='meta'> + <div class='gray'>Published</div> + <div>2013</div> + </div><div class='meta'> + <div class='gray'>Videos</div> + <div>33 </div> + </div><div class='meta'> + <div class='gray'>Purpose</div> + <div>Facial landmark estimation in the wild</div> + </div><div class='meta'> + <div class='gray'>Website</div> + <div><a href='http://www.vision.caltech.edu/~dhall/projects/MergingPoseEstimates/' target='_blank' rel='nofollow noopener'>caltech.edu</a></div> + </div><div class='meta'><div><div class='gray'>Collected</div><div>TBD</div></div><div><div class='gray'>Published</div><div>TBD</div></div><div><div class='gray'>Images</div><div>TBD</div></div><div><div class='gray'>Faces</div><div>TBD</div></div></div></div><h2>50 People 1 Question</h2> <p>(PAGE UNDER DEVELOPMENT)</p> <p>At vero eos et accusamus et iusto odio dignissimos ducimus, qui blanditiis praesentium voluptatum deleniti atque corrupti, quos dolores et quas molestias excepturi sint, obcaecati cupiditate non-provident, similique sunt in culpa, qui officia deserunt mollitia animi, id est laborum et dolorum fuga. Et harum quidem rerum facilis est et expedita distinctio.</p> <p>Nam libero tempore, cum soluta nobis est eligendi optio, cumque nihil impedit, quo minus id, quod maxime placeat, facere possimus, omnis voluptas assumenda est, omnis dolor repellendus. Temporibus autem quibusdam et aut officiis debitis aut rerum necessitatibus saepe eveniet, ut et voluptates repudiandae sint et molestiae non-recusandae. Itaque earum rerum hic tenetur a sapiente delectus, ut aut reiciendis voluptatibus maiores alias consequatur aut perferendis doloribus asperiores repellat</p> </section><section> - <h3>Information Supply Chain</h3> + <h3>Biometric Trade Routes</h3> <!-- <div class="map-sidebar right-sidebar"> <h3>Legend</h3> @@ -45,45 +57,44 @@ </div> --> <p> - To understand how 50 People One Question Dataset has been used around the world... - affected global research on computer vision, surveillance, defense, and consumer technology, the and where this dataset has been used the locations of each organization that used or referenced the datast + To help understand how 50 People One Question Dataset has been used around the world for commercial, military and academic research; publicly available research citing 50 People One Question is collected, verified, and geocoded to show the biometric trade routes of people appearing in the images. Click on the markers to reveal reserach projects at that location. </p> </section> -<section class="applet_container"> +<section class="applet_container fullwidth"> <div class="applet" data-payload="{"command": "map"}"></div> + </section> <div class="caption"> - <div class="map-legend-item edu">Academic</div> - <div class="map-legend-item com">Industry</div> - <div class="map-legend-item gov">Government</div> - Data is compiled from <a href="https://www.semanticscholar.org">Semantic Scholar</a> and has been manually verified to show usage of 50 People One Question Dataset. + <ul class="map-legend"> + <li class="edu">Academic</li> + <li class="com">Commercial</li> + <li class="gov">Military / Government</li> + <li class="source">Citation data is collected using <a href="https://semanticscholar.org" target="_blank">SemanticScholar.org</a> then dataset usage verified and geolocated.</li> + </ul> </div> -<section> +<!-- <section> <p class='subp'> - Standardized paragraph of text about the map. Sed ut perspiciatis, unde omnis iste natus error sit voluptatem accusantium doloremque laudantium, totam rem aperiam eaque ipsa, quae ab illo inventore veritatis et quasi architecto beatae vitae dicta sunt, explicabo. + [section under development] 50 People One Question Dataset ... Standardized paragraph of text about the map. Sed ut perspiciatis, unde omnis iste natus error sit voluptatem accusantium doloremque laudantium, totam rem aperiam eaque ipsa, quae ab illo inventore veritatis et quasi architecto beatae vitae dicta sunt, explicabo. </p> -</section><section> - +</section> + --><section> <div class="hr-wave-holder"> <div class="hr-wave-line hr-wave-line1"></div> <div class="hr-wave-line hr-wave-line2"></div> </div> - <h2>Supplementary Information</h2> + <h3>Supplementary Information</h3> + </section><section class="applet_container"> - <h3>Citations</h3> - <p> - Citations were collected from <a href="https://www.semanticscholar.org">Semantic Scholar</a>, a website which aggregates - and indexes research papers. The citations were geocoded using names of institutions found in the PDF front matter, or as listed on other resources. These papers have been manually verified to show that researchers downloaded and used the dataset to train and/or test machine learning algorithms. - </p> + <h3>Dataset Citations</h3> <p> - Add button/link to download CSV + The dataset citations used in the visualizations were collected from <a href="https://www.semanticscholar.org">Semantic Scholar</a>, a website which aggregates and indexes research papers. Each citation was geocoded using names of institutions found in the PDF front matter, or as listed on other resources. These papers have been manually verified to show that researchers downloaded and used the dataset to train or test machine learning algorithms. </p> <div class="applet" data-payload="{"command": "citations"}"></div> diff --git a/site/public/datasets/afad/index.html b/site/public/datasets/afad/index.html index a0aea1a6..ac025a80 100644 --- a/site/public/datasets/afad/index.html +++ b/site/public/datasets/afad/index.html @@ -8,7 +8,6 @@ <meta name="referrer" content="no-referrer" /> <meta name="viewport" content="width=device-width, initial-scale=1.0, user-scalable=yes" /> <link rel='stylesheet' href='/assets/css/fonts.css' /> - <link rel='stylesheet' href='/assets/css/tabulator.css' /> <link rel='stylesheet' href='/assets/css/css.css' /> <link rel='stylesheet' href='/assets/css/leaflet.css' /> <link rel='stylesheet' href='/assets/css/applets.css' /> @@ -18,6 +17,7 @@ <a class='slogan' href="/"> <div class='logo'></div> <div class='site_name'>MegaPixels</div> + <div class='splash'>AFAD</div> </a> <div class='links'> <a href="/datasets/">Datasets</a> diff --git a/site/public/datasets/aflw/index.html b/site/public/datasets/aflw/index.html index 7aaa9af0..476f390c 100644 --- a/site/public/datasets/aflw/index.html +++ b/site/public/datasets/aflw/index.html @@ -8,7 +8,6 @@ <meta name="referrer" content="no-referrer" /> <meta name="viewport" content="width=device-width, initial-scale=1.0, user-scalable=yes" /> <link rel='stylesheet' href='/assets/css/fonts.css' /> - <link rel='stylesheet' href='/assets/css/tabulator.css' /> <link rel='stylesheet' href='/assets/css/css.css' /> <link rel='stylesheet' href='/assets/css/leaflet.css' /> <link rel='stylesheet' href='/assets/css/applets.css' /> @@ -18,6 +17,7 @@ <a class='slogan' href="/"> <div class='logo'></div> <div class='site_name'>MegaPixels</div> + </a> <div class='links'> <a href="/datasets/">Datasets</a> diff --git a/site/public/datasets/brainwash/index.html b/site/public/datasets/brainwash/index.html index 41484257..ec5ee434 100644 --- a/site/public/datasets/brainwash/index.html +++ b/site/public/datasets/brainwash/index.html @@ -4,11 +4,10 @@ <title>MegaPixels</title> <meta charset="utf-8" /> <meta name="author" content="Adam Harvey" /> - <meta name="description" content="Brainwash is a dataset of webcam images taken from the Brainwash Cafe in San Francisco" /> + <meta name="description" content="Brainwash is a dataset of webcam images taken from the Brainwash Cafe in San Francisco in 2014" /> <meta name="referrer" content="no-referrer" /> <meta name="viewport" content="width=device-width, initial-scale=1.0, user-scalable=yes" /> <link rel='stylesheet' href='/assets/css/fonts.css' /> - <link rel='stylesheet' href='/assets/css/tabulator.css' /> <link rel='stylesheet' href='/assets/css/css.css' /> <link rel='stylesheet' href='/assets/css/leaflet.css' /> <link rel='stylesheet' href='/assets/css/applets.css' /> @@ -18,6 +17,7 @@ <a class='slogan' href="/"> <div class='logo'></div> <div class='site_name'>MegaPixels</div> + <div class='splash'>Brainwash</div> </a> <div class='links'> <a href="/datasets/">Datasets</a> @@ -26,36 +26,50 @@ </header> <div class="content content-dataset"> - <section class='intro_section' style='background-image: url(https://nyc3.digitaloceanspaces.com/megapixels/v1/datasets/brainwash/assets/background.jpg)'><div class='inner'><div class='hero_desc'><span class='bgpad'>Brainwash is a dataset of webcam images taken from the Brainwash Cafe in San Francisco</span></div><div class='hero_subdesc'><span class='bgpad'>The Brainwash dataset includes 11,918 images of "everyday life of a busy downtown cafe" and is used for training head detection algorithms -</span></div></div></section><section><div class='left-sidebar'><div class='meta'><div><div class='gray'>Published</div><div>2015</div></div><div><div class='gray'>Images</div><div>11,918</div></div><div><div class='gray'>Faces</div><div>91,146</div></div><div><div class='gray'>Created by</div><div>Stanford Department of Computer Science</div></div><div><div class='gray'>Funded by</div><div>Max Planck Center for Visual Computing and Communication</div></div><div><div class='gray'>Location</div><div>Brainwash Cafe, San Franscisco</div></div><div><div class='gray'>Purpose</div><div>Training face detection</div></div><div><div class='gray'>Website</div><div><a href="https://exhibits.stanford.edu/data/catalog/sx925dc9385">stanford.edu</a></div></div><div><div class='gray'>Paper</div><div><a href="http://arxiv.org/abs/1506.04878">End-to-End People Detection in Crowded Scenes</a></div></div><div><div class='gray'>Explicit Consent</div><div>No</div></div></div></div><h2>Brainwash Dataset</h2> -<p>(PAGE UNDER DEVELOPMENT)</p> -<p><em>Brainwash</em> is a face detection dataset created from the Brainwash Cafe's livecam footage including 11,918 images of "everyday life of a busy downtown cafe<a class="footnote_shim" name="[^readme]_1"> </a><a href="#[^readme]" class="footnote" title="Footnote 1">1</a>". The images are used to develop face detection algorithms for the "challenging task of detecting people in crowded scenes" and tracking them.</p> -<p>Before closing in 2017, Brainwash Cafe was a "cafe and laundromat" located in San Francisco's SoMA district. The cafe published a publicy available livestream from the cafe with a view of the cash register, performance stage, and seating area.</p> -<p>Since it's publication by Stanford in 2015, the Brainwash dataset has appeared in several notable research papers. In September 2016 four researchers from the National University of Defense Technology in Changsha, China used the Brainwash dataset for a research study on "people head detection in crowded scenes", concluding that their algorithm "achieves superior head detection performance on the crowded scenes dataset<a class="footnote_shim" name="[^localized_region_context]_1"> </a><a href="#[^localized_region_context]" class="footnote" title="Footnote 2">2</a>". And again in 2017 three researchers at the National University of Defense Technology used Brainwash for a study on object detection noting "the data set used in our experiment is shown in Table 1, which includes one scene of the brainwash dataset<a class="footnote_shim" name="[^replacement_algorithm]_1"> </a><a href="#[^replacement_algorithm]" class="footnote" title="Footnote 3">3</a>".</p> -</section><section class='images'><div class='image'><img src='https://nyc3.digitaloceanspaces.com/megapixels/v1/datasets/brainwash/assets/00425000_960.jpg' alt=' An sample image from the Brainwash dataset used for training face and head detection algorithms for surveillance. The datset contains about 12,000 images. License: Open Data Commons Public Domain Dedication (PDDL)'><div class='caption'> An sample image from the Brainwash dataset used for training face and head detection algorithms for surveillance. The datset contains about 12,000 images. License: Open Data Commons Public Domain Dedication (PDDL)</div></div></section><section class='images'><div class='image'><img src='https://nyc3.digitaloceanspaces.com/megapixels/v1/datasets/brainwash/assets/brainwash_montage.jpg' alt=' 49 of the 11,918 images included in the Brainwash dataset. License: Open Data Commons Public Domain Dedication (PDDL)'><div class='caption'> 49 of the 11,918 images included in the Brainwash dataset. License: Open Data Commons Public Domain Dedication (PDDL)</div></div></section><section> + <section class='intro_section' style='background-image: url(https://nyc3.digitaloceanspaces.com/megapixels/v1/datasets/brainwash/assets/background.jpg)'><div class='inner'><div class='hero_desc'><span class='bgpad'>Brainwash is a dataset of webcam images taken from the Brainwash Cafe in San Francisco in 2014</span></div><div class='hero_subdesc'><span class='bgpad'>The Brainwash dataset includes 11,918 images of "everyday life of a busy downtown cafe" and is used for training head detection surveillance algorithms +</span></div></div></section><section><div class='left-sidebar'><div class='meta'> + <div class='gray'>Published</div> + <div>2015</div> + </div><div class='meta'> + <div class='gray'>Images</div> + <div>11,917 </div> + </div><div class='meta'> + <div class='gray'>Purpose</div> + <div>Head detection</div> + </div><div class='meta'> + <div class='gray'>Created by</div> + <div>Stanford University (US), Max Planck Institute for Informatics (DE)</div> + </div><div class='meta'> + <div class='gray'>Funded by</div> + <div>Max Planck Center for Visual Computing and Communication</div> + </div><div class='meta'> + <div class='gray'>Download Size</div> + <div>4.1 GB</div> + </div><div class='meta'> + <div class='gray'>Website</div> + <div><a href='https://purl.stanford.edu/sx925dc9385' target='_blank' rel='nofollow noopener'>stanford.edu</a></div> + </div></div><h2>Brainwash Dataset</h2> +<p><em>Brainwash</em> is a head detection dataset created from San Francisco's Brainwash Cafe livecam footage. It includes 11,918 images of "everyday life of a busy downtown cafe"<a class="footnote_shim" name="[^readme]_1"> </a><a href="#[^readme]" class="footnote" title="Footnote 1">1</a> captured at 100 second intervals throught the entire day. Brainwash dataset was captured during 3 days in 2014: October 27, November 13, and November 24. According the author's reserach paper introducing the dataset, the images were acquired with the help of Angelcam.com [cite orig paper].</p> +<p>Brainwash is not a widely used dataset but since its publication by Stanford University in 2015, it has notably appeared in several research papers from the National University of Defense Technology in Changsha, China. In 2016 and in 2017 researchers there conducted studies on detecting people's heads in crowded scenes for the purpose of surveillance <a class="footnote_shim" name="[^localized_region_context]_1"> </a><a href="#[^localized_region_context]" class="footnote" title="Footnote 2">2</a> <a class="footnote_shim" name="[^replacement_algorithm]_1"> </a><a href="#[^replacement_algorithm]" class="footnote" title="Footnote 3">3</a>.</p> +<p>If you happen to have been at Brainwash cafe in San Franscisco at any time on October 26, November 13, or November 24 in 2014 you are most likely included in the Brainwash dataset.</p> +</section><section class='images'><div class='image'><img src='https://nyc3.digitaloceanspaces.com/megapixels/v1/datasets/brainwash/assets/brainwash_mean_overlay.jpg' alt=' The pixel-averaged image of all Brainwash dataset images is shown with 81,973 head annotations drawn from the Brainwash training partition. (c) Adam Harvey'><div class='caption'> The pixel-averaged image of all Brainwash dataset images is shown with 81,973 head annotations drawn from the Brainwash training partition. (c) Adam Harvey</div></div></section><section> <h3>Who used Brainwash Dataset?</h3> <p> - This bar chart presents a ranking of the top countries where citations originated. Mouse over individual columns - to see yearly totals. These charts show at most the top 10 countries. + This bar chart presents a ranking of the top countries where dataset citations originated. Mouse over individual columns to see yearly totals. These charts show at most the top 10 countries. </p> </section> <section class="applet_container"> +<!-- <div style="position: absolute;top: 0px;right: -55px;width: 180px;font-size: 14px;">Labeled Faces in the Wild Dataset<br><span class="numc" style="font-size: 11px;">20 citations</span> +</div> --> <div class="applet" data-payload="{"command": "chart"}"></div> -</section><section> - <p> - These pie charts show overall totals based on country and institution type. - </p> - - </section> - -<section class="applet_container"> +</section><section class="applet_container"> <div class="applet" data-payload="{"command": "piechart"}"></div> </section><section> - <h3>Information Supply Chain</h3> + <h3>Biometric Trade Routes</h3> <!-- <div class="map-sidebar right-sidebar"> <h3>Legend</h3> @@ -67,53 +81,58 @@ </div> --> <p> - To understand how Brainwash Dataset has been used around the world... - affected global research on computer vision, surveillance, defense, and consumer technology, the and where this dataset has been used the locations of each organization that used or referenced the datast + To help understand how Brainwash Dataset has been used around the world for commercial, military and academic research; publicly available research citing Brainwash Dataset is collected, verified, and geocoded to show the biometric trade routes of people appearing in the images. Click on the markers to reveal reserach projects at that location. </p> </section> -<section class="applet_container"> +<section class="applet_container fullwidth"> <div class="applet" data-payload="{"command": "map"}"></div> + </section> <div class="caption"> - <div class="map-legend-item edu">Academic</div> - <div class="map-legend-item com">Industry</div> - <div class="map-legend-item gov">Government</div> - Data is compiled from <a href="https://www.semanticscholar.org">Semantic Scholar</a> and has been manually verified to show usage of Brainwash Dataset. + <ul class="map-legend"> + <li class="edu">Academic</li> + <li class="com">Commercial</li> + <li class="gov">Military / Government</li> + <li class="source">Citation data is collected using <a href="https://semanticscholar.org" target="_blank">SemanticScholar.org</a> then dataset usage verified and geolocated.</li> + </ul> </div> -<section> +<!-- <section> <p class='subp'> - Standardized paragraph of text about the map. Sed ut perspiciatis, unde omnis iste natus error sit voluptatem accusantium doloremque laudantium, totam rem aperiam eaque ipsa, quae ab illo inventore veritatis et quasi architecto beatae vitae dicta sunt, explicabo. + [section under development] Brainwash Dataset ... Standardized paragraph of text about the map. Sed ut perspiciatis, unde omnis iste natus error sit voluptatem accusantium doloremque laudantium, totam rem aperiam eaque ipsa, quae ab illo inventore veritatis et quasi architecto beatae vitae dicta sunt, explicabo. </p> -</section><section><p>Add more analysis here</p> -</section><section> +</section> + --><section class="applet_container"> + <h3>Dataset Citations</h3> + <p> + The dataset citations used in the visualizations were collected from <a href="https://www.semanticscholar.org">Semantic Scholar</a>, a website which aggregates and indexes research papers. Each citation was geocoded using names of institutions found in the PDF front matter, or as listed on other resources. These papers have been manually verified to show that researchers downloaded and used the dataset to train or test machine learning algorithms. + </p> + + <div class="applet" data-payload="{"command": "citations"}"></div> +</section><section> <div class="hr-wave-holder"> <div class="hr-wave-line hr-wave-line1"></div> <div class="hr-wave-line hr-wave-line2"></div> </div> - <h2>Supplementary Information</h2> -</section><section class="applet_container"> - - <h3>Citations</h3> - <p> - Citations were collected from <a href="https://www.semanticscholar.org">Semantic Scholar</a>, a website which aggregates - and indexes research papers. The citations were geocoded using names of institutions found in the PDF front matter, or as listed on other resources. These papers have been manually verified to show that researchers downloaded and used the dataset to train and/or test machine learning algorithms. - </p> - <p> - Add button/link to download CSV - </p> - - <div class="applet" data-payload="{"command": "citations"}"></div> -</section><section><h3>Additional Information</h3> + <h3>Supplementary Information</h3> + +</section><section class='images'><div class='image'><img src='https://nyc3.digitaloceanspaces.com/megapixels/v1/datasets/brainwash/assets/00425000_960.jpg' alt=' An sample image from the Brainwash dataset used for training face and head detection algorithms for surveillance. The datset contains about 12,000 images. License: Open Data Commons Public Domain Dedication (PDDL)'><div class='caption'> An sample image from the Brainwash dataset used for training face and head detection algorithms for surveillance. The datset contains about 12,000 images. License: Open Data Commons Public Domain Dedication (PDDL)</div></div></section><section class='images'><div class='image'><img src='https://nyc3.digitaloceanspaces.com/megapixels/v1/datasets/brainwash/assets/brainwash_montage.jpg' alt=' 49 of the 11,918 images included in the Brainwash dataset. License: Open Data Commons Public Domain Dedication (PDDL)'><div class='caption'> 49 of the 11,918 images included in the Brainwash dataset. License: Open Data Commons Public Domain Dedication (PDDL)</div></div></section><section><h4>Additional Resources</h4> <ul> <li>The dataset author spoke about his research at the CVPR conference in 2016 <a href="https://www.youtube.com/watch?v=Nl2fBKxwusQ">https://www.youtube.com/watch?v=Nl2fBKxwusQ</a></li> </ul> +<p>TODO</p> +<ul> +<li>add bounding boxes to the header image</li> +<li>remake montage with randomized images, with bboxes</li> +<li>clean up intro text</li> +<li>verify quote citations</li> +</ul> </section><section><ul class="footnotes"><li><a name="[^readme]" class="footnote_shim"></a><span class="backlinks"><a href="#[^readme]_1">a</a></span><p>"readme.txt" <a href="https://exhibits.stanford.edu/data/catalog/sx925dc9385">https://exhibits.stanford.edu/data/catalog/sx925dc9385</a>.</p> </li><li><a name="[^localized_region_context]" class="footnote_shim"></a><span class="backlinks"><a href="#[^localized_region_context]_1">a</a></span><p>Li, Y. and Dou, Y. and Liu, X. and Li, T. Localized Region Context and Object Feature Fusion for People Head Detection. ICIP16 Proceedings. 2016. Pages 594-598.</p> </li><li><a name="[^replacement_algorithm]" class="footnote_shim"></a><span class="backlinks"><a href="#[^replacement_algorithm]_1">a</a></span><p>Zhao. X, Wang Y, Dou, Y. A Replacement Algorithm of Non-Maximum Suppression Base on Graph Clustering.</p> diff --git a/site/public/datasets/caltech_10k/index.html b/site/public/datasets/caltech_10k/index.html index 6615bb1a..9aa0b2c3 100644 --- a/site/public/datasets/caltech_10k/index.html +++ b/site/public/datasets/caltech_10k/index.html @@ -8,7 +8,6 @@ <meta name="referrer" content="no-referrer" /> <meta name="viewport" content="width=device-width, initial-scale=1.0, user-scalable=yes" /> <link rel='stylesheet' href='/assets/css/fonts.css' /> - <link rel='stylesheet' href='/assets/css/tabulator.css' /> <link rel='stylesheet' href='/assets/css/css.css' /> <link rel='stylesheet' href='/assets/css/leaflet.css' /> <link rel='stylesheet' href='/assets/css/applets.css' /> @@ -18,6 +17,7 @@ <a class='slogan' href="/"> <div class='logo'></div> <div class='site_name'>MegaPixels</div> + </a> <div class='links'> <a href="/datasets/">Datasets</a> diff --git a/site/public/datasets/celeba/index.html b/site/public/datasets/celeba/index.html index 09347f10..26a43803 100644 --- a/site/public/datasets/celeba/index.html +++ b/site/public/datasets/celeba/index.html @@ -8,7 +8,6 @@ <meta name="referrer" content="no-referrer" /> <meta name="viewport" content="width=device-width, initial-scale=1.0, user-scalable=yes" /> <link rel='stylesheet' href='/assets/css/fonts.css' /> - <link rel='stylesheet' href='/assets/css/tabulator.css' /> <link rel='stylesheet' href='/assets/css/css.css' /> <link rel='stylesheet' href='/assets/css/leaflet.css' /> <link rel='stylesheet' href='/assets/css/applets.css' /> @@ -18,6 +17,7 @@ <a class='slogan' href="/"> <div class='logo'></div> <div class='site_name'>MegaPixels</div> + <div class='splash'>CelebA</div> </a> <div class='links'> <a href="/datasets/">Datasets</a> @@ -27,13 +27,31 @@ <div class="content content-dataset"> <section class='intro_section' style='background-image: url(https://nyc3.digitaloceanspaces.com/megapixels/v1/datasets/celeba/assets/background.jpg)'><div class='inner'><div class='hero_desc'><span class='bgpad'><span style="color:#ffaa00">CelebA</span> is a dataset of people...</span></div><div class='hero_subdesc'><span class='bgpad'>CelebA includes... -</span></div></div></section><section><div class='left-sidebar'><div class='meta'><div><div class='gray'>Collected</div><div>TBD</div></div><div><div class='gray'>Published</div><div>TBD</div></div><div><div class='gray'>Images</div><div>TBD</div></div><div><div class='gray'>Faces</div><div>TBD</div></div></div></div><h2>CelebA</h2> +</span></div></div></section><section><div class='left-sidebar'><div class='meta'> + <div class='gray'>Published</div> + <div>2015</div> + </div><div class='meta'> + <div class='gray'>Images</div> + <div>202,599 </div> + </div><div class='meta'> + <div class='gray'>Identities</div> + <div>10,177 </div> + </div><div class='meta'> + <div class='gray'>Purpose</div> + <div>face attribute recognition, face detection, and landmark (or facial part) localization</div> + </div><div class='meta'> + <div class='gray'>Download Size</div> + <div>1.4 GB</div> + </div><div class='meta'> + <div class='gray'>Website</div> + <div><a href='http://mmlab.ie.cuhk.edu.hk/projects/CelebA.html' target='_blank' rel='nofollow noopener'>edu.hk</a></div> + </div><div class='meta'><div><div class='gray'>Collected</div><div>TBD</div></div><div><div class='gray'>Published</div><div>TBD</div></div><div><div class='gray'>Images</div><div>TBD</div></div><div><div class='gray'>Faces</div><div>TBD</div></div></div></div><h2>CelebA</h2> <p>(PAGE UNDER DEVELOPMENT)</p> <p>At vero eos et accusamus et iusto odio dignissimos ducimus, qui blanditiis praesentium voluptatum deleniti atque corrupti, quos dolores et quas molestias excepturi sint, obcaecati cupiditate non-provident, similique sunt in culpa, qui officia deserunt mollitia animi, id est laborum et dolorum fuga. Et harum quidem rerum facilis est et expedita distinctio.</p> <p>Nam libero tempore, cum soluta nobis est eligendi optio, cumque nihil impedit, quo minus id, quod maxime placeat, facere possimus, omnis voluptas assumenda est, omnis dolor repellendus. Temporibus autem quibusdam et aut officiis debitis aut rerum necessitatibus saepe eveniet, ut et voluptates repudiandae sint et molestiae non-recusandae. Itaque earum rerum hic tenetur a sapiente delectus, ut aut reiciendis voluptatibus maiores alias consequatur aut perferendis doloribus asperiores repellat</p> </section><section> - <h3>Information Supply Chain</h3> + <h3>Biometric Trade Routes</h3> <!-- <div class="map-sidebar right-sidebar"> <h3>Legend</h3> @@ -45,45 +63,44 @@ </div> --> <p> - To understand how CelebA Dataset has been used around the world... - affected global research on computer vision, surveillance, defense, and consumer technology, the and where this dataset has been used the locations of each organization that used or referenced the datast + To help understand how CelebA Dataset has been used around the world for commercial, military and academic research; publicly available research citing Large-scale CelebFaces Attributes Dataset is collected, verified, and geocoded to show the biometric trade routes of people appearing in the images. Click on the markers to reveal reserach projects at that location. </p> </section> -<section class="applet_container"> +<section class="applet_container fullwidth"> <div class="applet" data-payload="{"command": "map"}"></div> + </section> <div class="caption"> - <div class="map-legend-item edu">Academic</div> - <div class="map-legend-item com">Industry</div> - <div class="map-legend-item gov">Government</div> - Data is compiled from <a href="https://www.semanticscholar.org">Semantic Scholar</a> and has been manually verified to show usage of CelebA Dataset. + <ul class="map-legend"> + <li class="edu">Academic</li> + <li class="com">Commercial</li> + <li class="gov">Military / Government</li> + <li class="source">Citation data is collected using <a href="https://semanticscholar.org" target="_blank">SemanticScholar.org</a> then dataset usage verified and geolocated.</li> + </ul> </div> -<section> +<!-- <section> <p class='subp'> - Standardized paragraph of text about the map. Sed ut perspiciatis, unde omnis iste natus error sit voluptatem accusantium doloremque laudantium, totam rem aperiam eaque ipsa, quae ab illo inventore veritatis et quasi architecto beatae vitae dicta sunt, explicabo. + [section under development] CelebA Dataset ... Standardized paragraph of text about the map. Sed ut perspiciatis, unde omnis iste natus error sit voluptatem accusantium doloremque laudantium, totam rem aperiam eaque ipsa, quae ab illo inventore veritatis et quasi architecto beatae vitae dicta sunt, explicabo. </p> -</section><section> - +</section> + --><section> <div class="hr-wave-holder"> <div class="hr-wave-line hr-wave-line1"></div> <div class="hr-wave-line hr-wave-line2"></div> </div> - <h2>Supplementary Information</h2> + <h3>Supplementary Information</h3> + </section><section class="applet_container"> - <h3>Citations</h3> - <p> - Citations were collected from <a href="https://www.semanticscholar.org">Semantic Scholar</a>, a website which aggregates - and indexes research papers. The citations were geocoded using names of institutions found in the PDF front matter, or as listed on other resources. These papers have been manually verified to show that researchers downloaded and used the dataset to train and/or test machine learning algorithms. - </p> + <h3>Dataset Citations</h3> <p> - Add button/link to download CSV + The dataset citations used in the visualizations were collected from <a href="https://www.semanticscholar.org">Semantic Scholar</a>, a website which aggregates and indexes research papers. Each citation was geocoded using names of institutions found in the PDF front matter, or as listed on other resources. These papers have been manually verified to show that researchers downloaded and used the dataset to train or test machine learning algorithms. </p> <div class="applet" data-payload="{"command": "citations"}"></div> diff --git a/site/public/datasets/cofw/index.html b/site/public/datasets/cofw/index.html index eac1f7a6..8925d4b8 100644 --- a/site/public/datasets/cofw/index.html +++ b/site/public/datasets/cofw/index.html @@ -8,7 +8,6 @@ <meta name="referrer" content="no-referrer" /> <meta name="viewport" content="width=device-width, initial-scale=1.0, user-scalable=yes" /> <link rel='stylesheet' href='/assets/css/fonts.css' /> - <link rel='stylesheet' href='/assets/css/tabulator.css' /> <link rel='stylesheet' href='/assets/css/css.css' /> <link rel='stylesheet' href='/assets/css/leaflet.css' /> <link rel='stylesheet' href='/assets/css/applets.css' /> @@ -18,6 +17,7 @@ <a class='slogan' href="/"> <div class='logo'></div> <div class='site_name'>MegaPixels</div> + <div class='splash'>COFW</div> </a> <div class='links'> <a href="/datasets/">Datasets</a> @@ -26,7 +26,19 @@ </header> <div class="content content-"> - <section><div class='left-sidebar'><div class='meta'><div><div class='gray'>Years</div><div>1993-1996</div></div><div><div class='gray'>Images</div><div>14,126</div></div><div><div class='gray'>Identities</div><div>1,199 </div></div><div><div class='gray'>Origin</div><div>Web Searches</div></div><div><div class='gray'>Funded by</div><div>ODNI, IARPA, Microsoft</div></div></div></div><h2>Caltech Occluded Faces in the Wild</h2> + <section><div class='left-sidebar'><div class='meta'> + <div class='gray'>Published</div> + <div>2013</div> + </div><div class='meta'> + <div class='gray'>Images</div> + <div>1,007 </div> + </div><div class='meta'> + <div class='gray'>Purpose</div> + <div>challenging dataset (sunglasses, hats, interaction with objects)</div> + </div><div class='meta'> + <div class='gray'>Website</div> + <div><a href='http://www.vision.caltech.edu/xpburgos/ICCV13/' target='_blank' rel='nofollow noopener'>caltech.edu</a></div> + </div><div class='meta'><div><div class='gray'>Years</div><div>1993-1996</div></div><div><div class='gray'>Images</div><div>14,126</div></div><div><div class='gray'>Identities</div><div>1,199 </div></div><div><div class='gray'>Origin</div><div>Web Searches</div></div><div><div class='gray'>Funded by</div><div>ODNI, IARPA, Microsoft</div></div></div></div><h2>Caltech Occluded Faces in the Wild</h2> <p>(PAGE UNDER DEVELOPMENT)</p> <p>COFW is "is designed to benchmark face landmark algorithms in realistic conditions, which include heavy occlusions and large shape variations" [Robust face landmark estimation under occlusion].</p> <p>RESEARCH below this line</p> @@ -43,7 +55,7 @@ To increase the number of training images, and since COFW has the exact same la <p><a href="https://www.cs.cmu.edu/~peiyunh/topdown/">https://www.cs.cmu.edu/~peiyunh/topdown/</a></p> </section><section> - <h3>Information Supply Chain</h3> + <h3>Biometric Trade Routes</h3> <!-- <div class="map-sidebar right-sidebar"> <h3>Legend</h3> @@ -55,45 +67,44 @@ To increase the number of training images, and since COFW has the exact same la </div> --> <p> - To understand how COFW Dataset has been used around the world... - affected global research on computer vision, surveillance, defense, and consumer technology, the and where this dataset has been used the locations of each organization that used or referenced the datast + To help understand how COFW Dataset has been used around the world for commercial, military and academic research; publicly available research citing Caltech Occluded Faces in the Wild is collected, verified, and geocoded to show the biometric trade routes of people appearing in the images. Click on the markers to reveal reserach projects at that location. </p> </section> -<section class="applet_container"> +<section class="applet_container fullwidth"> <div class="applet" data-payload="{"command": "map"}"></div> + </section> <div class="caption"> - <div class="map-legend-item edu">Academic</div> - <div class="map-legend-item com">Industry</div> - <div class="map-legend-item gov">Government</div> - Data is compiled from <a href="https://www.semanticscholar.org">Semantic Scholar</a> and has been manually verified to show usage of COFW Dataset. + <ul class="map-legend"> + <li class="edu">Academic</li> + <li class="com">Commercial</li> + <li class="gov">Military / Government</li> + <li class="source">Citation data is collected using <a href="https://semanticscholar.org" target="_blank">SemanticScholar.org</a> then dataset usage verified and geolocated.</li> + </ul> </div> -<section> +<!-- <section> <p class='subp'> - Standardized paragraph of text about the map. Sed ut perspiciatis, unde omnis iste natus error sit voluptatem accusantium doloremque laudantium, totam rem aperiam eaque ipsa, quae ab illo inventore veritatis et quasi architecto beatae vitae dicta sunt, explicabo. + [section under development] COFW Dataset ... Standardized paragraph of text about the map. Sed ut perspiciatis, unde omnis iste natus error sit voluptatem accusantium doloremque laudantium, totam rem aperiam eaque ipsa, quae ab illo inventore veritatis et quasi architecto beatae vitae dicta sunt, explicabo. </p> -</section><section> - +</section> + --><section> <div class="hr-wave-holder"> <div class="hr-wave-line hr-wave-line1"></div> <div class="hr-wave-line hr-wave-line2"></div> </div> - <h2>Supplementary Information</h2> + <h3>Supplementary Information</h3> + </section><section class="applet_container"> - <h3>Citations</h3> - <p> - Citations were collected from <a href="https://www.semanticscholar.org">Semantic Scholar</a>, a website which aggregates - and indexes research papers. The citations were geocoded using names of institutions found in the PDF front matter, or as listed on other resources. These papers have been manually verified to show that researchers downloaded and used the dataset to train and/or test machine learning algorithms. - </p> + <h3>Dataset Citations</h3> <p> - Add button/link to download CSV + The dataset citations used in the visualizations were collected from <a href="https://www.semanticscholar.org">Semantic Scholar</a>, a website which aggregates and indexes research papers. Each citation was geocoded using names of institutions found in the PDF front matter, or as listed on other resources. These papers have been manually verified to show that researchers downloaded and used the dataset to train or test machine learning algorithms. </p> <div class="applet" data-payload="{"command": "citations"}"></div> @@ -101,13 +112,14 @@ To increase the number of training images, and since COFW has the exact same la <h3>Who used COFW Dataset?</h3> <p> - This bar chart presents a ranking of the top countries where citations originated. Mouse over individual columns - to see yearly totals. These charts show at most the top 10 countries. + This bar chart presents a ranking of the top countries where dataset citations originated. Mouse over individual columns to see yearly totals. These charts show at most the top 10 countries. </p> </section> <section class="applet_container"> +<!-- <div style="position: absolute;top: 0px;right: -55px;width: 180px;font-size: 14px;">Labeled Faces in the Wild Dataset<br><span class="numc" style="font-size: 11px;">20 citations</span> +</div> --> <div class="applet" data-payload="{"command": "chart"}"></div> </section><section><p>TODO</p> <h2>- replace graphic</h2> diff --git a/site/public/datasets/duke_mtmc/index.html b/site/public/datasets/duke_mtmc/index.html index 299331d7..37de48ad 100644 --- a/site/public/datasets/duke_mtmc/index.html +++ b/site/public/datasets/duke_mtmc/index.html @@ -4,11 +4,10 @@ <title>MegaPixels</title> <meta charset="utf-8" /> <meta name="author" content="Adam Harvey" /> - <meta name="description" content="Duke MTMC is a dataset of CCTV footage of students at Duke University" /> + <meta name="description" content="Duke MTMC is a dataset of surveillance camera footage of students on Duke University campus" /> <meta name="referrer" content="no-referrer" /> <meta name="viewport" content="width=device-width, initial-scale=1.0, user-scalable=yes" /> <link rel='stylesheet' href='/assets/css/fonts.css' /> - <link rel='stylesheet' href='/assets/css/tabulator.css' /> <link rel='stylesheet' href='/assets/css/css.css' /> <link rel='stylesheet' href='/assets/css/leaflet.css' /> <link rel='stylesheet' href='/assets/css/applets.css' /> @@ -18,6 +17,7 @@ <a class='slogan' href="/"> <div class='logo'></div> <div class='site_name'>MegaPixels</div> + <div class='splash'>Duke MTMC</div> </a> <div class='links'> <a href="/datasets/">Datasets</a> @@ -26,12 +26,35 @@ </header> <div class="content content-dataset"> - <section class='intro_section' style='background-image: url(https://nyc3.digitaloceanspaces.com/megapixels/v1/datasets/duke_mtmc/assets/background.jpg)'><div class='inner'><div class='hero_desc'><span class='bgpad'><span class="dataset-name">Duke MTMC</span> is a dataset of CCTV footage of students at Duke University</span></div><div class='hero_subdesc'><span class='bgpad'>Duke MTMC contains over 2 million video frames and 2,000 unique identities collected from 8 cameras at Duke University campus in March 2014 -</span></div></div></section><section><div class='left-sidebar'><div class='meta'><div><div class='gray'>Collected</div><div>March 19, 2014</div></div><div><div class='gray'>Cameras</div><div>8</div></div><div><div class='gray'>Video Frames</div><div>2,000,000</div></div><div><div class='gray'>Identities</div><div>Over 2,000</div></div><div><div class='gray'>Used for</div><div>Person re-identification, <br>face recognition</div></div><div><div class='gray'>Sector</div><div>Academic</div></div><div><div class='gray'>Website</div><div><a href="http://vision.cs.duke.edu/DukeMTMC/">duke.edu</a></div></div></div></div><h2>Duke Multi-Target, Multi-Camera Tracking Dataset (Duke MTMC)</h2> -<p>(PAGE UNDER DEVELOPMENT)</p> + <section class='intro_section' style='background-image: url(https://nyc3.digitaloceanspaces.com/megapixels/v1/datasets/duke_mtmc/assets/background.jpg)'><div class='inner'><div class='hero_desc'><span class='bgpad'><span class="dataset-name">Duke MTMC</span> is a dataset of surveillance camera footage of students on Duke University campus</span></div><div class='hero_subdesc'><span class='bgpad'>Duke MTMC contains over 2 million video frames and 2,000 unique identities collected from 8 HD cameras at Duke University campus in March 2014 +</span></div></div></section><section><div class='left-sidebar'><div class='meta'> + <div class='gray'>Published</div> + <div>2016</div> + </div><div class='meta'> + <div class='gray'>Images</div> + <div>2,000,000 </div> + </div><div class='meta'> + <div class='gray'>Identities</div> + <div>1,812 </div> + </div><div class='meta'> + <div class='gray'>Purpose</div> + <div>Person re-identification and multi-camera tracking</div> + </div><div class='meta'> + <div class='gray'>Created by</div> + <div>Computer Science Department, Duke University, Durham, US</div> + </div><div class='meta'> + <div class='gray'>Website</div> + <div><a href='http://vision.cs.duke.edu/DukeMTMC/' target='_blank' rel='nofollow noopener'>duke.edu</a></div> + </div><div class='meta'><div><div class='gray'>Created</div><div>2014</div></div><div><div class='gray'>Identities</div><div>Over 2,700</div></div><div><div class='gray'>Used for</div><div>Face recognition, person re-identification</div></div><div><div class='gray'>Created by</div><div>Computer Science Department, Duke University, Durham, US</div></div><div><div class='gray'>Website</div><div><a href="http://vision.cs.duke.edu/DukeMTMC/">duke.edu</a></div></div></div></div><h2>Duke Multi-Target, Multi-Camera Tracking Dataset (Duke MTMC)</h2> +<p>[ PAGE UNDER DEVELOPMENT ]</p> +<p>Duke MTMC is a dataset of video recorded on Duke University campus during for the purpose of training, evaluating, and improving <em>multi-target multi-camera tracking</em>. The videos were recorded during February and March 2014 and cinclude</p> +<p>Includes a total of 888.8 minutes of video (ind. verified)</p> +<p>"We make available a new data set that has more than 2 million frames and more than 2,700 identities. It consists of 8×85 minutes of 1080p video recorded at 60 frames per second from 8 static cameras deployed on the Duke University campus during periods between lectures, when pedestrian traffic is heavy."</p> +<p>The dataset includes approximately 2,000 annotated identities appearing in 85 hours of video from 8 cameras located throughout Duke University's campus.</p> +</section><section class='images'><div class='image'><img src='https://nyc3.digitaloceanspaces.com/megapixels/v1/datasets/duke_mtmc/assets/duke_mtmc_cam5_average_comp.jpg' alt=' Duke MTMC pixel-averaged image of camera #5 is shown with the bounding boxes for each student drawn in white. (c) Adam Harvey'><div class='caption'> Duke MTMC pixel-averaged image of camera #5 is shown with the bounding boxes for each student drawn in white. (c) Adam Harvey</div></div></section><section><p>According to the dataset authors,</p> </section><section> - <h3>Information Supply Chain</h3> + <h3>Biometric Trade Routes</h3> <!-- <div class="map-sidebar right-sidebar"> <h3>Legend</h3> @@ -43,71 +66,66 @@ </div> --> <p> - To understand how Duke MTMC Dataset has been used around the world... - affected global research on computer vision, surveillance, defense, and consumer technology, the and where this dataset has been used the locations of each organization that used or referenced the datast + To help understand how Duke MTMC Dataset has been used around the world for commercial, military and academic research; publicly available research citing Duke Multi-Target, Multi-Camera Tracking Project is collected, verified, and geocoded to show the biometric trade routes of people appearing in the images. Click on the markers to reveal reserach projects at that location. </p> </section> -<section class="applet_container"> +<section class="applet_container fullwidth"> <div class="applet" data-payload="{"command": "map"}"></div> + </section> <div class="caption"> - <div class="map-legend-item edu">Academic</div> - <div class="map-legend-item com">Industry</div> - <div class="map-legend-item gov">Government</div> - Data is compiled from <a href="https://www.semanticscholar.org">Semantic Scholar</a> and has been manually verified to show usage of Duke MTMC Dataset. + <ul class="map-legend"> + <li class="edu">Academic</li> + <li class="com">Commercial</li> + <li class="gov">Military / Government</li> + <li class="source">Citation data is collected using <a href="https://semanticscholar.org" target="_blank">SemanticScholar.org</a> then dataset usage verified and geolocated.</li> + </ul> </div> -<section> +<!-- <section> <p class='subp'> - Standardized paragraph of text about the map. Sed ut perspiciatis, unde omnis iste natus error sit voluptatem accusantium doloremque laudantium, totam rem aperiam eaque ipsa, quae ab illo inventore veritatis et quasi architecto beatae vitae dicta sunt, explicabo. + [section under development] Duke MTMC Dataset ... Standardized paragraph of text about the map. Sed ut perspiciatis, unde omnis iste natus error sit voluptatem accusantium doloremque laudantium, totam rem aperiam eaque ipsa, quae ab illo inventore veritatis et quasi architecto beatae vitae dicta sunt, explicabo. </p> -</section><section> +</section> + --><section> <h3>Who used Duke MTMC Dataset?</h3> <p> - This bar chart presents a ranking of the top countries where citations originated. Mouse over individual columns - to see yearly totals. These charts show at most the top 10 countries. + This bar chart presents a ranking of the top countries where dataset citations originated. Mouse over individual columns to see yearly totals. These charts show at most the top 10 countries. </p> </section> <section class="applet_container"> +<!-- <div style="position: absolute;top: 0px;right: -55px;width: 180px;font-size: 14px;">Labeled Faces in the Wild Dataset<br><span class="numc" style="font-size: 11px;">20 citations</span> +</div> --> <div class="applet" data-payload="{"command": "chart"}"></div> -</section><section> - <p> - These pie charts show overall totals based on country and institution type. - </p> - - </section> - -<section class="applet_container"> +</section><section class="applet_container"> <div class="applet" data-payload="{"command": "piechart"}"></div> </section><section> - <div class="hr-wave-holder"> <div class="hr-wave-line hr-wave-line1"></div> <div class="hr-wave-line hr-wave-line2"></div> </div> - <h2>Supplementary Information</h2> + <h3>Supplementary Information</h3> + </section><section class="applet_container"> - <h3>Citations</h3> - <p> - Citations were collected from <a href="https://www.semanticscholar.org">Semantic Scholar</a>, a website which aggregates - and indexes research papers. The citations were geocoded using names of institutions found in the PDF front matter, or as listed on other resources. These papers have been manually verified to show that researchers downloaded and used the dataset to train and/or test machine learning algorithms. - </p> + <h3>Dataset Citations</h3> <p> - Add button/link to download CSV + The dataset citations used in the visualizations were collected from <a href="https://www.semanticscholar.org">Semantic Scholar</a>, a website which aggregates and indexes research papers. Each citation was geocoded using names of institutions found in the PDF front matter, or as listed on other resources. These papers have been manually verified to show that researchers downloaded and used the dataset to train or test machine learning algorithms. </p> <div class="applet" data-payload="{"command": "citations"}"></div> </section><section><h2>Research Notes</h2> <ul> +<li>"We make available a new data set that has more than 2 million frames and more than 2,700 identities. It consists of 8×85 minutes of 1080p video recorded at 60 frames per second from 8 static cameras deployed on the Duke University campus during periods between lectures, when pedestrian traffic is heavy." - 27a2fad58dd8727e280f97036e0d2bc55ef5424c</li> +<li>"This work was supported in part by the EPSRC Programme Grant (FACER2VM) EP/N007743/1, EPSRC/dstl/MURI project EP/R018456/1, the National Natural Science Foundation of China (61373055, 61672265, 61602390, 61532009, 61571313), Chinese Ministry of Education (Z2015101), Science and Technology Department of Sichuan Province (2017RZ0009 and 2017FZ0029), Education Department of Sichuan Province (15ZB0130), the Open Research Fund from Province Key Laboratory of Xihua University (szjj2015-056) and the NVIDIA GPU Grant Program." - ec9c20ed6cce15e9b63ac96bb5a6d55e69661e0b</li> <li>"DukeMTMC aims to accelerate advances in multi-target multi-camera tracking. It provides a tracking system that works within and across cameras, a new large scale HD video data set recorded by 8 synchronized cameras with more than 7,000 single camera trajectories and over 2,000 unique identities, and a new performance evaluation method that measures how often a system is correct about who is where"</li> <li><p>DukeMTMC is a new, manually annotated, calibrated, multi-camera data set recorded outdoors on the Duke University campus with 8 synchronized cameras. It consists of:</p> <p>8 static cameras x 85 minutes of 1080p 60 fps video diff --git a/site/public/datasets/facebook/index.html b/site/public/datasets/facebook/index.html index 7fb1901a..b2943e1f 100644 --- a/site/public/datasets/facebook/index.html +++ b/site/public/datasets/facebook/index.html @@ -8,7 +8,6 @@ <meta name="referrer" content="no-referrer" /> <meta name="viewport" content="width=device-width, initial-scale=1.0, user-scalable=yes" /> <link rel='stylesheet' href='/assets/css/fonts.css' /> - <link rel='stylesheet' href='/assets/css/tabulator.css' /> <link rel='stylesheet' href='/assets/css/css.css' /> <link rel='stylesheet' href='/assets/css/leaflet.css' /> <link rel='stylesheet' href='/assets/css/applets.css' /> @@ -18,6 +17,7 @@ <a class='slogan' href="/"> <div class='logo'></div> <div class='site_name'>MegaPixels</div> + </a> <div class='links'> <a href="/datasets/">Datasets</a> diff --git a/site/public/datasets/feret/index.html b/site/public/datasets/feret/index.html index ce60f3de..45510f64 100644 --- a/site/public/datasets/feret/index.html +++ b/site/public/datasets/feret/index.html @@ -8,7 +8,6 @@ <meta name="referrer" content="no-referrer" /> <meta name="viewport" content="width=device-width, initial-scale=1.0, user-scalable=yes" /> <link rel='stylesheet' href='/assets/css/fonts.css' /> - <link rel='stylesheet' href='/assets/css/tabulator.css' /> <link rel='stylesheet' href='/assets/css/css.css' /> <link rel='stylesheet' href='/assets/css/leaflet.css' /> <link rel='stylesheet' href='/assets/css/applets.css' /> @@ -18,6 +17,7 @@ <a class='slogan' href="/"> <div class='logo'></div> <div class='site_name'>MegaPixels</div> + <div class='splash'>LFW</div> </a> <div class='links'> <a href="/datasets/">Datasets</a> diff --git a/site/public/datasets/hrt_transgender/index.html b/site/public/datasets/hrt_transgender/index.html index e38e134b..80a4f40b 100644 --- a/site/public/datasets/hrt_transgender/index.html +++ b/site/public/datasets/hrt_transgender/index.html @@ -8,7 +8,6 @@ <meta name="referrer" content="no-referrer" /> <meta name="viewport" content="width=device-width, initial-scale=1.0, user-scalable=yes" /> <link rel='stylesheet' href='/assets/css/fonts.css' /> - <link rel='stylesheet' href='/assets/css/tabulator.css' /> <link rel='stylesheet' href='/assets/css/css.css' /> <link rel='stylesheet' href='/assets/css/leaflet.css' /> <link rel='stylesheet' href='/assets/css/applets.css' /> @@ -18,6 +17,7 @@ <a class='slogan' href="/"> <div class='logo'></div> <div class='site_name'>MegaPixels</div> + <div class='splash'>HRT Transgender</div> </a> <div class='links'> <a href="/datasets/">Datasets</a> @@ -27,31 +27,40 @@ <div class="content content-dataset"> <section class='intro_section' style='background-image: url(https://nyc3.digitaloceanspaces.com/megapixels/v1/datasets/hrt_transgender/assets/background.jpg)'><div class='inner'><div class='hero_desc'><span class='bgpad'>TBD</span></div><div class='hero_subdesc'><span class='bgpad'>TBD -</span></div></div></section><section><div class='left-sidebar'><div class='meta'><div><div class='gray'>Published</div><div>TBD</div></div><div><div class='gray'>Images</div><div>TBD</div></div></div></div><h2>HRT Transgender Dataset</h2> +</span></div></div></section><section><div class='left-sidebar'><div class='meta'> + <div class='gray'>Published</div> + <div>2013</div> + </div><div class='meta'> + <div class='gray'>Images</div> + <div>10,564 </div> + </div><div class='meta'> + <div class='gray'>Identities</div> + <div>38 </div> + </div><div class='meta'> + <div class='gray'>Purpose</div> + <div>gender transition and facial recognition</div> + </div><div class='meta'> + <div class='gray'>Website</div> + <div><a href='http://www.faceaginggroup.com/hrt-transgender/' target='_blank' rel='nofollow noopener'>faceaginggroup.com</a></div> + </div><div class='meta'><div><div class='gray'>Published</div><div>TBD</div></div><div><div class='gray'>Images</div><div>TBD</div></div></div></div><h2>HRT Transgender Dataset</h2> </section><section> <h3>Who used HRT Transgender?</h3> <p> - This bar chart presents a ranking of the top countries where citations originated. Mouse over individual columns - to see yearly totals. These charts show at most the top 10 countries. + This bar chart presents a ranking of the top countries where dataset citations originated. Mouse over individual columns to see yearly totals. These charts show at most the top 10 countries. </p> </section> <section class="applet_container"> +<!-- <div style="position: absolute;top: 0px;right: -55px;width: 180px;font-size: 14px;">Labeled Faces in the Wild Dataset<br><span class="numc" style="font-size: 11px;">20 citations</span> +</div> --> <div class="applet" data-payload="{"command": "chart"}"></div> -</section><section> - <p> - These pie charts show overall totals based on country and institution type. - </p> - - </section> - -<section class="applet_container"> +</section><section class="applet_container"> <div class="applet" data-payload="{"command": "piechart"}"></div> </section><section> - <h3>Information Supply Chain</h3> + <h3>Biometric Trade Routes</h3> <!-- <div class="map-sidebar right-sidebar"> <h3>Legend</h3> @@ -63,45 +72,44 @@ </div> --> <p> - To understand how HRT Transgender has been used around the world... - affected global research on computer vision, surveillance, defense, and consumer technology, the and where this dataset has been used the locations of each organization that used or referenced the datast + To help understand how HRT Transgender has been used around the world for commercial, military and academic research; publicly available research citing HRT Transgender Dataset is collected, verified, and geocoded to show the biometric trade routes of people appearing in the images. Click on the markers to reveal reserach projects at that location. </p> </section> -<section class="applet_container"> +<section class="applet_container fullwidth"> <div class="applet" data-payload="{"command": "map"}"></div> + </section> <div class="caption"> - <div class="map-legend-item edu">Academic</div> - <div class="map-legend-item com">Industry</div> - <div class="map-legend-item gov">Government</div> - Data is compiled from <a href="https://www.semanticscholar.org">Semantic Scholar</a> and has been manually verified to show usage of HRT Transgender. + <ul class="map-legend"> + <li class="edu">Academic</li> + <li class="com">Commercial</li> + <li class="gov">Military / Government</li> + <li class="source">Citation data is collected using <a href="https://semanticscholar.org" target="_blank">SemanticScholar.org</a> then dataset usage verified and geolocated.</li> + </ul> </div> -<section> +<!-- <section> <p class='subp'> - Standardized paragraph of text about the map. Sed ut perspiciatis, unde omnis iste natus error sit voluptatem accusantium doloremque laudantium, totam rem aperiam eaque ipsa, quae ab illo inventore veritatis et quasi architecto beatae vitae dicta sunt, explicabo. + [section under development] HRT Transgender ... Standardized paragraph of text about the map. Sed ut perspiciatis, unde omnis iste natus error sit voluptatem accusantium doloremque laudantium, totam rem aperiam eaque ipsa, quae ab illo inventore veritatis et quasi architecto beatae vitae dicta sunt, explicabo. </p> -</section><section> - +</section> + --><section> <div class="hr-wave-holder"> <div class="hr-wave-line hr-wave-line1"></div> <div class="hr-wave-line hr-wave-line2"></div> </div> - <h2>Supplementary Information</h2> + <h3>Supplementary Information</h3> + </section><section class="applet_container"> - <h3>Citations</h3> - <p> - Citations were collected from <a href="https://www.semanticscholar.org">Semantic Scholar</a>, a website which aggregates - and indexes research papers. The citations were geocoded using names of institutions found in the PDF front matter, or as listed on other resources. These papers have been manually verified to show that researchers downloaded and used the dataset to train and/or test machine learning algorithms. - </p> + <h3>Dataset Citations</h3> <p> - Add button/link to download CSV + The dataset citations used in the visualizations were collected from <a href="https://www.semanticscholar.org">Semantic Scholar</a>, a website which aggregates and indexes research papers. Each citation was geocoded using names of institutions found in the PDF front matter, or as listed on other resources. These papers have been manually verified to show that researchers downloaded and used the dataset to train or test machine learning algorithms. </p> <div class="applet" data-payload="{"command": "citations"}"></div> diff --git a/site/public/datasets/index.html b/site/public/datasets/index.html index f618e86b..f4776f6a 100644 --- a/site/public/datasets/index.html +++ b/site/public/datasets/index.html @@ -8,7 +8,6 @@ <meta name="referrer" content="no-referrer" /> <meta name="viewport" content="width=device-width, initial-scale=1.0, user-scalable=yes" /> <link rel='stylesheet' href='/assets/css/fonts.css' /> - <link rel='stylesheet' href='/assets/css/tabulator.css' /> <link rel='stylesheet' href='/assets/css/css.css' /> <link rel='stylesheet' href='/assets/css/leaflet.css' /> <link rel='stylesheet' href='/assets/css/applets.css' /> @@ -18,6 +17,7 @@ <a class='slogan' href="/"> <div class='logo'></div> <div class='site_name'>MegaPixels</div> + </a> <div class='links'> <a href="/datasets/">Datasets</a> @@ -28,7 +28,7 @@ <section><h1>Facial Recognition Datasets</h1> -<h3>Survey</h3> +<p>Explore publicly available facial recognition datasets. More datasets will be added throughout 2019.</p> </section> <section class='applet_container autosize'><div class='applet' data-payload='{"command":"dataset_list"}'></div></section> @@ -42,8 +42,8 @@ <span class='title'>Brainwash</span> <div class='fields'> <div class='year visible'><span>2015</span></div> - <div class='purpose'><span>Decoding image into set of people detections.</span></div> - <div class='images'><span>11,918 images</span></div> + <div class='purpose'><span>Head detection</span></div> + <div class='images'><span>11,917 images</span></div> <div class='identities'><span></span></div> </div> </div> @@ -85,6 +85,18 @@ </div> </a> + <a href="/datasets/msceleb/" style="background-image: url(https://nyc3.digitaloceanspaces.com/megapixels/v1/datasets/msceleb/assets/index.jpg)"> + <div class="dataset"> + <span class='title'>MS Celeb</span> + <div class='fields'> + <div class='year visible'><span>2016</span></div> + <div class='purpose'><span>Large-scale face recognition</span></div> + <div class='images'><span>1,000,000 images</span></div> + <div class='identities'><span>100,000 </span></div> + </div> + </div> + </a> + <a href="/datasets/pipa/" style="background-image: url(https://nyc3.digitaloceanspaces.com/megapixels/v1/datasets/pipa/assets/index.jpg)"> <div class="dataset"> <span class='title'>People in Photo Albums</span> @@ -102,9 +114,9 @@ <span class='title'>Unconstrained College Students</span> <div class='fields'> <div class='year visible'><span>2018</span></div> - <div class='purpose'><span>Unconstrained face recognition</span></div> + <div class='purpose'><span>Face recognition, face detection</span></div> <div class='images'><span>16,149 images</span></div> - <div class='identities'><span>4,362 </span></div> + <div class='identities'><span>1,732 </span></div> </div> </div> </a> @@ -114,7 +126,7 @@ <span class='title'>VIPeR</span> <div class='fields'> <div class='year visible'><span>2007</span></div> - <div class='purpose'><span>pedestrian re-identification</span></div> + <div class='purpose'><span>Person re-identification</span></div> <div class='images'><span>1,264 images</span></div> <div class='identities'><span>632 </span></div> </div> diff --git a/site/public/datasets/lfpw/index.html b/site/public/datasets/lfpw/index.html index 087d8b1d..77189ce7 100644 --- a/site/public/datasets/lfpw/index.html +++ b/site/public/datasets/lfpw/index.html @@ -8,7 +8,6 @@ <meta name="referrer" content="no-referrer" /> <meta name="viewport" content="width=device-width, initial-scale=1.0, user-scalable=yes" /> <link rel='stylesheet' href='/assets/css/fonts.css' /> - <link rel='stylesheet' href='/assets/css/tabulator.css' /> <link rel='stylesheet' href='/assets/css/css.css' /> <link rel='stylesheet' href='/assets/css/leaflet.css' /> <link rel='stylesheet' href='/assets/css/applets.css' /> @@ -18,6 +17,7 @@ <a class='slogan' href="/"> <div class='logo'></div> <div class='site_name'>MegaPixels</div> + <div class='splash'>LFWP</div> </a> <div class='links'> <a href="/datasets/">Datasets</a> diff --git a/site/public/datasets/lfw/index.html b/site/public/datasets/lfw/index.html index b4923877..d451d0cd 100644 --- a/site/public/datasets/lfw/index.html +++ b/site/public/datasets/lfw/index.html @@ -8,7 +8,6 @@ <meta name="referrer" content="no-referrer" /> <meta name="viewport" content="width=device-width, initial-scale=1.0, user-scalable=yes" /> <link rel='stylesheet' href='/assets/css/fonts.css' /> - <link rel='stylesheet' href='/assets/css/tabulator.css' /> <link rel='stylesheet' href='/assets/css/css.css' /> <link rel='stylesheet' href='/assets/css/leaflet.css' /> <link rel='stylesheet' href='/assets/css/applets.css' /> @@ -18,6 +17,7 @@ <a class='slogan' href="/"> <div class='logo'></div> <div class='site_name'>MegaPixels</div> + <div class='splash'>LFW</div> </a> <div class='links'> <a href="/datasets/">Datasets</a> @@ -27,7 +27,22 @@ <div class="content content-"> <section class='intro_section' style='background-image: url(https://nyc3.digitaloceanspaces.com/megapixels/v1/datasets/lfw/assets/background.jpg)'><div class='inner'><div class='hero_desc'><span class='bgpad'><span class="dataset-name">Labeled Faces in The Wild (LFW)</span> is the first facial recognition dataset created entirely from online photos</span></div><div class='hero_subdesc'><span class='bgpad'>It includes 13,456 images of 4,432 people's images copied from the Internet during 2002-2004 and is the most frequently used dataset in the world for benchmarking face recognition algorithms. -</span></div></div></section><section><div class='left-sidebar'><div class='meta'><div><div class='gray'>Created</div><div>2002 – 2004</div></div><div><div class='gray'>Images</div><div>13,233</div></div><div><div class='gray'>Identities</div><div>5,749</div></div><div><div class='gray'>Origin</div><div>Yahoo! News Images</div></div><div><div class='gray'>Used by</div><div>Facebook, Google, Microsoft, Baidu, Tencent, SenseTime, Face++, CIA, NSA, IARPA</div></div><div><div class='gray'>Website</div><div><a href="http://vis-www.cs.umass.edu/lfw">umass.edu</a></div></div></div><ul> +</span></div></div></section><section><div class='left-sidebar'><div class='meta'> + <div class='gray'>Published</div> + <div>2007</div> + </div><div class='meta'> + <div class='gray'>Images</div> + <div>13,233 </div> + </div><div class='meta'> + <div class='gray'>Identities</div> + <div>5,749 </div> + </div><div class='meta'> + <div class='gray'>Purpose</div> + <div>face recognition</div> + </div><div class='meta'> + <div class='gray'>Website</div> + <div><a href='http://vis-www.cs.umass.edu/lfw/' target='_blank' rel='nofollow noopener'>umass.edu</a></div> + </div><div class='meta'><div><div class='gray'>Created</div><div>2002 – 2004</div></div><div><div class='gray'>Images</div><div>13,233</div></div><div><div class='gray'>Identities</div><div>5,749</div></div><div><div class='gray'>Origin</div><div>Yahoo! News Images</div></div><div><div class='gray'>Used by</div><div>Facebook, Google, Microsoft, Baidu, Tencent, SenseTime, Face++, CIA, NSA, IARPA</div></div><div><div class='gray'>Website</div><div><a href="http://vis-www.cs.umass.edu/lfw">umass.edu</a></div></div></div><ul> <li>There are about 3 men for every 1 woman in the LFW dataset<a class="footnote_shim" name="[^lfw_www]_1"> </a><a href="#[^lfw_www]" class="footnote" title="Footnote 1">1</a></li> <li>The person with the most images is <a href="http://vis-www.cs.umass.edu/lfw/person/George_W_Bush_comp.html">George W. Bush</a> with 530</li> <li>There are about 3 George W. Bush's for every 1 <a href="http://vis-www.cs.umass.edu/lfw/person/Tony_Blair.html">Tony Blair</a></li> @@ -46,7 +61,7 @@ <p>The <em>Names and Faces</em> dataset was the first face recognition dataset created entire from online photos. However, <em>Names and Faces</em> and <em>LFW</em> are not the first face recognition dataset created entirely "in the wild". That title belongs to the <a href="/datasets/ucd_faces/">UCD dataset</a>. Images obtained "in the wild" means using an image without explicit consent or awareness from the subject or photographer.</p> </section><section> - <h3>Information Supply Chain</h3> + <h3>Biometric Trade Routes</h3> <!-- <div class="map-sidebar right-sidebar"> <h3>Legend</h3> @@ -58,66 +73,59 @@ </div> --> <p> - To understand how LFW has been used around the world... - affected global research on computer vision, surveillance, defense, and consumer technology, the and where this dataset has been used the locations of each organization that used or referenced the datast + To help understand how LFW has been used around the world for commercial, military and academic research; publicly available research citing Labeled Faces in the Wild is collected, verified, and geocoded to show the biometric trade routes of people appearing in the images. Click on the markers to reveal reserach projects at that location. </p> </section> -<section class="applet_container"> +<section class="applet_container fullwidth"> <div class="applet" data-payload="{"command": "map"}"></div> + </section> <div class="caption"> - <div class="map-legend-item edu">Academic</div> - <div class="map-legend-item com">Industry</div> - <div class="map-legend-item gov">Government</div> - Data is compiled from <a href="https://www.semanticscholar.org">Semantic Scholar</a> and has been manually verified to show usage of LFW. + <ul class="map-legend"> + <li class="edu">Academic</li> + <li class="com">Commercial</li> + <li class="gov">Military / Government</li> + <li class="source">Citation data is collected using <a href="https://semanticscholar.org" target="_blank">SemanticScholar.org</a> then dataset usage verified and geolocated.</li> + </ul> </div> -<section> +<!-- <section> <p class='subp'> - Standardized paragraph of text about the map. Sed ut perspiciatis, unde omnis iste natus error sit voluptatem accusantium doloremque laudantium, totam rem aperiam eaque ipsa, quae ab illo inventore veritatis et quasi architecto beatae vitae dicta sunt, explicabo. + [section under development] LFW ... Standardized paragraph of text about the map. Sed ut perspiciatis, unde omnis iste natus error sit voluptatem accusantium doloremque laudantium, totam rem aperiam eaque ipsa, quae ab illo inventore veritatis et quasi architecto beatae vitae dicta sunt, explicabo. </p> -</section><section> +</section> + --><section> <h3>Who used LFW?</h3> <p> - This bar chart presents a ranking of the top countries where citations originated. Mouse over individual columns - to see yearly totals. These charts show at most the top 10 countries. + This bar chart presents a ranking of the top countries where dataset citations originated. Mouse over individual columns to see yearly totals. These charts show at most the top 10 countries. </p> </section> <section class="applet_container"> +<!-- <div style="position: absolute;top: 0px;right: -55px;width: 180px;font-size: 14px;">Labeled Faces in the Wild Dataset<br><span class="numc" style="font-size: 11px;">20 citations</span> +</div> --> <div class="applet" data-payload="{"command": "chart"}"></div> -</section><section> - <p> - These pie charts show overall totals based on country and institution type. - </p> - - </section> - -<section class="applet_container"> +</section><section class="applet_container"> <div class="applet" data-payload="{"command": "piechart"}"></div> </section><section> - <div class="hr-wave-holder"> <div class="hr-wave-line hr-wave-line1"></div> <div class="hr-wave-line hr-wave-line2"></div> </div> - <h2>Supplementary Information</h2> + <h3>Supplementary Information</h3> + </section><section class="applet_container"> - <h3>Citations</h3> - <p> - Citations were collected from <a href="https://www.semanticscholar.org">Semantic Scholar</a>, a website which aggregates - and indexes research papers. The citations were geocoded using names of institutions found in the PDF front matter, or as listed on other resources. These papers have been manually verified to show that researchers downloaded and used the dataset to train and/or test machine learning algorithms. - </p> + <h3>Dataset Citations</h3> <p> - Add button/link to download CSV + The dataset citations used in the visualizations were collected from <a href="https://www.semanticscholar.org">Semantic Scholar</a>, a website which aggregates and indexes research papers. Each citation was geocoded using names of institutions found in the PDF front matter, or as listed on other resources. These papers have been manually verified to show that researchers downloaded and used the dataset to train or test machine learning algorithms. </p> <div class="applet" data-payload="{"command": "citations"}"></div> diff --git a/site/public/datasets/market_1501/index.html b/site/public/datasets/market_1501/index.html index a80c23fa..1ffd7e6c 100644 --- a/site/public/datasets/market_1501/index.html +++ b/site/public/datasets/market_1501/index.html @@ -8,7 +8,6 @@ <meta name="referrer" content="no-referrer" /> <meta name="viewport" content="width=device-width, initial-scale=1.0, user-scalable=yes" /> <link rel='stylesheet' href='/assets/css/fonts.css' /> - <link rel='stylesheet' href='/assets/css/tabulator.css' /> <link rel='stylesheet' href='/assets/css/css.css' /> <link rel='stylesheet' href='/assets/css/leaflet.css' /> <link rel='stylesheet' href='/assets/css/applets.css' /> @@ -18,6 +17,7 @@ <a class='slogan' href="/"> <div class='logo'></div> <div class='site_name'>MegaPixels</div> + <div class='splash'>Market 1501</div> </a> <div class='links'> <a href="/datasets/">Datasets</a> @@ -27,11 +27,26 @@ <div class="content content-dataset"> <section class='intro_section' style='background-image: url(https://nyc3.digitaloceanspaces.com/megapixels/v1/datasets/market_1501/assets/background.jpg)'><div class='inner'><div class='hero_desc'><span class='bgpad'><span class="dataset-name">Market-1501</span> is a dataset is collection of CCTV footage from ...</span></div><div class='hero_subdesc'><span class='bgpad'>The Market-1501 dataset includes ... -</span></div></div></section><section><div class='left-sidebar'><div class='meta'><div><div class='gray'>Collected</div><div>TBD</div></div><div><div class='gray'>Published</div><div>TBD</div></div><div><div class='gray'>Images</div><div>TBD</div></div><div><div class='gray'>Faces</div><div>TBD</div></div></div></div><h2>Market-1501 ...</h2> +</span></div></div></section><section><div class='left-sidebar'><div class='meta'> + <div class='gray'>Published</div> + <div>2015</div> + </div><div class='meta'> + <div class='gray'>Images</div> + <div>32,668 </div> + </div><div class='meta'> + <div class='gray'>Identities</div> + <div>1,501 </div> + </div><div class='meta'> + <div class='gray'>Purpose</div> + <div>Person re-identification</div> + </div><div class='meta'> + <div class='gray'>Website</div> + <div><a href='http://www.liangzheng.org/Project/project_reid.html' target='_blank' rel='nofollow noopener'>liangzheng.org</a></div> + </div><div class='meta'><div><div class='gray'>Collected</div><div>TBD</div></div><div><div class='gray'>Published</div><div>TBD</div></div><div><div class='gray'>Images</div><div>TBD</div></div><div><div class='gray'>Faces</div><div>TBD</div></div></div></div><h2>Market-1501 ...</h2> <p>(PAGE UNDER DEVELOPMENT)</p> </section><section> - <h3>Information Supply Chain</h3> + <h3>Biometric Trade Routes</h3> <!-- <div class="map-sidebar right-sidebar"> <h3>Legend</h3> @@ -43,45 +58,44 @@ </div> --> <p> - To understand how Market 1501 has been used around the world... - affected global research on computer vision, surveillance, defense, and consumer technology, the and where this dataset has been used the locations of each organization that used or referenced the datast + To help understand how Market 1501 has been used around the world for commercial, military and academic research; publicly available research citing Market 1501 Dataset is collected, verified, and geocoded to show the biometric trade routes of people appearing in the images. Click on the markers to reveal reserach projects at that location. </p> </section> -<section class="applet_container"> +<section class="applet_container fullwidth"> <div class="applet" data-payload="{"command": "map"}"></div> + </section> <div class="caption"> - <div class="map-legend-item edu">Academic</div> - <div class="map-legend-item com">Industry</div> - <div class="map-legend-item gov">Government</div> - Data is compiled from <a href="https://www.semanticscholar.org">Semantic Scholar</a> and has been manually verified to show usage of Market 1501. + <ul class="map-legend"> + <li class="edu">Academic</li> + <li class="com">Commercial</li> + <li class="gov">Military / Government</li> + <li class="source">Citation data is collected using <a href="https://semanticscholar.org" target="_blank">SemanticScholar.org</a> then dataset usage verified and geolocated.</li> + </ul> </div> -<section> +<!-- <section> <p class='subp'> - Standardized paragraph of text about the map. Sed ut perspiciatis, unde omnis iste natus error sit voluptatem accusantium doloremque laudantium, totam rem aperiam eaque ipsa, quae ab illo inventore veritatis et quasi architecto beatae vitae dicta sunt, explicabo. + [section under development] Market 1501 ... Standardized paragraph of text about the map. Sed ut perspiciatis, unde omnis iste natus error sit voluptatem accusantium doloremque laudantium, totam rem aperiam eaque ipsa, quae ab illo inventore veritatis et quasi architecto beatae vitae dicta sunt, explicabo. </p> -</section><section> - +</section> + --><section> <div class="hr-wave-holder"> <div class="hr-wave-line hr-wave-line1"></div> <div class="hr-wave-line hr-wave-line2"></div> </div> - <h2>Supplementary Information</h2> + <h3>Supplementary Information</h3> + </section><section class="applet_container"> - <h3>Citations</h3> - <p> - Citations were collected from <a href="https://www.semanticscholar.org">Semantic Scholar</a>, a website which aggregates - and indexes research papers. The citations were geocoded using names of institutions found in the PDF front matter, or as listed on other resources. These papers have been manually verified to show that researchers downloaded and used the dataset to train and/or test machine learning algorithms. - </p> + <h3>Dataset Citations</h3> <p> - Add button/link to download CSV + The dataset citations used in the visualizations were collected from <a href="https://www.semanticscholar.org">Semantic Scholar</a>, a website which aggregates and indexes research papers. Each citation was geocoded using names of institutions found in the PDF front matter, or as listed on other resources. These papers have been manually verified to show that researchers downloaded and used the dataset to train or test machine learning algorithms. </p> <div class="applet" data-payload="{"command": "citations"}"></div> diff --git a/site/public/datasets/mars/index.html b/site/public/datasets/mars/index.html deleted file mode 100644 index b053b456..00000000 --- a/site/public/datasets/mars/index.html +++ /dev/null @@ -1,110 +0,0 @@ -<!doctype html> -<html> -<head> - <title>MegaPixels</title> - <meta charset="utf-8" /> - <meta name="author" content="Adam Harvey" /> - <meta name="description" content="Motion Analysis and Re-identification Set (MARS) is a dataset is collection of CCTV footage " /> - <meta name="referrer" content="no-referrer" /> - <meta name="viewport" content="width=device-width, initial-scale=1.0, user-scalable=yes" /> - <link rel='stylesheet' href='/assets/css/fonts.css' /> - <link rel='stylesheet' href='/assets/css/tabulator.css' /> - <link rel='stylesheet' href='/assets/css/css.css' /> - <link rel='stylesheet' href='/assets/css/leaflet.css' /> - <link rel='stylesheet' href='/assets/css/applets.css' /> -</head> -<body> - <header> - <a class='slogan' href="/"> - <div class='logo'></div> - <div class='site_name'>MegaPixels</div> - </a> - <div class='links'> - <a href="/datasets/">Datasets</a> - <a href="/about/">About</a> - </div> - </header> - <div class="content content-dataset"> - - <section class='intro_section' style='background-image: url(https://nyc3.digitaloceanspaces.com/megapixels/v1/datasets/mars/assets/background.jpg)'><div class='inner'><div class='hero_desc'><span class='bgpad'><span style="color:#99ccee">Motion Analysis and Re-identification Set (MARS)</span> is a dataset is collection of CCTV footage </span></div><div class='hero_subdesc'><span class='bgpad'>The MARS dataset includes 1,191,003 of people used for training person re-identification algorithms -</span></div></div></section><section><div class='left-sidebar'><div class='meta'><div><div class='gray'>Collected</div><div>TBD</div></div><div><div class='gray'>Published</div><div>TBD</div></div><div><div class='gray'>Images</div><div>TBD</div></div><div><div class='gray'>Faces</div><div>TBD</div></div></div></div><h2>Motion Analysis and Re-identification Set (MARS)</h2> -<p>(PAGE UNDER DEVELOPMENT)</p> -<p>At vero eos et accusamus et iusto odio dignissimos ducimus, qui blanditiis praesentium voluptatum deleniti atque corrupti, quos dolores et quas molestias excepturi sint, obcaecati cupiditate non-provident, similique sunt in culpa, qui officia deserunt mollitia animi, id est laborum et dolorum fuga. Et harum quidem rerum facilis est et expedita distinctio.</p> -<p>Nam libero tempore, cum soluta nobis est eligendi optio, cumque nihil impedit, quo minus id, quod maxime placeat, facere possimus, omnis voluptas assumenda est, omnis dolor repellendus. Temporibus autem quibusdam et aut officiis debitis aut rerum necessitatibus saepe eveniet, ut et voluptates repudiandae sint et molestiae non-recusandae. Itaque earum rerum hic tenetur a sapiente delectus, ut aut reiciendis voluptatibus maiores alias consequatur aut perferendis doloribus asperiores repellat</p> -</section><section> - - <h3>Biometric Trade Routes (beta)</h3> -<!-- - <div class="map-sidebar right-sidebar"> - <h3>Legend</h3> - <ul> - <li><span style="color: #f2f293">■</span> Industry</li> - <li><span style="color: #f30000">■</span> Academic</li> - <li><span style="color: #3264f6">■</span> Government</li> - </ul> - </div> - --> - <p> - To understand how MARS has been used around the world... - affected global research on computer vision, surveillance, defense, and consumer technology, the and where this dataset has been used the locations of each organization that used or referenced the datast - </p> - - </section> - -<section class="applet_container"> - <div class="applet" data-payload="{"command": "map"}"></div> -</section> - -<div class="caption"> - <div class="map-legend-item edu">Academic</div> - <div class="map-legend-item com">Industry</div> - <div class="map-legend-item gov">Government</div> - Data is compiled from <a href="https://www.semanticscholar.org">Semantic Scholar</a> and not yet manually verified. -</div> - -<section> - <p class='subp'> - The data is generated by collecting all citations for all original research papers associated with the dataset. Then the PDFs are then converted to text and the organization names are extracted and geocoded. Because of the automated approach to extracting data, actual use of the dataset can not yet be confirmed. This visualization is provided to help locate and confirm usage and will be updated as data noise is reduced. - </p> -</section><section> - - - <div class="hr-wave-holder"> - <div class="hr-wave-line hr-wave-line1"></div> - <div class="hr-wave-line hr-wave-line2"></div> - </div> - - <h2>Supplementary Information</h2> -</section><section class="applet_container"> - - <h3>Citations</h3> - <p> - Citations were collected from <a href="https://www.semanticscholar.org">Semantic Scholar</a>, a website which aggregates - and indexes research papers. Metadata was extracted from these papers, including extracting names of institutions automatically from PDFs, and then the addresses were geocoded. Data is not yet manually verified, and reflects anytime the paper was cited. Some papers may only mention the dataset in passing, while others use it as part of their research methodology. - </p> - <p> - Add button/link to download CSV - </p> - - <div class="applet" data-payload="{"command": "citations"}"></div> -</section> - - </div> - <footer> - <div> - <a href="/">MegaPixels.cc</a> - <a href="/about/disclaimer/">Disclaimer</a> - <a href="/about/terms/">Terms of Use</a> - <a href="/about/privacy/">Privacy</a> - <a href="/about/">About</a> - <a href="/about/team/">Team</a> - </div> - <div> - MegaPixels ©2017-19 Adam R. Harvey / - <a href="https://ahprojects.com">ahprojects.com</a> - </div> - </footer> -</body> - -<script src="/assets/js/dist/index.js"></script> -</html>
\ No newline at end of file diff --git a/site/public/datasets/msceleb/index.html b/site/public/datasets/msceleb/index.html new file mode 100644 index 00000000..fb08c737 --- /dev/null +++ b/site/public/datasets/msceleb/index.html @@ -0,0 +1,153 @@ +<!doctype html> +<html> +<head> + <title>MegaPixels</title> + <meta charset="utf-8" /> + <meta name="author" content="Adam Harvey" /> + <meta name="description" content="MS Celeb is a dataset of web images used for training and evaluating face recognition algorithms" /> + <meta name="referrer" content="no-referrer" /> + <meta name="viewport" content="width=device-width, initial-scale=1.0, user-scalable=yes" /> + <link rel='stylesheet' href='/assets/css/fonts.css' /> + <link rel='stylesheet' href='/assets/css/css.css' /> + <link rel='stylesheet' href='/assets/css/leaflet.css' /> + <link rel='stylesheet' href='/assets/css/applets.css' /> +</head> +<body> + <header> + <a class='slogan' href="/"> + <div class='logo'></div> + <div class='site_name'>MegaPixels</div> + <div class='splash'>MsCeleb</div> + </a> + <div class='links'> + <a href="/datasets/">Datasets</a> + <a href="/about/">About</a> + </div> + </header> + <div class="content content-dataset"> + + <section class='intro_section' style='background-image: url(https://nyc3.digitaloceanspaces.com/megapixels/v1/datasets/msceleb/assets/background.jpg)'><div class='inner'><div class='hero_desc'><span class='bgpad'>MS Celeb is a dataset of web images used for training and evaluating face recognition algorithms</span></div><div class='hero_subdesc'><span class='bgpad'>The MS Celeb dataset includes over 10,000,000 images and 93,000 identities of semi-public figures collected using the Bing search engine +</span></div></div></section><section><div class='left-sidebar'><div class='meta'> + <div class='gray'>Published</div> + <div>2016</div> + </div><div class='meta'> + <div class='gray'>Images</div> + <div>1,000,000 </div> + </div><div class='meta'> + <div class='gray'>Identities</div> + <div>100,000 </div> + </div><div class='meta'> + <div class='gray'>Purpose</div> + <div>Large-scale face recognition</div> + </div><div class='meta'> + <div class='gray'>Created by</div> + <div>Microsoft Research</div> + </div><div class='meta'> + <div class='gray'>Funded by</div> + <div>Microsoft Research</div> + </div><div class='meta'> + <div class='gray'>Website</div> + <div><a href='http://www.msceleb.org/' target='_blank' rel='nofollow noopener'>msceleb.org</a></div> + </div><div class='meta'><div><div class='gray'>Published</div><div>TBD</div></div><div><div class='gray'>Images</div><div>TBD</div></div><div><div class='gray'>Faces</div><div>TBD</div></div><div><div class='gray'>Created by</div><div>TBD</div></div></div></div><h2>Microsoft Celeb Dataset (MS Celeb)</h2> +<p>(PAGE UNDER DEVELOPMENT)</p> +<p>At vero eos et accusamus et iusto odio dignissimos ducimus, qui blanditiis praesentium voluptatum deleniti atque corrupti, quos dolores et quas molestias excepturi sint, obcaecati cupiditate non-provident, similique sunt in culpa, qui officia deserunt mollitia animi, id est laborum et dolorum fuga. Et harum quidem rerum facilis est et expedita distinctio.</p> +<p>Nam libero tempore, cum soluta nobis est eligendi optio, cumque nihil impedit, quo minus id, quod maxime placeat, facere possimus, omnis voluptas assumenda est, omnis dolor repellendus. Temporibus autem quibusdam et aut officiis debitis aut rerum necessitatibus saepe eveniet, ut et voluptates repudiandae sint et molestiae non-recusandae. Itaque earum rerum hic tenetur a sapiente delectus, ut aut reiciendis voluptatibus maiores alias consequatur aut perferendis doloribus asperiores repellat</p> +</section><section> + <h3>Who used MsCeleb?</h3> + + <p> + This bar chart presents a ranking of the top countries where dataset citations originated. Mouse over individual columns to see yearly totals. These charts show at most the top 10 countries. + </p> + + </section> + +<section class="applet_container"> +<!-- <div style="position: absolute;top: 0px;right: -55px;width: 180px;font-size: 14px;">Labeled Faces in the Wild Dataset<br><span class="numc" style="font-size: 11px;">20 citations</span> +</div> --> + <div class="applet" data-payload="{"command": "chart"}"></div> +</section><section class="applet_container"> + <div class="applet" data-payload="{"command": "piechart"}"></div> +</section><section> + + <h3>Biometric Trade Routes</h3> +<!-- + <div class="map-sidebar right-sidebar"> + <h3>Legend</h3> + <ul> + <li><span style="color: #f2f293">■</span> Industry</li> + <li><span style="color: #f30000">■</span> Academic</li> + <li><span style="color: #3264f6">■</span> Government</li> + </ul> + </div> + --> + <p> + To help understand how MsCeleb has been used around the world for commercial, military and academic research; publicly available research citing Microsoft Celebrity Dataset is collected, verified, and geocoded to show the biometric trade routes of people appearing in the images. Click on the markers to reveal reserach projects at that location. + </p> + + </section> + +<section class="applet_container fullwidth"> + <div class="applet" data-payload="{"command": "map"}"></div> + +</section> + +<div class="caption"> + <ul class="map-legend"> + <li class="edu">Academic</li> + <li class="com">Commercial</li> + <li class="gov">Military / Government</li> + <li class="source">Citation data is collected using <a href="https://semanticscholar.org" target="_blank">SemanticScholar.org</a> then dataset usage verified and geolocated.</li> + </ul> +</div> + +<!-- <section> + <p class='subp'> + [section under development] MsCeleb ... Standardized paragraph of text about the map. Sed ut perspiciatis, unde omnis iste natus error sit voluptatem accusantium doloremque laudantium, totam rem aperiam eaque ipsa, quae ab illo inventore veritatis et quasi architecto beatae vitae dicta sunt, explicabo. + </p> +</section> + --><section><p>Add more analysis here</p> +</section><section> + + <div class="hr-wave-holder"> + <div class="hr-wave-line hr-wave-line1"></div> + <div class="hr-wave-line hr-wave-line2"></div> + </div> + + <h3>Supplementary Information</h3> + +</section><section class="applet_container"> + + <h3>Dataset Citations</h3> + <p> + The dataset citations used in the visualizations were collected from <a href="https://www.semanticscholar.org">Semantic Scholar</a>, a website which aggregates and indexes research papers. Each citation was geocoded using names of institutions found in the PDF front matter, or as listed on other resources. These papers have been manually verified to show that researchers downloaded and used the dataset to train or test machine learning algorithms. + </p> + + <div class="applet" data-payload="{"command": "citations"}"></div> +</section><section><h3>Additional Information</h3> +<ul> +<li>The dataset author spoke about his research at the CVPR conference in 2016 <a href="https://www.youtube.com/watch?v=Nl2fBKxwusQ">https://www.youtube.com/watch?v=Nl2fBKxwusQ</a></li> +</ul> +</section><section><ul class="footnotes"><li><a name="[^readme]" class="footnote_shim"></a><span class="backlinks"></span><p>"readme.txt" <a href="https://exhibits.stanford.edu/data/catalog/sx925dc9385">https://exhibits.stanford.edu/data/catalog/sx925dc9385</a>.</p> +</li><li><a name="[^localized_region_context]" class="footnote_shim"></a><span class="backlinks"></span><p>Li, Y. and Dou, Y. and Liu, X. and Li, T. Localized Region Context and Object Feature Fusion for People Head Detection. ICIP16 Proceedings. 2016. Pages 594-598.</p> +</li><li><a name="[^replacement_algorithm]" class="footnote_shim"></a><span class="backlinks"></span><p>Zhao. X, Wang Y, Dou, Y. A Replacement Algorithm of Non-Maximum Suppression Base on Graph Clustering.</p> +</li></ul></section> + + </div> + <footer> + <div> + <a href="/">MegaPixels.cc</a> + <a href="/about/disclaimer/">Disclaimer</a> + <a href="/about/terms/">Terms of Use</a> + <a href="/about/privacy/">Privacy</a> + <a href="/about/">About</a> + <a href="/about/team/">Team</a> + </div> + <div> + MegaPixels ©2017-19 Adam R. Harvey / + <a href="https://ahprojects.com">ahprojects.com</a> + </div> + </footer> +</body> + +<script src="/assets/js/dist/index.js"></script> +</html>
\ No newline at end of file diff --git a/site/public/datasets/pipa/index.html b/site/public/datasets/pipa/index.html index 62754070..27168c5c 100644 --- a/site/public/datasets/pipa/index.html +++ b/site/public/datasets/pipa/index.html @@ -8,7 +8,6 @@ <meta name="referrer" content="no-referrer" /> <meta name="viewport" content="width=device-width, initial-scale=1.0, user-scalable=yes" /> <link rel='stylesheet' href='/assets/css/fonts.css' /> - <link rel='stylesheet' href='/assets/css/tabulator.css' /> <link rel='stylesheet' href='/assets/css/css.css' /> <link rel='stylesheet' href='/assets/css/leaflet.css' /> <link rel='stylesheet' href='/assets/css/applets.css' /> @@ -18,6 +17,7 @@ <a class='slogan' href="/"> <div class='logo'></div> <div class='site_name'>MegaPixels</div> + <div class='splash'>PIPA</div> </a> <div class='links'> <a href="/datasets/">Datasets</a> @@ -27,11 +27,29 @@ <div class="content content-dataset"> <section class='intro_section' style='background-image: url(https://nyc3.digitaloceanspaces.com/megapixels/v1/datasets/pipa/assets/background.jpg)'><div class='inner'><div class='hero_desc'><span class='bgpad'><span class="dataset-name"> is a dataset...</span></div><div class='hero_subdesc'><span class='bgpad'>PIPA subdescription -</span></div></div></section><section><div class='left-sidebar'><div class='meta'><div><div class='gray'>Collected</div><div>TBD</div></div><div><div class='gray'>Published</div><div>TBD</div></div><div><div class='gray'>Images</div><div>TBD</div></div><div><div class='gray'>Faces</div><div>TBD</div></div></div></div><h2>Dataset Title TBD</h2> +</span></div></div></section><section><div class='left-sidebar'><div class='meta'> + <div class='gray'>Published</div> + <div>2015</div> + </div><div class='meta'> + <div class='gray'>Images</div> + <div>37,107 </div> + </div><div class='meta'> + <div class='gray'>Identities</div> + <div>2,356 </div> + </div><div class='meta'> + <div class='gray'>Purpose</div> + <div>Face recognition</div> + </div><div class='meta'> + <div class='gray'>Download Size</div> + <div>12 GB</div> + </div><div class='meta'> + <div class='gray'>Website</div> + <div><a href='https://people.eecs.berkeley.edu/~nzhang/piper.html' target='_blank' rel='nofollow noopener'>berkeley.edu</a></div> + </div><div class='meta'><div><div class='gray'>Collected</div><div>TBD</div></div><div><div class='gray'>Published</div><div>TBD</div></div><div><div class='gray'>Images</div><div>TBD</div></div><div><div class='gray'>Faces</div><div>TBD</div></div></div></div><h2>Dataset Title TBD</h2> <p>(PAGE UNDER DEVELOPMENT)</p> </section><section> - <h3>Information Supply Chain</h3> + <h3>Biometric Trade Routes</h3> <!-- <div class="map-sidebar right-sidebar"> <h3>Legend</h3> @@ -43,45 +61,44 @@ </div> --> <p> - To understand how PIPA Dataset has been used around the world... - affected global research on computer vision, surveillance, defense, and consumer technology, the and where this dataset has been used the locations of each organization that used or referenced the datast + To help understand how PIPA Dataset has been used around the world for commercial, military and academic research; publicly available research citing People in Photo Albums Dataset is collected, verified, and geocoded to show the biometric trade routes of people appearing in the images. Click on the markers to reveal reserach projects at that location. </p> </section> -<section class="applet_container"> +<section class="applet_container fullwidth"> <div class="applet" data-payload="{"command": "map"}"></div> + </section> <div class="caption"> - <div class="map-legend-item edu">Academic</div> - <div class="map-legend-item com">Industry</div> - <div class="map-legend-item gov">Government</div> - Data is compiled from <a href="https://www.semanticscholar.org">Semantic Scholar</a> and has been manually verified to show usage of PIPA Dataset. + <ul class="map-legend"> + <li class="edu">Academic</li> + <li class="com">Commercial</li> + <li class="gov">Military / Government</li> + <li class="source">Citation data is collected using <a href="https://semanticscholar.org" target="_blank">SemanticScholar.org</a> then dataset usage verified and geolocated.</li> + </ul> </div> -<section> +<!-- <section> <p class='subp'> - Standardized paragraph of text about the map. Sed ut perspiciatis, unde omnis iste natus error sit voluptatem accusantium doloremque laudantium, totam rem aperiam eaque ipsa, quae ab illo inventore veritatis et quasi architecto beatae vitae dicta sunt, explicabo. + [section under development] PIPA Dataset ... Standardized paragraph of text about the map. Sed ut perspiciatis, unde omnis iste natus error sit voluptatem accusantium doloremque laudantium, totam rem aperiam eaque ipsa, quae ab illo inventore veritatis et quasi architecto beatae vitae dicta sunt, explicabo. </p> -</section><section> - +</section> + --><section> <div class="hr-wave-holder"> <div class="hr-wave-line hr-wave-line1"></div> <div class="hr-wave-line hr-wave-line2"></div> </div> - <h2>Supplementary Information</h2> + <h3>Supplementary Information</h3> + </section><section class="applet_container"> - <h3>Citations</h3> - <p> - Citations were collected from <a href="https://www.semanticscholar.org">Semantic Scholar</a>, a website which aggregates - and indexes research papers. The citations were geocoded using names of institutions found in the PDF front matter, or as listed on other resources. These papers have been manually verified to show that researchers downloaded and used the dataset to train and/or test machine learning algorithms. - </p> + <h3>Dataset Citations</h3> <p> - Add button/link to download CSV + The dataset citations used in the visualizations were collected from <a href="https://www.semanticscholar.org">Semantic Scholar</a>, a website which aggregates and indexes research papers. Each citation was geocoded using names of institutions found in the PDF front matter, or as listed on other resources. These papers have been manually verified to show that researchers downloaded and used the dataset to train or test machine learning algorithms. </p> <div class="applet" data-payload="{"command": "citations"}"></div> diff --git a/site/public/datasets/uccs/index.html b/site/public/datasets/uccs/index.html index 08000c6e..593ac498 100644 --- a/site/public/datasets/uccs/index.html +++ b/site/public/datasets/uccs/index.html @@ -4,11 +4,10 @@ <title>MegaPixels</title> <meta charset="utf-8" /> <meta name="author" content="Adam Harvey" /> - <meta name="description" content="Unconstrained College Students (UCCS) is a dataset of images ..." /> + <meta name="description" content="Unconstrained College Students (UCCS) is a dataset of long-range surveillance photos of students taken without their knowledge" /> <meta name="referrer" content="no-referrer" /> <meta name="viewport" content="width=device-width, initial-scale=1.0, user-scalable=yes" /> <link rel='stylesheet' href='/assets/css/fonts.css' /> - <link rel='stylesheet' href='/assets/css/tabulator.css' /> <link rel='stylesheet' href='/assets/css/css.css' /> <link rel='stylesheet' href='/assets/css/leaflet.css' /> <link rel='stylesheet' href='/assets/css/applets.css' /> @@ -18,6 +17,7 @@ <a class='slogan' href="/"> <div class='logo'></div> <div class='site_name'>MegaPixels</div> + <div class='splash'>UCCS</div> </a> <div class='links'> <a href="/datasets/">Datasets</a> @@ -26,12 +26,215 @@ </header> <div class="content content-dataset"> - <section class='intro_section' style='background-image: url(https://nyc3.digitaloceanspaces.com/megapixels/v1/datasets/uccs/assets/background.jpg)'><div class='inner'><div class='hero_desc'><span class='bgpad'><span class="dataset-name">Unconstrained College Students (UCCS)</span> is a dataset of images ...</span></div><div class='hero_subdesc'><span class='bgpad'>The UCCS dataset includes ... -</span></div></div></section><section><div class='left-sidebar'><div class='meta'><div><div class='gray'>Collected</div><div>TBD</div></div><div><div class='gray'>Published</div><div>TBD</div></div><div><div class='gray'>Images</div><div>TBD</div></div><div><div class='gray'>Faces</div><div>TBD</div></div></div></div><h2>Unconstrained College Students ...</h2> + <section class='intro_section' style='background-image: url(https://nyc3.digitaloceanspaces.com/megapixels/v1/datasets/uccs/assets/background.jpg)'><div class='inner'><div class='hero_desc'><span class='bgpad'><span class="dataset-name">Unconstrained College Students (UCCS)</span> is a dataset of long-range surveillance photos of students taken without their knowledge</span></div><div class='hero_subdesc'><span class='bgpad'>The UCCS dataset includes 16,149 images and 1,732 identities of students at University of Colorado Colorado Springs campus and is used for face recognition and face detection +</span></div></div></section><section><div class='left-sidebar'><div class='meta'> + <div class='gray'>Published</div> + <div>2018</div> + </div><div class='meta'> + <div class='gray'>Images</div> + <div>16,149 </div> + </div><div class='meta'> + <div class='gray'>Identities</div> + <div>1,732 </div> + </div><div class='meta'> + <div class='gray'>Purpose</div> + <div>Face recognition, face detection</div> + </div><div class='meta'> + <div class='gray'>Created by</div> + <div>University of Colorado Colorado Springs (US)</div> + </div><div class='meta'> + <div class='gray'>Funded by</div> + <div>ODNI, IARPA, ONR MURI, Amry SBIR, SOCOM SBIR</div> + </div><div class='meta'> + <div class='gray'>Website</div> + <div><a href='http://vast.uccs.edu/Opensetface/' target='_blank' rel='nofollow noopener'>uccs.edu</a></div> + </div><div class='meta'><div><div class='gray'>Published</div><div>2018</div></div><div><div class='gray'>Images</div><div>16,149</div></div><div><div class='gray'>Identities</div><div>1,732</div></div><div><div class='gray'>Used for</div><div>Face recognition, face detection</div></div><div><div class='gray'>Created by</div><div>Unviversity of Colorado Colorado Springs (US)</div></div><div><div class='gray'>Funded by</div><div>ODNI, IARPA, ONR MURI, Amry SBIR, SOCOM SBIR</div></div><div><div class='gray'>Website</div><div><a href="https://vast.uccs.edu/Opensetface/">vast.uccs.edu</a></div></div></div></div><h2>Unconstrained College Students ...</h2> <p>(PAGE UNDER DEVELOPMENT)</p> -</section><section> +<p>Unconstrained College Students (UCCS) is a dataset of long-range surveillance photos captured at University of Colorado Colorado Springs. According to the authors of two papers associated with the dataset, subjects were "photographed using a long-range high-resolution surveillance camera without their knowledge" [^funding_sb]. The images were captured using a Canon 7D digital camera fitted with a Sigma 800mm telephoto lens pointed out the window of an office.</p> +<p>The UCCS dataset was funded by ODNI (Office of Director of National Intelligence), IARPA (Intelligence Advance Research Projects Activity), ONR MURI Office of Naval Research and The Department of Defense Multidisciplinary University Research Initiative, Army SBIR (Small Business Innovation Research), SOCOM SBIR (Special Operations Command and Small Business Innovation Research), and the National Science Foundation.</p> +<p>The images in UCCS include students walking between classes on campus over 19 days in 2012 - 2013. The dates include:</p> +<table> +<thead><tr> +<th>Year</th> +<th>Month</th> +<th>Day</th> +<th>Date</th> +<th>Time Range</th> +<th>Photos</th> +</tr> +</thead> +<tbody> +<tr> +<td>2012</td> +<td>Februay</td> +<td>---</td> +<td>23</td> +<td>-</td> +<td>132</td> +</tr> +<tr> +<td>2012</td> +<td>March</td> +<td>---</td> +<td>6</td> +<td>-</td> +<td>-</td> +</tr> +<tr> +<td>2012</td> +<td>March</td> +<td>---</td> +<td>8</td> +<td>-</td> +<td>-</td> +</tr> +<tr> +<td>2012</td> +<td>March</td> +<td>---</td> +<td>13</td> +<td>-</td> +<td>-</td> +</tr> +<tr> +<td>2012</td> +<td>Februay</td> +<td>---</td> +<td>23</td> +<td>-</td> +<td>132</td> +</tr> +<tr> +<td>2012</td> +<td>March</td> +<td>---</td> +<td>6</td> +<td>-</td> +<td>-</td> +</tr> +<tr> +<td>2012</td> +<td>March</td> +<td>---</td> +<td>8</td> +<td>-</td> +<td>-</td> +</tr> +<tr> +<td>2012</td> +<td>March</td> +<td>---</td> +<td>13</td> +<td>-</td> +<td>-</td> +</tr> +<tr> +<td>2012</td> +<td>Februay</td> +<td>---</td> +<td>23</td> +<td>-</td> +<td>132</td> +</tr> +<tr> +<td>2012</td> +<td>March</td> +<td>---</td> +<td>6</td> +<td>-</td> +<td>-</td> +</tr> +<tr> +<td>2012</td> +<td>March</td> +<td>---</td> +<td>8</td> +<td>-</td> +<td>-</td> +</tr> +<tr> +<td>2012</td> +<td>March</td> +<td>---</td> +<td>13</td> +<td>-</td> +<td>-</td> +</tr> +<tr> +<td>2012</td> +<td>Februay</td> +<td>---</td> +<td>23</td> +<td>-</td> +<td>132</td> +</tr> +<tr> +<td>2012</td> +<td>March</td> +<td>---</td> +<td>6</td> +<td>-</td> +<td>-</td> +</tr> +<tr> +<td>2012</td> +<td>March</td> +<td>---</td> +<td>8</td> +<td>-</td> +<td>-</td> +</tr> +<tr> +<td>2012</td> +<td>March</td> +<td>---</td> +<td>13</td> +<td>-</td> +<td>-</td> +</tr> +<tr> +<td>2012</td> +<td>Februay</td> +<td>---</td> +<td>23</td> +<td>-</td> +<td>132</td> +</tr> +<tr> +<td>2012</td> +<td>March</td> +<td>---</td> +<td>6</td> +<td>-</td> +<td>-</td> +</tr> +<tr> +<td>2012</td> +<td>March</td> +<td>---</td> +<td>8</td> +<td>-</td> +<td>-</td> +</tr> +</tbody> +</table> +<p>2012-03-20 +2012-03-22 +2012-04-03 +2012-04-12 +2012-04-17 +2012-04-24 +2012-04-25 +2012-04-26 +2013-01-28 +2013-01-29 +2013-02-13 +2013-02-19 +2013-02-20 +2013-02-26</p> +</section><section class='images'><div class='image'><img src='https://nyc3.digitaloceanspaces.com/megapixels/v1/datasets/uccs/assets/uccs_mean_bboxes_comp.jpg' alt=' The pixel-average of all Uconstrained College Students images is shown with all 51,838 face annotations. (c) Adam Harvey'><div class='caption'> The pixel-average of all Uconstrained College Students images is shown with all 51,838 face annotations. (c) Adam Harvey</div></div></section><section> - <h3>Information Supply Chain</h3> + <h3>Biometric Trade Routes</h3> <!-- <div class="map-sidebar right-sidebar"> <h3>Legend</h3> @@ -43,71 +246,63 @@ </div> --> <p> - To understand how UCCS has been used around the world... - affected global research on computer vision, surveillance, defense, and consumer technology, the and where this dataset has been used the locations of each organization that used or referenced the datast + To help understand how UCCS has been used around the world for commercial, military and academic research; publicly available research citing UnConstrained College Students Dataset is collected, verified, and geocoded to show the biometric trade routes of people appearing in the images. Click on the markers to reveal reserach projects at that location. </p> </section> -<section class="applet_container"> +<section class="applet_container fullwidth"> <div class="applet" data-payload="{"command": "map"}"></div> + </section> <div class="caption"> - <div class="map-legend-item edu">Academic</div> - <div class="map-legend-item com">Industry</div> - <div class="map-legend-item gov">Government</div> - Data is compiled from <a href="https://www.semanticscholar.org">Semantic Scholar</a> and has been manually verified to show usage of UCCS. + <ul class="map-legend"> + <li class="edu">Academic</li> + <li class="com">Commercial</li> + <li class="gov">Military / Government</li> + <li class="source">Citation data is collected using <a href="https://semanticscholar.org" target="_blank">SemanticScholar.org</a> then dataset usage verified and geolocated.</li> + </ul> </div> -<section> +<!-- <section> <p class='subp'> - Standardized paragraph of text about the map. Sed ut perspiciatis, unde omnis iste natus error sit voluptatem accusantium doloremque laudantium, totam rem aperiam eaque ipsa, quae ab illo inventore veritatis et quasi architecto beatae vitae dicta sunt, explicabo. + [section under development] UCCS ... Standardized paragraph of text about the map. Sed ut perspiciatis, unde omnis iste natus error sit voluptatem accusantium doloremque laudantium, totam rem aperiam eaque ipsa, quae ab illo inventore veritatis et quasi architecto beatae vitae dicta sunt, explicabo. </p> -</section><section> +</section> + --><section> <h3>Who used UCCS?</h3> <p> - This bar chart presents a ranking of the top countries where citations originated. Mouse over individual columns - to see yearly totals. These charts show at most the top 10 countries. + This bar chart presents a ranking of the top countries where dataset citations originated. Mouse over individual columns to see yearly totals. These charts show at most the top 10 countries. </p> </section> <section class="applet_container"> +<!-- <div style="position: absolute;top: 0px;right: -55px;width: 180px;font-size: 14px;">Labeled Faces in the Wild Dataset<br><span class="numc" style="font-size: 11px;">20 citations</span> +</div> --> <div class="applet" data-payload="{"command": "chart"}"></div> -</section><section> +</section><section class="applet_container"> + <div class="applet" data-payload="{"command": "piechart"}"></div> +</section><section class="applet_container"> + + <h3>Dataset Citations</h3> <p> - These pie charts show overall totals based on country and institution type. + The dataset citations used in the visualizations were collected from <a href="https://www.semanticscholar.org">Semantic Scholar</a>, a website which aggregates and indexes research papers. Each citation was geocoded using names of institutions found in the PDF front matter, or as listed on other resources. These papers have been manually verified to show that researchers downloaded and used the dataset to train or test machine learning algorithms. </p> - - </section> -<section class="applet_container"> - <div class="applet" data-payload="{"command": "piechart"}"></div> + <div class="applet" data-payload="{"command": "citations"}"></div> </section><section> - <div class="hr-wave-holder"> <div class="hr-wave-line hr-wave-line1"></div> <div class="hr-wave-line hr-wave-line2"></div> </div> - <h2>Supplementary Information</h2> -</section><section class="applet_container"> - - <h3>Citations</h3> - <p> - Citations were collected from <a href="https://www.semanticscholar.org">Semantic Scholar</a>, a website which aggregates - and indexes research papers. The citations were geocoded using names of institutions found in the PDF front matter, or as listed on other resources. These papers have been manually verified to show that researchers downloaded and used the dataset to train and/or test machine learning algorithms. - </p> - <p> - Add button/link to download CSV - </p> - - <div class="applet" data-payload="{"command": "citations"}"></div> -</section><section><h3>Research Notes</h3> -<p>The original Sapkota and Boult dataset, from which UCCS is derived, received funding from<sup class="footnote-ref" id="fnref-funding_sb"><a href="#fn-funding_sb">1</a></sup>:</p> + <h3>Supplementary Information</h3> + +</section><section><p>The original Sapkota and Boult dataset, from which UCCS is derived, received funding from<sup class="footnote-ref" id="fnref-funding_sb"><a href="#fn-funding_sb">1</a></sup>:</p> <ul> <li>ONR (Office of Naval Research) MURI (The Department of Defense Multidisciplinary University Research Initiative) grant N00014-08-1-0638</li> <li>Army SBIR (Small Business Innovation Research) grant W15P7T-12-C-A210</li> @@ -119,6 +314,14 @@ <li>ODNI (Office of Director of National Intelligence)</li> <li>IARPA (Intelligence Advance Research Projects Activity) R&D contract 2014-14071600012</li> </ul> +<h3>TODO</h3> +<ul> +<li>add tabulator module for dates</li> +<li>parse dates into CSV using Python</li> +<li>get google image showing line of sight?</li> +<li>fix up quote/citations</li> +</ul> +<h3>footnotes</h3> <div class="footnotes"> <hr> <ol><li id="fn-funding_sb"><p>Sapkota, Archana and Boult, Terrance. "Large Scale Unconstrained Open Set Face Database." 2013.<a href="#fnref-funding_sb" class="footnote">↩</a></p></li> diff --git a/site/public/datasets/vgg_face2/index.html b/site/public/datasets/vgg_face2/index.html index 42e3b961..d5c1d98c 100644 --- a/site/public/datasets/vgg_face2/index.html +++ b/site/public/datasets/vgg_face2/index.html @@ -8,7 +8,6 @@ <meta name="referrer" content="no-referrer" /> <meta name="viewport" content="width=device-width, initial-scale=1.0, user-scalable=yes" /> <link rel='stylesheet' href='/assets/css/fonts.css' /> - <link rel='stylesheet' href='/assets/css/tabulator.css' /> <link rel='stylesheet' href='/assets/css/css.css' /> <link rel='stylesheet' href='/assets/css/leaflet.css' /> <link rel='stylesheet' href='/assets/css/applets.css' /> @@ -18,6 +17,7 @@ <a class='slogan' href="/"> <div class='logo'></div> <div class='site_name'>MegaPixels</div> + </a> <div class='links'> <a href="/datasets/">Datasets</a> diff --git a/site/public/datasets/viper/index.html b/site/public/datasets/viper/index.html index 5acd0845..6d27b15b 100644 --- a/site/public/datasets/viper/index.html +++ b/site/public/datasets/viper/index.html @@ -8,7 +8,6 @@ <meta name="referrer" content="no-referrer" /> <meta name="viewport" content="width=device-width, initial-scale=1.0, user-scalable=yes" /> <link rel='stylesheet' href='/assets/css/fonts.css' /> - <link rel='stylesheet' href='/assets/css/tabulator.css' /> <link rel='stylesheet' href='/assets/css/css.css' /> <link rel='stylesheet' href='/assets/css/leaflet.css' /> <link rel='stylesheet' href='/assets/css/applets.css' /> @@ -18,6 +17,7 @@ <a class='slogan' href="/"> <div class='logo'></div> <div class='site_name'>MegaPixels</div> + <div class='splash'>VIPeR</div> </a> <div class='links'> <a href="/datasets/">Datasets</a> @@ -27,7 +27,25 @@ <div class="content content-dataset"> <section class='intro_section' style='background-image: url(https://nyc3.digitaloceanspaces.com/megapixels/v1/datasets/viper/assets/background.jpg)'><div class='inner'><div class='hero_desc'><span class='bgpad'><span class="dataset-name">VIPeR</span> is a person re-identification dataset of images captured at UC Santa Cruz in 2007</span></div><div class='hero_subdesc'><span class='bgpad'>VIPeR contains 1,264 images and 632 persons on the UC Santa Cruz campus and is used to train person re-identification algorithms for surveillance -</span></div></div></section><section><div class='left-sidebar'><div class='meta'><div><div class='gray'>Published</div><div>2007</div></div><div><div class='gray'>Images</div><div>1,264</div></div><div><div class='gray'>Persons</div><div>632</div></div><div><div class='gray'>Created by</div><div>UC Santa Cruz</div></div></div></div><h2>VIPeR Dataset</h2> +</span></div></div></section><section><div class='left-sidebar'><div class='meta'> + <div class='gray'>Published</div> + <div>2007</div> + </div><div class='meta'> + <div class='gray'>Images</div> + <div>1,264 </div> + </div><div class='meta'> + <div class='gray'>Identities</div> + <div>632 </div> + </div><div class='meta'> + <div class='gray'>Purpose</div> + <div>Person re-identification</div> + </div><div class='meta'> + <div class='gray'>Created by</div> + <div>University of California Santa Cruz</div> + </div><div class='meta'> + <div class='gray'>Website</div> + <div><a href='https://vision.soe.ucsc.edu/node/178' target='_blank' rel='nofollow noopener'>ucsc.edu</a></div> + </div><div class='meta'><div><div class='gray'>Published</div><div>2007</div></div><div><div class='gray'>Images</div><div>1,264</div></div><div><div class='gray'>Persons</div><div>632</div></div><div><div class='gray'>Created by</div><div>UC Santa Cruz</div></div></div></div><h2>VIPeR Dataset</h2> <p>(PAGE UNDER DEVELOPMENT)</p> <p><em>VIPeR (Viewpoint Invariant Pedestrian Recognition)</em> is a dataset of pedestrian images captured at University of California Santa Cruz in 2007. Accoriding to the reserachers 2 "cameras were placed in different locations in an academic setting and subjects were notified of the presence of cameras, but were not coached or instructed in any way."</p> <p>VIPeR is amongst the most widely used publicly available person re-identification datasets. In 2017 the VIPeR dataset was combined into a larger person re-identification created by the Chinese University of Hong Kong called PETA (PEdesTrian Attribute).</p> @@ -35,26 +53,20 @@ <h3>Who used VIPeR?</h3> <p> - This bar chart presents a ranking of the top countries where citations originated. Mouse over individual columns - to see yearly totals. These charts show at most the top 10 countries. + This bar chart presents a ranking of the top countries where dataset citations originated. Mouse over individual columns to see yearly totals. These charts show at most the top 10 countries. </p> </section> <section class="applet_container"> +<!-- <div style="position: absolute;top: 0px;right: -55px;width: 180px;font-size: 14px;">Labeled Faces in the Wild Dataset<br><span class="numc" style="font-size: 11px;">20 citations</span> +</div> --> <div class="applet" data-payload="{"command": "chart"}"></div> -</section><section> - <p> - These pie charts show overall totals based on country and institution type. - </p> - - </section> - -<section class="applet_container"> +</section><section class="applet_container"> <div class="applet" data-payload="{"command": "piechart"}"></div> </section><section> - <h3>Information Supply Chain</h3> + <h3>Biometric Trade Routes</h3> <!-- <div class="map-sidebar right-sidebar"> <h3>Legend</h3> @@ -66,45 +78,44 @@ </div> --> <p> - To understand how VIPeR has been used around the world... - affected global research on computer vision, surveillance, defense, and consumer technology, the and where this dataset has been used the locations of each organization that used or referenced the datast + To help understand how VIPeR has been used around the world for commercial, military and academic research; publicly available research citing Viewpoint Invariant Pedestrian Recognition is collected, verified, and geocoded to show the biometric trade routes of people appearing in the images. Click on the markers to reveal reserach projects at that location. </p> </section> -<section class="applet_container"> +<section class="applet_container fullwidth"> <div class="applet" data-payload="{"command": "map"}"></div> + </section> <div class="caption"> - <div class="map-legend-item edu">Academic</div> - <div class="map-legend-item com">Industry</div> - <div class="map-legend-item gov">Government</div> - Data is compiled from <a href="https://www.semanticscholar.org">Semantic Scholar</a> and has been manually verified to show usage of VIPeR. + <ul class="map-legend"> + <li class="edu">Academic</li> + <li class="com">Commercial</li> + <li class="gov">Military / Government</li> + <li class="source">Citation data is collected using <a href="https://semanticscholar.org" target="_blank">SemanticScholar.org</a> then dataset usage verified and geolocated.</li> + </ul> </div> -<section> +<!-- <section> <p class='subp'> - Standardized paragraph of text about the map. Sed ut perspiciatis, unde omnis iste natus error sit voluptatem accusantium doloremque laudantium, totam rem aperiam eaque ipsa, quae ab illo inventore veritatis et quasi architecto beatae vitae dicta sunt, explicabo. + [section under development] VIPeR ... Standardized paragraph of text about the map. Sed ut perspiciatis, unde omnis iste natus error sit voluptatem accusantium doloremque laudantium, totam rem aperiam eaque ipsa, quae ab illo inventore veritatis et quasi architecto beatae vitae dicta sunt, explicabo. </p> -</section><section> - +</section> + --><section> <div class="hr-wave-holder"> <div class="hr-wave-line hr-wave-line1"></div> <div class="hr-wave-line hr-wave-line2"></div> </div> - <h2>Supplementary Information</h2> + <h3>Supplementary Information</h3> + </section><section class="applet_container"> - <h3>Citations</h3> - <p> - Citations were collected from <a href="https://www.semanticscholar.org">Semantic Scholar</a>, a website which aggregates - and indexes research papers. The citations were geocoded using names of institutions found in the PDF front matter, or as listed on other resources. These papers have been manually verified to show that researchers downloaded and used the dataset to train and/or test machine learning algorithms. - </p> + <h3>Dataset Citations</h3> <p> - Add button/link to download CSV + The dataset citations used in the visualizations were collected from <a href="https://www.semanticscholar.org">Semantic Scholar</a>, a website which aggregates and indexes research papers. Each citation was geocoded using names of institutions found in the PDF front matter, or as listed on other resources. These papers have been manually verified to show that researchers downloaded and used the dataset to train or test machine learning algorithms. </p> <div class="applet" data-payload="{"command": "citations"}"></div> diff --git a/site/public/datasets/youtube_celebrities/index.html b/site/public/datasets/youtube_celebrities/index.html index dd230926..ee11f3c0 100644 --- a/site/public/datasets/youtube_celebrities/index.html +++ b/site/public/datasets/youtube_celebrities/index.html @@ -8,7 +8,6 @@ <meta name="referrer" content="no-referrer" /> <meta name="viewport" content="width=device-width, initial-scale=1.0, user-scalable=yes" /> <link rel='stylesheet' href='/assets/css/fonts.css' /> - <link rel='stylesheet' href='/assets/css/tabulator.css' /> <link rel='stylesheet' href='/assets/css/css.css' /> <link rel='stylesheet' href='/assets/css/leaflet.css' /> <link rel='stylesheet' href='/assets/css/applets.css' /> @@ -18,6 +17,7 @@ <a class='slogan' href="/"> <div class='logo'></div> <div class='site_name'>MegaPixels</div> + <div class='splash'>YouTube Celebrities</div> </a> <div class='links'> <a href="/datasets/">Datasets</a> diff --git a/site/public/info/index.html b/site/public/info/index.html index ef7dc8db..eb78b260 100644 --- a/site/public/info/index.html +++ b/site/public/info/index.html @@ -8,7 +8,6 @@ <meta name="referrer" content="no-referrer" /> <meta name="viewport" content="width=device-width, initial-scale=1.0, user-scalable=yes" /> <link rel='stylesheet' href='/assets/css/fonts.css' /> - <link rel='stylesheet' href='/assets/css/tabulator.css' /> <link rel='stylesheet' href='/assets/css/css.css' /> <link rel='stylesheet' href='/assets/css/leaflet.css' /> <link rel='stylesheet' href='/assets/css/applets.css' /> @@ -18,6 +17,7 @@ <a class='slogan' href="/"> <div class='logo'></div> <div class='site_name'>MegaPixels</div> + </a> <div class='links'> <a href="/datasets/">Datasets</a> diff --git a/site/public/research/00_introduction/index.html b/site/public/research/00_introduction/index.html index 5c536dc4..43f95d2a 100644 --- a/site/public/research/00_introduction/index.html +++ b/site/public/research/00_introduction/index.html @@ -8,7 +8,6 @@ <meta name="referrer" content="no-referrer" /> <meta name="viewport" content="width=device-width, initial-scale=1.0, user-scalable=yes" /> <link rel='stylesheet' href='/assets/css/fonts.css' /> - <link rel='stylesheet' href='/assets/css/tabulator.css' /> <link rel='stylesheet' href='/assets/css/css.css' /> <link rel='stylesheet' href='/assets/css/leaflet.css' /> <link rel='stylesheet' href='/assets/css/applets.css' /> @@ -18,6 +17,7 @@ <a class='slogan' href="/"> <div class='logo'></div> <div class='site_name'>MegaPixels</div> + </a> <div class='links'> <a href="/datasets/">Datasets</a> diff --git a/site/public/research/01_from_1_to_100_pixels/index.html b/site/public/research/01_from_1_to_100_pixels/index.html index c91d17ad..ea296960 100644 --- a/site/public/research/01_from_1_to_100_pixels/index.html +++ b/site/public/research/01_from_1_to_100_pixels/index.html @@ -8,7 +8,6 @@ <meta name="referrer" content="no-referrer" /> <meta name="viewport" content="width=device-width, initial-scale=1.0, user-scalable=yes" /> <link rel='stylesheet' href='/assets/css/fonts.css' /> - <link rel='stylesheet' href='/assets/css/tabulator.css' /> <link rel='stylesheet' href='/assets/css/css.css' /> <link rel='stylesheet' href='/assets/css/leaflet.css' /> <link rel='stylesheet' href='/assets/css/applets.css' /> @@ -18,6 +17,7 @@ <a class='slogan' href="/"> <div class='logo'></div> <div class='site_name'>MegaPixels</div> + </a> <div class='links'> <a href="/datasets/">Datasets</a> @@ -80,6 +80,38 @@ <li>"Note that we only keep the images with a minimal side length of 80 pixels." and "a face will be labeled as “Ignore” if it is very difficult to be detected due to blurring, severe deformation and unrecognizable eyes, or the side length of its bounding box is less than 32 pixels." Ge_Detecting_Masked_Faces_CVPR_2017_paper.pdf </li> <li>IBM DiF: "Faces with region size less than 50x50 or inter-ocular distance of less than 30 pixels were discarded. Faces with non-frontal pose, or anything beyond being slightly tilted to the left or the right, were also discarded."</li> </ul> +<p>As the resolution +formatted as rectangular databases of 16 bit RGB-tuples or 8 bit grayscale values</p> +<p>To consider how visual privacy applies to real world surveillance situations, the first</p> +<p>A single 8-bit grayscale pixel with 256 values is enough to represent the entire alphabet <code>a-Z0-9</code> with room to spare.</p> +<p>A 2x2 pixels contains</p> +<p>Using no more than a 42 pixel (6x7 image) face image researchers [cite] were able to correctly distinguish between a group of 50 people. Yet</p> +<p>The likely outcome of face recognition research is that more data is needed to improve. Indeed, resolution is the determining factor for all biometric systems, both as training data to increase</p> +<p>Pixels, typically considered the buiding blocks of images and vidoes, can also be plotted as a graph of sensor values corresponding to the intensity of RGB-calibrated sensors.</p> +<p>Wi-Fi and cameras presents elevated risks for transmitting videos and image documentation from conflict zones, high-risk situations, or even sharing on social media. How can new developments in computer vision also be used in reverse, as a counter-forensic tool, to minimize an individual's privacy risk?</p> +<p>As the global Internet becomes increasingly effecient at turning the Internet into a giant dataset for machine learning, forensics, and data analysing, it would be prudent to also consider tools for decreasing the resolution. The Visual Defense module is just that. What are new ways to minimize the adverse effects of surveillance by dulling the blade. For example, a researcher paper showed that by decreasing a face size to 12x16 it was possible to do 98% accuracy with 50 people. This is clearly an example of</p> +<p>This research module, tentatively called Visual Defense Tools, aims to explore the</p> +<h3>Prior Research</h3> +<ul> +<li>MPI visual privacy advisor</li> +<li>NIST: super resolution</li> +<li>YouTube blur tool</li> +<li>WITNESS: blur tool</li> +<li>Pixellated text </li> +<li>CV Dazzle</li> +<li>Bellingcat guide to geolocation</li> +<li>Peng! magic passport</li> +</ul> +<h3>Notes</h3> +<ul> +<li>In China, out of the approximately 200 million surveillance cameras only about 15% have enough resolution for face recognition. </li> +<li>In Apple's FaceID security guide, the probability of someone else's face unlocking your phone is 1 out of 1,000,000. </li> +<li>In England, the Metropolitan Police reported a false-positive match rate of 98% when attempting to use face recognition to locate wanted criminals. </li> +<li>In a face recognition trial at Berlin's Sudkreuz station, the false-match rate was 20%. </li> +</ul> +<p>What all 3 examples illustrate is that face recognition is anything but absolute. In a 2017 talk, Jason Matheny the former directory of IARPA, admitted the face recognition is so brittle it can be subverted by using a magic marker and drawing "a few dots on your forehead". In fact face recognition is a misleading term. Face recognition is search engine for faces that can only ever show you the mos likely match. This presents real a real threat to privacy and lends</p> +<p>Globally, iPhone users unwittingly agree to 1/1,000,000 probably +relying on FaceID and TouchID to protect their information agree to a</p> <div class="footnotes"> <hr> <ol><li id="fn-nist_sres"><p>NIST 906932. Performance Assessment of Face Recognition Using Super-Resolution. Shuowen Hu, Robert Maschal, S. Susan Young, Tsai Hong Hong, Jonathon P. Phillips<a href="#fnref-nist_sres" class="footnote">↩</a></p></li> diff --git a/site/public/research/02_what_computers_can_see/index.html b/site/public/research/02_what_computers_can_see/index.html index 9389bf84..23641328 100644 --- a/site/public/research/02_what_computers_can_see/index.html +++ b/site/public/research/02_what_computers_can_see/index.html @@ -8,7 +8,6 @@ <meta name="referrer" content="no-referrer" /> <meta name="viewport" content="width=device-width, initial-scale=1.0, user-scalable=yes" /> <link rel='stylesheet' href='/assets/css/fonts.css' /> - <link rel='stylesheet' href='/assets/css/tabulator.css' /> <link rel='stylesheet' href='/assets/css/css.css' /> <link rel='stylesheet' href='/assets/css/leaflet.css' /> <link rel='stylesheet' href='/assets/css/applets.css' /> @@ -18,6 +17,7 @@ <a class='slogan' href="/"> <div class='logo'></div> <div class='site_name'>MegaPixels</div> + </a> <div class='links'> <a href="/datasets/">Datasets</a> @@ -126,6 +126,7 @@ <li>Wearing Necktie</li> <li>Wearing Necklace</li> </ul> +<p>for i in {1..9};do wget <a href="http://visiond1.cs.umbc.edu/webpage/codedata/ADLdataset/ADL_videos/P_0$i.MP4;done;for">http://visiond1.cs.umbc.edu/webpage/codedata/ADLdataset/ADL_videos/P_0$i.MP4;done;for</a> i in {10..20}; do wget <a href="http://visiond1.cs.umbc.edu/webpage/codedata/ADLdataset/ADL_videos/P_$i.MP4;done">http://visiond1.cs.umbc.edu/webpage/codedata/ADLdataset/ADL_videos/P_$i.MP4;done</a></p> <h2>From Market 1501</h2> <p>The 27 attributes are:</p> <table> @@ -269,6 +270,24 @@ Visibility boolean for each keypoint Region annotations (upper clothes, lower clothes, dress, socks, shoes, hands, gloves, neck, face, hair, hat, sunglasses, bag, occluder) Body type (male, female or child)</p> <p>source: <a href="https://www2.eecs.berkeley.edu/Research/Projects/CS/vision/shape/h3d/">https://www2.eecs.berkeley.edu/Research/Projects/CS/vision/shape/h3d/</a></p> +<h2>From Leeds Sports Pose</h2> +<p>=INDEX(A2:A9,MATCH(datasets!D1,B2:B9,0)) +=VLOOKUP(A2, datasets!A:J, 7, FALSE)</p> +<p>Right ankle +Right knee +Right hip +Left hip +Left knee +Left ankle +Right wrist +Right elbow +Right shoulder +Left shoulder +Left elbow +Left wrist +Neck +Head top</p> +<p>source: <a href="http://web.archive.org/web/20170915023005/sam.johnson.io/research/lsp.html">http://web.archive.org/web/20170915023005/sam.johnson.io/research/lsp.html</a></p> </section> </div> diff --git a/site/public/research/index.html b/site/public/research/index.html index 303732f8..d3ed6ef3 100644 --- a/site/public/research/index.html +++ b/site/public/research/index.html @@ -8,7 +8,6 @@ <meta name="referrer" content="no-referrer" /> <meta name="viewport" content="width=device-width, initial-scale=1.0, user-scalable=yes" /> <link rel='stylesheet' href='/assets/css/fonts.css' /> - <link rel='stylesheet' href='/assets/css/tabulator.css' /> <link rel='stylesheet' href='/assets/css/css.css' /> <link rel='stylesheet' href='/assets/css/leaflet.css' /> <link rel='stylesheet' href='/assets/css/applets.css' /> @@ -18,6 +17,7 @@ <a class='slogan' href="/"> <div class='logo'></div> <div class='site_name'>MegaPixels</div> + </a> <div class='links'> <a href="/datasets/">Datasets</a> diff --git a/site/public/test/chart/index.html b/site/public/test/chart/index.html index 93e12b3c..53f41d6a 100644 --- a/site/public/test/chart/index.html +++ b/site/public/test/chart/index.html @@ -8,7 +8,6 @@ <meta name="referrer" content="no-referrer" /> <meta name="viewport" content="width=device-width, initial-scale=1.0, user-scalable=yes" /> <link rel='stylesheet' href='/assets/css/fonts.css' /> - <link rel='stylesheet' href='/assets/css/tabulator.css' /> <link rel='stylesheet' href='/assets/css/css.css' /> <link rel='stylesheet' href='/assets/css/leaflet.css' /> <link rel='stylesheet' href='/assets/css/applets.css' /> @@ -18,6 +17,7 @@ <a class='slogan' href="/"> <div class='logo'></div> <div class='site_name'>MegaPixels</div> + </a> <div class='links'> <a href="/datasets/">Datasets</a> diff --git a/site/public/test/citations/index.html b/site/public/test/citations/index.html index 70b3fe55..e7140177 100644 --- a/site/public/test/citations/index.html +++ b/site/public/test/citations/index.html @@ -8,7 +8,6 @@ <meta name="referrer" content="no-referrer" /> <meta name="viewport" content="width=device-width, initial-scale=1.0, user-scalable=yes" /> <link rel='stylesheet' href='/assets/css/fonts.css' /> - <link rel='stylesheet' href='/assets/css/tabulator.css' /> <link rel='stylesheet' href='/assets/css/css.css' /> <link rel='stylesheet' href='/assets/css/leaflet.css' /> <link rel='stylesheet' href='/assets/css/applets.css' /> @@ -18,6 +17,7 @@ <a class='slogan' href="/"> <div class='logo'></div> <div class='site_name'>MegaPixels</div> + </a> <div class='links'> <a href="/datasets/">Datasets</a> diff --git a/site/public/test/csv/index.html b/site/public/test/csv/index.html index 3257716f..c47bcd57 100644 --- a/site/public/test/csv/index.html +++ b/site/public/test/csv/index.html @@ -8,7 +8,6 @@ <meta name="referrer" content="no-referrer" /> <meta name="viewport" content="width=device-width, initial-scale=1.0, user-scalable=yes" /> <link rel='stylesheet' href='/assets/css/fonts.css' /> - <link rel='stylesheet' href='/assets/css/tabulator.css' /> <link rel='stylesheet' href='/assets/css/css.css' /> <link rel='stylesheet' href='/assets/css/leaflet.css' /> <link rel='stylesheet' href='/assets/css/applets.css' /> @@ -18,6 +17,7 @@ <a class='slogan' href="/"> <div class='logo'></div> <div class='site_name'>MegaPixels</div> + </a> <div class='links'> <a href="/datasets/">Datasets</a> @@ -28,7 +28,7 @@ <section><h1>CSV Test</h1> <h3><a href="/test/">← Back to test index</a></h3> -</section><section class='applet_container'><div class='applet' data-payload='{"command": "load_file /datasets/lfw/assets/lfw_names_gender_kg_min.csv", "fields": ["Name, Images, Gender, Description"]}'></div></section> +</section><section class='applet_container'><div class='applet' data-payload='{"command": "load_file /site/test/assets/test.csv", "fields": ["Name, Images, Year, Gender, Description, URL"]}'></div></section> </div> <footer> diff --git a/site/public/test/datasets/index.html b/site/public/test/datasets/index.html index 15edf039..61e4ef86 100644 --- a/site/public/test/datasets/index.html +++ b/site/public/test/datasets/index.html @@ -8,7 +8,6 @@ <meta name="referrer" content="no-referrer" /> <meta name="viewport" content="width=device-width, initial-scale=1.0, user-scalable=yes" /> <link rel='stylesheet' href='/assets/css/fonts.css' /> - <link rel='stylesheet' href='/assets/css/tabulator.css' /> <link rel='stylesheet' href='/assets/css/css.css' /> <link rel='stylesheet' href='/assets/css/leaflet.css' /> <link rel='stylesheet' href='/assets/css/applets.css' /> @@ -18,6 +17,7 @@ <a class='slogan' href="/"> <div class='logo'></div> <div class='site_name'>MegaPixels</div> + </a> <div class='links'> <a href="/datasets/">Datasets</a> diff --git a/site/public/test/face_search/index.html b/site/public/test/face_search/index.html index 93dc2bc6..cad7ceec 100644 --- a/site/public/test/face_search/index.html +++ b/site/public/test/face_search/index.html @@ -8,7 +8,6 @@ <meta name="referrer" content="no-referrer" /> <meta name="viewport" content="width=device-width, initial-scale=1.0, user-scalable=yes" /> <link rel='stylesheet' href='/assets/css/fonts.css' /> - <link rel='stylesheet' href='/assets/css/tabulator.css' /> <link rel='stylesheet' href='/assets/css/css.css' /> <link rel='stylesheet' href='/assets/css/leaflet.css' /> <link rel='stylesheet' href='/assets/css/applets.css' /> @@ -18,6 +17,7 @@ <a class='slogan' href="/"> <div class='logo'></div> <div class='site_name'>MegaPixels</div> + </a> <div class='links'> <a href="/datasets/">Datasets</a> diff --git a/site/public/test/gallery/index.html b/site/public/test/gallery/index.html index 9e2c54f6..7dbe020b 100644 --- a/site/public/test/gallery/index.html +++ b/site/public/test/gallery/index.html @@ -8,7 +8,6 @@ <meta name="referrer" content="no-referrer" /> <meta name="viewport" content="width=device-width, initial-scale=1.0, user-scalable=yes" /> <link rel='stylesheet' href='/assets/css/fonts.css' /> - <link rel='stylesheet' href='/assets/css/tabulator.css' /> <link rel='stylesheet' href='/assets/css/css.css' /> <link rel='stylesheet' href='/assets/css/leaflet.css' /> <link rel='stylesheet' href='/assets/css/applets.css' /> @@ -18,6 +17,7 @@ <a class='slogan' href="/"> <div class='logo'></div> <div class='site_name'>MegaPixels</div> + </a> <div class='links'> <a href="/datasets/">Datasets</a> diff --git a/site/public/test/index.html b/site/public/test/index.html index 0fc839d0..730d10c9 100644 --- a/site/public/test/index.html +++ b/site/public/test/index.html @@ -8,7 +8,6 @@ <meta name="referrer" content="no-referrer" /> <meta name="viewport" content="width=device-width, initial-scale=1.0, user-scalable=yes" /> <link rel='stylesheet' href='/assets/css/fonts.css' /> - <link rel='stylesheet' href='/assets/css/tabulator.css' /> <link rel='stylesheet' href='/assets/css/css.css' /> <link rel='stylesheet' href='/assets/css/leaflet.css' /> <link rel='stylesheet' href='/assets/css/applets.css' /> @@ -18,6 +17,7 @@ <a class='slogan' href="/"> <div class='logo'></div> <div class='site_name'>MegaPixels</div> + </a> <div class='links'> <a href="/datasets/">Datasets</a> diff --git a/site/public/test/map/index.html b/site/public/test/map/index.html index 4f4e7093..19c09314 100644 --- a/site/public/test/map/index.html +++ b/site/public/test/map/index.html @@ -8,7 +8,6 @@ <meta name="referrer" content="no-referrer" /> <meta name="viewport" content="width=device-width, initial-scale=1.0, user-scalable=yes" /> <link rel='stylesheet' href='/assets/css/fonts.css' /> - <link rel='stylesheet' href='/assets/css/tabulator.css' /> <link rel='stylesheet' href='/assets/css/css.css' /> <link rel='stylesheet' href='/assets/css/leaflet.css' /> <link rel='stylesheet' href='/assets/css/applets.css' /> @@ -18,6 +17,7 @@ <a class='slogan' href="/"> <div class='logo'></div> <div class='site_name'>MegaPixels</div> + </a> <div class='links'> <a href="/datasets/">Datasets</a> diff --git a/site/public/test/name_search/index.html b/site/public/test/name_search/index.html index 4e3ef428..f14f6f83 100644 --- a/site/public/test/name_search/index.html +++ b/site/public/test/name_search/index.html @@ -8,7 +8,6 @@ <meta name="referrer" content="no-referrer" /> <meta name="viewport" content="width=device-width, initial-scale=1.0, user-scalable=yes" /> <link rel='stylesheet' href='/assets/css/fonts.css' /> - <link rel='stylesheet' href='/assets/css/tabulator.css' /> <link rel='stylesheet' href='/assets/css/css.css' /> <link rel='stylesheet' href='/assets/css/leaflet.css' /> <link rel='stylesheet' href='/assets/css/applets.css' /> @@ -18,6 +17,7 @@ <a class='slogan' href="/"> <div class='logo'></div> <div class='site_name'>MegaPixels</div> + </a> <div class='links'> <a href="/datasets/">Datasets</a> diff --git a/site/public/test/pie_chart/index.html b/site/public/test/pie_chart/index.html index 7dd159a3..a3167090 100644 --- a/site/public/test/pie_chart/index.html +++ b/site/public/test/pie_chart/index.html @@ -8,7 +8,6 @@ <meta name="referrer" content="no-referrer" /> <meta name="viewport" content="width=device-width, initial-scale=1.0, user-scalable=yes" /> <link rel='stylesheet' href='/assets/css/fonts.css' /> - <link rel='stylesheet' href='/assets/css/tabulator.css' /> <link rel='stylesheet' href='/assets/css/css.css' /> <link rel='stylesheet' href='/assets/css/leaflet.css' /> <link rel='stylesheet' href='/assets/css/applets.css' /> @@ -18,6 +17,7 @@ <a class='slogan' href="/"> <div class='logo'></div> <div class='site_name'>MegaPixels</div> + </a> <div class='links'> <a href="/datasets/">Datasets</a> diff --git a/site/templates/layout.html b/site/templates/layout.html index b5b7880c..d0630310 100644 --- a/site/templates/layout.html +++ b/site/templates/layout.html @@ -8,7 +8,6 @@ <meta name="referrer" content="no-referrer" /> <meta name="viewport" content="width=device-width, initial-scale=1.0, user-scalable=yes" /> <link rel='stylesheet' href='/assets/css/fonts.css' /> - <link rel='stylesheet' href='/assets/css/tabulator.css' /> <link rel='stylesheet' href='/assets/css/css.css' /> <link rel='stylesheet' href='/assets/css/leaflet.css' /> <link rel='stylesheet' href='/assets/css/applets.css' /> @@ -18,6 +17,7 @@ <a class='slogan' href="/"> <div class='logo'></div> <div class='site_name'>MegaPixels</div> + {% if metadata.meta.dataset %}<div class='splash'>{{ metadata.meta.dataset.name_short }}</div>{% endif %} </a> <div class='links'> <a href="/datasets/">Datasets</a> |
