diff options
| author | Jules Laplace <julescarbon@gmail.com> | 2018-11-03 18:41:23 +0100 |
|---|---|---|
| committer | Jules Laplace <julescarbon@gmail.com> | 2018-11-03 18:41:23 +0100 |
| commit | 2278adead1ff16115f8b989dc316bdf9efe9e37d (patch) | |
| tree | 96fae400f35025e2565b9e0e8d7c6a2d020d822b /s2-dump-db-pdf-urls.py | |
| parent | fde14c19ef77f1bbe67f4cac7cadddbd9d3129b3 (diff) | |
s2-dump-db-pdf-urls.py
Diffstat (limited to 's2-dump-db-pdf-urls.py')
| -rw-r--r-- | s2-dump-db-pdf-urls.py | 37 |
1 files changed, 37 insertions, 0 deletions
diff --git a/s2-dump-db-pdf-urls.py b/s2-dump-db-pdf-urls.py new file mode 100644 index 00000000..520b513e --- /dev/null +++ b/s2-dump-db-pdf-urls.py @@ -0,0 +1,37 @@ +import os +import glob +import simplejson as json +import click +from util import * + +PAPER_JSON_DIR = 'datasets/s2/db_papers' + +@click.command() +def s2_dump_pdf_urls(): + # loop over all the papers in db_papers + # get all the PDF urls, pick the best one + # store it and the paper id + # another script will fetch the urls from this process + rows = [process_paper(fn) for fn in glob.iglob('{}/**/paper.json'.format(PAPER_JSON_DIR), recursive=True)] + print("Wrote {} rows".format(len(rows))) + write_csv('db_paper_pdf_list.csv', keys=['Paper ID', 'PDF URL', 'IEEE URL', 'Extra URL'], rows=rows) + +def process_paper(fn, lookups): + paper = read_json(fn) + paper_id = paper['id'] + pdf_url = None + ieee_url = None + extra_url = None + if paper['s2PdfUrl']: + pdf_url = paper['s2PdfUrl'] + for url in paper['pdfUrls']: + if 'ieeexplore.ieee.org' in url: + ieee_url = url + elif pdf_url is None and 'pdf' in url: + pdf_url = url + else: + extra_url = url + return [paper_id, pdf_url, ieee_url, extra_url] + +if __name__ == '__main__': + s2_dump_pdf_urls() |
