import os import glob import simplejson as json import click from util import * PAPER_JSON_DIR = 'datasets/s2/db_papers' @click.command() def s2_dump_pdf_urls(): # loop over all the papers in db_papers # get all the PDF urls, pick the best one # store it and the paper id # another script will fetch the urls from this process rows = [process_paper(fn) for fn in glob.iglob('{}/**/paper.json'.format(PAPER_JSON_DIR), recursive=True)] print("Wrote {} rows".format(len(rows))) write_csv('db_paper_pdf_list.csv', keys=['Paper ID', 'PDF URL', 'IEEE URL', 'Extra URL'], rows=rows) def process_paper(fn, lookups): paper = read_json(fn) paper_id = paper['id'] pdf_url = None ieee_url = None extra_url = None if paper['s2PdfUrl']: pdf_url = paper['s2PdfUrl'] for url in paper['pdfUrls']: if 'ieeexplore.ieee.org' in url: ieee_url = url elif pdf_url is None and 'pdf' in url: pdf_url = url else: extra_url = url return [paper_id, pdf_url, ieee_url, extra_url] if __name__ == '__main__': s2_dump_pdf_urls()