From 17b049ccc6e5c3a3b5dd7e4dd787bfe984e55fd6 Mon Sep 17 00:00:00 2001 From: Jules Laplace Date: Sat, 3 Nov 2018 17:22:47 +0100 Subject: dump pdf urls --- s2-dump-pdf-urls.py | 37 +++++++++++++++++++++++++++++++++++++ 1 file changed, 37 insertions(+) create mode 100644 s2-dump-pdf-urls.py diff --git a/s2-dump-pdf-urls.py b/s2-dump-pdf-urls.py new file mode 100644 index 00000000..111ed830 --- /dev/null +++ b/s2-dump-pdf-urls.py @@ -0,0 +1,37 @@ +import os +import glob +import simplejson as json +import click +from util import * + +PAPER_JSON_DIR = 'datasets/s2/db_papers' + +@click.command() +def s2_dump_pdf_urls(): + # loop over all the papers in db_papers + # get all the PDF urls, pick the best one + # store it and the paper id + # another script will fetch the urls from this process + ids = {} + for fn in glob.iglob('{}/**/paper.json'.format(PAPER_JSON_DIR), recursive=True): + process_paper(fn, ids) + id_list = list(ids.keys()) + print("Wrote {} ids".format(len(id_list))) + write_csv('pdf_list.csv', id_list) + + for line in lines: + label = line[0] + title = re.sub(r'[^-0-9a-zA-Z ]+', '', line[1]) + entry_fn = './datasets/s2/entries/{}.json'.format(title) + if not os.path.exists(entry_fn): + results = s2.search(title) + write_json(dump_fn, results) + if len(results['results']) == 0: + print("No results for {}".format(title)) + else: + print(title) + write_json(entry_fn, results['results'][0]) + time.sleep(random.randint(10, 20)) + +if __name__ == '__main__': + s2_dump_pdf_urls() -- cgit v1.2.3-70-g09d2