From 05fc975a313aa38483d904cb9ad07a029641d086 Mon Sep 17 00:00:00 2001 From: Jules Laplace Date: Sun, 16 Dec 2018 16:29:04 +0100 Subject: rebuild --- scraper/s2-search.py | 20 +++++++++++--------- 1 file changed, 11 insertions(+), 9 deletions(-) (limited to 'scraper/s2-search.py') diff --git a/scraper/s2-search.py b/scraper/s2-search.py index 9ec20cc9..d9b1beca 100644 --- a/scraper/s2-search.py +++ b/scraper/s2-search.py @@ -38,22 +38,24 @@ def fetch_entries(index, refresh): if len(clean_title) < 2: continue dump_fn = './datasets/s2/dumps/{}.json'.format(key) - entry_fn = './datasets/s2/entries/{}.json'.format(key) result = None - if not refresh and os.path.exists(entry_fn): - result = read_json(entry_fn) + if not refresh and os.path.exists(dump_fn): + results = read_json(dump_fn) else: results = s2.search(clean_title) write_json(dump_fn, results) - if len(results['results']) == 0: - print("- {}".format(title)) - else: - print("+ {}".format(title)) - result = results['results'][0] - write_json(entry_fn, result) + + if len(results['results']) == 0: + print("- {}".format(title)) + else: + print("+ {}".format(title)) + result = results['results'][0] + if result: paper_id = result['id'] paper = fetch_paper(s2, paper_id) + entry_fn = './datasets/s2/entries/{}.json'.format(paper_id) + write_json(entry_fn, result) citation_lookup.append([key, name, title, paper_id]) write_csv("datasets/citation_lookup.csv", keys=['key', 'name', 'title', 'paper_id'], rows=citation_lookup) -- cgit v1.2.3-70-g09d2