summaryrefslogtreecommitdiff
path: root/s2-dump-pdf-urls.py
diff options
context:
space:
mode:
Diffstat (limited to 's2-dump-pdf-urls.py')
-rw-r--r--s2-dump-pdf-urls.py37
1 files changed, 37 insertions, 0 deletions
diff --git a/s2-dump-pdf-urls.py b/s2-dump-pdf-urls.py
new file mode 100644
index 00000000..111ed830
--- /dev/null
+++ b/s2-dump-pdf-urls.py
@@ -0,0 +1,37 @@
+import os
+import glob
+import simplejson as json
+import click
+from util import *
+
+PAPER_JSON_DIR = 'datasets/s2/db_papers'
+
+@click.command()
+def s2_dump_pdf_urls():
+ # loop over all the papers in db_papers
+ # get all the PDF urls, pick the best one
+ # store it and the paper id
+ # another script will fetch the urls from this process
+ ids = {}
+ for fn in glob.iglob('{}/**/paper.json'.format(PAPER_JSON_DIR), recursive=True):
+ process_paper(fn, ids)
+ id_list = list(ids.keys())
+ print("Wrote {} ids".format(len(id_list)))
+ write_csv('pdf_list.csv', id_list)
+
+ for line in lines:
+ label = line[0]
+ title = re.sub(r'[^-0-9a-zA-Z ]+', '', line[1])
+ entry_fn = './datasets/s2/entries/{}.json'.format(title)
+ if not os.path.exists(entry_fn):
+ results = s2.search(title)
+ write_json(dump_fn, results)
+ if len(results['results']) == 0:
+ print("No results for {}".format(title))
+ else:
+ print(title)
+ write_json(entry_fn, results['results'][0])
+ time.sleep(random.randint(10, 20))
+
+if __name__ == '__main__':
+ s2_dump_pdf_urls()