summaryrefslogtreecommitdiff
path: root/s2-fetch-doi.py
diff options
context:
space:
mode:
Diffstat (limited to 's2-fetch-doi.py')
-rw-r--r--s2-fetch-doi.py53
1 files changed, 53 insertions, 0 deletions
diff --git a/s2-fetch-doi.py b/s2-fetch-doi.py
new file mode 100644
index 00000000..a021fd2b
--- /dev/null
+++ b/s2-fetch-doi.py
@@ -0,0 +1,53 @@
+import os
+import sys
+import csv
+import subprocess
+import time
+import random
+import re
+import json
+import click
+from urllib.parse import urlparse
+from s2 import SemanticScholarAPI
+from util import *
+
+s2 = SemanticScholarAPI()
+
+@click.command()
+@click.option('--fn', '-i', default='db_paper_doi.csv', help='Filename of CSV (id, url,)')
+def fetch_doi_list(fn):
+ lines = read_csv(fn, keys=False)
+ for line in lines:
+ paper_id, url = line
+ fetch_doi(paper_id, url)
+ print("{} papers processed".format(len(lines)))
+
+def fetch_doi(paper_id, url):
+ os.makedirs(make_doi_path(paper_id), exist_ok=True)
+ doi_fn = make_doi_fn(paper_id)
+ url_fn = make_url_fn(paper_id)
+ txt_fn = make_txt_fn(paper_id)
+ if os.path.exists(doi_fn) or os.path.exists(txt_fn):
+ # return read_json(doi_fn)
+ return
+ size, final_url = s2.fetch_file(url, doi_fn)
+ if size is None:
+ print("{} empty?".format(paper_id))
+ time.sleep(random.randint(5, 10))
+ return None
+ print("{} {} kb".format(paper_id, int(size / 1024)))
+ time.sleep(random.randint(5, 10))
+ return
+ # return paper
+
+def make_doi_path(paper_id):
+ return './datasets/s2/doi/{}/{}'.format(paper_id[0:2], paper_id)
+def make_doi_fn(paper_id):
+ return './datasets/s2/doi/{}/{}/paper.doi'.format(paper_id[0:2], paper_id)
+def make_url_fn(paper_id):
+ return './datasets/s2/doi/{}/{}/paper.url'.format(paper_id[0:2], paper_id)
+def make_txt_fn(paper_id):
+ return './datasets/s2/pdf/{}/{}/paper.txt'.format(paper_id[0:2], paper_id)
+
+if __name__ == '__main__':
+ fetch_doi_list()