summaryrefslogtreecommitdiff
path: root/s2-papers.py
diff options
context:
space:
mode:
Diffstat (limited to 's2-papers.py')
-rw-r--r--s2-papers.py89
1 files changed, 89 insertions, 0 deletions
diff --git a/s2-papers.py b/s2-papers.py
new file mode 100644
index 00000000..7320c095
--- /dev/null
+++ b/s2-papers.py
@@ -0,0 +1,89 @@
+import os
+import sys
+import csv
+import subprocess
+import time
+import random
+import re
+import json
+import click
+from s2 import SemanticScholarAPI
+
+'''
+s2 search API format:
+results
+matchedAuthors
+matchedPresentations
+query
+querySuggestions
+results
+stats
+totalPages
+totalResults
+'''
+
+s2 = SemanticScholarAPI()
+
+@click.command()
+@click.option('--index', '-n', default=0, help='Index of CSV.')
+@click.option('--depth', '-d', default=1, help='Depth to recurse.')
+def fetch_papers(index, depth):
+ keys, lines = read_citation_list(index)
+ for line in lines:
+ label = line[0]
+ title = re.sub(r'[^-0-9a-zA-Z ]+', '', line[1])
+ entry_fn = './datasets/s2/entries/{}.json'.format(title)
+ if not os.path.exists(entry_fn):
+ print('not found: {}'.format(entry_fn))
+ continue
+ result = read_json(entry_fn)
+ paper_id = result['id']
+ paper = fetch_paper(paper_id)
+ # get all of the paper's citations
+
+def fetch_paper(paper_id):
+ os.makedirs('./datasets/s2/papers/{}/{}'.format(paper_id[0:2], paper_id), exist_ok=True)
+ paper_fn = './datasets/s2/papers/{}/{}/paper.json'.format(paper_id[0:2], paper_id)
+ if os.path.exists(paper_fn):
+ return read_json(paper_fn)
+ print(paper_id)
+ paper = s2.paper(paper_id)
+ if paper is None:
+ print("Got none paper??")
+ time.sleep(random.randint(20, 30))
+ paper = s2.paper(paper_id)
+ if paper is None:
+ print("Paper not found")
+ return None
+ write_json(paper_fn, paper)
+ time.sleep(random.randint(5, 10))
+ return paper
+
+def read_citation_list(index=0):
+ filename = './datasets/citations.csv'
+ if index > 0:
+ fn, ext = os.path.splitext(filename)
+ filename = fn + '-' + str(index) + ext
+ with open(filename, 'r') as f:
+ reader = csv.reader(f)
+ lines = list(reader)
+ keys = lines[0]
+ lines = lines[1:]
+ return keys, lines
+
+def read_json(fn):
+ with open(fn, 'r') as json_file:
+ return json.load(json_file)
+def write_json(fn, data):
+ with open(fn, 'w') as outfile:
+ json.dump(data, outfile)
+def write_csv(fn, keys, rows):
+ with open(fn, 'w') as f:
+ writer = csv.writer(f)
+ if keys is not None:
+ writer.writerow(keys)
+ for row in rows:
+ writer.writerow(row)
+
+if __name__ == '__main__':
+ fetch_papers()