summaryrefslogtreecommitdiff
path: root/scraper/s2-papers.py
diff options
context:
space:
mode:
authorJules Laplace <julescarbon@gmail.com>2018-11-25 22:19:15 +0100
committerJules Laplace <julescarbon@gmail.com>2018-11-25 22:19:15 +0100
commitee3d0d98e19f1d8177d85af1866fd0ee431fe9ea (patch)
tree41372528e78d4328bc2a47bbbabac7e809c58894 /scraper/s2-papers.py
parent255b8178af1e25a71fd23703d30c0d1f74911f47 (diff)
moving stuff
Diffstat (limited to 'scraper/s2-papers.py')
-rw-r--r--scraper/s2-papers.py64
1 files changed, 64 insertions, 0 deletions
diff --git a/scraper/s2-papers.py b/scraper/s2-papers.py
new file mode 100644
index 00000000..4fe1f025
--- /dev/null
+++ b/scraper/s2-papers.py
@@ -0,0 +1,64 @@
+import os
+import sys
+import csv
+import subprocess
+import time
+import random
+import re
+import json
+import click
+from s2 import SemanticScholarAPI
+from util import *
+
+'''
+s2 search API format:
+results
+matchedAuthors
+matchedPresentations
+query
+querySuggestions
+results
+stats
+totalPages
+totalResults
+'''
+
+s2 = SemanticScholarAPI()
+
+@click.command()
+@click.option('--index', '-n', default=0, help='Index of CSV (query,)')
+@click.option('--depth', '-d', default=1, help='Depth to recurse (not implemented).')
+def fetch_papers(index, depth):
+ keys, lines = read_citation_list(index)
+ for line in lines:
+ label = line[0]
+ title = re.sub(r'[^-0-9a-zA-Z ]+', '', line[1])
+ entry_fn = './datasets/s2/entries/{}.json'.format(title)
+ if not os.path.exists(entry_fn):
+ print('not found: {}'.format(entry_fn))
+ continue
+ result = read_json(entry_fn)
+ paper_id = result['id']
+ paper = fetch_paper(paper_id)
+ # get all of the paper's citations
+
+def fetch_paper(paper_id):
+ os.makedirs('./datasets/s2/papers/{}/{}'.format(paper_id[0:2], paper_id), exist_ok=True)
+ paper_fn = './datasets/s2/papers/{}/{}/paper.json'.format(paper_id[0:2], paper_id)
+ if os.path.exists(paper_fn):
+ return read_json(paper_fn)
+ print(paper_id)
+ paper = s2.paper(paper_id)
+ if paper is None:
+ print("Got none paper??")
+ time.sleep(random.randint(20, 30))
+ paper = s2.paper(paper_id)
+ if paper is None:
+ print("Paper not found")
+ return None
+ write_json(paper_fn, paper)
+ time.sleep(random.randint(5, 10))
+ return paper
+
+if __name__ == '__main__':
+ fetch_papers()