Skip to content

Commit

Permalink
manubot#337 Use a single default_value.
Browse files Browse the repository at this point in the history
  • Loading branch information
xihh87 committed Jun 15, 2022
1 parent 8f0f0f3 commit d4f69f9
Show file tree
Hide file tree
Showing 2 changed files with 23 additions and 17 deletions.
8 changes: 4 additions & 4 deletions manubot/cite/unpaywall.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,12 +100,12 @@ def __init__(self, doi, set_oa_locations=True):
if set_oa_locations:
self.set_oa_locations()

def set_oa_locations(self):
def set_oa_locations(self, timeout: int = 3):
from manubot.util import contact_email

url = f"https://api.unpaywall.org/v2/{self.doi}"
params = {"email": contact_email}
response = requests.get(url, params=params)
response = requests.get(url, params=params, timeout=timeout)
response.raise_for_status()
self.results = response.json()
self.oa_locations = [
Expand All @@ -124,11 +124,11 @@ def __init__(self, arxiv_id, set_oa_locations=True, use_doi=True):
if set_oa_locations:
self.set_oa_locations()

def set_oa_locations(self):
def set_oa_locations(self, timeout: int = 3):
from .arxiv import get_arxiv_csl_item

if not self.csl_item:
self.csl_item = get_arxiv_csl_item(self.arxiv_id)
self.csl_item = get_arxiv_csl_item(self.arxiv_id, timeout=timeout)
doi = self.csl_item.get("DOI")
if self.use_doi and doi:
unpaywall_doi = Unpaywall_DOI(doi)
Expand Down
32 changes: 19 additions & 13 deletions manubot/cite/url.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,27 +7,33 @@

CSLItem = Dict[str, Any]

default_timeout = 3

class Handler_URL(Handler):

standard_prefix = "url"
class Handler_URL(Handler):

prefixes = [
"url",
"http",
"https",
]
standard_prefix = ("url",)
prefixes = (
[
"url",
"http",
"https",
],
)
timeout_seconds = 3

def standardize_prefix_accession(self, accession):
if self.prefix_lower != "url":
accession = f"{self.prefix_lower}:{accession}"
return self.standard_prefix, accession

def get_csl_item(self, citekey, timeout=3):
return get_url_csl_item(citekey.standard_accession, timeout=timeout)
def get_csl_item(self, citekey):
return get_url_csl_item(
citekey.standard_accession, timeout=self.timeout_seconds
)


def get_url_csl_item(url: str, timeout: int = 3) -> CSLItem:
def get_url_csl_item(url: str, timeout: int = default_timeout) -> CSLItem:
"""
Get csl_item for a URL trying a sequence of strategies.
Expand All @@ -48,7 +54,7 @@ def get_url_csl_item(url: str, timeout: int = 3) -> CSLItem:
raise Exception(f"all get_url_csl_item methods failed for {url}")


def get_url_csl_item_zotero(url: str, timeout: int = 3) -> CSLItem:
def get_url_csl_item_zotero(url: str, timeout: int = default_timeout) -> CSLItem:
"""
Use Zotero's translation-server to generate a CSL Item for the specified URL.
"""
Expand All @@ -63,7 +69,7 @@ def get_url_csl_item_zotero(url: str, timeout: int = 3) -> CSLItem:
return csl_item


def get_url_csl_item_greycite(url: str, timeout: int = 3) -> CSLItem:
def get_url_csl_item_greycite(url: str, timeout: int = default_timeout) -> CSLItem:
"""
Uses Greycite which has experiened uptime problems in the past.
API calls seem to take at least 15 seconds. Browser requests are much
Expand Down Expand Up @@ -100,7 +106,7 @@ def get_url_csl_item_greycite(url: str, timeout: int = 3) -> CSLItem:
return csl_item


def get_url_csl_item_manual(url: str, timeout: int = 3) -> CSLItem:
def get_url_csl_item_manual(url: str, timeout: int = default_timeout) -> CSLItem:
"""
Manually create csl_item for a URL.
"""
Expand Down

0 comments on commit d4f69f9

Please sign in to comment.