From dd8cdc88f71e6a395ab5b10d84d6443f70e39048 Mon Sep 17 00:00:00 2001 From: Bryan Newbold Date: Thu, 4 Nov 2021 11:49:33 -0700 Subject: crossref grobid refs: another error case (ReadTimeout) With this last exception handled, was about to get through millions of rows of references, with only a few dozen errors (mostly invalid XML). --- python/sandcrawler/grobid.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) (limited to 'python/sandcrawler/grobid.py') diff --git a/python/sandcrawler/grobid.py b/python/sandcrawler/grobid.py index b9dd196..a6858ff 100644 --- a/python/sandcrawler/grobid.py +++ b/python/sandcrawler/grobid.py @@ -311,7 +311,7 @@ class CrossrefRefsWorker(SandcrawlerWorker): return self.grobid_client.crossref_refs(record) except xml.etree.ElementTree.ParseError: print( - f" GROBID returned bad XML for Crossref DOI: {record.get('DOI')}", + f"GROBID returned bad XML for Crossref DOI: {record.get('DOI')}", file=sys.stderr, ) # but add a small slow-down so we don't churn through these if @@ -319,9 +319,11 @@ class CrossrefRefsWorker(SandcrawlerWorker): time.sleep(3) return None except requests.exceptions.HTTPError: - print(f" GROBID HTTP error for Crossref DOI: {record.get('DOI')}", file=sys.stderr) - # but add a small slow-down so we don't churn through these if - # GROBID is just misconfigured or something + print(f"GROBID HTTP error for Crossref DOI: {record.get('DOI')}", file=sys.stderr) + time.sleep(3) + return None + except requests.exceptions.ReadTimeout: + print(f"GROBID HTTP timeout for Crossref DOI: {record.get('DOI')}", file=sys.stderr) time.sleep(3) return None -- cgit v1.2.3