aboutsummaryrefslogtreecommitdiffstats
path: root/python/sandcrawler/grobid.py
diff options
context:
space:
mode:
authorBryan Newbold <bnewbold@archive.org>2021-11-03 18:57:59 -0700
committerBryan Newbold <bnewbold@archive.org>2021-11-04 17:19:52 -0700
commitb4ceb130504cacbb75549e46719159f4e5ab5c51 (patch)
treefca88f1af4f0c15410029e86074afff252ebdf34 /python/sandcrawler/grobid.py
parent1f078fe94a5cf5322527b97dcdf0cb054e0c7540 (diff)
downloadsandcrawler-b4ceb130504cacbb75549e46719159f4e5ab5c51.tar.gz
sandcrawler-b4ceb130504cacbb75549e46719159f4e5ab5c51.zip
grobid crossref refs: try to handle HTTP 5xx and XML parse errors
Diffstat (limited to 'python/sandcrawler/grobid.py')
-rw-r--r--python/sandcrawler/grobid.py28
1 files changed, 24 insertions, 4 deletions
diff --git a/python/sandcrawler/grobid.py b/python/sandcrawler/grobid.py
index 7d7f6b5..8ed6d7e 100644
--- a/python/sandcrawler/grobid.py
+++ b/python/sandcrawler/grobid.py
@@ -1,5 +1,7 @@
import html
import sys
+import time
+import xml.etree.ElementTree
from typing import Any, Dict, List, Optional
import requests
@@ -242,9 +244,9 @@ class GrobidClient(object):
)
unstructured_refs = unstructured_refs[:2000]
- refs = self.process_citation_list(
- [clean_crossref_unstructured(r["unstructured"]) for r in unstructured_refs]
- )
+ clean_refs = [clean_crossref_unstructured(r["unstructured"]) for r in unstructured_refs]
+ refs = self.process_citation_list(clean_refs)
+
assert len(refs) == len(unstructured_refs)
refs_json = []
for i in range(len(refs)):
@@ -302,7 +304,25 @@ class CrossrefRefsWorker(SandcrawlerWorker):
self.sink = sink
def process(self, record: Any, key: Optional[str] = None) -> Any:
- return self.grobid_client.crossref_refs(record)
+ # handle the rare case of bad TEI-XML response
+ # eg: https://github.com/kermitt2/grobid/issues/848
+ try:
+ return self.grobid_client.crossref_refs(record)
+ except xml.etree.ElementTree.ParseError:
+ print(
+ f" GROBID returned bad XML for Crossref DOI: {record.get('DOI')}",
+ file=sys.stderr,
+ )
+ # but add a small slow-down so we don't churn through these if
+ # GROBID is just misconfigured or something
+ time.sleep(3)
+ return None
+ except requests.exceptions.HTTPError:
+ print(f" GROBID HTTP error for Crossref DOI: {record.get('DOI')}", file=sys.stderr)
+ # but add a small slow-down so we don't churn through these if
+ # GROBID is just misconfigured or something
+ time.sleep(3)
+ return None
class GrobidBlobWorker(SandcrawlerWorker):