aboutsummaryrefslogtreecommitdiffstats
path: root/mapreduce/tests
diff options
context:
space:
mode:
authorBryan Newbold <bnewbold@archive.org>2018-04-10 17:26:56 -0700
committerBryan Newbold <bnewbold@archive.org>2018-04-10 19:13:53 -0700
commit41c79a8bd9b01ba52dc19e7e3ba13221bf23c560 (patch)
tree68f9915844c012ada4afaebde427ccf5d8ed9a39 /mapreduce/tests
parent60f29aaa1efd90628c6a6dc503e23d694e0389ce (diff)
downloadsandcrawler-41c79a8bd9b01ba52dc19e7e3ba13221bf23c560.tar.gz
sandcrawler-41c79a8bd9b01ba52dc19e7e3ba13221bf23c560.zip
extraction test fixes
Diffstat (limited to 'mapreduce/tests')
-rw-r--r--mapreduce/tests/test_extraction_cdx_grobid.py27
1 files changed, 23 insertions, 4 deletions
diff --git a/mapreduce/tests/test_extraction_cdx_grobid.py b/mapreduce/tests/test_extraction_cdx_grobid.py
index 713e501..729e68b 100644
--- a/mapreduce/tests/test_extraction_cdx_grobid.py
+++ b/mapreduce/tests/test_extraction_cdx_grobid.py
@@ -158,7 +158,7 @@ def test_parse_cdx_skip(job):
@responses.activate
def test_grobid_503(mock_fetch, job):
- status = b"{'status': 'done broke due to 503'}"
+ status = b'{"status": "done broke due to 503"}'
responses.add(responses.POST, 'http://localhost:8070/api/processFulltextDocument', status=503,
body=status)
@@ -176,9 +176,9 @@ def test_grobid_503(mock_fetch, job):
@responses.activate
def test_grobid_not_xml(mock_fetch, job):
- status = b"{'status': 'done broke'}"
+ payload = b'this is not XML'
responses.add(responses.POST, 'http://localhost:8070/api/processFulltextDocument', status=200,
- body=status)
+ body=payload)
raw = io.BytesIO(b"""com,sagepub,cep)/content/28/9/960.full.pdf 20170705062200 http://cep.sagepub.com/content/28/9/960.full.pdf application/pdf 200 ABCDEF12345Q2MSVX7XZKYAYSCX5QBYJ - - 401 313356621 CITESEERX-CRAWL-2017-06-20-20170705061647307-00039-00048-wbgrp-svc284/CITESEERX-CRAWL-2017-06-20-20170705062052659-00043-31209~wbgrp-svc284.us.archive.org~8443.warc.gz""")
@@ -186,5 +186,24 @@ def test_grobid_not_xml(mock_fetch, job):
job.sandbox(stdin=raw, stdout=output)
job.run_mapper()
row = job.hb_table.row(b'sha1:ABCDEF12345Q2MSVX7XZKYAYSCX5QBYJ')
- assert json.loads(row[b'grobid0:status'].decode('utf-8')) == status
+ assert struct.unpack("!q", row[b'grobid0:status_code'])[0] == 200
+ assert row[b'grobid0:tei_xml'] == payload
+ assert b'grobid0:tei_json' not in row
+
+
+@mock.patch('extraction_cdx_grobid.MRExtractCdxGrobid.fetch_warc_content', return_value=(FAKE_PDF_BYTES, None))
+@responses.activate
+def test_grobid_invalid_connection(mock_fetch, job):
+
+ status = b'{"status": "done broke"}'
+ job.options.grobid_uri = 'http://host.invalid:8070/api/processFulltextDocument'
+
+ raw = io.BytesIO(b"""com,sagepub,cep)/content/28/9/960.full.pdf 20170705062200 http://cep.sagepub.com/content/28/9/960.full.pdf application/pdf 200 ABCDEF12345Q2MSVX7XZKYAYSCX5QBYJ - - 401 313356621 CITESEERX-CRAWL-2017-06-20-20170705061647307-00039-00048-wbgrp-svc284/CITESEERX-CRAWL-2017-06-20-20170705062052659-00043-31209~wbgrp-svc284.us.archive.org~8443.warc.gz""")
+
+ output = io.BytesIO()
+ job.sandbox(stdin=raw, stdout=output)
+ #with pytest.raises...
+ job.run_mapper()
+ assert job.hb_table.row(b'sha1:ABCDEF12345Q2MSVX7XZKYAYSCX5QBYJ') == {}
+# TODO: failure to fetch from wayback