aboutsummaryrefslogtreecommitdiffstats
path: root/python/tests/import_ingest.py
diff options
context:
space:
mode:
authorBryan Newbold <bnewbold@robocracy.org>2019-11-13 00:27:48 -0800
committerBryan Newbold <bnewbold@robocracy.org>2019-11-15 16:46:26 -0800
commita4db9ee2e18a18b23eb7ece484f95914421f877d (patch)
tree6d4856c8fb7854a75dbd43c983179d4492769039 /python/tests/import_ingest.py
parent169477c39dc772c0eb1d45f8097215e73f0f6044 (diff)
downloadfatcat-a4db9ee2e18a18b23eb7ece484f95914421f877d.tar.gz
fatcat-a4db9ee2e18a18b23eb7ece484f95914421f877d.zip
ingest file result importer
Diffstat (limited to 'python/tests/import_ingest.py')
-rw-r--r--python/tests/import_ingest.py58
1 files changed, 58 insertions, 0 deletions
diff --git a/python/tests/import_ingest.py b/python/tests/import_ingest.py
new file mode 100644
index 00000000..7c0a85cd
--- /dev/null
+++ b/python/tests/import_ingest.py
@@ -0,0 +1,58 @@
+
+import json
+import pytest
+from fatcat_tools.importers import IngestFileResultImporter, JsonLinePusher
+from fixtures import api
+
+
+@pytest.fixture(scope="function")
+def ingest_importer(api):
+ yield IngestFileResultImporter(api)
+
+# TODO: use API to check that entities actually created...
+def test_ingest_importer_basic(ingest_importer):
+ with open('tests/files/example_ingest.json', 'r') as f:
+ JsonLinePusher(ingest_importer, f).run()
+
+@pytest.mark.skip("tests not flushed out yet")
+def test_ingest_importer(ingest_importer):
+ last_index = ingest_importer.api.get_changelog(limit=1)[0].index
+ with open('tests/files/example_ingest.json', 'r') as f:
+ ingest_importer.bezerk_mode = True
+ counts = JsonLinePusher(ingest_importer, f).run()
+ assert counts['insert'] == 2
+ assert counts['exists'] == 0
+ assert counts['skip'] == 11
+
+ # fetch most recent editgroup
+ change = ingest_importer.api.get_changelog_entry(index=last_index+1)
+ eg = change.editgroup
+ assert eg.description
+ assert "crawled from web" in eg.description.lower()
+ assert eg.extra['git_rev']
+ assert "fatcat_tools.IngestFileResultImporter" in eg.extra['agent']
+
+ # re-insert; should skip
+ with open('tests/files/example_ingest.json', 'r') as f:
+ ingest_importer.reset()
+ ingest_importer.bezerk_mode = False
+ counts = JsonLinePusher(ingest_importer, f).run()
+ assert counts['insert'] == 0
+ assert counts['exists'] == 2
+ assert counts['skip'] == 11
+
+def test_ingest_dict_parse(ingest_importer):
+ with open('tests/files/example_ingest.json', 'r') as f:
+ raw = json.loads(f.readline())
+ f = ingest_importer.parse_record(raw)
+ assert f.sha1 == "00242a192acc258bdfdb151943419437f440c313"
+ assert f.md5 == "f4de91152c7ab9fdc2a128f962faebff"
+ assert f.mimetype == "application/pdf"
+ assert f.size == 255629
+ assert len(f.urls) == 2
+ for u in f.urls:
+ if u.rel == "web":
+ assert u.url.startswith("http://journals.plos.org")
+ if u.rel == "webarchive":
+ assert u.url.startswith("https://web.archive.org/")
+ assert len(f.release_ids) == 1