aboutsummaryrefslogtreecommitdiffstats
path: root/python/tests/import_fileset_generic.py
diff options
context:
space:
mode:
authorBryan Newbold <bnewbold@robocracy.org>2021-11-02 18:13:14 -0700
committerBryan Newbold <bnewbold@robocracy.org>2021-11-02 18:13:14 -0700
commitcdfd6b85b386b7bbf9d5a5179ef26970b6e5a4e7 (patch)
tree5e4034027b51f3ee4d2a488bb2cbb7a75c3bd0d8 /python/tests/import_fileset_generic.py
parent78f08280edea4ff65ca613ad30005c45cc48dea6 (diff)
downloadfatcat-cdfd6b85b386b7bbf9d5a5179ef26970b6e5a4e7.tar.gz
fatcat-cdfd6b85b386b7bbf9d5a5179ef26970b6e5a4e7.zip
fmt (black): tests/
Diffstat (limited to 'python/tests/import_fileset_generic.py')
-rw-r--r--python/tests/import_fileset_generic.py37
1 files changed, 21 insertions, 16 deletions
diff --git a/python/tests/import_fileset_generic.py b/python/tests/import_fileset_generic.py
index 8d30a83b..eadf5c43 100644
--- a/python/tests/import_fileset_generic.py
+++ b/python/tests/import_fileset_generic.py
@@ -1,4 +1,3 @@
-
import json
import pytest
@@ -11,48 +10,54 @@ from fatcat_tools.importers import FilesetImporter, JsonLinePusher
def fileset_importer(api):
yield FilesetImporter(api)
+
# TODO: use API to check that entities actually created...
def test_fileset_importer_basic(fileset_importer):
- with open('tests/files/fileset_ltjp7k2nrbes3or5h4na5qgxlu.json', 'r') as f:
+ with open("tests/files/fileset_ltjp7k2nrbes3or5h4na5qgxlu.json", "r") as f:
JsonLinePusher(fileset_importer, f).run()
+
def test_fileset_importer(fileset_importer):
last_index = fileset_importer.api.get_changelog(limit=1)[0].index
- with open('tests/files/fileset_ltjp7k2nrbes3or5h4na5qgxlu.json', 'r') as f:
+ with open("tests/files/fileset_ltjp7k2nrbes3or5h4na5qgxlu.json", "r") as f:
fileset_importer.bezerk_mode = True
counts = JsonLinePusher(fileset_importer, f).run()
- assert counts['insert'] == 1
- assert counts['exists'] == 0
- assert counts['skip'] == 0
+ assert counts["insert"] == 1
+ assert counts["exists"] == 0
+ assert counts["skip"] == 0
# fetch most recent editgroup
- change = fileset_importer.api.get_changelog_entry(index=last_index+1)
+ change = fileset_importer.api.get_changelog_entry(index=last_index + 1)
eg = change.editgroup
assert eg.description
assert "generic fileset" in eg.description.lower()
- assert eg.extra['git_rev']
- assert "fatcat_tools.FilesetImporter" in eg.extra['agent']
+ assert eg.extra["git_rev"]
+ assert "fatcat_tools.FilesetImporter" in eg.extra["agent"]
# re-insert; should skip
- with open('tests/files/fileset_ltjp7k2nrbes3or5h4na5qgxlu.json', 'r') as f:
+ with open("tests/files/fileset_ltjp7k2nrbes3or5h4na5qgxlu.json", "r") as f:
fileset_importer.reset()
fileset_importer.bezerk_mode = False
counts = JsonLinePusher(fileset_importer, f).run()
- assert counts['insert'] == 0
- assert counts['exists'] == 1
- assert counts['skip'] == 0
+ assert counts["insert"] == 0
+ assert counts["exists"] == 1
+ assert counts["skip"] == 0
+
def test_fileset_dict_parse(fileset_importer):
- with open('tests/files/fileset_ltjp7k2nrbes3or5h4na5qgxlu.json', 'r') as f:
+ with open("tests/files/fileset_ltjp7k2nrbes3or5h4na5qgxlu.json", "r") as f:
raw = json.loads(f.readline())
fs = fileset_importer.parse_record(raw)
assert fs.manifest[0].sha1 == "cc9bd558ca79b30b2966714da7ef4129537fde0c"
assert fs.manifest[0].md5 == "742c40404c9a4dbbd77c0985201c639f"
- assert fs.manifest[0].sha256 == "3a7c07ad17ce3638d5a1dd21f995a496e430b952eef00270ad741d506984370f"
+ assert (
+ fs.manifest[0].sha256
+ == "3a7c07ad17ce3638d5a1dd21f995a496e430b952eef00270ad741d506984370f"
+ )
assert fs.manifest[0].size == 640500
assert fs.manifest[0].path == "070111_LatA_100nM.txt"
- assert fs.manifest[0].extra['mimetype'] == "text/plain"
+ assert fs.manifest[0].extra["mimetype"] == "text/plain"
assert len(fs.urls) == 3
for u in fs.urls:
if u.rel == "repo":