aboutsummaryrefslogtreecommitdiffstats
path: root/python
diff options
context:
space:
mode:
authorBryan Newbold <bnewbold@robocracy.org>2021-11-02 11:32:16 -0700
committerBryan Newbold <bnewbold@robocracy.org>2021-11-02 11:32:16 -0700
commit2137be14556a604a06e8b54bbc22d68f7d3cd695 (patch)
tree30a23b96afac28d0af57877b0031f10d8569373b /python
parentfdbfb8dc55df8c3739feca8c52c017c56b006573 (diff)
parent6184ecca3a2e072c11482020938566dc8841bf52 (diff)
downloadfatcat-2137be14556a604a06e8b54bbc22d68f7d3cd695.tar.gz
fatcat-2137be14556a604a06e8b54bbc22d68f7d3cd695.zip
Merge branch 'bnewbold-import-fileset'
Diffstat (limited to 'python')
-rw-r--r--python/.gitignore2
-rwxr-xr-xpython/fatcat_import.py95
-rw-r--r--python/fatcat_tools/importers/__init__.py3
-rw-r--r--python/fatcat_tools/importers/common.py4
-rw-r--r--python/fatcat_tools/importers/fileset_generic.py83
-rw-r--r--python/fatcat_tools/importers/ingest.py248
-rw-r--r--python/fatcat_tools/transforms/ingest.py16
-rw-r--r--python/tests/files/fileset_ltjp7k2nrbes3or5h4na5qgxlu.json1
-rw-r--r--python/tests/import_fileset_generic.py59
9 files changed, 507 insertions, 4 deletions
diff --git a/python/.gitignore b/python/.gitignore
index e11492e6..b3d22ecf 100644
--- a/python/.gitignore
+++ b/python/.gitignore
@@ -6,6 +6,8 @@ dist/
*.egg-info
*.json.gz
fatcat-*.tar.gz
+old/
+TODO*
!example.env
!.coveragerc
diff --git a/python/fatcat_import.py b/python/fatcat_import.py
index b82e81c7..41a51ad4 100755
--- a/python/fatcat_import.py
+++ b/python/fatcat_import.py
@@ -164,6 +164,27 @@ def run_ingest_web(args):
else:
JsonLinePusher(iwri, args.json_file).run()
+def run_ingest_fileset(args):
+ ifri = IngestFilesetResultImporter(args.api,
+ editgroup_description=args.editgroup_description_override,
+ skip_source_allowlist=args.skip_source_allowlist,
+ do_updates=args.do_updates,
+ default_link_rel=args.default_link_rel,
+ edit_batch_size=args.batch_size)
+ if args.kafka_mode:
+ KafkaJsonPusher(
+ ifri,
+ args.kafka_hosts,
+ args.kafka_env,
+ "ingest-fileset-results",
+ "fatcat-{}-ingest-fileset-result".format(args.kafka_env),
+ kafka_namespace="sandcrawler",
+ consume_batch_size=args.batch_size,
+ force_flush=True,
+ ).run()
+ else:
+ JsonLinePusher(ifri, args.json_file).run()
+
def run_savepapernow_file(args):
ifri = SavePaperNowFileImporter(args.api,
editgroup_description=args.editgroup_description_override,
@@ -200,6 +221,24 @@ def run_savepapernow_web(args):
else:
JsonLinePusher(ifri, args.json_file).run()
+def run_savepapernow_fileset(args):
+ ifri = SavePaperNowFilesetImporter(args.api,
+ editgroup_description=args.editgroup_description_override,
+ edit_batch_size=args.batch_size)
+ if args.kafka_mode:
+ KafkaJsonPusher(
+ ifri,
+ args.kafka_hosts,
+ args.kafka_env,
+ "ingest-file-results",
+ "fatcat-{}-savepapernow-fileset-result".format(args.kafka_env),
+ kafka_namespace="sandcrawler",
+ consume_batch_size=args.batch_size,
+ force_flush=True,
+ ).run()
+ else:
+ JsonLinePusher(ifri, args.json_file).run()
+
def run_grobid_metadata(args):
fmi = GrobidMetadataImporter(args.api,
edit_batch_size=args.batch_size,
@@ -325,6 +364,14 @@ def run_file_meta(args):
)
JsonLinePusher(fmi, args.json_file).run()
+def run_fileset(args):
+ fmi = FilesetImporter(
+ args.api,
+ edit_batch_size=100,
+ skip_release_fileset_check=args.skip_release_fileset_check,
+ )
+ JsonLinePusher(fmi, args.json_file).run()
+
def main():
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
@@ -561,6 +608,28 @@ def main():
default="web",
help="default URL rel for matches (eg, 'publisher', 'web')")
+ sub_ingest_fileset = subparsers.add_parser('ingest-fileset-results',
+ help="add/update fileset entities linked to releases based on sandcrawler ingest results")
+ sub_ingest_fileset.set_defaults(
+ func=run_ingest_fileset,
+ auth_var="FATCAT_AUTH_WORKER_CRAWL",
+ )
+ sub_ingest_fileset.add_argument('json_file',
+ help="ingest_fileset JSON file to import from",
+ default=sys.stdin, type=argparse.FileType('r'))
+ sub_ingest_fileset.add_argument('--skip-source-allowlist',
+ action='store_true',
+ help="don't filter import based on request source allowlist")
+ sub_ingest_fileset.add_argument('--kafka-mode',
+ action='store_true',
+ help="consume from kafka topic (not stdin)")
+ sub_ingest_fileset.add_argument('--do-updates',
+ action='store_true',
+ help="update pre-existing fileset entities if new match (instead of skipping)")
+ sub_ingest_fileset.add_argument('--default-link-rel',
+ default="fileset",
+ help="default URL rel for matches (eg, 'publisher', 'web')")
+
sub_savepapernow_file = subparsers.add_parser('savepapernow-file-results',
help="add file entities crawled due to async Save Paper Now request")
sub_savepapernow_file.set_defaults(
@@ -587,6 +656,19 @@ def main():
action='store_true',
help="consume from kafka topic (not stdin)")
+ sub_savepapernow_fileset = subparsers.add_parser('savepapernow-fileset-results',
+ help="add fileset entities crawled due to async Save Paper Now request")
+ sub_savepapernow_fileset.set_defaults(
+ func=run_savepapernow_fileset,
+ auth_var="FATCAT_AUTH_WORKER_SAVEPAPERNOW",
+ )
+ sub_savepapernow_fileset.add_argument('json_file',
+ help="ingest-file JSON file to import from",
+ default=sys.stdin, type=argparse.FileType('r'))
+ sub_savepapernow_fileset.add_argument('--kafka-mode',
+ action='store_true',
+ help="consume from kafka topic (not stdin)")
+
sub_grobid_metadata = subparsers.add_parser('grobid-metadata',
help="create release and file entities based on GROBID PDF metadata extraction")
sub_grobid_metadata.set_defaults(
@@ -750,6 +832,19 @@ def main():
help="File with jsonlines from file_meta schema to import from",
default=sys.stdin, type=argparse.FileType('r'))
+ sub_fileset = subparsers.add_parser('fileset',
+ help="generic fileset importer")
+ sub_fileset.set_defaults(
+ func=run_fileset,
+ auth_var="FATCAT_API_AUTH_TOKEN",
+ )
+ sub_fileset.add_argument('json_file',
+ help="File with jsonlines of fileset entities to import",
+ default=sys.stdin, type=argparse.FileType('r'))
+ sub_fileset.add_argument('--skip-release-fileset-check',
+ action='store_true',
+ help="create without checking if releases already have related filesets")
+
args = parser.parse_args()
if not args.__dict__.get("func"):
print("tell me what to do!")
diff --git a/python/fatcat_tools/importers/__init__.py b/python/fatcat_tools/importers/__init__.py
index 9cb18506..a2224081 100644
--- a/python/fatcat_tools/importers/__init__.py
+++ b/python/fatcat_tools/importers/__init__.py
@@ -27,9 +27,10 @@ from .orcid import OrcidImporter
from .arabesque import ArabesqueMatchImporter, ARABESQUE_MATCH_WHERE_CLAUSE
from .wayback_static import auto_wayback_static
from .cdl_dash_dat import auto_cdl_dash_dat
-from .ingest import IngestFileResultImporter, SavePaperNowFileImporter, IngestWebResultImporter, SavePaperNowWebImporter
+from .ingest import IngestFileResultImporter, SavePaperNowFileImporter, IngestWebResultImporter, SavePaperNowWebImporter, IngestFilesetResultImporter, SavePaperNowFilesetImporter
from .shadow import ShadowLibraryImporter
from .file_meta import FileMetaImporter
from .doaj_article import DoajArticleImporter
from .dblp_release import DblpReleaseImporter
from .dblp_container import DblpContainerImporter
+from .fileset_generic import FilesetImporter
diff --git a/python/fatcat_tools/importers/common.py b/python/fatcat_tools/importers/common.py
index e936477c..680b4f9c 100644
--- a/python/fatcat_tools/importers/common.py
+++ b/python/fatcat_tools/importers/common.py
@@ -447,6 +447,10 @@ class EntityImporter:
existing.urls = [u for u in existing.urls if u.url not in redundant_urls]
return existing
+ @staticmethod
+ def generic_fileset_cleanups(existing):
+ return existing
+
def match_existing_release_fuzzy(self, release: ReleaseEntity) -> Optional[Tuple[str, str, ReleaseEntity]]:
"""
This helper function uses fuzzycat (and elasticsearch) to look for
diff --git a/python/fatcat_tools/importers/fileset_generic.py b/python/fatcat_tools/importers/fileset_generic.py
new file mode 100644
index 00000000..f0ad5460
--- /dev/null
+++ b/python/fatcat_tools/importers/fileset_generic.py
@@ -0,0 +1,83 @@
+
+import fatcat_openapi_client
+
+from fatcat_tools import entity_from_dict
+from .common import EntityImporter
+
+
+class FilesetImporter(EntityImporter):
+ """
+ General purpose importer for fileset entities. Simply fileset schema JSON
+ and inserts.
+
+ By default requires release_ids to be non-empty, and will check each
+ release_id to see if a fileset is already associated; if so, skips the
+ import. This behavior may change in the future, and can be disabled.
+
+ Currently only creates (insert), no updates.
+ """
+
+ def __init__(self, api, **kwargs):
+
+ eg_desc = kwargs.pop('editgroup_description', None) or "Generic Fileset entity import"
+ eg_extra = kwargs.pop('editgroup_extra', dict())
+ eg_extra['agent'] = eg_extra.get('agent', 'fatcat_tools.FilesetImporter')
+ kwargs['do_updates'] = bool(kwargs.get("do_updates", False))
+ self.skip_release_fileset_check = bool(kwargs.get("skip_release_fileset_check", False))
+ super().__init__(api,
+ editgroup_description=eg_desc,
+ editgroup_extra=eg_extra,
+ **kwargs)
+
+ # bezerk mode doesn't make sense for this importer
+ assert self.bezerk_mode == False
+
+ def want(self, row):
+ if not row.get('release_ids'):
+ self.counts['skip-no-release-ids'] += 1
+ return False
+ if not row.get('urls'):
+ self.counts['skip-no-urls'] += 1
+ return False
+ if not row.get('manifest'):
+ self.counts['skip-no-files'] += 1
+ return False
+
+ for f in row.get('manifest'):
+ for k in ('sha1', 'md5'):
+ if not f.get(k):
+ self.counts['skip-missing-file-field'] += 1
+ return False
+ return True
+
+ def parse_record(self, row):
+
+ fse = entity_from_dict(
+ row,
+ fatcat_openapi_client.FilesetEntity,
+ api_client=self.api.api_client,
+ )
+ fse = self.generic_fileset_cleanups(fse)
+ return fse
+
+ def try_update(self, fse):
+
+ if not self.skip_release_fileset_check:
+ for release_id in fse.release_ids:
+ # don't catch 404, that would be an error
+ release = self.api.get_release(release_id, expand='filesets', hide='abstracts,refs')
+ assert release.state == 'active'
+ if release.filesets:
+ self.counts['exists'] += 1
+ self.counts['exists-via-release-filesets'] += 1
+ return False
+
+ # do the insert
+ return True
+
+ def insert_batch(self, batch):
+ self.api.create_fileset_auto_batch(fatcat_openapi_client.FilesetAutoBatch(
+ editgroup=fatcat_openapi_client.Editgroup(
+ description=self.editgroup_description,
+ extra=self.editgroup_extra),
+ entity_list=batch))
diff --git a/python/fatcat_tools/importers/ingest.py b/python/fatcat_tools/importers/ingest.py
index bc759219..288c4cff 100644
--- a/python/fatcat_tools/importers/ingest.py
+++ b/python/fatcat_tools/importers/ingest.py
@@ -64,6 +64,9 @@ class IngestFileResultImporter(EntityImporter):
"application/jats+xml", "application/tei+xml", "text/xml"):
self.counts['skip-mimetype'] += 1
return False
+ elif row['request'].get('ingest_type') in ['component', 'src', 'dataset-file']:
+ # we rely on sandcrawler for these checks
+ pass
else:
self.counts['skip-ingest-type'] += 1
return False
@@ -336,7 +339,7 @@ class SavePaperNowFileImporter(IngestFileResultImporter):
eg_desc = kwargs.pop('editgroup_description', None) or "Files crawled after a public 'Save Paper Now' request"
eg_extra = kwargs.pop('editgroup_extra', dict())
- eg_extra['agent'] = eg_extra.get('agent', 'fatcat_tools.IngestFileSavePaperNow')
+ eg_extra['agent'] = eg_extra.get('agent', 'fatcat_tools.SavePaperNowFileImporter')
kwargs['submit_mode'] = submit_mode
kwargs['require_grobid'] = False
kwargs['do_updates'] = False
@@ -533,7 +536,7 @@ class SavePaperNowWebImporter(IngestWebResultImporter):
eg_desc = kwargs.pop('editgroup_description', None) or "Webcaptures crawled after a public 'Save Paper Now' request"
eg_extra = kwargs.pop('editgroup_extra', dict())
- eg_extra['agent'] = eg_extra.get('agent', 'fatcat_tools.IngestWebSavePaperNow')
+ eg_extra['agent'] = eg_extra.get('agent', 'fatcat_tools.SavePaperNowWebImporter')
kwargs['submit_mode'] = submit_mode
kwargs['do_updates'] = False
super().__init__(api,
@@ -573,3 +576,244 @@ class SavePaperNowWebImporter(IngestWebResultImporter):
return False
return True
+
+
+class IngestFilesetResultImporter(IngestFileResultImporter):
+ """
+ Variant of IngestFileResultImporter for processing, eg, dataset ingest
+ results into fileset objects.
+ """
+
+ def __init__(self, api, **kwargs):
+
+ eg_desc = kwargs.pop('editgroup_description', None) or "Filesets crawled from web using sandcrawler ingest tool"
+ eg_extra = kwargs.pop('editgroup_extra', dict())
+ eg_extra['agent'] = eg_extra.get('agent', 'fatcat_tools.IngestFilesetResultImporter')
+ kwargs['do_updates'] = False
+ super().__init__(api,
+ editgroup_description=eg_desc,
+ editgroup_extra=eg_extra,
+ **kwargs)
+ self.max_file_count = 300
+
+ def want_fileset(self, row):
+
+ if not row.get('manifest') or len(row.get('manifest')) == 0:
+ self.counts['skip-empty-manifest'] += 1
+ return False
+
+ if len(row.get('manifest')) == 1:
+ self.counts['skip-single-file'] += 1
+ return False
+
+ if len(row.get('manifest')) > self.max_file_count:
+ self.counts['skip-too-many-files'] += 1
+ return False
+
+ return True
+
+ def want(self, row):
+
+ if not self.want_ingest(row):
+ return False
+
+ # fileset-specific filters
+ if row['request'].get('ingest_type') not in ['dataset',]:
+ self.counts['skip-ingest-type'] += 1
+ return False
+
+ if not self.want_fileset(row):
+ return False
+
+ return True
+
+ def parse_fileset_urls(self, row):
+ if not row.get('strategy'):
+ return []
+ strategy = row['strategy']
+ urls = []
+ if strategy == 'archiveorg-fileset' and row.get('archiveorg_item_name'):
+ urls.append(fatcat_openapi_client.FilesetUrl(
+ url=f"https://archive.org/download/{row['archiveorg_item_name']}/",
+ rel="archive-base",
+ ))
+ if row['strategy'].startswith('web-') and row.get('platform_base_url'):
+ urls.append(fatcat_openapi_client.FilesetUrl(
+ url=f"https://web.archive.org/web/{row['web_base_url_dt']}/{row['web_base_url']}",
+ rel="webarchive-base",
+ ))
+ # TODO: repository-base
+ # TODO: web-base
+
+ if row['strategy'] == 'archiveorg-fileset-bundle' and row.get('archiveorg_item_name'):
+ # TODO: bundle path
+ urls.append(fatcat_openapi_client.FilesetUrl(
+ url=f"https://archive.org/download/{row['archiveorg_item_name']}/{bundle_path}",
+ rel="archive-bundle",
+ ))
+
+ if row['strategy'] == 'web-fileset-bundle' and row.get('platform_bundle_url'):
+ urls.append(fatcat_openapi_client.FilesetUrl(
+ url=f"https://web.archive.org/web/{row['web_bundle_url_dt']}/{row['web_bundle_url']}",
+ rel="webarchive-bundle",
+ ))
+
+ # add any additional / platform URLs here
+ if row.get('platform_bundle_url'):
+ urls.append(fatcat_openapi_client.FilesetUrl(
+ url=row['platform_bundle_url'],
+ rel="repository-bundle",
+ ))
+ if row.get('platform_base_url'):
+ urls.append(fatcat_openapi_client.FilesetUrl(
+ url=row['platform_bundle_url'],
+ rel="repository-base",
+ ))
+ return urls
+
+ def parse_record(self, row):
+
+ request = row['request']
+
+ # double check that want() filtered request correctly
+ if request.get('ingest_type') not in ["dataset",]:
+ self.counts['skip-ingest-type'] += 1
+ return None
+
+ # identify release by fatcat ident, or extid lookup
+ release_ident = self.parse_ingest_release_ident(row)
+
+ if not release_ident:
+ self.counts['skip-release-not-found'] += 1
+ return None
+
+ entity_extra = dict()
+ edit_extra = self.parse_edit_extra(row)
+ edit_extra['ingest_strategy'] = row['ingest_strategy']
+ if row.get('platform'):
+ edit_extra['platform'] = row['platform']
+ if row.get('platform_id'):
+ edit_extra['platform_id'] = row['platform_id']
+
+ entity_urls = self.parse_fileset_urls(row)
+ if not entity_urls:
+ self.counts['skip-no-access-url'] += 1
+ return None
+
+ assert row['file_count'] == len(row['manifest'])
+ if row['file_count'] > self.max_file_count:
+ self.counts['skip-too-many-manifest-files'] += 1
+ return None
+
+ manifest = []
+ for ingest_file in row['manifest']:
+ fsf = fatcat_openapi_client.FilesetFile(
+ path=ingest_file['path'],
+ size=ingest_file['size'],
+ md5=ingest_file['md5'],
+ sha1=ingest_file['sha1'],
+ sha256=ingest_file.get('sha256'),
+ extra=dict(
+ mimetype=ingest_file['mimetype'],
+ ),
+ )
+ if not (fsf.md5 and fsf.sha1 and fsf.path and fsf.size):
+ self.counts['skip-partial-file-info'] += 1
+ return None
+ if ingest_file.get('platform_url'):
+ # XXX: should we include this?
+ fsf.extra['original_url'] = ingest_file['platform_url']
+ if ingest_file.get('terminal_url') and ingest_file.get('terminal_dt'):
+ fsf.extra['wayback_url'] = f"https://web.archive.org/web/{ingest_file['terminal_dt']}/{ingest_file['terminal_url']}"
+ manifest.append(fsf)
+
+ fe = fatcat_openapi_client.FilesetEntity(
+ manifest=manifest,
+ urls=entity_urls,
+ release_ids=[release_ident],
+ )
+
+ if entity_extra:
+ fe.extra = entity_extra
+ if edit_extra:
+ fe.edit_extra = edit_extra
+ return fe
+
+ def try_update(self, wc):
+
+ # check for existing edits-in-progress with same URL
+ for other in self._entity_queue:
+ # XXX: how to duplicate check?
+ if other.original_url == wc.original_url:
+ self.counts['skip-in-queue'] += 1
+ return False
+
+ # lookup sha1, or create new entity (TODO: API doesn't support this yet)
+ #existing = None
+
+ # NOTE: in lieu of existing checks (by lookup), only allow one fileset per release
+ release = self.api.get_release(wc.release_ids[0], expand="filesets")
+ if release.filesets:
+ # XXX: how to duplicate check filesets?
+ # check if this is an existing match, or just a similar hit
+ for other in release.filesets:
+ if wc.original_url == other.original_url:
+ # TODO: compare very similar timestamps of same time (different formats)
+ self.counts['exists'] += 1
+ return False
+ self.counts['skip-release-has-fileset'] += 1
+ return False
+
+ return True
+
+ def insert_batch(self, batch):
+ if self.submit_mode:
+ eg = self.api.create_editgroup(fatcat_openapi_client.Editgroup(
+ description=self.editgroup_description,
+ extra=self.editgroup_extra))
+ for fe in batch:
+ self.api.create_fileset(eg.editgroup_id, fe)
+ self.api.update_editgroup(eg.editgroup_id, eg, submit=True)
+ else:
+ self.api.create_fileset_auto_batch(fatcat_openapi_client.FilesetAutoBatch(
+ editgroup=fatcat_openapi_client.Editgroup(
+ description=self.editgroup_description,
+ extra=self.editgroup_extra),
+ entity_list=batch))
+
+
+class SavePaperNowFilesetImporter(IngestFilesetResultImporter):
+ """
+ Like SavePaperNowFileImporter, but for fileset/dataset ingest.
+ """
+
+ def __init__(self, api, submit_mode=True, **kwargs):
+
+ eg_desc = kwargs.pop('editgroup_description', None) or "Fileset crawled after a public 'Save Paper Now' request"
+ eg_extra = kwargs.pop('editgroup_extra', dict())
+ eg_extra['agent'] = eg_extra.get('agent', 'fatcat_tools.SavePaperNowFilesetImporter')
+ kwargs['submit_mode'] = submit_mode
+ kwargs['do_updates'] = False
+ super().__init__(api,
+ editgroup_description=eg_desc,
+ editgroup_extra=eg_extra,
+ **kwargs)
+
+ def want(self, row):
+
+ source = row['request'].get('ingest_request_source')
+ if not source:
+ self.counts['skip-ingest_request_source'] += 1
+ return False
+ if not source.startswith('savepapernow'):
+ self.counts['skip-not-savepapernow'] += 1
+ return False
+
+ if row.get('hit') != True:
+ self.counts['skip-hit'] += 1
+ return False
+
+ if not self.want_fileset(row):
+ return False
+
+ return True
diff --git a/python/fatcat_tools/transforms/ingest.py b/python/fatcat_tools/transforms/ingest.py
index 42927b2a..9101a4ec 100644
--- a/python/fatcat_tools/transforms/ingest.py
+++ b/python/fatcat_tools/transforms/ingest.py
@@ -32,8 +32,22 @@ def release_ingest_request(release, ingest_request_source='fatcat', ingest_type=
if (not ingest_type) and release.container_id:
ingest_type = INGEST_TYPE_CONTAINER_MAP.get(release.container_id)
+
if not ingest_type:
- ingest_type = 'pdf'
+ if release.release_type == 'stub':
+ return None
+ elif release.release_type in ['component', 'graphic']:
+ ingest_type = 'component'
+ elif release.release_type == 'dataset':
+ ingest_type = 'dataset'
+ elif release.release_type == 'software':
+ ingest_type = 'software'
+ elif release.release_type == 'post-weblog':
+ ingest_type = 'html'
+ elif release.release_type in ['article-journal', 'article', 'chapter', 'paper-conference', 'book', 'report', 'thesis']:
+ ingest_type = 'pdf'
+ else:
+ ingest_type = 'pdf'
# generate a URL where we expect to find fulltext
url = None
diff --git a/python/tests/files/fileset_ltjp7k2nrbes3or5h4na5qgxlu.json b/python/tests/files/fileset_ltjp7k2nrbes3or5h4na5qgxlu.json
new file mode 100644
index 00000000..6ba9d573
--- /dev/null
+++ b/python/tests/files/fileset_ltjp7k2nrbes3or5h4na5qgxlu.json
@@ -0,0 +1 @@
+{"extra":{"cdl_dash":{"version":1}},"ident":"ltjp7k2nrbes3or5h4na5qgxlu","manifest":[{"extra":{"mimetype":"text/plain"},"md5":"742c40404c9a4dbbd77c0985201c639f","path":"070111_LatA_100nM.txt","sha1":"cc9bd558ca79b30b2966714da7ef4129537fde0c","sha256":"3a7c07ad17ce3638d5a1dd21f995a496e430b952eef00270ad741d506984370f","size":640500},{"extra":{"mimetype":"text/plain"},"md5":"e45d39263eda5e7763002cf61f708ce4","path":"060111_Ctrl_DMSO.txt","sha1":"3c90a1d86e7a2b11ce4994afe950031dfb99ddf2","sha256":"c1d5a888bce2a12478b2aca1113c0db923fb3a9978858497889b81902165927c","size":1499484},{"extra":{"mimetype":"text/plain"},"md5":"548523f7b4c5170cc220a091bf480e32","path":"052311_CytoB.txt","sha1":"4862246b5f48ac8f446ff65fdffe0a5af5520dc9","sha256":"4203b83eab45d7adbe8f738527bef9ddbc1730ceeeff05dfa4193a442064049c","size":121910},{"extra":{"mimetype":"text/plain"},"md5":"b387e141c84292ec637eb4333287dd1e","path":"062812_CytoB4_8uL.txt","sha1":"ecd5dce5e1214401d6cf09c8df6565818610d75b","sha256":"fa40b428a57cf4a95f68286988c742cfc1e919130d42e5090de3fb6963c3eafb","size":215477},{"extra":{"mimetype":"text/plain"},"md5":"e143909e79f1a7ee70ac411c6e61ef8c","path":"021611_H929.txt","sha1":"135096114b2d993e6729c81a2e2df0909dc999bc","sha256":"b0d409b90c9f9c4e1d9c84b712cd955e37544cb5749e9e3f398b90ee86a760b6","size":173751},{"extra":{"mimetype":"text/plain"},"md5":"7a73dd72af5c59612bb2d3a5450aedee","path":"062912_LatA200nM.txt","sha1":"7211121b8f8fc7ba76c55c35cc5d7e64c554914e","sha256":"ef2ed92a5a5c21816d713b2bdbcc2a406a39d2ea8f8dde961f4a976dc116d1f8","size":578291},{"extra":{"mimetype":"text/plain"},"md5":"f3eeac4f72e71b6e58dff8c39d92220b","path":"022212_DMSO.txt","sha1":"ede26cbc8d9ebc881277f81c962c212c09b1680f","sha256":"3b5b613ae6c0a06b0c013e3f439a3fd709677c30758516661fcf8134880a7199","size":1748533},{"extra":{"mimetype":"text/plain"},"md5":"b9feaaed607da9054b8aa7ec949a821d","path":"062012_LatA50nM.txt","sha1":"45ed08cf4734c44146dd900785a7a9ba8bcf72d9","sha256":"39522709b157d1e479cd102e53221a367488c9f8535c4ce85aabbf8dc457b70b","size":1320060},{"extra":{"mimetype":"text/plain"},"md5":"a3c3cf340773ac2d16e946a7c10cfddb","path":"070612_DMSO.txt","sha1":"4296e1cd6ef8e32aa7224eac8377f47ca7d645c1","sha256":"700966fc77dddf7438863ed118d64ed31ffe57b239781bb8b9594e9136d2c91f","size":1565988},{"extra":{"mimetype":"text/plain"},"md5":"fdc8c5c7dced7b38752d5dd44a51e8e8","path":"030712_Ctrl.txt","sha1":"98e1c3f6af26ad4cf0d1ab6b43ac1b4aa779cef9","sha256":"7b0c8bcfaf0df17f206c977aaf7950b26d99480579a56bc53f447b3d7a630e7b","size":1326229},{"extra":{"mimetype":"text/plain"},"md5":"408220ddeef9a5741354ee544bf0b348","path":"022012_Taxol.txt","sha1":"3bb86d180ebf2b7179b38d90094cbc7bea7406f7","sha256":"d10e473da5557296df0954bf198678661e64b3656f3920e93127ad1d31684713","size":1635260},{"extra":{"mimetype":"text/plain"},"md5":"80a782103b95a22beb76cbb1f577aa68","path":"021912_DMSO.txt","sha1":"a79f45942044684a768e8bfda0c8e5f4c659882e","sha256":"23ba99985e98e2d87eda793e4e0c637716844c91051f217b0964cd8b7cc7345a","size":1082779},{"extra":{"mimetype":"text/plain"},"md5":"1c1f27ff119969d809dbeef3d47656af","path":"021812_LatA.txt","sha1":"f070f9265add073d052af20021b96ed8ef1ab57b","sha256":"cda3a1ff0d94a61577a267205797a8e4f5432b0cc586b91f2fd251b7019b16b3","size":1229189},{"extra":{"mimetype":"text/plain"},"md5":"1d7dfd73ae5fe71d2548746181de5758","path":"061611_CytoB.txt","sha1":"9e277c91d99f064566b3df67c2453d2c3d85b77e","sha256":"b4740947b9330f5f234eb597f25e917c801ad75106f0361a4bf4c795543636d6","size":548117},{"extra":{"mimetype":"text/plain"},"md5":"04702e0535a62adebd50ccab4ef9b9d3","path":"040312_Ctrl.txt","sha1":"e9becc2ef210540bdd17ed9c42c25193721b69b2","sha256":"2fafb8eb7c61fc1832eb333b733f60d78cb5fdf39af3e3259fe38563029beb76","size":2027114},{"extra":{"mimetype":"text/plain"},"md5":"f5c291b532a534030046b722f68a362b","path":"072712_CH12.txt","sha1":"c90d5ab0f53f288d953ef84ee3433a8eb6a4578a","sha256":"6dd01fcef271facefa8a6db85dd67f0abfc21b59c7a53dc10c0358680b602752","size":625260},{"extra":{"mimetype":"text/plain"},"md5":"5709144abf8d960bd3d0bdbee87d6fb5","path":"030812_Noco.txt","sha1":"3577e12cc4b9432f95492ca6bb2a822e899a17b0","sha256":"8a421a72194eb815410e33b255dcb6763c46edaa19b8b39685cf1ae3b6a29eca","size":50747},{"extra":{"mimetype":"text/plain"},"md5":"e306220f25c5ad0863733a050de546c9","path":"071612_CH12.txt","sha1":"0fac76b0e579540a475f8ad44e14544cf9dd7e05","sha256":"3b2d040ae222f28774d78ef16c033fc5102a8707d385d0b35d6ff537d1763b5c","size":962172},{"extra":{"mimetype":"text/plain"},"md5":"5b72da6e75f1423bdee3c88cf6ae6233","path":"021712_LatA.txt","sha1":"5a6dd2e329716ccdbb7aeb29ef90b6d25111120c","sha256":"00e161b4a6197563fa58a670c6495378705a2b0aace260159a6a63c6ff4a9548","size":1440446},{"extra":{"mimetype":"text/plain"},"md5":"36ce1748c811d4ecc19c67c08533abcb","path":"062411_Bleb_half.txt","sha1":"0e6292cc9f3110063fdc6273986e2e5aa81bad89","sha256":"d02b01ede98936f9d321660089a3538aeb7db55109c6e1930ee8f8839d2aed14","size":836110},{"extra":{"mimetype":"text/plain"},"md5":"7aaeaaa4a84f32688402227ad118765c","path":"063012_CytoB2_4uL.txt","sha1":"2d36c589460c18f8cb9a8d99633cbe253dd7afbc","sha256":"06261efc45234416d52a16fe4e7ecf4fee894ab638c6412e1f8164cc6742094f","size":412193},{"extra":{"mimetype":"text/plain"},"md5":"a7befc9b21c8a299bde285029ace1db4","path":"061511_Ctrl_DMSO.txt","sha1":"768d79e65ce989720cbe309d9dac2648991a01f6","sha256":"25c9a83223533462c702f1fa0f7748752ac421cbdc71187fd80c3f2d2bbace1d","size":2023868},{"extra":{"mimetype":"text/plain"},"md5":"8e9223899deac53433f8462bb4f99127","path":"062212_Bleb20uM.txt","sha1":"d88b7041ceca633bd6a6947fa50fcdc939410e5f","sha256":"82b21305739ad3f0d42bb49fcba33eeed97a8fb503484aa95a33a9ebc7e06e44","size":1343606},{"extra":{"mimetype":"text/plain"},"md5":"df3f66a0a9b3cbc31869c089aa1ea1e7","path":"061412_Ctrl.txt","sha1":"99f35d510201666c424e5ea4ea8caa2462c5ee07","sha256":"0ae7603322afb3fdc02fc3ba8563524455d049bae3d6a26924a5fd658066d4eb","size":1009039},{"extra":{"mimetype":"text/plain"},"md5":"ff6e55043299d74316f505c2bb6291d2","path":"022312_Bleb.txt","sha1":"d91d552083f69fa0dffe240d71b5065a4d2adf31","sha256":"3435e8c66591ff309ecd276dd2173a7f7e6919afdb3963c6cb9ce82579006b8a","size":959741},{"extra":{"mimetype":"text/plain"},"md5":"1dbbd41e9c24456a4048b795b55293c3","path":"070312_Taxol100nM.txt","sha1":"2e9cc0865c89288157fd0d15196472e3fa254457","sha256":"ad0834349142a39435b731046e0aee32dccfae781d20686eb4195bb602da9d7d","size":1507593},{"extra":{"mimetype":"text/plain"},"md5":"40b6c4ae2a730d91d94a0f4d4a887996","path":"062311_Ctrl_DMSO.txt","sha1":"7956925a7d45ad24eeb4d960503f11a110717852","sha256":"c273a77f5f5ba065b8c1d658baf244cc2573a53d636c46e7f937d366fe3e5c6a","size":1936298},{"extra":{"mimetype":"text/plain"},"md5":"0ba45348b121e063ac5e4b3adb9a6eb8","path":"060911_CytoB.txt","sha1":"007437ea742ad21e940acafd3b5c814020e57ffb","sha256":"4c174a361aa9b436355bacb78100ab28c51a2cce0bc8114f05ea7dc398094a22","size":1210495},{"extra":{"mimetype":"text/plain"},"md5":"b7f3992920be473cd9ae332e484791c8","path":"061512_Noco30nM.txt","sha1":"88963bd98a9d65741b8c85f98305b713ed841144","sha256":"af56dfe73cb0f23f44b3dfd9657fc234d41d0d294cfc8db0d41f4cfe673b35ce","size":1895883},{"extra":{"mimetype":"text/plain"},"md5":"caeb083db525282eba36112fe69f51f3","path":"030912_Noco150nM.txt","sha1":"d16fb6454e28ff974de9b73d1d7399a4c5513959","sha256":"b6c59323496e79f4e1a54cfbd78aa410e8fe0947f2beb44d049775b708c13a59","size":239209},{"extra":{"mimetype":"text/plain"},"md5":"6d465df98fa82951103f41a07136e95e","path":"062712_Taxol200nM.txt","sha1":"3c17307aa38adf576ce2c9946ba089713fe52592","sha256":"66aab2d864798ea15156bfb47f9bce3e69ba5f0d607177bf556642c2259d0bdc","size":1012783},{"extra":{"mimetype":"text/plain"},"md5":"def0ccab1abeeba8d3523e32ff87f6e5","path":"070212_Noco75nM.txt","sha1":"39f34aee4d6284f803519c38d7b533ed1331c30f","sha256":"ae001ff7b14064130c2398dd484278ec19bc904d996bc6eb65f93850ac55ad79","size":713317},{"extra":{"mimetype":"text/plain"},"md5":"09ba0f38b143884c4ef29a32cc4f7987","path":"051211_ctrl.txt","sha1":"586ff44c3fc0fe66dc5adbdf83918931ab4fec27","sha256":"70aa1727f06f5bf31113521d68d09100b5a0d1f836301e0f4f744bd1da385e6c","size":1804864},{"extra":{"mimetype":"text/plain"},"md5":"a7974029b9aef59c77f40d9e8ddd0d75","path":"031412_Noco75nM.txt","sha1":"c2fcd27d05f6200fc580d252fe07c07f74dbf2f1","sha256":"33125453c79bb7905d47e5d8c580c0de2b47c5761990e00564f53735e99242e4","size":952533},{"extra":{"mimetype":"text/plain"},"md5":"c4d6cc03d6b62bfb8831610ff1514c12","path":"062211_ctrl.txt","sha1":"53c50eb858664513cad21e2646914c57998006db","sha256":"1b33fb075ee7c01f46e83cff2e06cd5854de68bc4f9ad1b7c255e410c8c8fa95","size":1288861},{"extra":{"mimetype":"text/plain"},"md5":"a45357263abe633a7ea9466263d44487","path":"060811_Ctrl_DMSO.txt","sha1":"fcce389353151dec4078b18e86d444c276221541","sha256":"df9a3ca164a3bd5c21f867b78d1fd0968149dafc827d7909bd17020356306452","size":900320},{"extra":{"mimetype":"text/plain"},"md5":"0cb4014e77a7573052a7db8b75bd9754","path":"051811_Ctrl_DMSO.txt","sha1":"0d3fc44a1074b1a1c8dbdb7697d6e32ddfb9d300","sha256":"2a7b244469a20e9f199b9bb7194be36f08921fade689f94b1c4681b4c0265d0d","size":964288},{"extra":{"mimetype":"text/plain"},"md5":"b1bb04ddc494dbc10a839bd0bdd35887","path":"052011_Bleb.txt","sha1":"85f183e84317e3122f6c55e8922674d5e42b751a","sha256":"cbae5b6353ddbeeb1620066776ca0712cc3acb8ba57adae59bdca27d1d3db4e0","size":905367},{"extra":{"mimetype":"text/plain"},"md5":"8edcd44172814cf0676a0465246ddc90","path":"063011_Bleb_half.txt","sha1":"c8b5e178385556c754a51a1b1ea9ca1aae80cf33","sha256":"da90b25aa30dc9db8b0ee071b018f6a7531f79014170e82487c71f310fd2813a","size":988383},{"extra":{"mimetype":"text/plain"},"md5":"163ae1ff84bfd46682dc5ef6760386b6","path":"031312_Noco75nM.txt","sha1":"08b866ad28bef7cc8c331b6ef6a89a299e408eed","sha256":"df4c334b5f1c4a47e52ad1d1225c8bc918cb23ed399eef3e1b6f073d64d4e6cc","size":646355},{"extra":{"mimetype":"text/plain"},"md5":"c9088b2b788aeb8f510358ab87fcfa33","path":"062112_CytoB1_2uL.txt","sha1":"df43246d06270d4da382df677b28bb82851cb098","sha256":"72b907e677e96decaf149e09b7b8a7b07351dc46a04e74809258ee2c57ef116a","size":1999938},{"extra":{"mimetype":"text/plain"},"md5":"ba290d5d90b74a4cccb54898b30a76de","path":"022112_Taxol.txt","sha1":"d4db64e3cbde7100a5e9263b9cdb9f456b4d045b","sha256":"1cefbe823233b0b1e5064beb6b8480526b2551199b667542040837031d2604f1","size":1470448},{"extra":{"mimetype":"text/plain"},"md5":"57ab6a7a014191d21628b9953ec81cde","path":"021612_Ctrl.txt","sha1":"21ad38a1518520d16ca0fda04968ef2fa138f3e8","sha256":"48b6b152e9acff3c1343c6adf20d12e0ed40219e741213ed5038a557afc27655","size":1020372},{"extra":{"mimetype":"text/plain"},"md5":"49c1c75ccf661c9b945147dcc2eb0b78","path":"102110_H929.txt","sha1":"f44a9270178424f14e38fcd581e38a6863562efa","sha256":"b6e55ee3afb5fc61ecf6ecfb59dcc86440c1b4f1b2dd7d426033ffc4928a377d","size":6018},{"extra":{"mimetype":"text/plain"},"md5":"d714083f1254002f22e512d870f913b4","path":"061812_Taxol50nM.txt","sha1":"883985161c76509020f6661d2253846d417fe26c","sha256":"f990df05db87819760d10b0c6385e670bfc5bb7746d278ecc1391ffe0a373371","size":732372},{"extra":{"mimetype":"text/plain"},"md5":"313f3fc6e698a477d399480bae9b43fc","path":"070512_Noco75nM.txt","sha1":"3e869ff3c6a42f3a0b041cf42484ce9885fcb3fd","sha256":"ccbeda703e57289e0a7926682b99ff538399c5592b2db9a430c950609d877a6e","size":836862},{"extra":{"mimetype":"text/plain"},"md5":"2960d8e6f01cbb5581f1dfeaf8129339","path":"062911_Ctrl_DMSO.txt","sha1":"a1426131c8558f08fa1776d328f68e1def657f44","sha256":"2c7cfe2232d5a9f41c93ab32c2870c002976e929a618b516cd14424e591fc225","size":822006},{"extra":{"mimetype":"text/plain"},"md5":"b33659f76a230de31b5ea3c49511672d","path":"060211_Bleb.txt","sha1":"3bb02bafcc8b54bfc11501c5e6adf5fcee93feeb","sha256":"db6d369dad1e7c90c157eb7e2705110ec23f8f58702f9fa26449cc6350cea56b","size":388758},{"extra":{"mimetype":"text/plain"},"md5":"a2b84ed9248649c93024638b8806416c","path":"073012_CH12.txt","sha1":"3c9ee5dcd56e07b67b32a797b5f9979c8a6765de","sha256":"758622d7d44fe7cc1550a851b94e0fdfaa39b559fe400e0cc840ce249999c817","size":4775762}],"release_ids":["aaaaaaaaaaaaarceaaaaaaaaai"],"revision":"0d6fd1f1-5784-44ef-927a-62f8bcb99c71","state":"active","urls":[{"rel":"repo-bundle","url":"https://merritt.cdlib.org/u/ark%3A%2Fb5068%2Fd1rp49/1"},{"rel":"repo","url":"https://merritt.cdlib.org/d/ark%3A%2Fb5068%2Fd1rp49/1/"},{"rel":"dweb","url":"dat://7f5f95752650ab2968ec6a0c491fe320937ab928f57bd88692b1086248ee2925/files/"}]}
diff --git a/python/tests/import_fileset_generic.py b/python/tests/import_fileset_generic.py
new file mode 100644
index 00000000..44310304
--- /dev/null
+++ b/python/tests/import_fileset_generic.py
@@ -0,0 +1,59 @@
+
+import json
+import pytest
+
+from fatcat_tools.importers import FilesetImporter, JsonLinePusher
+from fixtures import *
+
+
+@pytest.fixture(scope="function")
+def fileset_importer(api):
+ yield FilesetImporter(api)
+
+# TODO: use API to check that entities actually created...
+def test_fileset_importer_basic(fileset_importer):
+ with open('tests/files/fileset_ltjp7k2nrbes3or5h4na5qgxlu.json', 'r') as f:
+ JsonLinePusher(fileset_importer, f).run()
+
+def test_fileset_importer(fileset_importer):
+ last_index = fileset_importer.api.get_changelog(limit=1)[0].index
+ with open('tests/files/fileset_ltjp7k2nrbes3or5h4na5qgxlu.json', 'r') as f:
+ fileset_importer.bezerk_mode = True
+ counts = JsonLinePusher(fileset_importer, f).run()
+ assert counts['insert'] == 1
+ assert counts['exists'] == 0
+ assert counts['skip'] == 0
+
+ # fetch most recent editgroup
+ change = fileset_importer.api.get_changelog_entry(index=last_index+1)
+ eg = change.editgroup
+ assert eg.description
+ assert "generic fileset" in eg.description.lower()
+ assert eg.extra['git_rev']
+ assert "fatcat_tools.FilesetImporter" in eg.extra['agent']
+
+ # re-insert; should skip
+ with open('tests/files/fileset_ltjp7k2nrbes3or5h4na5qgxlu.json', 'r') as f:
+ fileset_importer.reset()
+ fileset_importer.bezerk_mode = False
+ counts = JsonLinePusher(fileset_importer, f).run()
+ assert counts['insert'] == 0
+ assert counts['exists'] == 1
+ assert counts['skip'] == 0
+
+def test_fileset_dict_parse(fileset_importer):
+ with open('tests/files/fileset_ltjp7k2nrbes3or5h4na5qgxlu.json', 'r') as f:
+ raw = json.loads(f.readline())
+ fs = fileset_importer.parse_record(raw)
+
+ assert fs.manifest[0].sha1 == "cc9bd558ca79b30b2966714da7ef4129537fde0c"
+ assert fs.manifest[0].md5 == "742c40404c9a4dbbd77c0985201c639f"
+ assert fs.manifest[0].sha256 == "3a7c07ad17ce3638d5a1dd21f995a496e430b952eef00270ad741d506984370f"
+ assert fs.manifest[0].size == 640500
+ assert fs.manifest[0].path == "070111_LatA_100nM.txt"
+ assert fs.manifest[0].extra['mimetype'] == "text/plain"
+ assert len(fs.urls) == 3
+ for u in fs.urls:
+ if u.rel == "repo":
+ assert u.url == "https://merritt.cdlib.org/d/ark%3A%2Fb5068%2Fd1rp49/1/"
+ assert len(fs.release_ids) == 1