From f6f7450903bdbe36bd5fff146b942e34ad221557 Mon Sep 17 00:00:00 2001 From: Bryan Newbold Date: Fri, 25 Jan 2019 18:41:33 -0800 Subject: transform and import fixes/tweaks --- python/fatcat_tools/__init__.py | 4 +- python/fatcat_tools/importers/common.py | 4 +- python/fatcat_tools/importers/journal_metadata.py | 10 ++++- python/fatcat_tools/transforms.py | 51 +++++++++++++++++------ 4 files changed, 51 insertions(+), 18 deletions(-) (limited to 'python/fatcat_tools') diff --git a/python/fatcat_tools/__init__.py b/python/fatcat_tools/__init__.py index e2b1e3a2..64c45062 100644 --- a/python/fatcat_tools/__init__.py +++ b/python/fatcat_tools/__init__.py @@ -1,4 +1,6 @@ from .api_auth import authenticated_api, public_api from .fcid import fcid2uuid, uuid2fcid -from .transforms import entity_to_dict, entity_from_json, release_to_elasticsearch +from .transforms import entity_to_dict, entity_from_json, \ + release_to_elasticsearch, container_to_elasticsearch, \ + changelog_to_elasticsearch diff --git a/python/fatcat_tools/importers/common.py b/python/fatcat_tools/importers/common.py index ebdce56f..a29b3019 100644 --- a/python/fatcat_tools/importers/common.py +++ b/python/fatcat_tools/importers/common.py @@ -236,8 +236,8 @@ class EntityImporter: self._entity_queue.append(entity) if len(self._entity_queue) >= self.edit_batch_size: self.insert_batch(self._entity_queue) - self.counts['insert'] += len(_entity_queue) - self._entity_queue = 0 + self.counts['insert'] += len(self._entity_queue) + self._entity_queue = [] def want(self, raw_record): """ diff --git a/python/fatcat_tools/importers/journal_metadata.py b/python/fatcat_tools/importers/journal_metadata.py index 7f6b1ee8..be62d63a 100644 --- a/python/fatcat_tools/importers/journal_metadata.py +++ b/python/fatcat_tools/importers/journal_metadata.py @@ -44,7 +44,7 @@ class JournalMetadataImporter(EntityImporter): editgroup_extra=eg_extra) def want(self, raw_record): - if raw_record.get('issnl'): + if raw_record.get('issnl') and raw_record.get('name'): return True return False @@ -55,6 +55,10 @@ class JournalMetadataImporter(EntityImporter): returns a ContainerEntity (or None if invalid or couldn't parse) """ + if not row.get('name'): + # Name is required (by schema) + return None + extra = dict() for key in ('issne', 'issnp', 'languages', 'country', 'urls', 'abbrev', 'coden', 'aliases', 'original_name', 'first_year', 'last_year', @@ -76,8 +80,10 @@ class JournalMetadataImporter(EntityImporter): extra_ia = dict() # TODO: would like an ia.longtail_ia flag if row.get('sim'): + # NB: None case of the .get() here is blech, but othrwise + # extra['ia'].get('sim') would be false-y, breaking 'any_ia_sim' later on extra_ia['sim'] = { - 'year_spans': row['sim']['year_spans'], + 'year_spans': row['sim'].get('year_spans'), } if extra_ia: extra['ia'] = extra_ia diff --git a/python/fatcat_tools/transforms.py b/python/fatcat_tools/transforms.py index a85c877c..7bb75c3e 100644 --- a/python/fatcat_tools/transforms.py +++ b/python/fatcat_tools/transforms.py @@ -231,20 +231,12 @@ def container_to_elasticsearch(entity): container_type = entity.container_type, issnl = entity.issnl, wikidata_qid = entity.wikidata_qid, - - entity_status = entity.entity_status, - language = entity.language, - license = entity.license_slug, - doi = entity.doi, - pmid = entity.pmid, - isbn13 = entity.isbn13, - core_id = entity.core_id, - arxiv_id = entity.core_id, - jstor_id = entity.jstor_id, ) # TODO: region, discipline # TODO: single primary language? + if not entity.extra: + entity.extra = dict() for key in ('country', 'languages', 'mimetypes', 'first_year', 'last_year'): if entity.extra.get(key): t[key] = entity.extra[key] @@ -285,13 +277,46 @@ def container_to_elasticsearch(entity): if extra['ia'].get('sim'): any_ia_sim = True - t['in_doaj'] = is_doaj - t['in_road'] = is_road + t['in_doaj'] = in_doaj + t['in_road'] = in_road t['in_doi'] = in_doi t['in_sherpa_romeo'] = in_sherpa_romeo - t['is_oa'] = in_doaj or in_road or is_longtail_oa or ia_oa + t['is_oa'] = in_doaj or in_road or is_longtail_oa or is_oa t['is_longtail_oa'] = is_longtail_oa t['any_kbart'] = any_ia_sim t['any_jstor'] = any_ia_sim t['any_ia_sim'] = bool(any_ia_sim) return t + + +def changelog_to_elasticsearch(entity): + + editgroup = entity.editgroup + t = dict( + index=entity.index, + editgroup_id=entity.editgroup_id, + timestamp=entity.timestamp, + editor_id=editgroup.editor_id, + ) + + extra = editgroup.extra or dict() + if extra.get('agent'): + t['agent'] = extra['agent'] + + t['containers'] = len(editgroup.edits.containers) + t['creators'] = len(editgroup.edits.containers) + t['files'] = len(editgroup.edits.containers) + t['filesets'] = len(editgroup.edits.containers) + t['webcaptures'] = len(editgroup.edits.containers) + t['releases'] = len(editgroup.edits.containers) + t['works'] = len(editgroup.edits.containers) + + # TODO: parse and pull out counts + #created = 0 + #updated = 0 + #deleted = 0 + #t['created'] = created + #t['updated'] = updated + #t['deleted'] = deleted + #t['total'] = created + updated + deleted + return t -- cgit v1.2.3