aboutsummaryrefslogtreecommitdiffstats
path: root/python/fatcat
diff options
context:
space:
mode:
authorBryan Newbold <bnewbold@robocracy.org>2018-09-11 16:16:39 -0700
committerBryan Newbold <bnewbold@robocracy.org>2018-09-11 16:16:39 -0700
commitf5812c8c3b062b5efb34e45702ee7df507f71e16 (patch)
tree0eb409c0da5d212c82788c5a34d58ac864e9a9c8 /python/fatcat
parent91c080a2e82ec4e8908cb8e3916a543519151847 (diff)
downloadfatcat-f5812c8c3b062b5efb34e45702ee7df507f71e16.tar.gz
fatcat-f5812c8c3b062b5efb34e45702ee7df507f71e16.zip
python implementation of new editgroup param
Diffstat (limited to 'python/fatcat')
-rw-r--r--python/fatcat/crossref_importer.py16
-rw-r--r--python/fatcat/importer_common.py4
-rw-r--r--python/fatcat/issn_importer.py11
-rw-r--r--python/fatcat/manifest_importer.py7
-rw-r--r--python/fatcat/orcid_importer.py11
5 files changed, 19 insertions, 30 deletions
diff --git a/python/fatcat/crossref_importer.py b/python/fatcat/crossref_importer.py
index 54a3e84f..a59d0a45 100644
--- a/python/fatcat/crossref_importer.py
+++ b/python/fatcat/crossref_importer.py
@@ -119,22 +119,20 @@ class FatcatCrossrefImporter(FatcatImporter):
extra=extra)
return (re, ce)
- def create_row(self, row, editgroup_id=None):
+ def create_row(self, row, editgroup=None):
if row is None:
return
obj = json.loads(row)
entities = self.parse_crossref_dict(obj)
if entities is not None:
(re, ce) = entities
- re.editgroup_id = editgroup_id
if ce is not None:
- ce.editgroup_id = editgroup_id
- container = self.api.create_container(ce)
+ container = self.api.create_container(ce, editgroup=editgroup)
re.container_id = container.ident
self._issnl_id_map[ce.issnl] = container.ident
- self.api.create_release(re)
+ self.api.create_release(re, editgroup=editgroup)
- def create_batch(self, batch, editgroup_id=None):
+ def create_batch(self, batch, editgroup=None):
"""Current work/release pairing disallows batch creation of releases.
Could do batch work creation and then match against releases, but meh."""
release_batch = []
@@ -145,11 +143,9 @@ class FatcatCrossrefImporter(FatcatImporter):
entities = self.parse_crossref_dict(obj)
if entities is not None:
(re, ce) = entities
- re.editgroup_id = editgroup_id
if ce is not None:
- ce.editgroup_id = editgroup_id
- container = self.api.create_container(ce)
+ container = self.api.create_container(ce, editgroup=editgroup)
re.container_id = container.ident
self._issnl_id_map[ce.issnl] = container.ident
release_batch.append(re)
- self.api.create_release_batch(release_batch, autoaccept="true", editgroup=editgroup_id)
+ self.api.create_release_batch(release_batch, autoaccept="true", editgroup=editgroup)
diff --git a/python/fatcat/importer_common.py b/python/fatcat/importer_common.py
index 74a57ac1..ff0c8a27 100644
--- a/python/fatcat/importer_common.py
+++ b/python/fatcat/importer_common.py
@@ -32,7 +32,7 @@ class FatcatImporter:
eg = self.api.create_editgroup(
fatcat_client.Editgroup(editor_id='aaaaaaaaaaaabkvkaaaaaaaaae'))
for i, row in enumerate(source):
- self.create_row(row, editgroup_id=eg.id)
+ self.create_row(row, editgroup=eg.id)
if i > 0 and (i % group_size) == 0:
self.api.accept_editgroup(eg)
eg = self.api.create_editgroup(
@@ -45,7 +45,7 @@ class FatcatImporter:
for rows in grouper(source, size):
eg = self.api.create_editgroup(
fatcat_client.Editgroup(editor_id='aaaaaaaaaaaabkvkaaaaaaaaae'))
- self.create_batch(rows, editgroup_id=eg.id)
+ self.create_batch(rows, editgroup=eg.id)
def process_csv_source(self, source, group_size=100, delimiter=','):
reader = csv.DictReader(source, delimiter=delimiter)
diff --git a/python/fatcat/issn_importer.py b/python/fatcat/issn_importer.py
index eb8a50ba..ad2cad78 100644
--- a/python/fatcat/issn_importer.py
+++ b/python/fatcat/issn_importer.py
@@ -57,17 +57,14 @@ class FatcatIssnImporter(FatcatImporter):
extra=extra)
return ce
- def create_row(self, row, editgroup_id=None):
+ def create_row(self, row, editgroup=None):
ce = self.parse_issn_row(row)
if ce is not None:
- ce.editgroup_id = editgroup_id
- self.api.create_container(ce)
+ self.api.create_container(ce, editgroup=editgroup)
- def create_batch(self, batch, editgroup_id=None):
+ def create_batch(self, batch, editgroup=None):
"""Reads and processes in batches (not API-call-per-line)"""
objects = [self.parse_issn_row(l)
for l in batch if l != None]
objects = [o for o in objects if o != None]
- for o in objects:
- o.editgroup_id = editgroup_id
- self.api.create_container_batch(objects, autoaccept="true", editgroup=editgroup_id)
+ self.api.create_container_batch(objects, autoaccept="true", editgroup=editgroup)
diff --git a/python/fatcat/manifest_importer.py b/python/fatcat/manifest_importer.py
index 2965d0ef..3b0b3815 100644
--- a/python/fatcat/manifest_importer.py
+++ b/python/fatcat/manifest_importer.py
@@ -50,10 +50,9 @@ class FatcatManifestImporter(FatcatImporter):
extra=extra)
return fe
- def create_entity(self, entity, editgroup_id=None):
+ def create_entity(self, entity, editgroup=None):
if entity is not None:
- entity.editgroup_id = editgroup_id
- self.api.create_file(entity)
+ self.api.create_file(entity, editgroup=editgroup)
def process_db(self, db_path, size=100):
# TODO: multiple DOIs per sha1
@@ -78,7 +77,7 @@ class FatcatManifestImporter(FatcatImporter):
fe = self.parse_manifest_row(row)
if fe is None:
continue
- self.create_entity(fe, editgroup_id=eg.id)
+ self.create_entity(fe, editgroup=eg.id)
if i > 0 and (i % size) == 0:
self.api.accept_editgroup(eg.id)
eg = self.api.create_editgroup(fatcat_client.Editgroup(editor_id="aaaaaaaaaaaabkvkaaaaaaaaae"))
diff --git a/python/fatcat/orcid_importer.py b/python/fatcat/orcid_importer.py
index fe76b02c..2eeac122 100644
--- a/python/fatcat/orcid_importer.py
+++ b/python/fatcat/orcid_importer.py
@@ -57,18 +57,15 @@ class FatcatOrcidImporter(FatcatImporter):
extra=extra)
return ce
- def create_row(self, row, editgroup_id=None):
+ def create_row(self, row, editgroup=None):
obj = json.loads(row)
ce = self.parse_orcid_dict(obj)
if ce is not None:
- ce.editgroup_id = editgroup_id
- self.api.create_creator(ce)
+ self.api.create_creator(ce, editgroup=editgroup)
- def create_batch(self, batch, editgroup_id=None):
+ def create_batch(self, batch, editgroup=None):
"""Reads and processes in batches (not API-call-per-line)"""
objects = [self.parse_orcid_dict(json.loads(l))
for l in batch if l != None]
objects = [o for o in objects if o != None]
- for o in objects:
- o.editgroup_id = editgroup_id
- self.api.create_creator_batch(objects, autoaccept="true", editgroup=editgroup_id)
+ self.api.create_creator_batch(objects, autoaccept="true", editgroup=editgroup)