aboutsummaryrefslogtreecommitdiffstats
path: root/python
diff options
context:
space:
mode:
authorBryan Newbold <bnewbold@robocracy.org>2018-09-09 10:10:42 -0700
committerBryan Newbold <bnewbold@robocracy.org>2018-09-09 10:10:42 -0700
commitb15eff77fdb7974ce2bf3c2e44c8edc354f9f452 (patch)
tree5bccb9ff2633eb35dc00babc0b2dd1842f02e49b /python
parent419bddcb0377e82e7177356350d35bf84b3e80d8 (diff)
parenta29beab0683d77086cc1b431779d0540dc5a9b49 (diff)
downloadfatcat-b15eff77fdb7974ce2bf3c2e44c8edc354f9f452.tar.gz
fatcat-b15eff77fdb7974ce2bf3c2e44c8edc354f9f452.zip
Merge branch 'http-verbs' into cockroach
Manually merged conflicts: rust/migrations/2018-05-12-001226_init/up.sql rust/src/api_server.rs rust/src/database_schema.rs
Diffstat (limited to 'python')
-rw-r--r--python/README_codegen.md10
-rw-r--r--python/fatcat/crossref_importer.py2
-rw-r--r--python/fatcat/fcid.py17
-rw-r--r--python/fatcat/importer_common.py20
-rw-r--r--python/fatcat/issn_importer.py2
-rw-r--r--python/fatcat/manifest_importer.py4
-rw-r--r--python/fatcat/orcid_importer.py9
-rw-r--r--python/fatcat/templates/file_view.html2
-rw-r--r--python/fatcat/templates/release_view.html6
-rw-r--r--python/fatcat_client/api/default_api.py1084
-rw-r--r--python/fatcat_client/models/creator_entity.py4
-rw-r--r--python/fatcat_client/models/release_entity.py28
-rwxr-xr-xpython/fatcat_export.py72
-rw-r--r--python/tests/codegen_tests/test_default_api.py60
-rw-r--r--python/tests/files/0000-0001-8254-710X.json1
-rw-r--r--python/tests/files/crossref-works.2018-01-21.badsample.json1
-rw-r--r--python/tests/importer.py20
-rw-r--r--python/tests/orcid.py4
18 files changed, 1320 insertions, 26 deletions
diff --git a/python/README_codegen.md b/python/README_codegen.md
index 393fae32..0d072dde 100644
--- a/python/README_codegen.md
+++ b/python/README_codegen.md
@@ -80,6 +80,11 @@ Class | Method | HTTP request | Description
*DefaultApi* | [**create_release_batch**](docs/DefaultApi.md#create_release_batch) | **POST** /release/batch |
*DefaultApi* | [**create_work**](docs/DefaultApi.md#create_work) | **POST** /work |
*DefaultApi* | [**create_work_batch**](docs/DefaultApi.md#create_work_batch) | **POST** /work/batch |
+*DefaultApi* | [**delete_container**](docs/DefaultApi.md#delete_container) | **DELETE** /container/{id} |
+*DefaultApi* | [**delete_creator**](docs/DefaultApi.md#delete_creator) | **DELETE** /creator/{id} |
+*DefaultApi* | [**delete_file**](docs/DefaultApi.md#delete_file) | **DELETE** /file/{id} |
+*DefaultApi* | [**delete_release**](docs/DefaultApi.md#delete_release) | **DELETE** /release/{id} |
+*DefaultApi* | [**delete_work**](docs/DefaultApi.md#delete_work) | **DELETE** /work/{id} |
*DefaultApi* | [**get_changelog**](docs/DefaultApi.md#get_changelog) | **GET** /changelog |
*DefaultApi* | [**get_changelog_entry**](docs/DefaultApi.md#get_changelog_entry) | **GET** /changelog/{id} |
*DefaultApi* | [**get_container**](docs/DefaultApi.md#get_container) | **GET** /container/{id} |
@@ -103,6 +108,11 @@ Class | Method | HTTP request | Description
*DefaultApi* | [**lookup_creator**](docs/DefaultApi.md#lookup_creator) | **GET** /creator/lookup |
*DefaultApi* | [**lookup_file**](docs/DefaultApi.md#lookup_file) | **GET** /file/lookup |
*DefaultApi* | [**lookup_release**](docs/DefaultApi.md#lookup_release) | **GET** /release/lookup |
+*DefaultApi* | [**update_container**](docs/DefaultApi.md#update_container) | **PUT** /container/{id} |
+*DefaultApi* | [**update_creator**](docs/DefaultApi.md#update_creator) | **PUT** /creator/{id} |
+*DefaultApi* | [**update_file**](docs/DefaultApi.md#update_file) | **PUT** /file/{id} |
+*DefaultApi* | [**update_release**](docs/DefaultApi.md#update_release) | **PUT** /release/{id} |
+*DefaultApi* | [**update_work**](docs/DefaultApi.md#update_work) | **PUT** /work/{id} |
## Documentation For Models
diff --git a/python/fatcat/crossref_importer.py b/python/fatcat/crossref_importer.py
index d3e525a4..54a3e84f 100644
--- a/python/fatcat/crossref_importer.py
+++ b/python/fatcat/crossref_importer.py
@@ -152,4 +152,4 @@ class FatcatCrossrefImporter(FatcatImporter):
re.container_id = container.ident
self._issnl_id_map[ce.issnl] = container.ident
release_batch.append(re)
- self.api.create_release_batch(release_batch)
+ self.api.create_release_batch(release_batch, autoaccept="true", editgroup=editgroup_id)
diff --git a/python/fatcat/fcid.py b/python/fatcat/fcid.py
new file mode 100644
index 00000000..dd72b242
--- /dev/null
+++ b/python/fatcat/fcid.py
@@ -0,0 +1,17 @@
+
+import base64
+import uuid
+
+def fcid2uuid(s):
+ s = s.split('_')[-1].upper().encode('utf-8')
+ assert len(s) == 26
+ raw = base64.b32decode(s + b"======")
+ return str(uuid.UUID(bytes=raw)).lower()
+
+def uuid2fcid(s):
+ raw = uuid.UUID(s).bytes
+ return base64.b32encode(raw)[:26].lower().decode('utf-8')
+
+def test_fcid():
+ test_uuid = '00000000-0000-0000-3333-000000000001'
+ assert test_uuid == fcid2uuid(uuid2fcid(test_uuid))
diff --git a/python/fatcat/importer_common.py b/python/fatcat/importer_common.py
index 9d495aa7..0b02d175 100644
--- a/python/fatcat/importer_common.py
+++ b/python/fatcat/importer_common.py
@@ -1,4 +1,5 @@
+import re
import sys
import csv
import json
@@ -22,6 +23,7 @@ class FatcatImporter:
self._orcid_id_map = dict()
self._doi_id_map = dict()
self._issn_issnl_map = None
+ self._orcid_regex = re.compile("^\\d{4}-\\d{4}-\\d{4}-\\d{4}$")
if issn_map_file:
self.read_issn_map_file(issn_map_file)
@@ -43,8 +45,7 @@ class FatcatImporter:
for rows in grouper(source, size):
eg = self.api.create_editgroup(
fatcat_client.Editgroup(editor_id='aaaaaaaaaaaabkvkaaaaaaaaae'))
- self.create_batch(rows, eg.id)
- self.api.accept_editgroup(eg.id)
+ self.create_batch(rows, editgroup_id=eg.id)
def process_csv_source(self, source, group_size=100, delimiter=','):
reader = csv.DictReader(source, delimiter=delimiter)
@@ -54,9 +55,11 @@ class FatcatImporter:
reader = csv.DictReader(source, delimiter=delimiter)
self.process_batch(reader, size)
+ def is_issnl(self, issnl):
+ return len(issnl) == 9 and issnl[4] == '-'
+
def lookup_issnl(self, issnl):
"""Caches calls to the ISSN-L lookup API endpoint in a local dict"""
- assert len(issnl) == 9 and issnl[4] == '-'
if issnl in self._issnl_id_map:
return self._issnl_id_map[issnl]
container_id = None
@@ -69,9 +72,13 @@ class FatcatImporter:
self._issnl_id_map[issnl] = container_id # might be None
return container_id
+ def is_orcid(self, orcid):
+ return self._orcid_regex.match(orcid) != None
+
def lookup_orcid(self, orcid):
"""Caches calls to the Orcid lookup API endpoint in a local dict"""
- assert len(orcid) == 19 and orcid[4] == '-'
+ if not self.is_orcid(orcid):
+ return None
if orcid in self._orcid_id_map:
return self._orcid_id_map[orcid]
creator_id = None
@@ -84,9 +91,12 @@ class FatcatImporter:
self._orcid_id_map[orcid] = creator_id # might be None
return creator_id
+ def is_doi(self, doi):
+ return doi.startswith("10.") and doi.count("/") >= 1
+
def lookup_doi(self, doi):
"""Caches calls to the doi lookup API endpoint in a local dict"""
- assert doi.startswith('10.')
+ assert self.is_doi(doi)
doi = doi.lower()
if doi in self._doi_id_map:
return self._doi_id_map[doi]
diff --git a/python/fatcat/issn_importer.py b/python/fatcat/issn_importer.py
index 181137ac..eb8a50ba 100644
--- a/python/fatcat/issn_importer.py
+++ b/python/fatcat/issn_importer.py
@@ -70,4 +70,4 @@ class FatcatIssnImporter(FatcatImporter):
objects = [o for o in objects if o != None]
for o in objects:
o.editgroup_id = editgroup_id
- self.api.create_container_batch(objects)
+ self.api.create_container_batch(objects, autoaccept="true", editgroup=editgroup_id)
diff --git a/python/fatcat/manifest_importer.py b/python/fatcat/manifest_importer.py
index 7762d132..2965d0ef 100644
--- a/python/fatcat/manifest_importer.py
+++ b/python/fatcat/manifest_importer.py
@@ -66,7 +66,7 @@ class FatcatManifestImporter(FatcatImporter):
total_count = int(list(db.execute("SELECT COUNT(*) FROM files_metadata;"))[0][0])
print("{} rows to process".format(total_count))
- eg = self.api.create_editgroup(fatcat_client.Editgroup(editor_id=1))
+ eg = self.api.create_editgroup(fatcat_client.Editgroup(editor_id="aaaaaaaaaaaabkvkaaaaaaaaae"))
i = 0
j = -1
for row in db.execute(QUERY):
@@ -81,7 +81,7 @@ class FatcatManifestImporter(FatcatImporter):
self.create_entity(fe, editgroup_id=eg.id)
if i > 0 and (i % size) == 0:
self.api.accept_editgroup(eg.id)
- eg = self.api.create_editgroup(fatcat_client.Editgroup(editor_id=1))
+ eg = self.api.create_editgroup(fatcat_client.Editgroup(editor_id="aaaaaaaaaaaabkvkaaaaaaaaae"))
print("Finished a batch; row {} of {} ({:.2f}%).\tTotal inserted: {}".format(
j, total_count, 100.0*j/total_count, i))
i = i + 1
diff --git a/python/fatcat/orcid_importer.py b/python/fatcat/orcid_importer.py
index 69b184d5..fe76b02c 100644
--- a/python/fatcat/orcid_importer.py
+++ b/python/fatcat/orcid_importer.py
@@ -5,7 +5,6 @@ import itertools
import fatcat_client
from fatcat.importer_common import FatcatImporter
-
def value_or_none(e):
if type(e) == dict:
e = e.get('value')
@@ -46,8 +45,12 @@ class FatcatOrcidImporter(FatcatImporter):
else:
# must have *some* name
return None
+ orcid = obj['orcid-identifier']['path']
+ if not self.is_orcid(orcid):
+ sys.stderr.write("Bad ORCID: {}\n".format(orcid))
+ return None
ce = fatcat_client.CreatorEntity(
- orcid=obj['orcid-identifier']['path'],
+ orcid=orcid,
given_name=given,
surname=sur,
display_name=display,
@@ -68,4 +71,4 @@ class FatcatOrcidImporter(FatcatImporter):
objects = [o for o in objects if o != None]
for o in objects:
o.editgroup_id = editgroup_id
- self.api.create_creator_batch(objects)
+ self.api.create_creator_batch(objects, autoaccept="true", editgroup=editgroup_id)
diff --git a/python/fatcat/templates/file_view.html b/python/fatcat/templates/file_view.html
index 2934224d..febc2b19 100644
--- a/python/fatcat/templates/file_view.html
+++ b/python/fatcat/templates/file_view.html
@@ -45,7 +45,7 @@ No known public URL, mirror, or archive for this file.
{% endif %}
<h3>Checksums</h3>
-<table class="ui table">
+<table class="ui compact table">
<thead>
<tr><th>Algorithm
<th>Value
diff --git a/python/fatcat/templates/release_view.html b/python/fatcat/templates/release_view.html
index dd92f611..9be312e1 100644
--- a/python/fatcat/templates/release_view.html
+++ b/python/fatcat/templates/release_view.html
@@ -147,11 +147,11 @@ Believed to represent this release...
This release citing other releases.
<ol>
{% for ref in release.refs %}
- <li>{% if ref.extra != None %}{{ ref.extra }}{% else %}<i>unknown</i>{% endif %}
+ <li>{% if ref.extra != None and ref.extra.unstructured != None %}{{ ref.extra.unstructured }}{% else %}<i>unknown</i>{% endif %}
{% if ref.target_release_id != None %}
(<a href="/release/{{ ref.target_release_id }}">fatcat release</a>)
- {% elif ref.extra != None and ref.extra.doi != None %}
- (DOI: <a href="/release/lookup?doi={{ ref.exta.doi }}">{{ ref.extra.doi }}</a>)
+{# {% elif ref.extra != None and ref.extra.doi != None %}
+ (DOI: <a href="/release/lookup?doi={{ ref.exta.get('doi') }}">{{ ref.extra.get('doi') }}</a>) #}
{% endif %}
{% endfor %}
</ol>
diff --git a/python/fatcat_client/api/default_api.py b/python/fatcat_client/api/default_api.py
index b7f23e25..23c8d7ca 100644
--- a/python/fatcat_client/api/default_api.py
+++ b/python/fatcat_client/api/default_api.py
@@ -245,6 +245,8 @@ class DefaultApi(object):
:param async bool
:param list[ContainerEntity] entity_list: (required)
+ :param bool autoaccept: If true, and editor is authorized, batch is accepted all at once
+ :param str editgroup: Editgroup to auto-accept and apply to all entities (required if 'autoaccept' is True)
:return: list[EntityEdit]
If the method is called asynchronously,
returns the request thread.
@@ -266,12 +268,14 @@ class DefaultApi(object):
:param async bool
:param list[ContainerEntity] entity_list: (required)
+ :param bool autoaccept: If true, and editor is authorized, batch is accepted all at once
+ :param str editgroup: Editgroup to auto-accept and apply to all entities (required if 'autoaccept' is True)
:return: list[EntityEdit]
If the method is called asynchronously,
returns the request thread.
"""
- all_params = ['entity_list'] # noqa: E501
+ all_params = ['entity_list', 'autoaccept', 'editgroup'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
@@ -296,6 +300,10 @@ class DefaultApi(object):
path_params = {}
query_params = []
+ if 'autoaccept' in params:
+ query_params.append(('autoaccept', params['autoaccept'])) # noqa: E501
+ if 'editgroup' in params:
+ query_params.append(('editgroup', params['editgroup'])) # noqa: E501
header_params = {}
@@ -439,6 +447,8 @@ class DefaultApi(object):
:param async bool
:param list[CreatorEntity] entity_list: (required)
+ :param bool autoaccept: If true, and editor is authorized, batch is accepted all at once
+ :param str editgroup: Editgroup to auto-accept and apply to all entities (required if 'autoaccept' is True)
:return: list[EntityEdit]
If the method is called asynchronously,
returns the request thread.
@@ -460,12 +470,14 @@ class DefaultApi(object):
:param async bool
:param list[CreatorEntity] entity_list: (required)
+ :param bool autoaccept: If true, and editor is authorized, batch is accepted all at once
+ :param str editgroup: Editgroup to auto-accept and apply to all entities (required if 'autoaccept' is True)
:return: list[EntityEdit]
If the method is called asynchronously,
returns the request thread.
"""
- all_params = ['entity_list'] # noqa: E501
+ all_params = ['entity_list', 'autoaccept', 'editgroup'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
@@ -490,6 +502,10 @@ class DefaultApi(object):
path_params = {}
query_params = []
+ if 'autoaccept' in params:
+ query_params.append(('autoaccept', params['autoaccept'])) # noqa: E501
+ if 'editgroup' in params:
+ query_params.append(('editgroup', params['editgroup'])) # noqa: E501
header_params = {}
@@ -730,6 +746,8 @@ class DefaultApi(object):
:param async bool
:param list[FileEntity] entity_list: (required)
+ :param bool autoaccept: If true, and editor is authorized, batch is accepted all at once
+ :param str editgroup: Editgroup to auto-accept and apply to all entities (required if 'autoaccept' is True)
:return: list[EntityEdit]
If the method is called asynchronously,
returns the request thread.
@@ -751,12 +769,14 @@ class DefaultApi(object):
:param async bool
:param list[FileEntity] entity_list: (required)
+ :param bool autoaccept: If true, and editor is authorized, batch is accepted all at once
+ :param str editgroup: Editgroup to auto-accept and apply to all entities (required if 'autoaccept' is True)
:return: list[EntityEdit]
If the method is called asynchronously,
returns the request thread.
"""
- all_params = ['entity_list'] # noqa: E501
+ all_params = ['entity_list', 'autoaccept', 'editgroup'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
@@ -781,6 +801,10 @@ class DefaultApi(object):
path_params = {}
query_params = []
+ if 'autoaccept' in params:
+ query_params.append(('autoaccept', params['autoaccept'])) # noqa: E501
+ if 'editgroup' in params:
+ query_params.append(('editgroup', params['editgroup'])) # noqa: E501
header_params = {}
@@ -924,6 +948,8 @@ class DefaultApi(object):
:param async bool
:param list[ReleaseEntity] entity_list: (required)
+ :param bool autoaccept: If true, and editor is authorized, batch is accepted all at once
+ :param str editgroup: Editgroup to auto-accept and apply to all entities (required if 'autoaccept' is True)
:return: list[EntityEdit]
If the method is called asynchronously,
returns the request thread.
@@ -945,12 +971,14 @@ class DefaultApi(object):
:param async bool
:param list[ReleaseEntity] entity_list: (required)
+ :param bool autoaccept: If true, and editor is authorized, batch is accepted all at once
+ :param str editgroup: Editgroup to auto-accept and apply to all entities (required if 'autoaccept' is True)
:return: list[EntityEdit]
If the method is called asynchronously,
returns the request thread.
"""
- all_params = ['entity_list'] # noqa: E501
+ all_params = ['entity_list', 'autoaccept', 'editgroup'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
@@ -975,6 +1003,10 @@ class DefaultApi(object):
path_params = {}
query_params = []
+ if 'autoaccept' in params:
+ query_params.append(('autoaccept', params['autoaccept'])) # noqa: E501
+ if 'editgroup' in params:
+ query_params.append(('editgroup', params['editgroup'])) # noqa: E501
header_params = {}
@@ -1118,6 +1150,8 @@ class DefaultApi(object):
:param async bool
:param list[WorkEntity] entity_list: (required)
+ :param bool autoaccept: If true, and editor is authorized, batch is accepted all at once
+ :param str editgroup: Editgroup to auto-accept and apply to all entities (required if 'autoaccept' is True)
:return: list[EntityEdit]
If the method is called asynchronously,
returns the request thread.
@@ -1139,12 +1173,14 @@ class DefaultApi(object):
:param async bool
:param list[WorkEntity] entity_list: (required)
+ :param bool autoaccept: If true, and editor is authorized, batch is accepted all at once
+ :param str editgroup: Editgroup to auto-accept and apply to all entities (required if 'autoaccept' is True)
:return: list[EntityEdit]
If the method is called asynchronously,
returns the request thread.
"""
- all_params = ['entity_list'] # noqa: E501
+ all_params = ['entity_list', 'autoaccept', 'editgroup'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
@@ -1169,6 +1205,10 @@ class DefaultApi(object):
path_params = {}
query_params = []
+ if 'autoaccept' in params:
+ query_params.append(('autoaccept', params['autoaccept'])) # noqa: E501
+ if 'editgroup' in params:
+ query_params.append(('editgroup', params['editgroup'])) # noqa: E501
header_params = {}
@@ -1205,6 +1245,511 @@ class DefaultApi(object):
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
+ def delete_container(self, id, **kwargs): # noqa: E501
+ """delete_container # noqa: E501
+
+ This method makes a synchronous HTTP request by default. To make an
+ asynchronous HTTP request, please pass async=True
+ >>> thread = api.delete_container(id, async=True)
+ >>> result = thread.get()
+
+ :param async bool
+ :param str id: (required)
+ :param str editgroup:
+ :return: EntityEdit
+ If the method is called asynchronously,
+ returns the request thread.
+ """
+ kwargs['_return_http_data_only'] = True
+ if kwargs.get('async'):
+ return self.delete_container_with_http_info(id, **kwargs) # noqa: E501
+ else:
+ (data) = self.delete_container_with_http_info(id, **kwargs) # noqa: E501
+ return data
+
+ def delete_container_with_http_info(self, id, **kwargs): # noqa: E501
+ """delete_container # noqa: E501
+
+ This method makes a synchronous HTTP request by default. To make an
+ asynchronous HTTP request, please pass async=True
+ >>> thread = api.delete_container_with_http_info(id, async=True)
+ >>> result = thread.get()
+
+ :param async bool
+ :param str id: (required)
+ :param str editgroup:
+ :return: EntityEdit
+ If the method is called asynchronously,
+ returns the request thread.
+ """
+
+ all_params = ['id', 'editgroup'] # noqa: E501
+ all_params.append('async')
+ all_params.append('_return_http_data_only')
+ all_params.append('_preload_content')
+ all_params.append('_request_timeout')
+
+ params = locals()
+ for key, val in six.iteritems(params['kwargs']):
+ if key not in all_params:
+ raise TypeError(
+ "Got an unexpected keyword argument '%s'"
+ " to method delete_container" % key
+ )
+ params[key] = val
+ del params['kwargs']
+ # verify the required parameter 'id' is set
+ if ('id' not in params or
+ params['id'] is None):
+ raise ValueError("Missing the required parameter `id` when calling `delete_container`") # noqa: E501
+
+ collection_formats = {}
+
+ path_params = {}
+ if 'id' in params:
+ path_params['id'] = params['id'] # noqa: E501
+
+ query_params = []
+ if 'editgroup' in params:
+ query_params.append(('editgroup', params['editgroup'])) # noqa: E501
+
+ header_params = {}
+
+ form_params = []
+ local_var_files = {}
+
+ body_params = None
+ # HTTP header `Accept`
+ header_params['Accept'] = self.api_client.select_header_accept(
+ ['application/json']) # noqa: E501
+
+ # HTTP header `Content-Type`
+ header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
+ ['application/json']) # noqa: E501
+
+ # Authentication setting
+ auth_settings = [] # noqa: E501
+
+ return self.api_client.call_api(
+ '/container/{id}', 'DELETE',
+ path_params,
+ query_params,
+ header_params,
+ body=body_params,
+ post_params=form_params,
+ files=local_var_files,
+ response_type='EntityEdit', # noqa: E501
+ auth_settings=auth_settings,
+ async=params.get('async'),
+ _return_http_data_only=params.get('_return_http_data_only'),
+ _preload_content=params.get('_preload_content', True),
+ _request_timeout=params.get('_request_timeout'),
+ collection_formats=collection_formats)
+
+ def delete_creator(self, id, **kwargs): # noqa: E501
+ """delete_creator # noqa: E501
+
+ This method makes a synchronous HTTP request by default. To make an
+ asynchronous HTTP request, please pass async=True
+ >>> thread = api.delete_creator(id, async=True)
+ >>> result = thread.get()
+
+ :param async bool
+ :param str id: (required)
+ :param str editgroup:
+ :return: EntityEdit
+ If the method is called asynchronously,
+ returns the request thread.
+ """
+ kwargs['_return_http_data_only'] = True
+ if kwargs.get('async'):
+ return self.delete_creator_with_http_info(id, **kwargs) # noqa: E501
+ else:
+ (data) = self.delete_creator_with_http_info(id, **kwargs) # noqa: E501
+ return data
+
+ def delete_creator_with_http_info(self, id, **kwargs): # noqa: E501
+ """delete_creator # noqa: E501
+
+ This method makes a synchronous HTTP request by default. To make an
+ asynchronous HTTP request, please pass async=True
+ >>> thread = api.delete_creator_with_http_info(id, async=True)
+ >>> result = thread.get()
+
+ :param async bool
+ :param str id: (required)
+ :param str editgroup:
+ :return: EntityEdit
+ If the method is called asynchronously,
+ returns the request thread.
+ """
+
+ all_params = ['id', 'editgroup'] # noqa: E501
+ all_params.append('async')
+ all_params.append('_return_http_data_only')
+ all_params.append('_preload_content')
+ all_params.append('_request_timeout')
+
+ params = locals()
+ for key, val in six.iteritems(params['kwargs']):
+ if key not in all_params:
+ raise TypeError(
+ "Got an unexpected keyword argument '%s'"
+ " to method delete_creator" % key
+ )
+ params[key] = val
+ del params['kwargs']
+ # verify the required parameter 'id' is set
+ if ('id' not in params or
+ params['id'] is None):
+ raise ValueError("Missing the required parameter `id` when calling `delete_creator`") # noqa: E501
+
+ collection_formats = {}
+
+ path_params = {}
+ if 'id' in params:
+ path_params['id'] = params['id'] # noqa: E501
+
+ query_params = []
+ if 'editgroup' in params:
+ query_params.append(('editgroup', params['editgroup'])) # noqa: E501
+
+ header_params = {}
+
+ form_params = []
+ local_var_files = {}
+
+ body_params = None
+ # HTTP header `Accept`
+ header_params['Accept'] = self.api_client.select_header_accept(
+ ['application/json']) # noqa: E501
+
+ # HTTP header `Content-Type`
+ header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
+ ['application/json']) # noqa: E501
+
+ # Authentication setting
+ auth_settings = [] # noqa: E501
+
+ return self.api_client.call_api(
+ '/creator/{id}', 'DELETE',
+ path_params,
+ query_params,
+ header_params,
+ body=body_params,
+ post_params=form_params,
+ files=local_var_files,
+ response_type='EntityEdit', # noqa: E501
+ auth_settings=auth_settings,
+ async=params.get('async'),
+ _return_http_data_only=params.get('_return_http_data_only'),
+ _preload_content=params.get('_preload_content', True),
+ _request_timeout=params.get('_request_timeout'),
+ collection_formats=collection_formats)
+
+ def delete_file(self, id, **kwargs): # noqa: E501
+ """delete_file # noqa: E501
+
+ This method makes a synchronous HTTP request by default. To make an
+ asynchronous HTTP request, please pass async=True
+ >>> thread = api.delete_file(id, async=True)
+ >>> result = thread.get()
+
+ :param async bool
+ :param str id: (required)
+ :param str editgroup:
+ :return: EntityEdit
+ If the method is called asynchronously,
+ returns the request thread.
+ """
+ kwargs['_return_http_data_only'] = True
+ if kwargs.get('async'):
+ return self.delete_file_with_http_info(id, **kwargs) # noqa: E501
+ else:
+ (data) = self.delete_file_with_http_info(id, **kwargs) # noqa: E501
+ return data
+
+ def delete_file_with_http_info(self, id, **kwargs): # noqa: E501
+ """delete_file # noqa: E501
+
+ This method makes a synchronous HTTP request by default. To make an
+ asynchronous HTTP request, please pass async=True
+ >>> thread = api.delete_file_with_http_info(id, async=True)
+ >>> result = thread.get()
+
+ :param async bool
+ :param str id: (required)
+ :param str editgroup:
+ :return: EntityEdit
+ If the method is called asynchronously,
+ returns the request thread.
+ """
+
+ all_params = ['id', 'editgroup'] # noqa: E501
+ all_params.append('async')
+ all_params.append('_return_http_data_only')
+ all_params.append('_preload_content')
+ all_params.append('_request_timeout')
+
+ params = locals()
+ for key, val in six.iteritems(params['kwargs']):
+ if key not in all_params:
+ raise TypeError(
+ "Got an unexpected keyword argument '%s'"
+ " to method delete_file" % key
+ )
+ params[key] = val
+ del params['kwargs']
+ # verify the required parameter 'id' is set
+ if ('id' not in params or
+ params['id'] is None):
+ raise ValueError("Missing the required parameter `id` when calling `delete_file`") # noqa: E501
+
+ collection_formats = {}
+
+ path_params = {}
+ if 'id' in params:
+ path_params['id'] = params['id'] # noqa: E501
+
+ query_params = []
+ if 'editgroup' in params:
+ query_params.append(('editgroup', params['editgroup'])) # noqa: E501
+
+ header_params = {}
+
+ form_params = []
+ local_var_files = {}
+
+ body_params = None
+ # HTTP header `Accept`
+ header_params['Accept'] = self.api_client.select_header_accept(
+ ['application/json']) # noqa: E501
+
+ # HTTP header `Content-Type`
+ header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
+ ['application/json']) # noqa: E501
+
+ # Authentication setting
+ auth_settings = [] # noqa: E501
+
+ return self.api_client.call_api(
+ '/file/{id}', 'DELETE',
+ path_params,
+ query_params,
+ header_params,
+ body=body_params,
+ post_params=form_params,
+ files=local_var_files,
+ response_type='EntityEdit', # noqa: E501
+ auth_settings=auth_settings,
+ async=params.get('async'),
+ _return_http_data_only=params.get('_return_http_data_only'),
+ _preload_content=params.get('_preload_content', True),
+ _request_timeout=params.get('_request_timeout'),
+ collection_formats=collection_formats)
+
+ def delete_release(self, id, **kwargs): # noqa: E501
+ """delete_release # noqa: E501
+
+ This method makes a synchronous HTTP request by default. To make an
+ asynchronous HTTP request, please pass async=True
+ >>> thread = api.delete_release(id, async=True)
+ >>> result = thread.get()
+
+ :param async bool
+ :param str id: (required)
+ :param str editgroup:
+ :return: EntityEdit
+ If the method is called asynchronously,
+ returns the request thread.
+ """
+ kwargs['_return_http_data_only'] = True
+ if kwargs.get('async'):
+ return self.delete_release_with_http_info(id, **kwargs) # noqa: E501
+ else:
+ (data) = self.delete_release_with_http_info(id, **kwargs) # noqa: E501
+ return data
+
+ def delete_release_with_http_info(self, id, **kwargs): # noqa: E501
+ """delete_release # noqa: E501
+
+ This method makes a synchronous HTTP request by default. To make an
+ asynchronous HTTP request, please pass async=True
+ >>> thread = api.delete_release_with_http_info(id, async=True)
+ >>> result = thread.get()
+
+ :param async bool
+ :param str id: (required)
+ :param str editgroup:
+ :return: EntityEdit
+ If the method is called asynchronously,
+ returns the request thread.
+ """
+
+ all_params = ['id', 'editgroup'] # noqa: E501
+ all_params.append('async')
+ all_params.append('_return_http_data_only')
+ all_params.append('_preload_content')
+ all_params.append('_request_timeout')
+
+ params = locals()
+ for key, val in six.iteritems(params['kwargs']):
+ if key not in all_params:
+ raise TypeError(
+ "Got an unexpected keyword argument '%s'"
+ " to method delete_release" % key
+ )
+ params[key] = val
+ del params['kwargs']
+ # verify the required parameter 'id' is set
+ if ('id' not in params or
+ params['id'] is None):
+ raise ValueError("Missing the required parameter `id` when calling `delete_release`") # noqa: E501
+
+ collection_formats = {}
+
+ path_params = {}
+ if 'id' in params:
+ path_params['id'] = params['id'] # noqa: E501
+
+ query_params = []
+ if 'editgroup' in params:
+ query_params.append(('editgroup', params['editgroup'])) # noqa: E501
+
+ header_params = {}
+
+ form_params = []
+ local_var_files = {}
+
+ body_params = None
+ # HTTP header `Accept`
+ header_params['Accept'] = self.api_client.select_header_accept(
+ ['application/json']) # noqa: E501
+
+ # HTTP header `Content-Type`
+ header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
+ ['application/json']) # noqa: E501
+
+ # Authentication setting
+ auth_settings = [] # noqa: E501
+
+ return self.api_client.call_api(
+ '/release/{id}', 'DELETE',
+ path_params,
+ query_params,
+ header_params,
+ body=body_params,
+ post_params=form_params,
+ files=local_var_files,
+ response_type='EntityEdit', # noqa: E501
+ auth_settings=auth_settings,
+ async=params.get('async'),
+ _return_http_data_only=params.get('_return_http_data_only'),
+ _preload_content=params.get('_preload_content', True),
+ _request_timeout=params.get('_request_timeout'),
+ collection_formats=collection_formats)
+
+ def delete_work(self, id, **kwargs): # noqa: E501
+ """delete_work # noqa: E501
+
+ This method makes a synchronous HTTP request by default. To make an
+ asynchronous HTTP request, please pass async=True
+ >>> thread = api.delete_work(id, async=True)
+ >>> result = thread.get()
+
+ :param async bool
+ :param str id: (required)
+ :param str editgroup:
+ :return: EntityEdit
+ If the method is called asynchronously,
+ returns the request thread.
+ """
+ kwargs['_return_http_data_only'] = True
+ if kwargs.get('async'):
+ return self.delete_work_with_http_info(id, **kwargs) # noqa: E501
+ else:
+ (data) = self.delete_work_with_http_info(id, **kwargs) # noqa: E501
+ return data
+
+ def delete_work_with_http_info(self, id, **kwargs): # noqa: E501
+ """delete_work # noqa: E501
+
+ This method makes a synchronous HTTP request by default. To make an
+ asynchronous HTTP request, please pass async=True
+ >>> thread = api.delete_work_with_http_info(id, async=True)
+ >>> result = thread.get()
+
+ :param async bool
+ :param str id: (required)
+ :param str editgroup:
+ :return: EntityEdit
+ If the method is called asynchronously,
+ returns the request thread.
+ """
+
+ all_params = ['id', 'editgroup'] # noqa: E501
+ all_params.append('async')
+ all_params.append('_return_http_data_only')
+ all_params.append('_preload_content')
+ all_params.append('_request_timeout')
+
+ params = locals()
+ for key, val in six.iteritems(params['kwargs']):
+ if key not in all_params:
+ raise TypeError(
+ "Got an unexpected keyword argument '%s'"
+ " to method delete_work" % key
+ )
+ params[key] = val
+ del params['kwargs']
+ # verify the required parameter 'id' is set
+ if ('id' not in params or
+ params['id'] is None):
+ raise ValueError("Missing the required parameter `id` when calling `delete_work`") # noqa: E501
+
+ collection_formats = {}
+
+ path_params = {}
+ if 'id' in params:
+ path_params['id'] = params['id'] # noqa: E501
+
+ query_params = []
+ if 'editgroup' in params:
+ query_params.append(('editgroup', params['editgroup'])) # noqa: E501
+
+ header_params = {}
+
+ form_params = []
+ local_var_files = {}
+
+ body_params = None
+ # HTTP header `Accept`
+ header_params['Accept'] = self.api_client.select_header_accept(
+ ['application/json']) # noqa: E501
+
+ # HTTP header `Content-Type`
+ header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
+ ['application/json']) # noqa: E501
+
+ # Authentication setting
+ auth_settings = [] # noqa: E501
+
+ return self.api_client.call_api(
+ '/work/{id}', 'DELETE',
+ path_params,
+ query_params,
+ header_params,
+ body=body_params,
+ post_params=form_params,
+ files=local_var_files,
+ response_type='EntityEdit', # noqa: E501
+ auth_settings=auth_settings,
+ async=params.get('async'),
+ _return_http_data_only=params.get('_return_http_data_only'),
+ _preload_content=params.get('_preload_content', True),
+ _request_timeout=params.get('_request_timeout'),
+ collection_formats=collection_formats)
+
def get_changelog(self, **kwargs): # noqa: E501
"""get_changelog # noqa: E501
@@ -3255,8 +3800,8 @@ class DefaultApi(object):
if ('orcid' in params and
len(params['orcid']) < 19):
raise ValueError("Invalid value for parameter `orcid` when calling `lookup_creator`, length must be greater than or equal to `19`") # noqa: E501
- if 'orcid' in params and not re.search('\\d{4}-\\d{4}-\\d{4}-\\d{4}', params['orcid']): # noqa: E501
- raise ValueError("Invalid value for parameter `orcid` when calling `lookup_creator`, must conform to the pattern `/\\d{4}-\\d{4}-\\d{4}-\\d{4}/`") # noqa: E501
+ if 'orcid' in params and not re.search('\\d{4}-\\d{4}-\\d{4}-\\d{3}[\\dX]', params['orcid']): # noqa: E501
+ raise ValueError("Invalid value for parameter `orcid` when calling `lookup_creator`, must conform to the pattern `/\\d{4}-\\d{4}-\\d{4}-\\d{3}[\\dX]/`") # noqa: E501
collection_formats = {}
path_params = {}
@@ -3491,3 +4036,528 @@ class DefaultApi(object):
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
+
+ def update_container(self, id, entity, **kwargs): # noqa: E501
+ """update_container # noqa: E501
+
+ This method makes a synchronous HTTP request by default. To make an
+ asynchronous HTTP request, please pass async=True
+ >>> thread = api.update_container(id, entity, async=True)
+ >>> result = thread.get()
+
+ :param async bool
+ :param str id: (required)
+ :param ContainerEntity entity: (required)
+ :return: EntityEdit
+ If the method is called asynchronously,
+ returns the request thread.
+ """
+ kwargs['_return_http_data_only'] = True
+ if kwargs.get('async'):
+ return self.update_container_with_http_info(id, entity, **kwargs) # noqa: E501
+ else:
+ (data) = self.update_container_with_http_info(id, entity, **kwargs) # noqa: E501
+ return data
+
+ def update_container_with_http_info(self, id, entity, **kwargs): # noqa: E501
+ """update_container # noqa: E501
+
+ This method makes a synchronous HTTP request by default. To make an
+ asynchronous HTTP request, please pass async=True
+ >>> thread = api.update_container_with_http_info(id, entity, async=True)
+ >>> result = thread.get()
+
+ :param async bool
+ :param str id: (required)
+ :param ContainerEntity entity: (required)
+ :return: EntityEdit
+ If the method is called asynchronously,
+ returns the request thread.
+ """
+
+ all_params = ['id', 'entity'] # noqa: E501
+ all_params.append('async')
+ all_params.append('_return_http_data_only')
+ all_params.append('_preload_content')
+ all_params.append('_request_timeout')
+
+ params = locals()
+ for key, val in six.iteritems(params['kwargs']):
+ if key not in all_params:
+ raise TypeError(
+ "Got an unexpected keyword argument '%s'"
+ " to method update_container" % key
+ )
+ params[key] = val
+ del params['kwargs']
+ # verify the required parameter 'id' is set
+ if ('id' not in params or
+ params['id'] is None):
+ raise ValueError("Missing the required parameter `id` when calling `update_container`") # noqa: E501
+ # verify the required parameter 'entity' is set
+ if ('entity' not in params or
+ params['entity'] is None):
+ raise ValueError("Missing the required parameter `entity` when calling `update_container`") # noqa: E501
+
+ collection_formats = {}
+
+ path_params = {}
+ if 'id' in params:
+ path_params['id'] = params['id'] # noqa: E501
+
+ query_params = []
+
+ header_params = {}
+
+ form_params = []
+ local_var_files = {}
+
+ body_params = None
+ if 'entity' in params:
+ body_params = params['entity']
+ # HTTP header `Accept`
+ header_params['Accept'] = self.api_client.select_header_accept(
+ ['application/json']) # noqa: E501
+
+ # HTTP header `Content-Type`
+ header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
+ ['application/json']) # noqa: E501
+
+ # Authentication setting
+ auth_settings = [] # noqa: E501
+
+ return self.api_client.call_api(
+ '/container/{id}', 'PUT',
+ path_params,
+ query_params,
+ header_params,
+ body=body_params,
+ post_params=form_params,
+ files=local_var_files,
+ response_type='EntityEdit', # noqa: E501
+ auth_settings=auth_settings,
+ async=params.get('async'),
+ _return_http_data_only=params.get('_return_http_data_only'),
+ _preload_content=params.get('_preload_content', True),
+ _request_timeout=params.get('_request_timeout'),
+ collection_formats=collection_formats)
+
+ def update_creator(self, id, entity, **kwargs): # noqa: E501
+ """update_creator # noqa: E501
+
+ This method makes a synchronous HTTP request by default. To make an
+ asynchronous HTTP request, please pass async=True
+ >>> thread = api.update_creator(id, entity, async=True)
+ >>> result = thread.get()
+
+ :param async bool
+ :param str id: (required)
+ :param CreatorEntity entity: (required)
+ :return: EntityEdit
+ If the method is called asynchronously,
+ returns the request thread.
+ """
+ kwargs['_return_http_data_only'] = True
+ if kwargs.get('async'):
+ return self.update_creator_with_http_info(id, entity, **kwargs) # noqa: E501
+ else:
+ (data) = self.update_creator_with_http_info(id, entity, **kwargs) # noqa: E501
+ return data
+
+ def update_creator_with_http_info(self, id, entity, **kwargs): # noqa: E501
+ """update_creator # noqa: E501
+
+ This method makes a synchronous HTTP request by default. To make an
+ asynchronous HTTP request, please pass async=True
+ >>> thread = api.update_creator_with_http_info(id, entity, async=True)
+ >>> result = thread.get()
+
+ :param async bool
+ :param str id: (required)
+ :param CreatorEntity entity: (required)
+ :return: EntityEdit
+ If the method is called asynchronously,
+ returns the request thread.
+ """
+
+ all_params = ['id', 'entity'] # noqa: E501
+ all_params.append('async')
+ all_params.append('_return_http_data_only')
+ all_params.append('_preload_content')
+ all_params.append('_request_timeout')
+
+ params = locals()
+ for key, val in six.iteritems(params['kwargs']):
+ if key not in all_params:
+ raise TypeError(
+ "Got an unexpected keyword argument '%s'"
+ " to method update_creator" % key
+ )
+ params[key] = val
+ del params['kwargs']
+ # verify the required parameter 'id' is set
+ if ('id' not in params or
+ params['id'] is None):
+ raise ValueError("Missing the required parameter `id` when calling `update_creator`") # noqa: E501
+ # verify the required parameter 'entity' is set
+ if ('entity' not in params or
+ params['entity'] is None):
+ raise ValueError("Missing the required parameter `entity` when calling `update_creator`") # noqa: E501
+
+ collection_formats = {}
+
+ path_params = {}
+ if 'id' in params:
+ path_params['id'] = params['id'] # noqa: E501
+
+ query_params = []
+
+ header_params = {}
+
+ form_params = []
+ local_var_files = {}
+
+ body_params = None
+ if 'entity' in params:
+ body_params = params['entity']
+ # HTTP header `Accept`
+ header_params['Accept'] = self.api_client.select_header_accept(
+ ['application/json']) # noqa: E501
+
+ # HTTP header `Content-Type`
+ header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
+ ['application/json']) # noqa: E501
+
+ # Authentication setting
+ auth_settings = [] # noqa: E501
+
+ return self.api_client.call_api(
+ '/creator/{id}', 'PUT',
+ path_params,
+ query_params,
+ header_params,
+ body=body_params,
+ post_params=form_params,
+ files=local_var_files,
+ response_type='EntityEdit', # noqa: E501
+ auth_settings=auth_settings,
+ async=params.get('async'),
+ _return_http_data_only=params.get('_return_http_data_only'),
+ _preload_content=params.get('_preload_content', True),
+ _request_timeout=params.get('_request_timeout'),
+ collection_formats=collection_formats)
+
+ def update_file(self, id, entity, **kwargs): # noqa: E501
+ """update_file # noqa: E501
+
+ This method makes a synchronous HTTP request by default. To make an
+ asynchronous HTTP request, please pass async=True
+ >>> thread = api.update_file(id, entity, async=True)
+ >>> result = thread.get()
+
+ :param async bool
+ :param str id: (required)
+ :param FileEntity entity: (required)
+ :return: EntityEdit
+ If the method is called asynchronously,
+ returns the request thread.
+ """
+ kwargs['_return_http_data_only'] = True
+ if kwargs.get('async'):
+ return self.update_file_with_http_info(id, entity, **kwargs) # noqa: E501
+ else:
+ (data) = self.update_file_with_http_info(id, entity, **kwargs) # noqa: E501
+ return data
+
+ def update_file_with_http_info(self, id, entity, **kwargs): # noqa: E501
+ """update_file # noqa: E501
+
+ This method makes a synchronous HTTP request by default. To make an
+ asynchronous HTTP request, please pass async=True
+ >>> thread = api.update_file_with_http_info(id, entity, async=True)
+ >>> result = thread.get()
+
+ :param async bool
+ :param str id: (required)
+ :param FileEntity entity: (required)
+ :return: EntityEdit
+ If the method is called asynchronously,
+ returns the request thread.
+ """
+
+ all_params = ['id', 'entity'] # noqa: E501
+ all_params.append('async')
+ all_params.append('_return_http_data_only')
+ all_params.append('_preload_content')
+ all_params.append('_request_timeout')
+
+ params = locals()
+ for key, val in six.iteritems(params['kwargs']):
+ if key not in all_params:
+ raise TypeError(
+ "Got an unexpected keyword argument '%s'"
+ " to method update_file" % key
+ )
+ params[key] = val
+ del params['kwargs']
+ # verify the required parameter 'id' is set
+ if ('id' not in params or
+ params['id'] is None):
+ raise ValueError("Missing the required parameter `id` when calling `update_file`") # noqa: E501
+ # verify the required parameter 'entity' is set
+ if ('entity' not in params or
+ params['entity'] is None):
+ raise ValueError("Missing the required parameter `entity` when calling `update_file`") # noqa: E501
+
+ collection_formats = {}
+
+ path_params = {}
+ if 'id' in params:
+ path_params['id'] = params['id'] # noqa: E501
+
+ query_params = []
+
+ header_params = {}
+
+ form_params = []
+ local_var_files = {}
+
+ body_params = None
+ if 'entity' in params:
+ body_params = params['entity']
+ # HTTP header `Accept`
+ header_params['Accept'] = self.api_client.select_header_accept(
+ ['application/json']) # noqa: E501
+
+ # HTTP header `Content-Type`
+ header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
+ ['application/json']) # noqa: E501
+
+ # Authentication setting
+ auth_settings = [] # noqa: E501
+
+ return self.api_client.call_api(
+ '/file/{id}', 'PUT',
+ path_params,
+ query_params,
+ header_params,
+ body=body_params,
+ post_params=form_params,
+ files=local_var_files,
+ response_type='EntityEdit', # noqa: E501
+ auth_settings=auth_settings,
+ async=params.get('async'),
+ _return_http_data_only=params.get('_return_http_data_only'),
+ _preload_content=params.get('_preload_content', True),
+ _request_timeout=params.get('_request_timeout'),
+ collection_formats=collection_formats)
+
+ def update_release(self, id, entity, **kwargs): # noqa: E501
+ """update_release # noqa: E501
+
+ This method makes a synchronous HTTP request by default. To make an
+ asynchronous HTTP request, please pass async=True
+ >>> thread = api.update_release(id, entity, async=True)
+ >>> result = thread.get()
+
+ :param async bool
+ :param str id: (required)
+ :param ReleaseEntity entity: (required)
+ :return: EntityEdit
+ If the method is called asynchronously,
+ returns the request thread.
+ """
+ kwargs['_return_http_data_only'] = True
+ if kwargs.get('async'):
+ return self.update_release_with_http_info(id, entity, **kwargs) # noqa: E501
+ else:
+ (data) = self.update_release_with_http_info(id, entity, **kwargs) # noqa: E501
+ return data
+
+ def update_release_with_http_info(self, id, entity, **kwargs): # noqa: E501
+ """update_release # noqa: E501
+
+ This method makes a synchronous HTTP request by default. To make an
+ asynchronous HTTP request, please pass async=True
+ >>> thread = api.update_release_with_http_info(id, entity, async=True)
+ >>> result = thread.get()
+
+ :param async bool
+ :param str id: (required)
+ :param ReleaseEntity entity: (required)
+ :return: EntityEdit
+ If the method is called asynchronously,
+ returns the request thread.
+ """
+
+ all_params = ['id', 'entity'] # noqa: E501
+ all_params.append('async')
+ all_params.append('_return_http_data_only')
+ all_params.append('_preload_content')
+ all_params.append('_request_timeout')
+
+ params = locals()
+ for key, val in six.iteritems(params['kwargs']):
+ if key not in all_params:
+ raise TypeError(
+ "Got an unexpected keyword argument '%s'"
+ " to method update_release" % key
+ )
+ params[key] = val
+ del params['kwargs']
+ # verify the required parameter 'id' is set
+ if ('id' not in params or
+ params['id'] is None):
+ raise ValueError("Missing the required parameter `id` when calling `update_release`") # noqa: E501
+ # verify the required parameter 'entity' is set
+ if ('entity' not in params or
+ params['entity'] is None):
+ raise ValueError("Missing the required parameter `entity` when calling `update_release`") # noqa: E501
+
+ collection_formats = {}
+
+ path_params = {}
+ if 'id' in params:
+ path_params['id'] = params['id'] # noqa: E501
+
+ query_params = []
+
+ header_params = {}
+
+ form_params = []
+ local_var_files = {}
+
+ body_params = None
+ if 'entity' in params:
+ body_params = params['entity']
+ # HTTP header `Accept`
+ header_params['Accept'] = self.api_client.select_header_accept(
+ ['application/json']) # noqa: E501
+
+ # HTTP header `Content-Type`
+ header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
+ ['application/json']) # noqa: E501
+
+ # Authentication setting
+ auth_settings = [] # noqa: E501
+
+ return self.api_client.call_api(
+ '/release/{id}', 'PUT',
+ path_params,
+ query_params,
+ header_params,
+ body=body_params,
+ post_params=form_params,
+ files=local_var_files,
+ response_type='EntityEdit', # noqa: E501
+ auth_settings=auth_settings,
+ async=params.get('async'),
+ _return_http_data_only=params.get('_return_http_data_only'),
+ _preload_content=params.get('_preload_content', True),
+ _request_timeout=params.get('_request_timeout'),
+ collection_formats=collection_formats)
+
+ def update_work(self, id, entity, **kwargs): # noqa: E501
+ """update_work # noqa: E501
+
+ This method makes a synchronous HTTP request by default. To make an
+ asynchronous HTTP request, please pass async=True
+ >>> thread = api.update_work(id, entity, async=True)
+ >>> result = thread.get()
+
+ :param async bool
+ :param str id: (required)
+ :param WorkEntity entity: (required)
+ :return: EntityEdit
+ If the method is called asynchronously,
+ returns the request thread.
+ """
+ kwargs['_return_http_data_only'] = True
+ if kwargs.get('async'):
+ return self.update_work_with_http_info(id, entity, **kwargs) # noqa: E501
+ else:
+ (data) = self.update_work_with_http_info(id, entity, **kwargs) # noqa: E501
+ return data
+
+ def update_work_with_http_info(self, id, entity, **kwargs): # noqa: E501
+ """update_work # noqa: E501
+
+ This method makes a synchronous HTTP request by default. To make an
+ asynchronous HTTP request, please pass async=True
+ >>> thread = api.update_work_with_http_info(id, entity, async=True)
+ >>> result = thread.get()
+
+ :param async bool
+ :param str id: (required)
+ :param WorkEntity entity: (required)
+ :return: EntityEdit
+ If the method is called asynchronously,
+ returns the request thread.
+ """
+
+ all_params = ['id', 'entity'] # noqa: E501
+ all_params.append('async')
+ all_params.append('_return_http_data_only')
+ all_params.append('_preload_content')
+ all_params.append('_request_timeout')
+
+ params = locals()
+ for key, val in six.iteritems(params['kwargs']):
+ if key not in all_params:
+ raise TypeError(
+ "Got an unexpected keyword argument '%s'"
+ " to method update_work" % key
+ )
+ params[key] = val
+ del params['kwargs']
+ # verify the required parameter 'id' is set
+ if ('id' not in params or
+ params['id'] is None):
+ raise ValueError("Missing the required parameter `id` when calling `update_work`") # noqa: E501
+ # verify the required parameter 'entity' is set
+ if ('entity' not in params or
+ params['entity'] is None):
+ raise ValueError("Missing the required parameter `entity` when calling `update_work`") # noqa: E501
+
+ collection_formats = {}
+
+ path_params = {}
+ if 'id' in params:
+ path_params['id'] = params['id'] # noqa: E501
+
+ query_params = []
+
+ header_params = {}
+
+ form_params = []
+ local_var_files = {}
+
+ body_params = None
+ if 'entity' in params:
+ body_params = params['entity']
+ # HTTP header `Accept`
+ header_params['Accept'] = self.api_client.select_header_accept(
+ ['application/json']) # noqa: E501
+
+ # HTTP header `Content-Type`
+ header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
+ ['application/json']) # noqa: E501
+
+ # Authentication setting
+ auth_settings = [] # noqa: E501
+
+ return self.api_client.call_api(
+ '/work/{id}', 'PUT',
+ path_params,
+ query_params,
+ header_params,
+ body=body_params,
+ post_params=form_params,
+ files=local_var_files,
+ response_type='EntityEdit', # noqa: E501
+ auth_settings=auth_settings,
+ async=params.get('async'),
+ _return_http_data_only=params.get('_return_http_data_only'),
+ _preload_content=params.get('_preload_content', True),
+ _request_timeout=params.get('_request_timeout'),
+ collection_formats=collection_formats)
diff --git a/python/fatcat_client/models/creator_entity.py b/python/fatcat_client/models/creator_entity.py
index a9e459ee..de096281 100644
--- a/python/fatcat_client/models/creator_entity.py
+++ b/python/fatcat_client/models/creator_entity.py
@@ -139,8 +139,8 @@ class CreatorEntity(object):
raise ValueError("Invalid value for `orcid`, length must be less than or equal to `19`") # noqa: E501
if orcid is not None and len(orcid) < 19:
raise ValueError("Invalid value for `orcid`, length must be greater than or equal to `19`") # noqa: E501
- if orcid is not None and not re.search('\\d{4}-\\d{4}-\\d{4}-\\d{4}', orcid): # noqa: E501
- raise ValueError("Invalid value for `orcid`, must be a follow pattern or equal to `/\\d{4}-\\d{4}-\\d{4}-\\d{4}/`") # noqa: E501
+ if orcid is not None and not re.search('\\d{4}-\\d{4}-\\d{4}-\\d{3}[\\dX]', orcid): # noqa: E501
+ raise ValueError("Invalid value for `orcid`, must be a follow pattern or equal to `/\\d{4}-\\d{4}-\\d{4}-\\d{3}[\\dX]/`") # noqa: E501
self._orcid = orcid
diff --git a/python/fatcat_client/models/release_entity.py b/python/fatcat_client/models/release_entity.py
index 3412ca92..79e2c64f 100644
--- a/python/fatcat_client/models/release_entity.py
+++ b/python/fatcat_client/models/release_entity.py
@@ -48,6 +48,7 @@ class ReleaseEntity(object):
'wikidata_qid': 'str',
'pmcid': 'str',
'pmid': 'str',
+ 'core_id': 'str',
'isbn13': 'str',
'doi': 'str',
'release_date': 'date',
@@ -78,6 +79,7 @@ class ReleaseEntity(object):
'wikidata_qid': 'wikidata_qid',
'pmcid': 'pmcid',
'pmid': 'pmid',
+ 'core_id': 'core_id',
'isbn13': 'isbn13',
'doi': 'doi',
'release_date': 'release_date',
@@ -96,7 +98,7 @@ class ReleaseEntity(object):
'extra': 'extra'
}
- def __init__(self, abstracts=None, refs=None, contribs=None, language=None, publisher=None, pages=None, issue=None, volume=None, wikidata_qid=None, pmcid=None, pmid=None, isbn13=None, doi=None, release_date=None, release_status=None, release_type=None, container_id=None, files=None, container=None, work_id=None, title=None, state=None, ident=None, revision=None, redirect=None, editgroup_id=None, extra=None): # noqa: E501
+ def __init__(self, abstracts=None, refs=None, contribs=None, language=None, publisher=None, pages=None, issue=None, volume=None, wikidata_qid=None, pmcid=None, pmid=None, core_id=None, isbn13=None, doi=None, release_date=None, release_status=None, release_type=None, container_id=None, files=None, container=None, work_id=None, title=None, state=None, ident=None, revision=None, redirect=None, editgroup_id=None, extra=None): # noqa: E501
"""ReleaseEntity - a model defined in Swagger""" # noqa: E501
self._abstracts = None
@@ -110,6 +112,7 @@ class ReleaseEntity(object):
self._wikidata_qid = None
self._pmcid = None
self._pmid = None
+ self._core_id = None
self._isbn13 = None
self._doi = None
self._release_date = None
@@ -150,6 +153,8 @@ class ReleaseEntity(object):
self.pmcid = pmcid
if pmid is not None:
self.pmid = pmid
+ if core_id is not None:
+ self.core_id = core_id
if isbn13 is not None:
self.isbn13 = isbn13
if doi is not None:
@@ -416,6 +421,27 @@ class ReleaseEntity(object):
self._pmid = pmid
@property
+ def core_id(self):
+ """Gets the core_id of this ReleaseEntity. # noqa: E501
+
+
+ :return: The core_id of this ReleaseEntity. # noqa: E501
+ :rtype: str
+ """
+ return self._core_id
+
+ @core_id.setter
+ def core_id(self, core_id):
+ """Sets the core_id of this ReleaseEntity.
+
+
+ :param core_id: The core_id of this ReleaseEntity. # noqa: E501
+ :type: str
+ """
+
+ self._core_id = core_id
+
+ @property
def isbn13(self):
"""Gets the isbn13 of this ReleaseEntity. # noqa: E501
diff --git a/python/fatcat_export.py b/python/fatcat_export.py
new file mode 100755
index 00000000..6c4502af
--- /dev/null
+++ b/python/fatcat_export.py
@@ -0,0 +1,72 @@
+#!/usr/bin/env python3
+
+import sys
+import json
+import argparse
+import fatcat_client
+from fatcat_client.rest import ApiException
+from fatcat.fcid import uuid2fcid
+
+def run_export_releases(args):
+ conf = fatcat_client.Configuration()
+ conf.host = args.host_url
+ api = fatcat_client.DefaultApi(fatcat_client.ApiClient(conf))
+
+ for line in args.ident_file:
+ ident = uuid2fcid(line.split()[0])
+ release = api.get_release(id=ident, expand="all")
+ args.json_output.write(json.dumps(release.to_dict()) + "\n")
+
+def run_export_changelog(args):
+ conf = fatcat_client.Configuration()
+ conf.host = args.host_url
+ api = fatcat_client.DefaultApi(fatcat_client.ApiClient(conf))
+
+ end = args.end
+ if end is None:
+ latest = api.get_changelog(limit=1)[0]
+ end = latest.index
+
+ for i in range(args.start, end):
+ entry = api.get_changelog_entry(id=i)
+ args.json_output.write(json.dumps(entry.to_dict()) + "\n")
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--debug',
+ action='store_true',
+ help="enable debugging interface")
+ parser.add_argument('--host-url',
+ default="http://localhost:9411/v0",
+ help="connect to this host/port")
+ subparsers = parser.add_subparsers()
+
+ sub_releases = subparsers.add_parser('releases')
+ sub_releases.set_defaults(func=run_export_releases)
+ sub_releases.add_argument('ident_file',
+ help="TSV list of fatcat release idents to dump",
+ default=sys.stdin, type=argparse.FileType('r'))
+ sub_releases.add_argument('json_output',
+ help="where to send output",
+ default=sys.stdout, type=argparse.FileType('w'))
+
+ sub_changelog = subparsers.add_parser('changelog')
+ sub_changelog.set_defaults(func=run_export_changelog)
+ sub_changelog.add_argument('--start',
+ help="index to start dumping at",
+ default=1, type=int)
+ sub_changelog.add_argument('--end',
+ help="index to stop dumping at (else detect most recent)",
+ default=None, type=int)
+ sub_changelog.add_argument('json_output',
+ help="where to send output",
+ default=sys.stdout, type=argparse.FileType('w'))
+
+ args = parser.parse_args()
+ if not args.__dict__.get("func"):
+ print("tell me what to do!")
+ sys.exit(-1)
+ args.func(args)
+
+if __name__ == '__main__':
+ main()
diff --git a/python/tests/codegen_tests/test_default_api.py b/python/tests/codegen_tests/test_default_api.py
index e3008898..ec36887b 100644
--- a/python/tests/codegen_tests/test_default_api.py
+++ b/python/tests/codegen_tests/test_default_api.py
@@ -101,6 +101,36 @@ class TestDefaultApi(unittest.TestCase):
"""
pass
+ def test_delete_container(self):
+ """Test case for delete_container
+
+ """
+ pass
+
+ def test_delete_creator(self):
+ """Test case for delete_creator
+
+ """
+ pass
+
+ def test_delete_file(self):
+ """Test case for delete_file
+
+ """
+ pass
+
+ def test_delete_release(self):
+ """Test case for delete_release
+
+ """
+ pass
+
+ def test_delete_work(self):
+ """Test case for delete_work
+
+ """
+ pass
+
def test_get_changelog(self):
"""Test case for get_changelog
@@ -239,6 +269,36 @@ class TestDefaultApi(unittest.TestCase):
"""
pass
+ def test_update_container(self):
+ """Test case for update_container
+
+ """
+ pass
+
+ def test_update_creator(self):
+ """Test case for update_creator
+
+ """
+ pass
+
+ def test_update_file(self):
+ """Test case for update_file
+
+ """
+ pass
+
+ def test_update_release(self):
+ """Test case for update_release
+
+ """
+ pass
+
+ def test_update_work(self):
+ """Test case for update_work
+
+ """
+ pass
+
if __name__ == '__main__':
unittest.main()
diff --git a/python/tests/files/0000-0001-8254-710X.json b/python/tests/files/0000-0001-8254-710X.json
new file mode 100644
index 00000000..094cae67
--- /dev/null
+++ b/python/tests/files/0000-0001-8254-710X.json
@@ -0,0 +1 @@
+{"orcid-identifier":{"uri":"http://orcid.org/0000-0001-8254-710X","path":"0000-0001-8254-710X","host":"orcid.org"},"preferences":{"locale":"en"},"history":{"creation-method":"Member-referred","completion-date":null,"submission-date":{"value":1407501041999},"last-modified-date":{"value":1465949566770},"claimed":true,"source":null,"deactivation-date":null,"verified-email":true,"verified-primary-email":true},"person":{"last-modified-date":null,"name":{"created-date":{"value":1460755375159},"last-modified-date":{"value":1460755375159},"given-names":{"value":"Man-Hui"},"family-name":{"value":"Li"},"credit-name":null,"source":null,"visibility":"public","path":"0000-0001-8254-710X"},"other-names":{"last-modified-date":null,"other-name":null,"path":"/0000-0001-8254-710X/other-names"},"biography":{"created-date":{"value":1460755375161},"last-modified-date":{"value":1460755375161},"content":null,"visibility":"public","path":"/0000-0001-8254-710X/biography"},"researcher-urls":{"last-modified-date":null,"researcher-url":null,"path":"/0000-0001-8254-710X/researcher-urls"},"emails":{"last-modified-date":null,"email":null,"path":"/0000-0001-8254-710X/email"},"addresses":{"last-modified-date":null,"address":null,"path":"/0000-0001-8254-710X/address"},"keywords":{"last-modified-date":null,"keyword":null,"path":"/0000-0001-8254-710X/keywords"},"external-identifiers":{"last-modified-date":null,"external-identifier":null,"path":"/0000-0001-8254-710X/external-identifiers"},"path":"/0000-0001-8254-710X/person"},"activities-summary":{"last-modified-date":null,"educations":{"last-modified-date":null,"education-summary":null,"path":"/0000-0001-8254-710X/educations"},"employments":{"last-modified-date":null,"employment-summary":null,"path":"/0000-0001-8254-710X/employments"},"fundings":{"last-modified-date":null,"group":null,"path":"/0000-0001-8254-710X/fundings"},"peer-reviews":{"last-modified-date":null,"group":null,"path":"/0000-0001-8254-710X/peer-reviews"},"works":{"last-modified-date":null,"group":null,"path":"/0000-0001-8254-710X/works"},"path":"/0000-0001-8254-710X/activities"},"path":"/0000-0001-8254-710X"}
diff --git a/python/tests/files/crossref-works.2018-01-21.badsample.json b/python/tests/files/crossref-works.2018-01-21.badsample.json
index d0ce191f..931da7a7 100644
--- a/python/tests/files/crossref-works.2018-01-21.badsample.json
+++ b/python/tests/files/crossref-works.2018-01-21.badsample.json
@@ -9,5 +9,6 @@
{ "_id" : { "$oid" : "5a55196988a035a45bda0cb1" }, "indexed" : { "date-parts" : [ [ 2017, 10, 23 ] ], "date-time" : "2017-10-23T14:41:48Z", "timestamp" : { "$numberLong" : "1508769708308" } }, "reference-count" : 44, "publisher" : "Elsevier BV", "issue" : "1", "license" : [ { "URL" : "http://www.elsevier.com/tdm/userlicense/1.0/", "start" : { "date-parts" : [ [ 1998, 11, 1 ] ], "date-time" : "1998-11-01T00:00:00Z", "timestamp" : { "$numberLong" : "909878400000" } }, "delay-in-days" : 0, "content-version" : "tdm" } ], "content-domain" : { "domain" : [], "crossmark-restriction" : false }, "short-container-title" : [ "Toxicology and Applied Pharmacology" ], "published-print" : { "date-parts" : [ [ 1998, 11 ] ] }, "DOI" : "10.1006/taap.1998.8543", "type" : "journal-article", "created" : { "date-parts" : [ [ 2002, 9, 18 ] ], "date-time" : "2002-09-18T22:01:25Z", "timestamp" : { "$numberLong" : "1032386485000" } }, "page" : "102-108", "source" : "Crossref", "is-referenced-by-count" : 44, "title" : [ "Role of CYP1A2 in the Hepatotoxicity of Acetaminophen: Investigations UsingCyp1a2Null Mice" ], "prefix" : "10.1006", "volume" : "153", "author" : [ { "given" : "Robert P.", "family" : "Tonge", "affiliation" : [] }, { "given" : "Edward J.", "family" : "Kelly", "affiliation" : [] }, { "given" : "Sam A.", "family" : "Bruschi", "affiliation" : [] }, { "given" : "Tom", "family" : "Kalhorn", "affiliation" : [] }, { "given" : "David L.", "family" : "Eaton", "affiliation" : [] }, { "given" : "Daniel W.", "family" : "Nebert", "affiliation" : [] }, { "given" : "Sidney D.", "family" : "Nelson", "affiliation" : [] } ], "member" : "78", "container-title" : [ "Toxicology and Applied Pharmacology" ], "link" : [ { "URL" : "http://api.elsevier.com/content/article/PII:S0041008X9898543X?httpAccept=text/xml", "content-type" : "text/xml", "content-version" : "vor", "intended-application" : "text-mining" }, { "URL" : "http://api.elsevier.com/content/article/PII:S0041008X9898543X?httpAccept=text/plain", "content-type" : "text/plain", "content-version" : "vor", "intended-application" : "text-mining" } ], "deposited" : { "date-parts" : [ [ 2017, 6, 14 ] ], "date-time" : "2017-06-14T16:51:33Z", "timestamp" : { "$numberLong" : "1497459093000" } }, "score" : 1, "issued" : { "date-parts" : [ [ 1998, 11 ] ] }, "references-count" : 44, "alternative-id" : [ "S0041008X9898543X" ], "URL" : "http://dx.doi.org/10.1006/taap.1998.8543", "ISSN" : [ "0041-008X" ], "issn-type" : [ { "value" : "0041-008X", "type" : "print" } ], "subject" : [ "Toxicology", "Pharmacology" ] }
{ "_id" : { "$oid" : "5a55170088a035a45bd8490d" }, "indexed" : { "date-parts" : [ [ 2017, 10, 23 ] ], "date-time" : "2017-10-23T14:30:12Z", "timestamp" : { "$numberLong" : "1508769012416" } }, "reference-count" : 37, "publisher" : "Wiley-Blackwell", "issue" : "2", "license" : [ { "URL" : "http://doi.wiley.com/10.1002/tdm_license_1.1", "start" : { "date-parts" : [ [ 2015, 9, 1 ] ], "date-time" : "2015-09-01T00:00:00Z", "timestamp" : { "$numberLong" : "1441065600000" } }, "delay-in-days" : 5356, "content-version" : "tdm" } ], "content-domain" : { "domain" : [], "crossmark-restriction" : false }, "short-container-title" : [ "Am. J. Ind. Med." ], "published-print" : { "date-parts" : [ [ 2001, 2 ] ] }, "DOI" : "10.1002/1097-0274(200102)39:2<218::aid-ajim1009>3.0.co;2-4", "type" : "journal-article", "created" : { "date-parts" : [ [ 2002, 8, 25 ] ], "date-time" : "2002-08-25T20:41:50Z", "timestamp" : { "$numberLong" : "1030308110000" } }, "page" : "218-226", "source" : "Crossref", "is-referenced-by-count" : 10, "title" : [ "The work environment impact assessment: A methodologic framework for evaluating health-based interventions" ], "prefix" : "10.1002", "volume" : "39", "author" : [ { "given" : "Beth J.", "family" : "Rosenberg", "affiliation" : [] }, { "given" : "Elizabeth M.", "family" : "Barbeau", "affiliation" : [] }, { "given" : "Rafael", "family" : "Moure-Eraso", "affiliation" : [] }, { "given" : "Charles", "family" : "Levenstein", "affiliation" : [] } ], "member" : "311", "published-online" : { "date-parts" : [ [ 2001 ] ] }, "reference" : [ { "key" : "BIB1", "author" : "Barbeau", "year" : "1998", "unstructured" : "1998. Displaced tobacco workers, public health, and tobacco policy: moving beyond jobs versus health. Doctoral thesis, Department of Work Environment, University of Massachusetts, Lowell." }, { "key" : "BIB2", "author" : "Berberian", "volume" : "37", "first-page" : "126", "year" : "1987", "journal-title" : "J Occup Environ Med" }, { "key" : "BIB3", "author" : "Bignami", "volume" : "80", "first-page" : "265", "year" : "1981", "journal-title" : "Mutat Res", "DOI" : "10.1016/0027-5107(81)90099-3", "doi-asserted-by" : "crossref" }, { "key" : "BIB4", "author" : "Britton", "year" : "1989", "unstructured" : "1989. The post-Alar era dawns chilly for apple growers. Boston Globe. Oct. 25, p. 34." }, { "key" : "BIB5", "author" : "Brusick", "year" : "1976", "unstructured" : "1976. Mutagen and oncogen Study on 1,1-dimethylhydrazine. Prepared for the Aerospace Med. Res. Lab., Aeropsace Med. Div. Airforce Systems Command, Wright- Patterson A.F.B., Dayton OH Litton Bionetics, Inc., Kensington, MD. NTIS AD-A035475." }, { "key" : "BIB6", "author" : "Chemical Marketing Reporter", "year" : "1984", "unstructured" : "Chemical Marketing Reporter. 1984. Uniroyal pesticide to be reviewed by EPA: regulatory action prompted by its toxicity. July 23." }, { "key" : "BIB7", "author" : "Chemical Marketing Reporter", "year" : "1989", "unstructured" : "Chemical Marketing Reporter. 1989. Uniroyal pulls apple pesticide from market, citing controversy. June 5." }, { "key" : "BIB8", "year" : "1990", "unstructured" : "Du Pont Chemical Company. 1990. MSDS No. M0000057, p. 2." }, { "key" : "BIB9", "year" : "1993", "unstructured" : "Farm Chemicals Handbook '93. 1993. Willoughby, OH: Meister.", "volume-title" : "Farm Chemicals Handbook '93" }, { "key" : "BIB10", "year" : "1985", "unstructured" : "Farm Chemicals Handbook '85. 1985. Willoughby, OH: Meister.", "volume-title" : "Farm Chemicals Handbook '85" }, { "key" : "BIB11", "author" : "Federal Register", "year" : "1989", "unstructured" : "Federal Register. 1989. Daminozide: termination of special review of food uses. Vol. 54, No. 216, p. 47482, November 14." }, { "key" : "BIB12", "author" : "Fenske", "first-page" : "729", "year" : "2000", "unstructured" : "2000. Agricultural workers. In: editors. Occupational health: recognizing and preventing work-related disease and injury. 4th ed. Philadelphia: Lippincott Williams and Wilkins, p. 729-748.", "volume-title" : "Occupational health: recognizing and preventing work-related disease and injury" }, { "key" : "BIB13", "author" : "Gibson", "volume" : "5", "first-page" : "24", "year" : "1994", "journal-title" : "New Solutions", "DOI" : "10.2190/NS5.1.g", "doi-asserted-by" : "crossref" }, { "key" : "BIB14", "author" : "Goldenhar", "volume" : "29", "first-page" : "289", "year" : "1996", "journal-title" : "Am J Ind Med", "DOI" : "10.1002/(SICI)1097-0274(199604)29:4<289::AID-AJIM2>3.0.CO;2-K", "doi-asserted-by" : "crossref" }, { "key" : "BIB15", "author" : "Haun", "year" : "1984", "unstructured" : "1984. Inhalation studies of UDMH. Air Force Aerospace Medical Res Lab, TR-85-020." }, { "key" : "BIB16", "author" : "International Agency for Research on Cancer (IARC)", "year" : "1997", "unstructured" : "International Agency for Research on Cancer (IARC). 1997. Evaluation of carcinogen risks to humans: man-made mineral fibres and radon. Lyons, France." }, { "key" : "BIB17", "author" : "Lord", "year" : "1969", "unstructured" : "1969 (May-June). Thoughts on the apple harvest problem. Fruit Notes. U. S. Department of Agriculture, Massachusetts Extension Service." }, { "key" : "BIB18", "author" : "Manning", "first-page" : "34", "year" : "1989", "unstructured" : "Sales Agent for J. P. Sullivan and Co., of Ayer, MA, an apple commission house. In 1989. The post-Alar era dawns chilly for apple growers. Boston Globe Oct. 25 p. 34.", "volume-title" : "The post-Alar era dawns chilly for apple growers" }, { "key" : "BIB19", "author" : "National Cancer Institute", "year" : "1978", "unstructured" : "National Cancer Institute. 1978. Bioassay of daminozide for possible carcinogenicity. Washington, D.C., United State Department of Health, Education and Welfare, Public Health Service (NIC Carcinogenesis Technical Report Series No. 83; DHEW Publication No (NIH 78-1333)." }, { "key" : "BIB20", "author" : "Rogers", "volume" : "89", "first-page" : "321", "year" : "1981", "journal-title" : "Mutat Res", "DOI" : "10.1016/0165-1218(81)90113-0", "doi-asserted-by" : "crossref" }, { "key" : "BIB21", "author" : "Rosenberg", "year" : "1995", "unstructured" : "1995. The best laid bans: the impact of pesticide bans on workers. Doctoral thesis, Department of Work Environment, University of Massachusetts Lowell." }, { "key" : "BIB22", "author" : "Rosenberg", "volume" : "6", "first-page" : "34", "year" : "1996", "journal-title" : "New Solutions: A Journal of Environmental and Occupational Health Policy", "DOI" : "10.2190/NS6.2.d", "doi-asserted-by" : "crossref" }, { "key" : "BIB23", "author" : "Rosenberg", "volume" : "8", "first-page" : "365", "year" : "1998", "journal-title" : "New Solutions Environmental Health Policy", "DOI" : "10.2190/A2A1-CT1X-RY6D-RR3M", "doi-asserted-by" : "crossref" }, { "key" : "BIB24", "author" : "Saunders", "volume" : "29", "first-page" : "409", "year" : "1987", "journal-title" : "J Occup Environ Med" }, { "key" : "BIB25", "author" : "Toth", "volume" : "50", "first-page" : "181", "year" : "1973", "journal-title" : "J Natl Cancer Inst", "DOI" : "10.1093/jnci/50.1.181", "doi-asserted-by" : "crossref" }, { "key" : "BIB26", "author" : "Toth", "volume" : "40", "first-page" : "2427", "year" : "1977a", "journal-title" : "Cancer", "DOI" : "10.1002/1097-0142(197711)40:5+<2427::AID-CNCR2820400906>3.0.CO;2-Y", "doi-asserted-by" : "crossref" }, { "key" : "BIB27", "author" : "Toth", "volume" : "37", "first-page" : "3497", "year" : "1977b", "journal-title" : "Cancer Res" }, { "key" : "BIB28", "author" : "U.S. Environmental Protection Agency", "year" : "1986", "unstructured" : "U.S. Environmental Protection Agency. 1986. Integrated Risk Information System (IRIS). Oxamyl. December 9." }, { "key" : "BIB29", "author" : "U.S. Environmental Protection Agency", "year" : "1986", "unstructured" : "U.S. Environmental Protection Agency. 1986. Chemical Fact Sheet Number 26: Daminozide. Office of Pesticides and Toxic Substances, Washington, DC. 10-169." }, { "key" : "BIB30", "author" : "U.S. Environmental Protection Agency", "year" : "1989", "unstructured" : "U.S. Environmental Protection Agency, Office of Pesticide Programs, Office of Pesticides and Toxic Substances. 1989. Daminozide special review technical support document: Preliminary determination to cancel the food uses of Daminozide. Washington, DC: May." }, { "key" : "BIB31", "author" : "U.S. Environmental Protection Agency", "volume" : "54", "first-page" : "10", "year" : "1989", "journal-title" : "Fed Regist." }, { "key" : "BIB32", "author" : "U.S. Environmental Protection Agency", "year" : "1990", "unstructured" : "U.S. Environmental Protection Agency. 1990. Integrated Risk Information System (IRIS). Propargite. May 1." }, { "key" : "BIB33", "author" : "U.S. Environmental Protection Agency", "volume" : "57", "first-page" : "10", "year" : "1992", "journal-title" : "Fed. Regist." }, { "key" : "BIB34", "author" : "U.S. Environmental Protection Agency", "year" : "1993", "unstructured" : "U.S. Environmental Protection Agency, Office of Prevention, Pesticides and Toxic Substances. 1993. R.E.D. Facts, Document number EPA-738-F-93-007. September." }, { "key" : "BIB35", "author" : "U.S. Department of Agriculture", "year" : "1993", "journal-title" : "New England Agricultural Statistics" }, { "key" : "BIB36", "author" : "Warren", "year" : "1992", "unstructured" : "1992. Unanticipated consequences of banning a chemical: the case of Alar. Unpublished manuscript, Department of Work Environment, University of Massachusetts Lowell." }, { "key" : "BIB37", "author" : "Wood", "year" : "1990", "unstructured" : "1990. Memo to Poverty Lane, West Lebanon, New Hampshire, to members of the Risk Assessment/Risk Management Work Group, Keystone National Policy Dialogue on Food Safety, Oct. 26, 1990, cited in Rosenberg, B. 1996." } ], "container-title" : [ "American Journal of Industrial Medicine" ], "link" : [ { "URL" : "https://api.wiley.com/onlinelibrary/tdm/v1/articles/10.1002%2F1097-0274(200102)39:2%3C218::AID-AJIM1009%3E3.0.CO;2-4", "content-type" : "unspecified", "content-version" : "vor", "intended-application" : "text-mining" } ], "deposited" : { "date-parts" : [ [ 2017, 8, 4 ] ], "date-time" : "2017-08-04T20:22:16Z", "timestamp" : { "$numberLong" : "1501878136000" } }, "score" : 1, "issued" : { "date-parts" : [ [ 2001 ] ] }, "references-count" : 37, "URL" : "http://dx.doi.org/10.1002/1097-0274(200102)39:2<218::aid-ajim1009>3.0.co;2-4", "relation" : { "cites" : [] }, "ISSN" : [ "0271-3586", "1097-0274" ], "issn-type" : [ { "value" : "0271-3586", "type" : "print" }, { "value" : "1097-0274", "type" : "electronic" } ], "subject" : [ "Public Health, Environmental and Occupational Health" ] }
{ "_id" : { "$oid" : "5a553b4388a035a45bf39150" }, "indexed" : { "date-parts" : [ [ 2017, 10, 23 ] ], "date-time" : "2017-10-23T17:10:15Z", "timestamp" : { "$numberLong" : "1508778615346" } }, "reference-count" : 22, "publisher" : "Elsevier BV", "issue" : "4", "license" : [ { "URL" : "http://www.elsevier.com/tdm/userlicense/1.0/", "start" : { "date-parts" : [ [ 2001, 12, 1 ] ], "date-time" : "2001-12-01T00:00:00Z", "timestamp" : { "$numberLong" : "1007164800000" } }, "delay-in-days" : 0, "content-version" : "tdm" } ], "content-domain" : { "domain" : [], "crossmark-restriction" : false }, "short-container-title" : [ "International Journal of Hospitality Management" ], "published-print" : { "date-parts" : [ [ 2001, 12 ] ] }, "DOI" : "10.1016/s0278-4319(01)00020-2", "type" : "journal-article", "created" : { "date-parts" : [ [ 2002, 7, 25 ] ], "date-time" : "2002-07-25T14:28:16Z", "timestamp" : { "$numberLong" : "1027607296000" } }, "page" : "325-338", "source" : "Crossref", "is-referenced-by-count" : 14, "title" : [ "Hotel management style: a study of employee perceptions and preferences" ], "prefix" : "10.1016", "volume" : "20", "author" : [ { "given" : "Margaret", "family" : "Deery", "affiliation" : [] }, { "given" : "Leo K", "family" : "Jago", "affiliation" : [] } ], "member" : "78", "container-title" : [ "International Journal of Hospitality Management" ], "link" : [ { "URL" : "http://api.elsevier.com/content/article/PII:S0278431901000202?httpAccept=text/xml", "content-type" : "text/xml", "content-version" : "vor", "intended-application" : "text-mining" }, { "URL" : "http://api.elsevier.com/content/article/PII:S0278431901000202?httpAccept=text/plain", "content-type" : "text/plain", "content-version" : "vor", "intended-application" : "text-mining" } ], "deposited" : { "date-parts" : [ [ 2017, 6, 14 ] ], "date-time" : "2017-06-14T21:24:09Z", "timestamp" : { "$numberLong" : "1497475449000" } }, "score" : 1, "issued" : { "date-parts" : [ [ 2001, 12 ] ] }, "references-count" : 22, "alternative-id" : [ "S0278431901000202" ], "URL" : "http://dx.doi.org/10.1016/s0278-4319(01)00020-2", "ISSN" : [ "0278-4319" ], "issn-type" : [ { "value" : "0278-4319", "type" : "print" } ], "subject" : [ "Tourism, Leisure and Hospitality Management", "Strategy and Management" ] }
+{ "_id" : { "$oid" : "5a55176088a035a45bd8802c" }, "indexed" : { "date-parts" : [ [ 2017, 10, 23 ] ], "date-time" : "2017-10-23T14:31:47Z", "timestamp" : { "$numberLong" : "1508769107897" } }, "reference-count" : 1, "publisher" : "Hindawi Limited", "issue" : "2", "license" : [ { "URL" : "http://creativecommons.org/licenses/by/3.0/", "start" : { "date-parts" : [ [ 2002, 1, 1 ] ], "date-time" : "2002-01-01T00:00:00Z", "timestamp" : { "$numberLong" : "1009843200000" } }, "delay-in-days" : 0, "content-version" : "vor" } ], "content-domain" : { "domain" : [], "crossmark-restriction" : false }, "short-container-title" : [ "Comparative and Functional Genomics" ], "published-print" : { "date-parts" : [ [ 2002 ] ] }, "abstract" : "<jats:p>This brief meeting review summarizes the recommendations of NSF and NPGI funded bioinformaticians concerning the future requirements for plant bioinformatics systems and databases.</jats:p>", "DOI" : "10.1002/cfg.158", "type" : "journal-article", "created" : { "date-parts" : [ [ 2002, 8, 25 ] ], "date-time" : "2002-08-25T23:45:33Z", "timestamp" : { "$numberLong" : "1030319133000" } }, "page" : "176-176", "source" : "Crossref", "is-referenced-by-count" : 4, "title" : [ "Meeting Review: Plant Bioinformatics at the NSF and NPGI (PAMGX Satellite) Meetings" ], "prefix" : "10.1155", "volume" : "3", "author" : [ { "ORCID" : "http://orcid.org/0000-0002-4447-597X", "authenticated-orcid" : true, "given" : "Richard", "family" : "Bruskiewich", "affiliation" : [ { "name" : "International Rice Research Institute (IRRI), Metro Manila DAPO 7777, Philippines" } ] } ], "member" : "98", "reference" : [ { "key" : "10.1002/cfg.158-BIB1", "author" : "Brazma", "volume" : "29", "first-page" : "365", "year" : "2001", "journal-title" : "Nature Genetics", "DOI" : "10.1038/ng1201-365", "doi-asserted-by" : "crossref" } ], "container-title" : [ "Comparative and Functional Genomics" ], "link" : [ { "URL" : "http://downloads.hindawi.com/journals/ijg/2002/250628.pdf", "content-type" : "application/pdf", "content-version" : "vor", "intended-application" : "text-mining" }, { "URL" : "http://downloads.hindawi.com/journals/ijg/2002/250628.pdf", "content-type" : "unspecified", "content-version" : "vor", "intended-application" : "similarity-checking" } ], "deposited" : { "date-parts" : [ [ 2017, 8, 5 ] ], "date-time" : "2017-08-05T06:32:00Z", "timestamp" : { "$numberLong" : "1501914720000" } }, "score" : 1, "issued" : { "date-parts" : [ [ 2002 ] ] }, "references-count" : 1, "alternative-id" : [ "250628" ], "URL" : "http://dx.doi.org/10.1002/cfg.158", "relation" : { "cites" : [] }, "ISSN" : [ "1531-6912", "1532-6268" ], "issn-type" : [ { "value" : "1531-6912", "type" : "print" }, { "value" : "1532-6268", "type" : "electronic" } ], "subject" : [ "Biotechnology", "Genetics", "Molecular Biology" ] }
{ "_id" : { "$oid" : "5a55176088a035a45bd8802c" }, "indexed" : { "date-parts" : [ [ 2017, 10, 23 ] ], "date-time" : "2017-10-23T14:31:47Z", "timestamp" : { "$numberLong" : "1508769107897" } }, "reference-count" : 1, "publisher" : "Hindawi Limited", "issue" : "2", "license" : [ { "URL" : "http://creativecommons.org/licenses/by/3.0/", "start" : { "date-parts" : [ [ 2002, 1, 1 ] ], "date-time" : "2002-01-01T00:00:00Z", "timestamp" : { "$numberLong" : "1009843200000" } }, "delay-in-days" : 0, "content-version" : "vor" } ], "content-domain" : { "domain" : [], "crossmark-restriction" : false }, "short-container-title" : [ "Comparative and Functional Genomics" ], "published-print" : { "date-parts" : [ [ 2002 ] ] }, "abstract" : "<jats:p>This brief meeting review summarizes the recommendations of NSF and NPGI funded bioinformaticians concerning the future requirements for plant bioinformatics systems and databases.</jats:p>", "DOI" : "10.1002/cfg.158", "type" : "journal-article", "created" : { "date-parts" : [ [ 2002, 8, 25 ] ], "date-time" : "2002-08-25T23:45:33Z", "timestamp" : { "$numberLong" : "1030319133000" } }, "page" : "176-176", "source" : "Crossref", "is-referenced-by-count" : 4, "title" : [ "Meeting Review: Plant Bioinformatics at the NSF and NPGI (PAMGX Satellite) Meetings" ], "prefix" : "10.1155", "volume" : "3", "author" : [ { "ORCID" : "http://orcid.org/0000-0002-4447-5978", "authenticated-orcid" : true, "given" : "Richard", "family" : "Bruskiewich", "affiliation" : [ { "name" : "International Rice Research Institute (IRRI), Metro Manila DAPO 7777, Philippines" } ] } ], "member" : "98", "reference" : [ { "key" : "10.1002/cfg.158-BIB1", "author" : "Brazma", "volume" : "29", "first-page" : "365", "year" : "2001", "journal-title" : "Nature Genetics", "DOI" : "10.1038/ng1201-365", "doi-asserted-by" : "crossref" } ], "container-title" : [ "Comparative and Functional Genomics" ], "link" : [ { "URL" : "http://downloads.hindawi.com/journals/ijg/2002/250628.pdf", "content-type" : "application/pdf", "content-version" : "vor", "intended-application" : "text-mining" }, { "URL" : "http://downloads.hindawi.com/journals/ijg/2002/250628.pdf", "content-type" : "unspecified", "content-version" : "vor", "intended-application" : "similarity-checking" } ], "deposited" : { "date-parts" : [ [ 2017, 8, 5 ] ], "date-time" : "2017-08-05T06:32:00Z", "timestamp" : { "$numberLong" : "1501914720000" } }, "score" : 1, "issued" : { "date-parts" : [ [ 2002 ] ] }, "references-count" : 1, "alternative-id" : [ "250628" ], "URL" : "http://dx.doi.org/10.1002/cfg.158", "relation" : { "cites" : [] }, "ISSN" : [ "1531-6912", "1532-6268" ], "issn-type" : [ { "value" : "1531-6912", "type" : "print" }, { "value" : "1532-6268", "type" : "electronic" } ], "subject" : [ "Biotechnology", "Genetics", "Molecular Biology" ] }
{ "_id" : { "$oid" : "5a551fbe88a035a45bdf19fd" }, "indexed" : { "date-parts" : [ [ 2017, 10, 23 ] ], "date-time" : "2017-10-23T15:12:12Z", "timestamp" : { "$numberLong" : "1508771532055" } }, "reference-count" : 0, "publisher" : "Springer Nature", "issue" : "11", "content-domain" : { "domain" : [], "crossmark-restriction" : false }, "short-container-title" : [ "Skeletal Radiol" ], "published-print" : { "date-parts" : [ [ 2001, 11 ] ] }, "DOI" : "10.1007/s002560100423", "type" : "journal-article", "created" : { "date-parts" : [ [ 2002, 10, 6 ] ], "date-time" : "2002-10-06T13:44:04Z", "timestamp" : { "$numberLong" : "1033911844000" } }, "page" : "643-647", "source" : "Crossref", "is-referenced-by-count" : 2, "title" : [ "Unilateral osteonecrosis in a patient with bilateral os centrale carpi: imaging findings" ], "prefix" : "10.1007", "volume" : "30", "author" : [ { "given" : "F.", "family" : "Abascal", "affiliation" : [] }, { "given" : "L.", "family" : "Cerezal", "affiliation" : [] }, { "given" : "F.", "family" : "del Piñal", "affiliation" : [] }, { "given" : "R.", "family" : "García-Valtuille", "affiliation" : [] }, { "given" : "A.", "family" : "García-Valtuille", "affiliation" : [] }, { "given" : "A.", "family" : "Canga", "affiliation" : [] }, { "given" : "J.", "family" : "Torcida", "affiliation" : [] } ], "member" : "297", "published-online" : { "date-parts" : [ [ 2001, 9, 14 ] ] }, "container-title" : [ "Skeletal Radiology" ], "link" : [ { "URL" : "http://link.springer.com/content/pdf/10.1007/s002560100423", "content-type" : "unspecified", "content-version" : "vor", "intended-application" : "similarity-checking" } ], "deposited" : { "date-parts" : [ [ 2014, 4, 9 ] ], "date-time" : "2014-04-09T05:21:52Z", "timestamp" : { "$numberLong" : "1397020912000" } }, "score" : 1, "issued" : { "date-parts" : [ [ 2001, 9, 14 ] ] }, "references-count" : 0, "alternative-id" : [ "423" ], "URL" : "http://dx.doi.org/10.1007/s002560100423", "ISSN" : [ "0364-2348", "1432-2161" ], "issn-type" : [ { "value" : "0364-2348", "type" : "print" }, { "value" : "1432-2161", "type" : "electronic" } ], "subject" : [ "Radiology Nuclear Medicine and imaging" ] }
diff --git a/python/tests/importer.py b/python/tests/importer.py
index 190acbed..4d49e794 100644
--- a/python/tests/importer.py
+++ b/python/tests/importer.py
@@ -13,3 +13,23 @@ def test_issnl_mapping_lookup():
assert fi.issn2issnl('9999-0027') == None
assert fi.lookup_issnl('9999-9999') == None
+
+def test_identifiers():
+
+ with open('tests/files/ISSN-to-ISSN-L.snip.txt', 'r') as issn_file:
+ fi = FatcatImporter("http://localhost:9411/v0", issn_file)
+
+ assert fi.is_issnl("1234-5678") == True
+ assert fi.is_issnl("1234-5678.") == False
+ assert fi.is_issnl("12345678") == False
+ assert fi.is_issnl("1-2345678") == False
+
+ assert fi.is_doi("10.1234/56789") == True
+ assert fi.is_doi("101234/56789") == False
+ assert fi.is_doi("10.1234_56789") == False
+
+ assert fi.is_orcid("0000-0003-3118-6591") == True
+ assert fi.is_orcid("0000-00x3-3118-659") == False
+ assert fi.is_orcid("0000-00033118-659") == False
+ assert fi.is_orcid("0000-0003-3118-659.") == False
+
diff --git a/python/tests/orcid.py b/python/tests/orcid.py
index 00748972..e07583ac 100644
--- a/python/tests/orcid.py
+++ b/python/tests/orcid.py
@@ -13,6 +13,10 @@ def test_orcid_importer_batch(orcid_importer):
with open('tests/files/0000-0001-8254-7103.json', 'r') as f:
orcid_importer.process_batch(f)
+def test_orcid_importer_badid(orcid_importer):
+ with open('tests/files/0000-0001-8254-710X.json', 'r') as f:
+ orcid_importer.process_batch(f)
+
def test_orcid_importer(orcid_importer):
with open('tests/files/0000-0001-8254-7103.json', 'r') as f:
orcid_importer.process_source(f)