diff options
Diffstat (limited to 'python/fatcat_tools')
21 files changed, 121 insertions, 121 deletions
diff --git a/python/fatcat_tools/api_auth.py b/python/fatcat_tools/api_auth.py index 3c643cea..add03399 100644 --- a/python/fatcat_tools/api_auth.py +++ b/python/fatcat_tools/api_auth.py @@ -1,7 +1,7 @@ import os, sys -import fatcat_client -from fatcat_client.rest import ApiException +import fatcat_openapi_client +from fatcat_openapi_client.rest import ApiException def public_api(host_uri): @@ -10,9 +10,9 @@ def public_api(host_uri): if the API isn't going to be used, so it's important that it doesn't try to actually connect to the API host or something. """ - conf = fatcat_client.Configuration() + conf = fatcat_openapi_client.Configuration() conf.host = host_uri - return fatcat_client.DefaultApi(fatcat_client.ApiClient(conf)) + return fatcat_openapi_client.DefaultApi(fatcat_openapi_client.ApiClient(conf)) def authenticated_api(host_uri, token=None): """ @@ -20,7 +20,7 @@ def authenticated_api(host_uri, token=None): is needed, so it does try to connect and verify credentials. """ - conf = fatcat_client.Configuration() + conf = fatcat_openapi_client.Configuration() conf.host = host_uri if not token: token = os.environ['FATCAT_API_AUTH_TOKEN'] @@ -31,7 +31,7 @@ def authenticated_api(host_uri, token=None): conf.api_key["Authorization"] = token conf.api_key_prefix["Authorization"] = "Bearer" - api = fatcat_client.DefaultApi(fatcat_client.ApiClient(conf)) + api = fatcat_openapi_client.DefaultApi(fatcat_openapi_client.ApiClient(conf)) # verify up front that auth is working api.auth_check() diff --git a/python/fatcat_tools/importers/arabesque.py b/python/fatcat_tools/importers/arabesque.py index e9376d96..65597d5c 100644 --- a/python/fatcat_tools/importers/arabesque.py +++ b/python/fatcat_tools/importers/arabesque.py @@ -4,7 +4,7 @@ import json import base64 import sqlite3 import itertools -import fatcat_client +import fatcat_openapi_client from .common import EntityImporter, clean, make_rel_url, SANE_MAX_RELEASES, SANE_MAX_URLS @@ -105,7 +105,7 @@ class ArabesqueMatchImporter(EntityImporter): # lookup extid try: re = self.api.lookup_release(**{self.extid_type: extid}) - except fatcat_client.rest.ApiException as err: + except fatcat_openapi_client.rest.ApiException as err: if err.status == 404: # bail on 404 (release not in DB) self.counts['skip-extid-not-found'] += 1 @@ -128,13 +128,13 @@ class ArabesqueMatchImporter(EntityImporter): row['final_url']) urls = [url, ("webarchive", wayback)] - urls = [fatcat_client.FileUrl(rel=rel, url=url) for (rel, url) in urls] + urls = [fatcat_openapi_client.FileUrl(rel=rel, url=url) for (rel, url) in urls] if len(urls) > SANE_MAX_URLS: self.counts['skip-too-many-url'] += 1 return None - fe = fatcat_client.FileEntity( + fe = fatcat_openapi_client.FileEntity( sha1=b32_hex(row['final_sha1']), mimetype=row['final_mimetype'] or self.default_mimetype, release_ids=[re.ident], @@ -147,7 +147,7 @@ class ArabesqueMatchImporter(EntityImporter): existing = None try: existing = self.api.lookup_file(sha1=fe.sha1) - except fatcat_client.rest.ApiException as err: + except fatcat_openapi_client.rest.ApiException as err: if err.status != 404: raise err @@ -177,7 +177,7 @@ class ArabesqueMatchImporter(EntityImporter): # merge the existing into this one and update existing.urls = list(set([(u.rel, u.url) for u in fe.urls + existing.urls])) - existing.urls = [fatcat_client.FileUrl(rel=rel, url=url) for (rel, url) in existing.urls] + existing.urls = [fatcat_openapi_client.FileUrl(rel=rel, url=url) for (rel, url) in existing.urls] if len(existing.urls) > SANE_MAX_URLS: self.counts['skip-update-too-many-url'] += 1 return None @@ -192,8 +192,8 @@ class ArabesqueMatchImporter(EntityImporter): return False def insert_batch(self, batch): - self.api.create_file_auto_batch(fatcat_client.FileAutoBatch( - editgroup=fatcat_client.Editgroup( + self.api.create_file_auto_batch(fatcat_openapi_client.FileAutoBatch( + editgroup=fatcat_openapi_client.Editgroup( description=self.editgroup_description, extra=self.editgroup_extra), entity_list=batch)) diff --git a/python/fatcat_tools/importers/arxiv.py b/python/fatcat_tools/importers/arxiv.py index 71b2d134..c69ee16a 100644 --- a/python/fatcat_tools/importers/arxiv.py +++ b/python/fatcat_tools/importers/arxiv.py @@ -6,7 +6,7 @@ import datetime from bs4 import BeautifulSoup from pylatexenc.latex2text import LatexNodes2Text -import fatcat_client +import fatcat_openapi_client from .common import EntityImporter, clean from .crossref import lookup_license_slug @@ -120,7 +120,7 @@ class ArxivRawImporter(EntityImporter): doi = None title = latex_to_text(metadata.title.string) authors = parse_arxiv_authors(metadata.authors.string) - contribs = [fatcat_client.ReleaseContrib(index=i, raw_name=a, role='author') for i, a in enumerate(authors)] + contribs = [fatcat_openapi_client.ReleaseContrib(index=i, raw_name=a, role='author') for i, a in enumerate(authors)] lang = "en" # the vast majority in english if metadata.comments and metadata.comments.string: @@ -178,12 +178,12 @@ class ArxivRawImporter(EntityImporter): if '$' in abst or '{' in abst: mime = "application/x-latex" abst_plain = latex_to_text(abst) - abstracts.append(fatcat_client.ReleaseAbstract(content=abst_plain, mimetype="text/plain", lang="en")) + abstracts.append(fatcat_openapi_client.ReleaseAbstract(content=abst_plain, mimetype="text/plain", lang="en")) else: mime = "text/plain" - abstracts.append(fatcat_client.ReleaseAbstract(content=abst, mimetype=mime, lang="en")) + abstracts.append(fatcat_openapi_client.ReleaseAbstract(content=abst, mimetype=mime, lang="en")) if orig: - abstracts.append(fatcat_client.ReleaseAbstract(content=orig, mimetype=mime)) + abstracts.append(fatcat_openapi_client.ReleaseAbstract(content=orig, mimetype=mime)) # indicates that fulltext probably isn't english either if lang == 'en': lang = None @@ -207,7 +207,7 @@ class ArxivRawImporter(EntityImporter): release_date = version.date.string.strip() release_date = datetime.datetime.strptime(release_date, "%a, %d %b %Y %H:%M:%S %Z").date() # TODO: source_type? - versions.append(fatcat_client.ReleaseEntity( + versions.append(fatcat_openapi_client.ReleaseEntity( work_id=None, title=title, #original_title @@ -216,7 +216,7 @@ class ArxivRawImporter(EntityImporter): release_stage='submitted', release_date=release_date.isoformat(), release_year=release_date.year, - ext_ids=fatcat_client.ReleaseExtIds( + ext_ids=fatcat_openapi_client.ReleaseExtIds( arxiv=arxiv_id, ), number=number, @@ -270,7 +270,7 @@ class ArxivRawImporter(EntityImporter): existing_doi = None try: existing = self.api.lookup_release(arxiv=v.ext_ids.arxiv) - except fatcat_client.rest.ApiException as err: + except fatcat_openapi_client.rest.ApiException as err: if err.status != 404: raise err @@ -281,7 +281,7 @@ class ArxivRawImporter(EntityImporter): if v.ext_ids.doi: try: existing_doi = self.api.lookup_release(doi=v.ext_ids.doi) - except fatcat_client.rest.ApiException as err: + except fatcat_openapi_client.rest.ApiException as err: if err.status != 404: raise err if existing_doi: @@ -325,8 +325,8 @@ class ArxivRawImporter(EntityImporter): # there is no batch/bezerk mode for arxiv importer, except for testing if self._test_override: for batch in batch_batch: - self.api.create_release_auto_batch(fatcat_client.ReleaseAutoBatch( - editgroup=fatcat_client.Editgroup( + self.api.create_release_auto_batch(fatcat_openapi_client.ReleaseAutoBatch( + editgroup=fatcat_openapi_client.Editgroup( description=self.editgroup_description, extra=self.editgroup_extra), entity_list=batch)) diff --git a/python/fatcat_tools/importers/cdl_dash_dat.py b/python/fatcat_tools/importers/cdl_dash_dat.py index aa7fab2b..536c013b 100755 --- a/python/fatcat_tools/importers/cdl_dash_dat.py +++ b/python/fatcat_tools/importers/cdl_dash_dat.py @@ -9,8 +9,8 @@ import hashlib import mimetypes import subprocess -import fatcat_client -from fatcat_client import * +import fatcat_openapi_client +from fatcat_openapi_client import * from .common import clean from .crossref import lookup_license_slug @@ -176,7 +176,7 @@ def auto_cdl_dash_dat(api, dat_path, release_id=None, editgroup_id=None): try: r = api.lookup_release(doi=release.ext_ids.doi) release_id = r.ident - except fatcat_client.rest.ApiException: + except fatcat_openapi_client.rest.ApiException: pass if not release_id: edit = api.create_release(eg.editgroup_id, release) diff --git a/python/fatcat_tools/importers/chocula.py b/python/fatcat_tools/importers/chocula.py index d7044ff4..6915ba98 100644 --- a/python/fatcat_tools/importers/chocula.py +++ b/python/fatcat_tools/importers/chocula.py @@ -2,7 +2,7 @@ import sys import json import itertools -import fatcat_client +import fatcat_openapi_client from .common import EntityImporter, clean @@ -66,7 +66,7 @@ class ChoculaImporter(EntityImporter): elif 'journal ' in name.lower(): container_type = 'journal' - ce = fatcat_client.ContainerEntity( + ce = fatcat_openapi_client.ContainerEntity( issnl=row['issnl'], ident=row['ident'], name=name, @@ -82,7 +82,7 @@ class ChoculaImporter(EntityImporter): if ce.ident: try: existing = self.api.get_container(ce.ident) - except fatcat_client.rest.ApiException as err: + except fatcat_openapi_client.rest.ApiException as err: if err.status != 404: raise err self.counts['exists'] += 1 @@ -97,7 +97,7 @@ class ChoculaImporter(EntityImporter): # check if existing by ISSN-L try: existing = self.api.lookup_container(issnl=ce.issnl) - except fatcat_client.rest.ApiException as err: + except fatcat_openapi_client.rest.ApiException as err: if err.status != 404: raise err if existing: @@ -157,8 +157,8 @@ class ChoculaImporter(EntityImporter): raise NotImplementedError def insert_batch(self, batch): - self.api.create_container_auto_batch(fatcat_client.ContainerAutoBatch( - editgroup=fatcat_client.Editgroup( + self.api.create_container_auto_batch(fatcat_openapi_client.ContainerAutoBatch( + editgroup=fatcat_openapi_client.Editgroup( description=self.editgroup_description, extra=self.editgroup_extra), entity_list=batch)) diff --git a/python/fatcat_tools/importers/common.py b/python/fatcat_tools/importers/common.py index 2f677be4..093569e1 100644 --- a/python/fatcat_tools/importers/common.py +++ b/python/fatcat_tools/importers/common.py @@ -13,8 +13,8 @@ import xml.etree.ElementTree as ET import pykafka from bs4 import BeautifulSoup -import fatcat_client -from fatcat_client.rest import ApiException +import fatcat_openapi_client +from fatcat_openapi_client.rest import ApiException DATE_FMT = "%Y-%m-%d" @@ -337,7 +337,7 @@ class EntityImporter: if not self._editgroup_id: eg = self.api.create_editgroup( - fatcat_client.Editgroup( + fatcat_openapi_client.Editgroup( description=self.editgroup_description, extra=self.editgroup_extra)) self._editgroup_id = eg.editgroup_id diff --git a/python/fatcat_tools/importers/crossref.py b/python/fatcat_tools/importers/crossref.py index 4074dfb8..faee6aac 100644 --- a/python/fatcat_tools/importers/crossref.py +++ b/python/fatcat_tools/importers/crossref.py @@ -5,7 +5,7 @@ import sqlite3 import datetime import itertools import subprocess -import fatcat_client +import fatcat_openapi_client from .common import EntityImporter, clean @@ -215,7 +215,7 @@ class CrossrefImporter(EntityImporter): extra = None assert ctype in ("author", "editor", "translator") raw_name = clean(raw_name) - contribs.append(fatcat_client.ReleaseContrib( + contribs.append(fatcat_openapi_client.ReleaseContrib( creator_id=creator_id, index=index, raw_name=raw_name, @@ -244,7 +244,7 @@ class CrossrefImporter(EntityImporter): container_name = None if (container_id is None and self.create_containers and (issnl is not None) and container_name): - ce = fatcat_client.ContainerEntity( + ce = fatcat_openapi_client.ContainerEntity( issnl=issnl, publisher=publisher, container_type=self.map_container_type(release_type), @@ -300,7 +300,7 @@ class CrossrefImporter(EntityImporter): ref_extra[k] = clean(rm[k]) if not ref_extra: ref_extra = None - refs.append(fatcat_client.ReleaseRef( + refs.append(fatcat_openapi_client.ReleaseRef( index=i, # doing lookups would be a second import pass target_release_id=None, @@ -316,7 +316,7 @@ class CrossrefImporter(EntityImporter): abstracts = [] abstract = clean(obj.get('abstract')) if abstract and len(abstract) > 10: - abstracts.append(fatcat_client.ReleaseAbstract( + abstracts.append(fatcat_openapi_client.ReleaseAbstract( mimetype="application/xml+jats", content=abstract)) @@ -420,7 +420,7 @@ class CrossrefImporter(EntityImporter): if not extra: extra = None - re = fatcat_client.ReleaseEntity( + re = fatcat_openapi_client.ReleaseEntity( work_id=None, container_id=container_id, title=title, @@ -431,7 +431,7 @@ class CrossrefImporter(EntityImporter): release_date=release_date, release_year=release_year, publisher=publisher, - ext_ids=fatcat_client.ReleaseExtIds( + ext_ids=fatcat_openapi_client.ReleaseExtIds( doi=obj['DOI'].lower(), pmid=extids['pmid'], pmcid=extids['pmcid'], @@ -459,7 +459,7 @@ class CrossrefImporter(EntityImporter): existing = None try: existing = self.api.lookup_release(doi=re.ext_ids.doi) - except fatcat_client.rest.ApiException as err: + except fatcat_openapi_client.rest.ApiException as err: if err.status != 404: raise err # doesn't exist, need to update @@ -474,8 +474,8 @@ class CrossrefImporter(EntityImporter): return True def insert_batch(self, batch): - self.api.create_release_auto_batch(fatcat_client.ReleaseAutoBatch( - editgroup=fatcat_client.Editgroup( + self.api.create_release_auto_batch(fatcat_openapi_client.ReleaseAutoBatch( + editgroup=fatcat_openapi_client.Editgroup( description=self.editgroup_description, extra=self.editgroup_extra), entity_list=batch)) diff --git a/python/fatcat_tools/importers/grobid_metadata.py b/python/fatcat_tools/importers/grobid_metadata.py index ea627dd6..2077eae4 100644 --- a/python/fatcat_tools/importers/grobid_metadata.py +++ b/python/fatcat_tools/importers/grobid_metadata.py @@ -4,7 +4,7 @@ import sys import json import base64 import datetime -import fatcat_client +import fatcat_openapi_client from .common import EntityImporter, clean, make_rel_url MAX_ABSTRACT_BYTES=4096 @@ -56,7 +56,7 @@ class GrobidMetadataImporter(EntityImporter): existing = None try: existing = self.api.lookup_file(sha1=fe.sha1) - except fatcat_client.rest.ApiException as err: + except fatcat_openapi_client.rest.ApiException as err: if err.status != 404: raise err @@ -82,7 +82,7 @@ class GrobidMetadataImporter(EntityImporter): abstract = obj.get('abstract') if abstract and len(abstract) < MAX_ABSTRACT_BYTES and len(abstract) > 10: - abobj = fatcat_client.ReleaseAbstract( + abobj = fatcat_openapi_client.ReleaseAbstract( mimetype="text/plain", content=clean(obj.get('abstract'))) abstracts = [abobj] @@ -91,7 +91,7 @@ class GrobidMetadataImporter(EntityImporter): contribs = [] for i, a in enumerate(obj.get('authors', [])): - contribs.append(fatcat_client.ReleaseContrib( + contribs.append(fatcat_openapi_client.ReleaseContrib( index=i, raw_name=clean(a['name']), given_name=clean(a.get('given_name')), @@ -116,7 +116,7 @@ class GrobidMetadataImporter(EntityImporter): if not cite_extra: cite_extra = None - refs.append(fatcat_client.ReleaseRef( + refs.append(fatcat_openapi_client.ReleaseRef( key=clean(raw.get('id')), year=year, title=clean(raw['title']), @@ -147,7 +147,7 @@ class GrobidMetadataImporter(EntityImporter): if not title or len(title) < 2: return None - re = fatcat_client.ReleaseEntity( + re = fatcat_openapi_client.ReleaseEntity( title=title, release_type="article-journal", release_date=release_date, @@ -158,7 +158,7 @@ class GrobidMetadataImporter(EntityImporter): volume=clean(obj['journal'].get('volume')), issue=clean(obj['journal'].get('issue')), abstracts=abstracts, - ext_ids=fatcat_client.ReleaseExtIds(), + ext_ids=fatcat_openapi_client.ReleaseExtIds(), extra=extra) return re @@ -166,7 +166,7 @@ class GrobidMetadataImporter(EntityImporter): sha1 = base64.b16encode(base64.b32decode(sha1_key.replace('sha1:', ''))).decode('ascii').lower() - fe = fatcat_client.FileEntity( + fe = fatcat_openapi_client.FileEntity( sha1=sha1, size=int(file_size), mimetype=mimetype, @@ -181,10 +181,10 @@ class GrobidMetadataImporter(EntityImporter): cdx['dt'], original) fe.urls.append( - fatcat_client.FileUrl(url=wayback, rel="webarchive")) + fatcat_openapi_client.FileUrl(url=wayback, rel="webarchive")) original_url = make_rel_url(original, default_link_rel=self.default_link_rel) if original_url is not None: - fe.urls.append(fatcat_client.FileUrl(rel=original_url[0], url=original_url[1])) + fe.urls.append(fatcat_openapi_client.FileUrl(rel=original_url[0], url=original_url[1])) return fe @@ -193,8 +193,8 @@ class GrobidMetadataImporter(EntityImporter): return True def insert_batch(self, batch): - self.api.create_file_auto_batch(fatcat_client.FileAutoBatch( - editgroup=fatcat_client.Editgroup( + self.api.create_file_auto_batch(fatcat_openapi_client.FileAutoBatch( + editgroup=fatcat_openapi_client.Editgroup( description=self.editgroup_description, extra=self.editgroup_extra), entity_list=batch)) diff --git a/python/fatcat_tools/importers/jalc.py b/python/fatcat_tools/importers/jalc.py index c1c13bb7..a0e0086b 100644 --- a/python/fatcat_tools/importers/jalc.py +++ b/python/fatcat_tools/importers/jalc.py @@ -7,7 +7,7 @@ import itertools import subprocess from bs4 import BeautifulSoup -import fatcat_client +import fatcat_openapi_client from .common import EntityImporter, clean, is_cjk, DATE_FMT @@ -48,7 +48,7 @@ def parse_jalc_persons(raw_persons): if lang == 'en' and surname and given_name: # english names order is flipped name = "{} {}".format(given_name, surname) - rc = fatcat_client.ReleaseContrib( + rc = fatcat_openapi_client.ReleaseContrib( raw_name=name, surname=surname, given_name=given_name, @@ -269,7 +269,7 @@ class JalcImporter(EntityImporter): # extra: issnp, issne, original_name, languages, country container_extra['country'] = 'jp' container_extra['languages'] = ['ja'] - ce = fatcat_client.ContainerEntity( + ce = fatcat_openapi_client.ContainerEntity( name=container_name, container_type='journal', publisher=publisher, @@ -303,7 +303,7 @@ class JalcImporter(EntityImporter): if not title: return None - re = fatcat_client.ReleaseEntity( + re = fatcat_openapi_client.ReleaseEntity( work_id=None, title=title, original_title=clean(original_title), @@ -311,7 +311,7 @@ class JalcImporter(EntityImporter): release_stage='published', release_date=release_date, release_year=release_year, - ext_ids=fatcat_client.ReleaseExtIds( + ext_ids=fatcat_openapi_client.ReleaseExtIds( doi=doi, pmid=extids['pmid'], pmcid=extids['pmcid'], @@ -338,7 +338,7 @@ class JalcImporter(EntityImporter): existing = None try: existing = self.api.lookup_release(doi=re.ext_ids.doi) - except fatcat_client.rest.ApiException as err: + except fatcat_openapi_client.rest.ApiException as err: if err.status != 404: raise err # doesn't exist, need to insert @@ -353,8 +353,8 @@ class JalcImporter(EntityImporter): return True def insert_batch(self, batch): - self.api.create_release_auto_batch(fatcat_client.ReleaseAutoBatch( - editgroup=fatcat_client.Editgroup( + self.api.create_release_auto_batch(fatcat_openapi_client.ReleaseAutoBatch( + editgroup=fatcat_openapi_client.Editgroup( description=self.editgroup_description, extra=self.editgroup_extra), entity_list=batch)) diff --git a/python/fatcat_tools/importers/journal_metadata.py b/python/fatcat_tools/importers/journal_metadata.py index d24b4103..d439c80a 100644 --- a/python/fatcat_tools/importers/journal_metadata.py +++ b/python/fatcat_tools/importers/journal_metadata.py @@ -2,7 +2,7 @@ import sys import json import itertools -import fatcat_client +import fatcat_openapi_client from .common import EntityImporter, clean @@ -93,7 +93,7 @@ class JournalMetadataImporter(EntityImporter): if not name: return None - ce = fatcat_client.ContainerEntity( + ce = fatcat_openapi_client.ContainerEntity( issnl=row['issnl'], container_type=None, # TODO name=name, @@ -107,7 +107,7 @@ class JournalMetadataImporter(EntityImporter): existing = None try: existing = self.api.lookup_container(issnl=ce.issnl) - except fatcat_client.rest.ApiException as err: + except fatcat_openapi_client.rest.ApiException as err: if err.status != 404: raise err @@ -133,8 +133,8 @@ class JournalMetadataImporter(EntityImporter): raise NotImplementedError def insert_batch(self, batch): - self.api.create_container_auto_batch(fatcat_client.ContainerAutoBatch( - editgroup=fatcat_client.Editgroup( + self.api.create_container_auto_batch(fatcat_openapi_client.ContainerAutoBatch( + editgroup=fatcat_openapi_client.Editgroup( description=self.editgroup_description, extra=self.editgroup_extra), entity_list=batch)) diff --git a/python/fatcat_tools/importers/jstor.py b/python/fatcat_tools/importers/jstor.py index 5016e813..5ff1ecd9 100644 --- a/python/fatcat_tools/importers/jstor.py +++ b/python/fatcat_tools/importers/jstor.py @@ -5,7 +5,7 @@ import datetime import warnings from bs4 import BeautifulSoup -import fatcat_client +import fatcat_openapi_client from .common import EntityImporter, clean, LANG_MAP_MARC from .crossref import CONTAINER_TYPE_MAP @@ -114,7 +114,7 @@ class JstorImporter(EntityImporter): # create container if it doesn't exist if (container_id is None and self.create_containers and (issnl is not None) and journal_title): - ce = fatcat_client.ContainerEntity( + ce = fatcat_openapi_client.ContainerEntity( issnl=issnl, publisher=publisher, container_type=self.map_container_type(release_type), @@ -158,7 +158,7 @@ class JstorImporter(EntityImporter): role = JSTOR_CONTRIB_MAP.get(c.get('contrib-type', 'author')) if not role and c.get('contrib-type'): sys.stderr.write("NOT IN JSTOR_CONTRIB_MAP: {}\n".format(c['contrib-type'])) - contribs.append(fatcat_client.ReleaseContrib( + contribs.append(fatcat_openapi_client.ReleaseContrib( role=role, raw_name=raw_name, given_name=given, @@ -228,7 +228,7 @@ class JstorImporter(EntityImporter): if not extra: extra = None - re = fatcat_client.ReleaseEntity( + re = fatcat_openapi_client.ReleaseEntity( #work_id title=title, #original_title @@ -236,7 +236,7 @@ class JstorImporter(EntityImporter): release_stage=release_stage, release_date=release_date, release_year=release_year, - ext_ids=fatcat_client.ReleaseExtIds( + ext_ids=fatcat_openapi_client.ReleaseExtIds( doi=doi, jstor=jstor_id, ), @@ -270,7 +270,7 @@ class JstorImporter(EntityImporter): existing = None try: existing = self.api.lookup_release(jstor=re.ext_ids.jstor) - except fatcat_client.rest.ApiException as err: + except fatcat_openapi_client.rest.ApiException as err: if err.status != 404: raise err @@ -282,7 +282,7 @@ class JstorImporter(EntityImporter): doi = "10.2307/{}".format(re.ext_ids.jstor) try: existing = self.api.lookup_release(doi=doi) - except fatcat_client.rest.ApiException as err: + except fatcat_openapi_client.rest.ApiException as err: if err.status != 404: raise err @@ -309,8 +309,8 @@ class JstorImporter(EntityImporter): return True def insert_batch(self, batch): - self.api.create_release_auto_batch(fatcat_client.ReleaseAutoBatch( - editgroup=fatcat_client.Editgroup( + self.api.create_release_auto_batch(fatcat_openapi_client.ReleaseAutoBatch( + editgroup=fatcat_openapi_client.Editgroup( description=self.editgroup_description, extra=self.editgroup_extra), entity_list=batch)) diff --git a/python/fatcat_tools/importers/matched.py b/python/fatcat_tools/importers/matched.py index a94584cd..dbb78ec9 100644 --- a/python/fatcat_tools/importers/matched.py +++ b/python/fatcat_tools/importers/matched.py @@ -3,7 +3,7 @@ import sys import json import sqlite3 import itertools -import fatcat_client +import fatcat_openapi_client from fatcat_tools.normal import * from .common import EntityImporter, clean, make_rel_url, SANE_MAX_RELEASES, SANE_MAX_URLS @@ -61,7 +61,7 @@ class MatchedImporter(EntityImporter): return None try: re = self.api.lookup_release(doi=doi) - except fatcat_client.rest.ApiException as err: + except fatcat_openapi_client.rest.ApiException as err: if err.status != 404: raise err re = None @@ -77,7 +77,7 @@ class MatchedImporter(EntityImporter): if extid: try: re = self.api.lookup_release(**{extid_type: extid}) - except fatcat_client.rest.ApiException as err: + except fatcat_openapi_client.rest.ApiException as err: if err.status != 404: raise err re = None @@ -110,7 +110,7 @@ class MatchedImporter(EntityImporter): url = make_rel_url(original, default_link_rel=self.default_link_rel) if url != None: urls.add(url) - urls = [fatcat_client.FileUrl(rel=rel, url=url) for (rel, url) in urls] + urls = [fatcat_openapi_client.FileUrl(rel=rel, url=url) for (rel, url) in urls] if len(urls) == 0: self.counts['skip-no-urls'] += 1 return None @@ -127,7 +127,7 @@ class MatchedImporter(EntityImporter): if urls[0].url.endswith('.pdf'): mimetype = 'application/pdf' - fe = fatcat_client.FileEntity( + fe = fatcat_openapi_client.FileEntity( md5=obj.get('md5'), sha1=obj['sha1'], sha256=obj.get('sha256'), @@ -143,7 +143,7 @@ class MatchedImporter(EntityImporter): existing = None try: existing = self.api.lookup_file(sha1=fe.sha1) - except fatcat_client.rest.ApiException as err: + except fatcat_openapi_client.rest.ApiException as err: if err.status != 404: raise err @@ -183,7 +183,7 @@ class MatchedImporter(EntityImporter): # merge the existing into this one and update existing.urls = list(set([(u.rel, u.url) for u in fe.urls + existing.urls])) - existing.urls = [fatcat_client.FileUrl(rel=rel, url=url) for (rel, url) in existing.urls] + existing.urls = [fatcat_openapi_client.FileUrl(rel=rel, url=url) for (rel, url) in existing.urls] if len(existing.urls) > SANE_MAX_URLS: self.counts['skip-update-too-many-url'] += 1 @@ -203,8 +203,8 @@ class MatchedImporter(EntityImporter): return False def insert_batch(self, batch): - self.api.create_file_auto_batch(fatcat_client.FileAutoBatch( - editgroup=fatcat_client.Editgroup( + self.api.create_file_auto_batch(fatcat_openapi_client.FileAutoBatch( + editgroup=fatcat_openapi_client.Editgroup( description=self.editgroup_description, extra=self.editgroup_extra), entity_list=batch)) diff --git a/python/fatcat_tools/importers/orcid.py b/python/fatcat_tools/importers/orcid.py index 0bdd5d19..0a2c8610 100644 --- a/python/fatcat_tools/importers/orcid.py +++ b/python/fatcat_tools/importers/orcid.py @@ -2,7 +2,7 @@ import sys import json import itertools -import fatcat_client +import fatcat_openapi_client from .common import EntityImporter, clean def value_or_none(e): @@ -64,7 +64,7 @@ class OrcidImporter(EntityImporter): if not display: # must have *some* name return None - ce = fatcat_client.CreatorEntity( + ce = fatcat_openapi_client.CreatorEntity( orcid=orcid, given_name=clean(given), surname=clean(sur), @@ -76,7 +76,7 @@ class OrcidImporter(EntityImporter): existing = None try: existing = self.api.lookup_creator(orcid=raw_record.orcid) - except fatcat_client.rest.ApiException as err: + except fatcat_openapi_client.rest.ApiException as err: if err.status != 404: raise err @@ -89,8 +89,8 @@ class OrcidImporter(EntityImporter): return True def insert_batch(self, batch): - self.api.create_creator_auto_batch(fatcat_client.CreatorAutoBatch( - editgroup=fatcat_client.Editgroup( + self.api.create_creator_auto_batch(fatcat_openapi_client.CreatorAutoBatch( + editgroup=fatcat_openapi_client.Editgroup( description=self.editgroup_description, extra=self.editgroup_extra), entity_list=batch)) diff --git a/python/fatcat_tools/importers/pubmed.py b/python/fatcat_tools/importers/pubmed.py index 4bfbbc79..80cf986c 100644 --- a/python/fatcat_tools/importers/pubmed.py +++ b/python/fatcat_tools/importers/pubmed.py @@ -7,7 +7,7 @@ import warnings from bs4 import BeautifulSoup from bs4.element import NavigableString -import fatcat_client +import fatcat_openapi_client from .common import EntityImporter, clean, LANG_MAP_MARC # from: https://www.ncbi.nlm.nih.gov/books/NBK3827/table/pubmedhelp.T.publication_types/?report=objectonly @@ -508,7 +508,7 @@ class PubmedImporter(EntityImporter): and container_name): # name, type, publisher, issnl # extra: issnp, issne, original_name, languages, country - ce = fatcat_client.ContainerEntity( + ce = fatcat_openapi_client.ContainerEntity( name=container_name, container_type='journal', #NOTE: publisher not included @@ -532,7 +532,7 @@ class PubmedImporter(EntityImporter): primary_abstract = medline.find("Abstract") if primary_abstract and primary_abstract.AbstractText.get('NlmCategory'): joined = "\n".join([m.get_text() for m in primary_abstract.find_all("AbstractText")]) - abst = fatcat_client.ReleaseAbstract( + abst = fatcat_openapi_client.ReleaseAbstract( content=joined, mimetype="text/plain", lang="en", @@ -541,7 +541,7 @@ class PubmedImporter(EntityImporter): abstracts.append(abst) elif primary_abstract: for abstract in primary_abstract.find_all("AbstractText"): - abst = fatcat_client.ReleaseAbstract( + abst = fatcat_openapi_client.ReleaseAbstract( content=abstract.get_text().strip(), mimetype="text/plain", lang="en", @@ -549,7 +549,7 @@ class PubmedImporter(EntityImporter): if abst.content: abstracts.append(abst) if abstract.find('math'): - abst = fatcat_client.ReleaseAbstract( + abst = fatcat_openapi_client.ReleaseAbstract( # strip the <AbstractText> tags content=str(abstract)[14:-15], mimetype="application/mathml+xml", @@ -562,7 +562,7 @@ class PubmedImporter(EntityImporter): lang = "en" if other.get('Language'): lang = LANG_MAP_MARC.get(other['Language']) - abst = fatcat_client.ReleaseAbstract( + abst = fatcat_openapi_client.ReleaseAbstract( content=other.AbstractText.get_text().strip(), mimetype="text/plain", lang=lang, @@ -617,7 +617,7 @@ class PubmedImporter(EntityImporter): if author.find("EqualContrib"): # TODO: schema for this? contrib_extra['equal'] = True - contribs.append(fatcat_client.ReleaseContrib( + contribs.append(fatcat_openapi_client.ReleaseContrib( raw_name=raw_name, given_name=given_name, surname=surname, @@ -628,7 +628,7 @@ class PubmedImporter(EntityImporter): )) if medline.AuthorList['CompleteYN'] == 'N': - contribs.append(fatcat_client.ReleaseContrib(raw_name="et al.")) + contribs.append(fatcat_openapi_client.ReleaseContrib(raw_name="et al.")) for i, contrib in enumerate(contribs): if contrib.raw_name != "et al.": @@ -659,7 +659,7 @@ class PubmedImporter(EntityImporter): ref_extra['unstructured'] = ref_raw.string if not ref_extra: ref_extra = None - refs.append(fatcat_client.ReleaseRef( + refs.append(fatcat_openapi_client.ReleaseRef( target_release_id=ref_release_id, extra=ref_extra, )) @@ -682,7 +682,7 @@ class PubmedImporter(EntityImporter): if not title: return None - re = fatcat_client.ReleaseEntity( + re = fatcat_openapi_client.ReleaseEntity( work_id=None, title=title, original_title=clean(original_title), @@ -691,7 +691,7 @@ class PubmedImporter(EntityImporter): release_date=release_date, release_year=release_year, withdrawn_status=withdrawn_status, - ext_ids=fatcat_client.ReleaseExtIds( + ext_ids=fatcat_openapi_client.ReleaseExtIds( doi=doi, pmid=pmid, pmcid=pmcid, @@ -717,7 +717,7 @@ class PubmedImporter(EntityImporter): existing = None try: existing = self.api.lookup_release(pmid=re.ext_ids.pmid) - except fatcat_client.rest.ApiException as err: + except fatcat_openapi_client.rest.ApiException as err: if err.status != 404: raise err @@ -725,7 +725,7 @@ class PubmedImporter(EntityImporter): if not existing and re.ext_ids.doi: try: existing = self.api.lookup_release(doi=re.ext_ids.doi) - except fatcat_client.rest.ApiException as err: + except fatcat_openapi_client.rest.ApiException as err: if err.status != 404: raise err if existing and existing.ext_ids.pmid and existing.ext_ids.pmid != re.ext_ids.pmid: @@ -751,7 +751,7 @@ class PubmedImporter(EntityImporter): try: self.api.update_release(self.get_editgroup_id(), existing.ident, existing) self.counts['update'] += 1 - except fatcat_client.rest.ApiException as err: + except fatcat_openapi_client.rest.ApiException as err: # there is a code path where we try to update the same release # twice in a row; if that happens, just skip # NOTE: API behavior might change in the future? @@ -765,8 +765,8 @@ class PubmedImporter(EntityImporter): return True def insert_batch(self, batch): - self.api.create_release_auto_batch(fatcat_client.ReleaseAutoBatch( - editgroup=fatcat_client.Editgroup( + self.api.create_release_auto_batch(fatcat_openapi_client.ReleaseAutoBatch( + editgroup=fatcat_openapi_client.Editgroup( description=self.editgroup_description, extra=self.editgroup_extra), entity_list=batch)) diff --git a/python/fatcat_tools/importers/wayback_static.py b/python/fatcat_tools/importers/wayback_static.py index 2125017a..fa0bd4ef 100755 --- a/python/fatcat_tools/importers/wayback_static.py +++ b/python/fatcat_tools/importers/wayback_static.py @@ -16,7 +16,7 @@ import argparse import subprocess from bs4 import BeautifulSoup -from fatcat_client import * +from fatcat_openapi_client import * CDX_API_BASE = "https://web.archive.org/cdx/search/cdx" GWB_URL_BASE = "https://web.archive.org/web" diff --git a/python/fatcat_tools/reviewers/review_common.py b/python/fatcat_tools/reviewers/review_common.py index bb36d4e3..ecf7da8f 100644 --- a/python/fatcat_tools/reviewers/review_common.py +++ b/python/fatcat_tools/reviewers/review_common.py @@ -5,8 +5,8 @@ import datetime import subprocess from collections import Counter -import fatcat_client -from fatcat_client.rest import ApiException +import fatcat_openapi_client +from fatcat_openapi_client.rest import ApiException """ checks should return: @@ -146,7 +146,7 @@ class ReviewBot: "checks": [check.name for check in self.checks], "result_counts": dict(result_counts), }) - annotation = fatcat_client.EditgroupAnnotation( + annotation = fatcat_openapi_client.EditgroupAnnotation( comment_markdown=comment, editgroup_id=editgroup.editgroup_id, extra=extra, diff --git a/python/fatcat_tools/transforms/csl.py b/python/fatcat_tools/transforms/csl.py index f94fa372..7ab94cac 100644 --- a/python/fatcat_tools/transforms/csl.py +++ b/python/fatcat_tools/transforms/csl.py @@ -8,7 +8,7 @@ from citeproc import formatter from citeproc.source.json import CiteProcJSON from citeproc_styles import get_style_filepath -from fatcat_client import ApiClient +from fatcat_openapi_client import ApiClient def contribs_by_role(contribs, role): diff --git a/python/fatcat_tools/transforms/elasticsearch.py b/python/fatcat_tools/transforms/elasticsearch.py index 303d56a9..3a53db4d 100644 --- a/python/fatcat_tools/transforms/elasticsearch.py +++ b/python/fatcat_tools/transforms/elasticsearch.py @@ -1,7 +1,7 @@ import collections -from fatcat_client import ApiClient +from fatcat_openapi_client import ApiClient def check_kbart(year, archive): diff --git a/python/fatcat_tools/transforms/entities.py b/python/fatcat_tools/transforms/entities.py index b67df12d..8e5de286 100644 --- a/python/fatcat_tools/transforms/entities.py +++ b/python/fatcat_tools/transforms/entities.py @@ -1,7 +1,7 @@ import collections -from fatcat_client import ApiClient +from fatcat_openapi_client import ApiClient def entity_to_dict(entity, api_client=None): """ diff --git a/python/fatcat_tools/workers/elasticsearch.py b/python/fatcat_tools/workers/elasticsearch.py index a91b5098..91224d98 100644 --- a/python/fatcat_tools/workers/elasticsearch.py +++ b/python/fatcat_tools/workers/elasticsearch.py @@ -4,7 +4,7 @@ import time import requests from pykafka.common import OffsetType -from fatcat_client import ReleaseEntity, ContainerEntity, ApiClient +from fatcat_openapi_client import ReleaseEntity, ContainerEntity, ApiClient from fatcat_tools import * from .worker_common import FatcatWorker diff --git a/python/fatcat_tools/workers/worker_common.py b/python/fatcat_tools/workers/worker_common.py index cb4e5dab..9ffbe5fd 100644 --- a/python/fatcat_tools/workers/worker_common.py +++ b/python/fatcat_tools/workers/worker_common.py @@ -8,8 +8,8 @@ from itertools import islice from pykafka import KafkaClient from pykafka.common import OffsetType -import fatcat_client -from fatcat_client.rest import ApiException +import fatcat_openapi_client +from fatcat_openapi_client.rest import ApiException def most_recent_message(topic): |