diff options
author | Martin Czygan <martin@archive.org> | 2021-05-28 20:18:46 +0000 |
---|---|---|
committer | Martin Czygan <martin@archive.org> | 2021-05-28 20:18:46 +0000 |
commit | 6a0a25e219f39530efeefd136c7abc298c89388d (patch) | |
tree | 1e318f5251429df0d2d7a8649beb57bfb0fe81b3 | |
parent | a7a3cc1f8de8f38d98cf878282bd26ec4d76771f (diff) | |
parent | 064b05a2f49138a1708416b9fc1ac25c29a858ef (diff) | |
download | fatcat-6a0a25e219f39530efeefd136c7abc298c89388d.tar.gz fatcat-6a0a25e219f39530efeefd136c7abc298c89388d.zip |
Merge branch 'bnewbold-lint-fixes' into 'master'
various lint fixes; should un-break CI
See merge request webgroup/fatcat!106
-rw-r--r-- | python/Makefile | 1 | ||||
-rw-r--r-- | python/fatcat_tools/harvest/doi_registrars.py | 2 | ||||
-rw-r--r-- | python/fatcat_tools/importers/common.py | 2 | ||||
-rw-r--r-- | python/fatcat_tools/transforms/csl.py | 2 | ||||
-rw-r--r-- | python/fatcat_tools/workers/changelog.py | 2 | ||||
-rw-r--r-- | python/fatcat_web/auth.py | 6 | ||||
-rw-r--r-- | python/fatcat_web/routes.py | 2 |
7 files changed, 8 insertions, 9 deletions
diff --git a/python/Makefile b/python/Makefile index 2ac7bc5f..954fdc38 100644 --- a/python/Makefile +++ b/python/Makefile @@ -14,6 +14,7 @@ dep: ## Create local virtualenv using pipenv .PHONY: lint lint: ## Run lints (eg, flake8) + pipenv run pylint -E fatcat*.py fatcat_tools fatcat_web tests/*.py pipenv run flake8 *.py tests/ fatcat_web/ fatcat_tools/ --select=E9,F63,F7,F82 pipenv run flake8 *.py tests/ fatcat_web/ fatcat_tools/ --exit-zero diff --git a/python/fatcat_tools/harvest/doi_registrars.py b/python/fatcat_tools/harvest/doi_registrars.py index 03773c15..553f4e7a 100644 --- a/python/fatcat_tools/harvest/doi_registrars.py +++ b/python/fatcat_tools/harvest/doi_registrars.py @@ -143,7 +143,7 @@ class HarvestCrossrefWorker: # Datacite API returned HTTP 200, but JSON seemed unparseable. # It might be a glitch, so we retry. print("failed to decode body from {}: {}".format(http_resp.url, resp_body), file=sys.stderr) - raise + raise exc items = self.extract_items(resp) count += len(items) print("... got {} ({} of {}), HTTP fetch took {}".format(len(items), count, diff --git a/python/fatcat_tools/importers/common.py b/python/fatcat_tools/importers/common.py index fcbe9ad2..6815a155 100644 --- a/python/fatcat_tools/importers/common.py +++ b/python/fatcat_tools/importers/common.py @@ -8,9 +8,7 @@ import datetime import subprocess from collections import Counter from typing import Optional, Tuple -from confluent_kafka import Consumer, KafkaException import lxml -import xml.parsers.expat import xml.etree.ElementTree as ET import elasticsearch diff --git a/python/fatcat_tools/transforms/csl.py b/python/fatcat_tools/transforms/csl.py index 15bb369f..0556f4fe 100644 --- a/python/fatcat_tools/transforms/csl.py +++ b/python/fatcat_tools/transforms/csl.py @@ -33,7 +33,7 @@ def release_to_csl(entity): if contrib.creator: # Default to "local" (publication-specific) metadata; fall back to # creator-level - family = contrib.creator.surname or contrib.surname or (contrib.raw_name and contrib.raw_name.split()[-1]) + family = contrib.creator.surname or contrib.surname or (contrib.raw_name and contrib.raw_name.split()[-1]) if not family: # CSL requires some surname (family name) continue diff --git a/python/fatcat_tools/workers/changelog.py b/python/fatcat_tools/workers/changelog.py index 94791770..982ee3ea 100644 --- a/python/fatcat_tools/workers/changelog.py +++ b/python/fatcat_tools/workers/changelog.py @@ -371,7 +371,7 @@ class EntityUpdatesWorker(FatcatWorker): for ident in set(fileset_ids): fileset_entity = self.api.get_fileset(ident, expand=None) # update release when a fileset changes - release_ids.extend(file_entity.release_ids or []) + release_ids.extend(fileset_entity.release_ids or []) # TODO: topic for webcapture updates for ident in set(webcapture_ids): diff --git a/python/fatcat_web/auth.py b/python/fatcat_web/auth.py index 74b8e2d6..73869544 100644 --- a/python/fatcat_web/auth.py +++ b/python/fatcat_web/auth.py @@ -36,7 +36,7 @@ def handle_token_login(token): abort(400) # fetch editor info editor = api.get_editor(editor_id) - session.permanent = True + session.permanent = True # pylint: disable=assigning-non-slot session['api_token'] = token session['editor'] = editor.to_dict() login_user(load_user(editor.editor_id)) @@ -76,7 +76,7 @@ def handle_oauth(remote, token, user_info): api_token = resp.token # write token and username to session - session.permanent = True + session.permanent = True # pylint: disable=assigning-non-slot session['api_token'] = api_token session['editor'] = editor.to_dict() @@ -104,7 +104,7 @@ def handle_ia_xauth(email, password): if resp.status_code == 401 or (not resp.json().get('success')): try: flash("Internet Archive email/password didn't match: {}".format(resp.json()['values']['reason'])) - except: + except Exception: app.log.warning("IA XAuth fail: {}".format(resp.content)) return render_template('auth_ia_login.html', email=email), resp.status_code elif resp.status_code != 200: diff --git a/python/fatcat_web/routes.py b/python/fatcat_web/routes.py index 7cf1f854..1a573006 100644 --- a/python/fatcat_web/routes.py +++ b/python/fatcat_web/routes.py @@ -658,7 +658,7 @@ def release_save(ident): Config.KAFKA_SAVEPAPERNOW_TOPIC, json.dumps(msg, sort_keys=True), ) - except: + except Exception: return render_template('release_save.html', entity=release, form=form, spn_status='kafka-error'), 500 return render_template('release_save.html', entity=release, form=form, spn_status='success'), 200 elif form.errors: |