aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--python/fatcat_web/__init__.py19
-rw-r--r--python/fatcat_web/auth.py130
-rw-r--r--python/fatcat_web/cors.py33
-rw-r--r--python/fatcat_web/editing_routes.py592
-rw-r--r--python/fatcat_web/entity_helpers.py148
-rw-r--r--python/fatcat_web/forms.py455
-rw-r--r--python/fatcat_web/graphics.py135
-rw-r--r--python/fatcat_web/hacks.py12
-rw-r--r--python/fatcat_web/kafka.py7
-rw-r--r--python/fatcat_web/ref_routes.py175
-rw-r--r--python/fatcat_web/routes.py1003
-rw-r--r--python/fatcat_web/search.py518
-rw-r--r--python/fatcat_web/web_config.py62
13 files changed, 1992 insertions, 1297 deletions
diff --git a/python/fatcat_web/__init__.py b/python/fatcat_web/__init__.py
index 336b4133..c0d0e0c8 100644
--- a/python/fatcat_web/__init__.py
+++ b/python/fatcat_web/__init__.py
@@ -1,4 +1,3 @@
-
import sys
import elasticsearch
@@ -18,7 +17,7 @@ from raven.contrib.flask import Sentry
from fatcat_web.web_config import Config
toolbar = DebugToolbarExtension()
-app = Flask(__name__, static_url_path='/static')
+app = Flask(__name__, static_url_path="/static")
app.config.from_object(Config)
toolbar = DebugToolbarExtension(app)
FlaskUUID(app)
@@ -26,7 +25,8 @@ app.csrf = CSRFProtect(app)
app.log = create_logger(app)
# This is the Markdown processor; setting default here
-Misaka(app,
+Misaka(
+ app,
autolink=True,
no_intra_emphasis=True,
strikethrough=True,
@@ -49,6 +49,7 @@ api = fatcat_openapi_client.DefaultApi(fatcat_openapi_client.ApiClient(conf))
app.jinja_env.trim_blocks = True
app.jinja_env.lstrip_blocks = True
+
def auth_api(token):
conf = fatcat_openapi_client.Configuration()
conf.api_key["Authorization"] = token
@@ -56,6 +57,7 @@ def auth_api(token):
conf.host = Config.FATCAT_API_HOST
return fatcat_openapi_client.DefaultApi(fatcat_openapi_client.ApiClient(conf))
+
if Config.FATCAT_API_AUTH_TOKEN:
print("Found and using privileged token (eg, for account signup)", file=sys.stderr)
priv_api = auth_api(Config.FATCAT_API_AUTH_TOKEN)
@@ -69,9 +71,10 @@ else:
mwoauth = MWOAuth(
consumer_key=Config.WIKIPEDIA_CLIENT_ID or "dummy",
consumer_secret=Config.WIKIPEDIA_CLIENT_SECRET or "dummy",
- default_return_to='wp_oauth_finish_login')
+ default_return_to="wp_oauth_finish_login",
+)
mwoauth.handshaker.user_agent = "fatcat.wiki;python_web_interface"
-app.register_blueprint(mwoauth.bp, url_prefix='/auth/wikipedia')
+app.register_blueprint(mwoauth.bp, url_prefix="/auth/wikipedia")
app.es_client = elasticsearch.Elasticsearch(Config.ELASTICSEARCH_BACKEND)
@@ -80,12 +83,12 @@ from fatcat_web import auth, cors, editing_routes, forms, ref_routes, routes
# TODO: blocking on ORCID support in loginpass
if Config.ORCID_CLIENT_ID:
orcid_bp = create_flask_blueprint(ORCiD, oauth, auth.handle_oauth)
- app.register_blueprint(orcid_bp, url_prefix='/auth/orcid')
+ app.register_blueprint(orcid_bp, url_prefix="/auth/orcid")
if Config.GITLAB_CLIENT_ID:
gitlab_bp = create_flask_blueprint(Gitlab, oauth, auth.handle_oauth)
- app.register_blueprint(gitlab_bp, url_prefix='/auth/gitlab')
+ app.register_blueprint(gitlab_bp, url_prefix="/auth/gitlab")
if Config.GITHUB_CLIENT_ID:
github_bp = create_flask_blueprint(GitHub, oauth, auth.handle_oauth)
- app.register_blueprint(github_bp, url_prefix='/auth/github')
+ app.register_blueprint(github_bp, url_prefix="/auth/github")
diff --git a/python/fatcat_web/auth.py b/python/fatcat_web/auth.py
index 137bc2bb..4fe85770 100644
--- a/python/fatcat_web/auth.py
+++ b/python/fatcat_web/auth.py
@@ -1,4 +1,3 @@
-
from collections import namedtuple
import fatcat_openapi_client
@@ -12,11 +11,12 @@ from fatcat_web import Config, api, app, login_manager, priv_api
def handle_logout():
logout_user()
- for k in ('editor', 'api_token'):
+ for k in ("editor", "api_token"):
if k in session:
session.pop(k)
session.clear()
+
def handle_token_login(token):
try:
m = pymacaroons.Macaroon.deserialize(token)
@@ -33,22 +33,23 @@ def handle_token_login(token):
for caveat in m.first_party_caveats():
caveat = caveat.caveat_id
if caveat.startswith(b"editor_id = "):
- editor_id = caveat[12:].decode('utf-8')
+ editor_id = caveat[12:].decode("utf-8")
if not editor_id:
app.log.warning("auth fail: editor_id missing in macaroon")
abort(400)
# fetch editor info
editor = api.get_editor(editor_id)
session.permanent = True # pylint: disable=assigning-non-slot
- session['api_token'] = token
- session['editor'] = editor.to_dict()
+ session["api_token"] = token
+ session["editor"] = editor.to_dict()
login_user(load_user(editor.editor_id))
rp = "/auth/account"
- if session.get('next'):
- rp = session['next']
- session.pop('next')
+ if session.get("next"):
+ rp = session["next"]
+ session.pop("next")
return redirect(rp)
+
# This will need to login/signup via fatcatd API, then set token in session
def handle_oauth(remote, token, user_info):
if user_info:
@@ -57,22 +58,24 @@ def handle_oauth(remote, token, user_info):
# SUB is the stable internal identifier for the user (not usually the username itself)
# TODO: should have the real sub here
# TODO: would be nicer to pass preferred_username for account creation
- iss = remote.OAUTH_CONFIG['api_base_url']
+ iss = remote.OAUTH_CONFIG["api_base_url"]
# we reuse 'preferred_username' for account name auto-creation (but
# don't store it otherwise in the backend, at least currently). But i'm
# not sure all loginpass backends will set it
- if user_info.get('preferred_username'):
- preferred_username = user_info['preferred_username']
- elif 'orcid.org' in iss:
+ if user_info.get("preferred_username"):
+ preferred_username = user_info["preferred_username"]
+ elif "orcid.org" in iss:
# as a special case, prefix ORCiD identifier so it can be used as a
# username. If we instead used the human name, we could have
# collisions. Not a great user experience either way.
- preferred_username = 'i' + user_info['sub'].replace('-', '')
+ preferred_username = "i" + user_info["sub"].replace("-", "")
else:
- preferred_username = user_info['sub']
+ preferred_username = user_info["sub"]
- params = fatcat_openapi_client.AuthOidc(remote.name, user_info['sub'], iss, preferred_username)
+ params = fatcat_openapi_client.AuthOidc(
+ remote.name, user_info["sub"], iss, preferred_username
+ )
# this call requires admin privs
(resp, http_status, http_headers) = priv_api.auth_oidc_with_http_info(params)
editor = resp.editor
@@ -80,90 +83,103 @@ def handle_oauth(remote, token, user_info):
# write token and username to session
session.permanent = True # pylint: disable=assigning-non-slot
- session['api_token'] = api_token
- session['editor'] = editor.to_dict()
+ session["api_token"] = api_token
+ session["editor"] = editor.to_dict()
# call login_user(load_user(editor_id))
login_user(load_user(editor.editor_id))
rp = "/auth/account"
- if session.get('next'):
- rp = session['next']
- session.pop('next')
+ if session.get("next"):
+ rp = session["next"]
+ session.pop("next")
return redirect(rp)
# XXX: what should this actually be?
raise Exception("didn't receive OAuth user_info")
+
def handle_ia_xauth(email, password):
- resp = requests.post(Config.IA_XAUTH_URI,
- params={'op': 'authenticate'},
+ resp = requests.post(
+ Config.IA_XAUTH_URI,
+ params={"op": "authenticate"},
json={
- 'version': '1',
- 'email': email,
- 'password': password,
- 'access': Config.IA_XAUTH_CLIENT_ID,
- 'secret': Config.IA_XAUTH_CLIENT_SECRET,
- })
- if resp.status_code == 401 or (not resp.json().get('success')):
+ "version": "1",
+ "email": email,
+ "password": password,
+ "access": Config.IA_XAUTH_CLIENT_ID,
+ "secret": Config.IA_XAUTH_CLIENT_SECRET,
+ },
+ )
+ if resp.status_code == 401 or (not resp.json().get("success")):
try:
- flash("Internet Archive email/password didn't match: {}".format(resp.json()['values']['reason']))
+ flash(
+ "Internet Archive email/password didn't match: {}".format(
+ resp.json()["values"]["reason"]
+ )
+ )
except Exception:
app.log.warning("IA XAuth fail: {}".format(resp.content))
- return render_template('auth_ia_login.html', email=email), resp.status_code
+ return render_template("auth_ia_login.html", email=email), resp.status_code
elif resp.status_code != 200:
flash("Internet Archive login failed (internal error?)")
app.log.warning("IA XAuth fail: {}".format(resp.content))
- return render_template('auth_ia_login.html', email=email), resp.status_code
+ return render_template("auth_ia_login.html", email=email), resp.status_code
# Successful login; now fetch info...
- resp = requests.post(Config.IA_XAUTH_URI,
- params={'op': 'info'},
+ resp = requests.post(
+ Config.IA_XAUTH_URI,
+ params={"op": "info"},
json={
- 'version': '1',
- 'email': email,
- 'access': Config.IA_XAUTH_CLIENT_ID,
- 'secret': Config.IA_XAUTH_CLIENT_SECRET,
- })
+ "version": "1",
+ "email": email,
+ "access": Config.IA_XAUTH_CLIENT_ID,
+ "secret": Config.IA_XAUTH_CLIENT_SECRET,
+ },
+ )
if resp.status_code != 200:
flash("Internet Archive login failed (internal error?)")
app.log.warning("IA XAuth fail: {}".format(resp.content))
- return render_template('auth_ia_login.html', email=email), resp.status_code
- ia_info = resp.json()['values']
+ return render_template("auth_ia_login.html", email=email), resp.status_code
+ ia_info = resp.json()["values"]
# and pass off "as if" we did OAuth successfully
- FakeOAuthRemote = namedtuple('FakeOAuthRemote', ['name', 'OAUTH_CONFIG'])
- remote = FakeOAuthRemote(name='archive', OAUTH_CONFIG={'api_base_url': Config.IA_XAUTH_URI})
+ FakeOAuthRemote = namedtuple("FakeOAuthRemote", ["name", "OAUTH_CONFIG"])
+ remote = FakeOAuthRemote(name="archive", OAUTH_CONFIG={"api_base_url": Config.IA_XAUTH_URI})
oauth_info = {
- 'preferred_username': ia_info['itemname'].replace('@', ''),
- 'iss': Config.IA_XAUTH_URI,
- 'sub': ia_info['itemname'],
+ "preferred_username": ia_info["itemname"].replace("@", ""),
+ "iss": Config.IA_XAUTH_URI,
+ "sub": ia_info["itemname"],
}
return handle_oauth(remote, None, oauth_info)
+
def handle_wmoauth(username):
# pass off "as if" we did OAuth successfully
- FakeOAuthRemote = namedtuple('FakeOAuthRemote', ['name', 'OAUTH_CONFIG'])
- remote = FakeOAuthRemote(name='wikipedia', OAUTH_CONFIG={'api_base_url': "https://www.mediawiki.org/w"})
- conservative_username = ''.join(filter(str.isalnum, username))
+ FakeOAuthRemote = namedtuple("FakeOAuthRemote", ["name", "OAUTH_CONFIG"])
+ remote = FakeOAuthRemote(
+ name="wikipedia", OAUTH_CONFIG={"api_base_url": "https://www.mediawiki.org/w"}
+ )
+ conservative_username = "".join(filter(str.isalnum, username))
oauth_info = {
- 'preferred_username': conservative_username,
- 'iss': "https://www.mediawiki.org/w",
- 'sub': username,
+ "preferred_username": conservative_username,
+ "iss": "https://www.mediawiki.org/w",
+ "sub": username,
}
return handle_oauth(remote, None, oauth_info)
+
@login_manager.user_loader
def load_user(editor_id):
# looks for extra info in session, and updates the user object with that.
# If session isn't loaded/valid, should return None
- if (not session.get('editor')) or (not session.get('api_token')):
+ if (not session.get("editor")) or (not session.get("api_token")):
return None
- editor = session['editor']
- token = session['api_token']
+ editor = session["editor"]
+ token = session["api_token"]
user = UserMixin()
user.id = editor_id
user.editor_id = editor_id
- user.username = editor['username']
- user.is_admin = editor['is_admin']
+ user.username = editor["username"]
+ user.is_admin = editor["is_admin"]
user.token = token
return user
diff --git a/python/fatcat_web/cors.py b/python/fatcat_web/cors.py
index cb2054b2..bb32f7c2 100644
--- a/python/fatcat_web/cors.py
+++ b/python/fatcat_web/cors.py
@@ -1,4 +1,3 @@
-
"""
This snippet from: http://flask.pocoo.org/snippets/56/
"Posted by Armin Ronacher on 2011-07-14"
@@ -10,15 +9,20 @@ from functools import update_wrapper
from flask import current_app, make_response, request
-def crossdomain(origin=None, methods=None, headers=None,
- max_age=21600, attach_to_all=True,
- automatic_options=True):
+def crossdomain(
+ origin=None,
+ methods=None,
+ headers=None,
+ max_age=21600,
+ attach_to_all=True,
+ automatic_options=True,
+):
if methods is not None:
- methods = ', '.join(sorted(x.upper() for x in methods))
+ methods = ", ".join(sorted(x.upper() for x in methods))
if headers is not None and not isinstance(headers, str):
- headers = ', '.join(x.upper() for x in headers)
+ headers = ", ".join(x.upper() for x in headers)
if not isinstance(origin, str):
- origin = ', '.join(origin)
+ origin = ", ".join(origin)
if isinstance(max_age, timedelta):
max_age = max_age.total_seconds()
@@ -27,26 +31,27 @@ def crossdomain(origin=None, methods=None, headers=None,
return methods
options_resp = current_app.make_default_options_response()
- return options_resp.headers['allow']
+ return options_resp.headers["allow"]
def decorator(f):
def wrapped_function(*args, **kwargs):
- if automatic_options and request.method == 'OPTIONS':
+ if automatic_options and request.method == "OPTIONS":
resp = current_app.make_default_options_response()
else:
resp = make_response(f(*args, **kwargs))
- if not attach_to_all and request.method != 'OPTIONS':
+ if not attach_to_all and request.method != "OPTIONS":
return resp
h = resp.headers
- h['Access-Control-Allow-Origin'] = origin
- h['Access-Control-Allow-Methods'] = get_methods()
- h['Access-Control-Max-Age'] = str(max_age)
+ h["Access-Control-Allow-Origin"] = origin
+ h["Access-Control-Allow-Methods"] = get_methods()
+ h["Access-Control-Max-Age"] = str(max_age)
if headers is not None:
- h['Access-Control-Allow-Headers'] = headers
+ h["Access-Control-Allow-Headers"] = headers
return resp
f.provide_automatic_options = False
return update_wrapper(wrapped_function, f)
+
return decorator
diff --git a/python/fatcat_web/editing_routes.py b/python/fatcat_web/editing_routes.py
index 5a97dfc4..6dafd2f1 100644
--- a/python/fatcat_web/editing_routes.py
+++ b/python/fatcat_web/editing_routes.py
@@ -1,4 +1,3 @@
-
from typing import Optional
from fatcat_openapi_client import (
@@ -29,47 +28,53 @@ from fatcat_web.forms import (
### Helper Methods ##########################################################
-def generic_entity_create_from_toml(user_api, entity_type: str, editgroup_id: str, toml_str: str) -> EntityEdit:
- if entity_type == 'container':
+
+def generic_entity_create_from_toml(
+ user_api, entity_type: str, editgroup_id: str, toml_str: str
+) -> EntityEdit:
+ if entity_type == "container":
entity = entity_from_toml(toml_str, ContainerEntity)
edit = user_api.create_container(editgroup_id, entity)
- elif entity_type == 'creator':
+ elif entity_type == "creator":
entity = entity_from_toml(toml_str, CreatorEntity)
edit = user_api.create_creator(editgroup_id, entity)
- elif entity_type == 'file':
+ elif entity_type == "file":
entity = entity_from_toml(toml_str, FileEntity)
edit = user_api.create_file(editgroup_id, entity)
- elif entity_type == 'fileset':
+ elif entity_type == "fileset":
entity = entity_from_toml(toml_str, FilesetEntity)
edit = user_api.create_fileset(editgroup_id, entity)
- elif entity_type == 'webcapture':
+ elif entity_type == "webcapture":
entity = entity_from_toml(toml_str, WebcaptureEntity)
edit = user_api.create_webcapture(editgroup_id, entity)
- elif entity_type == 'release':
+ elif entity_type == "release":
entity = entity_from_toml(toml_str, ReleaseEntity)
edit = user_api.create_release(editgroup_id, entity)
- elif entity_type == 'work':
+ elif entity_type == "work":
entity = entity_from_toml(toml_str, WorkEntity)
edit = user_api.create_work(editgroup_id, entity)
else:
raise NotImplementedError
return edit
-def generic_entity_delete_edit(user_api, entity_type: str, editgroup_id: str, edit_id: str) -> None:
+
+def generic_entity_delete_edit(
+ user_api, entity_type: str, editgroup_id: str, edit_id: str
+) -> None:
try:
- if entity_type == 'container':
+ if entity_type == "container":
user_api.delete_container_edit(editgroup_id, edit_id)
- elif entity_type == 'creator':
+ elif entity_type == "creator":
user_api.delete_creator_edit(editgroup_id, edit_id)
- elif entity_type == 'file':
+ elif entity_type == "file":
user_api.delete_file_edit(editgroup_id, edit_id)
- elif entity_type == 'fileset':
+ elif entity_type == "fileset":
user_api.delete_fileset_edit(editgroup_id, edit_id)
- elif entity_type == 'webcapture':
+ elif entity_type == "webcapture":
user_api.delete_webcapture_edit(editgroup_id, edit_id)
- elif entity_type == 'release':
+ elif entity_type == "release":
user_api.delete_release_edit(editgroup_id, edit_id)
- elif entity_type == 'work':
+ elif entity_type == "work":
user_api.delete_work_edit(editgroup_id, edit_id)
else:
raise NotImplementedError
@@ -79,21 +84,24 @@ def generic_entity_delete_edit(user_api, entity_type: str, editgroup_id: str, ed
else:
raise ae
-def generic_entity_delete_entity(user_api, entity_type: str, editgroup_id: str, entity_ident: str) -> None:
+
+def generic_entity_delete_entity(
+ user_api, entity_type: str, editgroup_id: str, entity_ident: str
+) -> None:
try:
- if entity_type == 'container':
+ if entity_type == "container":
edit = user_api.delete_container(editgroup_id, entity_ident)
- elif entity_type == 'creator':
+ elif entity_type == "creator":
edit = user_api.delete_creator(editgroup_id, entity_ident)
- elif entity_type == 'file':
+ elif entity_type == "file":
edit = user_api.delete_file(editgroup_id, entity_ident)
- elif entity_type == 'fileset':
+ elif entity_type == "fileset":
edit = user_api.delete_fileset(editgroup_id, entity_ident)
- elif entity_type == 'webcapture':
+ elif entity_type == "webcapture":
edit = user_api.delete_webcapture(editgroup_id, entity_ident)
- elif entity_type == 'release':
+ elif entity_type == "release":
edit = user_api.delete_release(editgroup_id, entity_ident)
- elif entity_type == 'work':
+ elif entity_type == "work":
edit = user_api.delete_work(editgroup_id, entity_ident)
else:
raise NotImplementedError
@@ -101,32 +109,36 @@ def generic_entity_delete_entity(user_api, entity_type: str, editgroup_id: str,
raise ae
return edit
-def generic_entity_update_from_toml(user_api, entity_type: str, editgroup_id: str, existing_ident, toml_str: str) -> EntityEdit:
- if entity_type == 'container':
+
+def generic_entity_update_from_toml(
+ user_api, entity_type: str, editgroup_id: str, existing_ident, toml_str: str
+) -> EntityEdit:
+ if entity_type == "container":
entity = entity_from_toml(toml_str, ContainerEntity)
edit = user_api.update_container(editgroup_id, existing_ident, entity)
- elif entity_type == 'creator':
+ elif entity_type == "creator":
entity = entity_from_toml(toml_str, CreatorEntity)
edit = user_api.update_creator(editgroup_id, existing_ident, entity)
- elif entity_type == 'file':
+ elif entity_type == "file":
entity = entity_from_toml(toml_str, FileEntity)
edit = user_api.update_file(editgroup_id, existing_ident, entity)
- elif entity_type == 'fileset':
+ elif entity_type == "fileset":
entity = entity_from_toml(toml_str, FilesetEntity)
edit = user_api.update_fileset(editgroup_id, existing_ident, entity)
- elif entity_type == 'webcapture':
+ elif entity_type == "webcapture":
entity = entity_from_toml(toml_str, WebcaptureEntity)
edit = user_api.update_webcapture(editgroup_id, existing_ident, entity)
- elif entity_type == 'release':
+ elif entity_type == "release":
entity = entity_from_toml(toml_str, ReleaseEntity)
edit = user_api.update_release(editgroup_id, existing_ident, entity)
- elif entity_type == 'work':
+ elif entity_type == "work":
entity = entity_from_toml(toml_str, WorkEntity)
edit = user_api.update_work(editgroup_id, existing_ident, entity)
else:
raise NotImplementedError
return edit
+
def form_editgroup_get_or_create(api, edit_form):
"""
This function expects a submitted, validated edit form
@@ -147,13 +159,15 @@ def form_editgroup_get_or_create(api, edit_form):
# if no editgroup, create one from description
try:
eg = api.create_editgroup(
- Editgroup(description=edit_form.editgroup_description.data or None))
+ Editgroup(description=edit_form.editgroup_description.data or None)
+ )
except ApiException as ae:
app.log.warning(ae)
raise ae
# set this session editgroup_id (TODO)
return eg
+
def generic_entity_edit(editgroup_id, entity_type, existing_ident, edit_template):
"""
@@ -195,17 +209,19 @@ def generic_entity_edit(editgroup_id, entity_type, existing_ident, edit_template
existing = None
existing_edit = None
if editgroup and existing_ident:
- existing, existing_edit = generic_get_editgroup_entity(editgroup, entity_type, existing_ident)
+ existing, existing_edit = generic_get_editgroup_entity(
+ editgroup, entity_type, existing_ident
+ )
elif existing_ident:
existing = generic_get_entity(entity_type, existing_ident)
# parse form (if submitted)
status = 200
- if entity_type == 'container':
+ if entity_type == "container":
form = ContainerEntityForm()
- elif entity_type == 'file':
+ elif entity_type == "file":
form = FileEntityForm()
- elif entity_type == 'release':
+ elif entity_type == "release":
form = ReleaseEntityForm()
else:
raise NotImplementedError
@@ -213,28 +229,32 @@ def generic_entity_edit(editgroup_id, entity_type, existing_ident, edit_template
if form.is_submitted():
if form.validate_on_submit():
# API on behalf of user
- user_api = auth_api(session['api_token'])
+ user_api = auth_api(session["api_token"])
if not editgroup:
editgroup = form_editgroup_get_or_create(user_api, form)
if editgroup:
- if not existing_ident: # it's a create
+ if not existing_ident: # it's a create
entity = form.to_entity()
try:
- if entity_type == 'container':
+ if entity_type == "container":
edit = user_api.create_container(editgroup.editgroup_id, entity)
- elif entity_type == 'file':
+ elif entity_type == "file":
edit = user_api.create_file(editgroup.editgroup_id, entity)
- elif entity_type == 'release':
+ elif entity_type == "release":
edit = user_api.create_release(editgroup.editgroup_id, entity)
else:
raise NotImplementedError
except ApiException as ae:
app.log.warning(ae)
raise ae
- return redirect('/editgroup/{}/{}/{}'.format(editgroup.editgroup_id, entity_type, edit.ident))
- else: # it's an update
+ return redirect(
+ "/editgroup/{}/{}/{}".format(
+ editgroup.editgroup_id, entity_type, edit.ident
+ )
+ )
+ else: # it's an update
# all the tricky logic is in the update method
form.update_entity(existing)
# do we need to try to delete the current in-progress edit first?
@@ -248,44 +268,61 @@ def generic_entity_edit(editgroup_id, entity_type, existing_ident, edit_template
# a "update pointer" edit
existing.revision = None
try:
- generic_entity_delete_edit(user_api, entity_type, editgroup.editgroup_id, existing_edit.edit_id)
+ generic_entity_delete_edit(
+ user_api,
+ entity_type,
+ editgroup.editgroup_id,
+ existing_edit.edit_id,
+ )
except ApiException as ae:
if ae.status == 404:
pass
else:
raise ae
try:
- if entity_type == 'container':
- edit = user_api.update_container(editgroup.editgroup_id, existing.ident, existing)
- elif entity_type == 'file':
- edit = user_api.update_file(editgroup.editgroup_id, existing.ident, existing)
- elif entity_type == 'release':
- edit = user_api.update_release(editgroup.editgroup_id, existing.ident, existing)
+ if entity_type == "container":
+ edit = user_api.update_container(
+ editgroup.editgroup_id, existing.ident, existing
+ )
+ elif entity_type == "file":
+ edit = user_api.update_file(
+ editgroup.editgroup_id, existing.ident, existing
+ )
+ elif entity_type == "release":
+ edit = user_api.update_release(
+ editgroup.editgroup_id, existing.ident, existing
+ )
else:
raise NotImplementedError
except ApiException as ae:
app.log.warning(ae)
raise ae
- return redirect('/editgroup/{}/{}/{}'.format(editgroup.editgroup_id, entity_type, edit.ident))
+ return redirect(
+ "/editgroup/{}/{}/{}".format(
+ editgroup.editgroup_id, entity_type, edit.ident
+ )
+ )
else:
status = 400
elif form.errors:
status = 400
app.log.info("form errors (did not validate): {}".format(form.errors))
- else: # form is not submitted
+ else: # form is not submitted
if existing:
- if entity_type == 'container':
+ if entity_type == "container":
form = ContainerEntityForm.from_entity(existing)
- elif entity_type == 'file':
+ elif entity_type == "file":
form = FileEntityForm.from_entity(existing)
- elif entity_type == 'release':
+ elif entity_type == "release":
form = ReleaseEntityForm.from_entity(existing)
else:
raise NotImplementedError
- editor_editgroups = api.get_editor_editgroups(session['editor']['editor_id'], limit=20)
- potential_editgroups = [e for e in editor_editgroups if e.changelog_index is None and e.submitted is None]
+ editor_editgroups = api.get_editor_editgroups(session["editor"]["editor_id"], limit=20)
+ potential_editgroups = [
+ e for e in editor_editgroups if e.changelog_index is None and e.submitted is None
+ ]
if not form.is_submitted():
# default to most recent not submitted, fallback to "create new"
@@ -293,9 +330,17 @@ def generic_entity_edit(editgroup_id, entity_type, existing_ident, edit_template
if potential_editgroups:
form.editgroup_id.data = potential_editgroups[0].editgroup_id
- return render_template(edit_template, form=form,
- existing_ident=existing_ident, editgroup=editgroup,
- potential_editgroups=potential_editgroups), status
+ return (
+ render_template(
+ edit_template,
+ form=form,
+ existing_ident=existing_ident,
+ editgroup=editgroup,
+ potential_editgroups=potential_editgroups,
+ ),
+ status,
+ )
+
def generic_entity_toml_edit(editgroup_id, entity_type, existing_ident, edit_template):
"""
@@ -321,7 +366,9 @@ def generic_entity_toml_edit(editgroup_id, entity_type, existing_ident, edit_tem
existing = None
existing_edit = None
if editgroup and existing_ident:
- existing, existing_edit = generic_get_editgroup_entity(editgroup, entity_type, existing_ident)
+ existing, existing_edit = generic_get_editgroup_entity(
+ editgroup, entity_type, existing_ident
+ )
elif existing_ident:
existing = generic_get_entity(entity_type, existing_ident)
@@ -332,15 +379,17 @@ def generic_entity_toml_edit(editgroup_id, entity_type, existing_ident, edit_tem
if form.is_submitted():
if form.validate_on_submit():
# API on behalf of user
- user_api = auth_api(session['api_token'])
+ user_api = auth_api(session["api_token"])
if not editgroup:
editgroup = form_editgroup_get_or_create(user_api, form)
if editgroup:
- if not existing_ident: # it's a create
+ if not existing_ident: # it's a create
try:
- edit = generic_entity_create_from_toml(user_api, entity_type, editgroup.editgroup_id, form.toml.data)
+ edit = generic_entity_create_from_toml(
+ user_api, entity_type, editgroup.editgroup_id, form.toml.data
+ )
except ValueError as ve:
form.toml.errors = [ve]
status = 400
@@ -348,8 +397,12 @@ def generic_entity_toml_edit(editgroup_id, entity_type, existing_ident, edit_tem
app.log.warning(ae)
raise ae
if status == 200:
- return redirect('/editgroup/{}/{}/{}'.format(editgroup.editgroup_id, entity_type, edit.ident))
- else: # it's an update
+ return redirect(
+ "/editgroup/{}/{}/{}".format(
+ editgroup.editgroup_id, entity_type, edit.ident
+ )
+ )
+ else: # it's an update
# TODO: some danger of wiping database state here is
# "updated edit" causes, eg, a 4xx error. Better to allow
# this in the API itself. For now, form validation *should*
@@ -359,9 +412,17 @@ def generic_entity_toml_edit(editgroup_id, entity_type, existing_ident, edit_tem
# need to clear revision on object or this becomes just
# a "update pointer" edit
existing.revision = None
- generic_entity_delete_edit(user_api, entity_type, editgroup.editgroup_id, existing_edit.edit_id)
+ generic_entity_delete_edit(
+ user_api, entity_type, editgroup.editgroup_id, existing_edit.edit_id
+ )
try:
- edit = generic_entity_update_from_toml(user_api, entity_type, editgroup.editgroup_id, existing.ident, form.toml.data)
+ edit = generic_entity_update_from_toml(
+ user_api,
+ entity_type,
+ editgroup.editgroup_id,
+ existing.ident,
+ form.toml.data,
+ )
except ValueError as ve:
form.toml.errors = [ve]
status = 400
@@ -369,19 +430,25 @@ def generic_entity_toml_edit(editgroup_id, entity_type, existing_ident, edit_tem
app.log.warning(ae)
raise ae
if status == 200:
- return redirect('/editgroup/{}/{}/{}'.format(editgroup.editgroup_id, entity_type, edit.ident))
+ return redirect(
+ "/editgroup/{}/{}/{}".format(
+ editgroup.editgroup_id, entity_type, edit.ident
+ )
+ )
else:
status = 400
elif form.errors:
status = 400
app.log.info("form errors (did not validate): {}".format(form.errors))
- else: # form is not submitted
+ else: # form is not submitted
if existing:
form = EntityTomlForm.from_entity(existing)
- editor_editgroups = api.get_editor_editgroups(session['editor']['editor_id'], limit=20)
- potential_editgroups = [e for e in editor_editgroups if e.changelog_index is None and e.submitted is None]
+ editor_editgroups = api.get_editor_editgroups(session["editor"]["editor_id"], limit=20)
+ potential_editgroups = [
+ e for e in editor_editgroups if e.changelog_index is None and e.submitted is None
+ ]
if not form.is_submitted():
# default to most recent not submitted, fallback to "create new"
@@ -389,9 +456,18 @@ def generic_entity_toml_edit(editgroup_id, entity_type, existing_ident, edit_tem
if potential_editgroups:
form.editgroup_id.data = potential_editgroups[0].editgroup_id
- return render_template(edit_template, form=form, entity_type=entity_type,
- existing_ident=existing_ident, editgroup=editgroup,
- potential_editgroups=potential_editgroups), status
+ return (
+ render_template(
+ edit_template,
+ form=form,
+ entity_type=entity_type,
+ existing_ident=existing_ident,
+ editgroup=editgroup,
+ potential_editgroups=potential_editgroups,
+ ),
+ status,
+ )
+
def generic_entity_delete(editgroup_id: Optional[str], entity_type: str, existing_ident: str):
"""
@@ -418,7 +494,9 @@ def generic_entity_delete(editgroup_id: Optional[str], entity_type: str, existin
existing = None
existing_edit = None
if editgroup and existing_ident:
- existing, existing_edit = generic_get_editgroup_entity(editgroup, entity_type, existing_ident)
+ existing, existing_edit = generic_get_editgroup_entity(
+ editgroup, entity_type, existing_ident
+ )
elif existing_ident:
existing = generic_get_entity(entity_type, existing_ident)
@@ -429,7 +507,7 @@ def generic_entity_delete(editgroup_id: Optional[str], entity_type: str, existin
if form.is_submitted():
if form.validate_on_submit():
# API on behalf of user
- user_api = auth_api(session['api_token'])
+ user_api = auth_api(session["api_token"])
if not editgroup:
editgroup = form_editgroup_get_or_create(user_api, form)
@@ -443,26 +521,36 @@ def generic_entity_delete(editgroup_id: Optional[str], entity_type: str, existin
# need to clear revision on object or this becomes just
# a "update pointer" edit
existing.revision = None
- generic_entity_delete_edit(user_api, entity_type, editgroup.editgroup_id, existing_edit.edit_id)
+ generic_entity_delete_edit(
+ user_api, entity_type, editgroup.editgroup_id, existing_edit.edit_id
+ )
try:
- edit = generic_entity_delete_entity(user_api, entity_type, editgroup.editgroup_id, existing.ident)
+ edit = generic_entity_delete_entity(
+ user_api, entity_type, editgroup.editgroup_id, existing.ident
+ )
except ApiException as ae:
app.log.warning(ae)
raise ae
if status == 200:
- return redirect('/editgroup/{}/{}/{}'.format(editgroup.editgroup_id, entity_type, edit.ident))
+ return redirect(
+ "/editgroup/{}/{}/{}".format(
+ editgroup.editgroup_id, entity_type, edit.ident
+ )
+ )
else:
status = 400
elif form.errors:
status = 400
app.log.info("form errors (did not validate): {}".format(form.errors))
- else: # form is not submitted
+ else: # form is not submitted
if existing:
form = EntityTomlForm.from_entity(existing)
- editor_editgroups = api.get_editor_editgroups(session['editor']['editor_id'], limit=20)
- potential_editgroups = [e for e in editor_editgroups if e.changelog_index is None and e.submitted is None]
+ editor_editgroups = api.get_editor_editgroups(session["editor"]["editor_id"], limit=20)
+ potential_editgroups = [
+ e for e in editor_editgroups if e.changelog_index is None and e.submitted is None
+ ]
if not form.is_submitted():
# default to most recent not submitted, fallback to "create new"
@@ -470,9 +558,18 @@ def generic_entity_delete(editgroup_id: Optional[str], entity_type: str, existin
if potential_editgroups:
form.editgroup_id.data = potential_editgroups[0].editgroup_id
- return render_template("entity_delete.html", form=form, entity_type=entity_type,
- existing_ident=existing_ident, editgroup=editgroup,
- potential_editgroups=potential_editgroups), status
+ return (
+ render_template(
+ "entity_delete.html",
+ form=form,
+ entity_type=entity_type,
+ existing_ident=existing_ident,
+ editgroup=editgroup,
+ potential_editgroups=potential_editgroups,
+ ),
+ status,
+ )
+
def generic_edit_delete(editgroup_id, entity_type, edit_id):
# fetch editgroup (if set) or 404
@@ -489,7 +586,7 @@ def generic_edit_delete(editgroup_id, entity_type, edit_id):
abort(400)
# API on behalf of user
- user_api = auth_api(session['api_token'])
+ user_api = auth_api(session["api_token"])
# do the deletion
generic_entity_delete_edit(user_api, entity_type, editgroup.editgroup_id, edit_id)
@@ -498,317 +595,382 @@ def generic_edit_delete(editgroup_id, entity_type, edit_id):
### Views ###################################################################
-@app.route('/container/create', methods=['GET', 'POST'])
+
+@app.route("/container/create", methods=["GET", "POST"])
@login_required
def container_create_view():
- return generic_entity_edit(None, 'container', None, 'container_create.html')
+ return generic_entity_edit(None, "container", None, "container_create.html")
+
-@app.route('/container/<ident>/edit', methods=['GET', 'POST'])
+@app.route("/container/<ident>/edit", methods=["GET", "POST"])
@login_required
def container_edit_view(ident):
- return generic_entity_edit(None, 'container', ident, 'container_edit.html')
+ return generic_entity_edit(None, "container", ident, "container_edit.html")
+
-@app.route('/container/<ident>/delete', methods=['GET', 'POST'])
+@app.route("/container/<ident>/delete", methods=["GET", "POST"])
@login_required
def container_delete_view(ident):
- return generic_entity_delete(None, 'container', ident)
+ return generic_entity_delete(None, "container", ident)
-@app.route('/editgroup/<editgroup_id>/container/<ident>/edit', methods=['GET', 'POST'])
+
+@app.route("/editgroup/<editgroup_id>/container/<ident>/edit", methods=["GET", "POST"])
@login_required
def container_editgroup_edit_view(editgroup_id, ident):
- return generic_entity_edit(editgroup_id, 'container', ident, 'container_edit.html')
+ return generic_entity_edit(editgroup_id, "container", ident, "container_edit.html")
+
-@app.route('/editgroup/<editgroup_id>/container/<ident>/delete', methods=['GET', 'POST'])
+@app.route("/editgroup/<editgroup_id>/container/<ident>/delete", methods=["GET", "POST"])
@login_required
def container_editgroup_delete_view(editgroup_id, ident):
- return generic_entity_delete(editgroup_id, 'container', ident)
+ return generic_entity_delete(editgroup_id, "container", ident)
-@app.route('/editgroup/<editgroup_id>/container/edit/<edit_id>/delete', methods=['POST'])
+
+@app.route("/editgroup/<editgroup_id>/container/edit/<edit_id>/delete", methods=["POST"])
@login_required
def container_edit_delete(editgroup_id, edit_id):
- return generic_edit_delete(editgroup_id, 'container', edit_id)
+ return generic_edit_delete(editgroup_id, "container", edit_id)
+
-@app.route('/creator/<ident>/delete', methods=['GET', 'POST'])
+@app.route("/creator/<ident>/delete", methods=["GET", "POST"])
@login_required
def creator_delete_view(ident):
- return generic_entity_delete(None, 'creator', ident)
+ return generic_entity_delete(None, "creator", ident)
-@app.route('/editgroup/<editgroup_id>/creator/edit/<edit_id>/delete', methods=['POST'])
+
+@app.route("/editgroup/<editgroup_id>/creator/edit/<edit_id>/delete", methods=["POST"])
def creator_edit_delete(editgroup_id, edit_id):
- return generic_edit_delete(editgroup_id, 'creator', edit_id)
+ return generic_edit_delete(editgroup_id, "creator", edit_id)
+
-@app.route('/editgroup/<editgroup_id>/creator/<ident>/delete', methods=['GET', 'POST'])
+@app.route("/editgroup/<editgroup_id>/creator/<ident>/delete", methods=["GET", "POST"])
@login_required
def creator_editgroup_delete(editgroup_id, ident):
- return generic_entity_delete(editgroup_id, 'creator', ident)
+ return generic_entity_delete(editgroup_id, "creator", ident)
-@app.route('/file/create', methods=['GET', 'POST'])
+
+@app.route("/file/create", methods=["GET", "POST"])
@login_required
def file_create_view():
- return generic_entity_edit(None, 'file', None, 'file_create.html')
+ return generic_entity_edit(None, "file", None, "file_create.html")
+
-@app.route('/file/<ident>/edit', methods=['GET', 'POST'])
+@app.route("/file/<ident>/edit", methods=["GET", "POST"])
@login_required
def file_edit_view(ident):
- return generic_entity_edit(None, 'file', ident, 'file_edit.html')
+ return generic_entity_edit(None, "file", ident, "file_edit.html")
+
-@app.route('/file/<ident>/delete', methods=['GET', 'POST'])
+@app.route("/file/<ident>/delete", methods=["GET", "POST"])
@login_required
def file_delete_view(ident):
- return generic_entity_delete(None, 'file', ident)
+ return generic_entity_delete(None, "file", ident)
-@app.route('/editgroup/<editgroup_id>/file/<ident>/edit', methods=['GET', 'POST'])
+
+@app.route("/editgroup/<editgroup_id>/file/<ident>/edit", methods=["GET", "POST"])
@login_required
def file_editgroup_edit_view(editgroup_id, ident):
- return generic_entity_edit(editgroup_id, 'file', ident, 'file_edit.html')
+ return generic_entity_edit(editgroup_id, "file", ident, "file_edit.html")
+
-@app.route('/editgroup/<editgroup_id>/file/<ident>/delete', methods=['GET', 'POST'])
+@app.route("/editgroup/<editgroup_id>/file/<ident>/delete", methods=["GET", "POST"])
@login_required
def file_editgroup_delete_view(editgroup_id, ident):
- return generic_entity_delete(editgroup_id, 'file', ident)
+ return generic_entity_delete(editgroup_id, "file", ident)
-@app.route('/editgroup/<editgroup_id>/file/edit/<edit_id>/delete', methods=['POST'])
+
+@app.route("/editgroup/<editgroup_id>/file/edit/<edit_id>/delete", methods=["POST"])
@login_required
def file_edit_delete(editgroup_id, edit_id):
- return generic_edit_delete(editgroup_id, 'file', edit_id)
+ return generic_edit_delete(editgroup_id, "file", edit_id)
+
-@app.route('/fileset/<ident>/delete', methods=['GET', 'POST'])
+@app.route("/fileset/<ident>/delete", methods=["GET", "POST"])
@login_required
def fileset_delete_view(ident):
- return generic_entity_delete(None, 'fileset', ident)
+ return generic_entity_delete(None, "fileset", ident)
+
-@app.route('/editgroup/<editgroup_id>/fileset/edit/<edit_id>/delete', methods=['POST'])
+@app.route("/editgroup/<editgroup_id>/fileset/edit/<edit_id>/delete", methods=["POST"])
def fileset_edit_delete(editgroup_id, edit_id):
- return generic_edit_delete(editgroup_id, 'fileset', edit_id)
+ return generic_edit_delete(editgroup_id, "fileset", edit_id)
-@app.route('/editgroup/<editgroup_id>/fileset/<ident>/delete', methods=['GET', 'POST'])
+
+@app.route("/editgroup/<editgroup_id>/fileset/<ident>/delete", methods=["GET", "POST"])
@login_required
def fileset_editgroup_delete(editgroup_id, ident):
- return generic_entity_delete(editgroup_id, 'fileset', ident)
+ return generic_entity_delete(editgroup_id, "fileset", ident)
+
-@app.route('/webcapture/<ident>/delete', methods=['GET', 'POST'])
+@app.route("/webcapture/<ident>/delete", methods=["GET", "POST"])
@login_required
def webcapture_delete_view(ident):
- return generic_entity_delete(None, 'webcapture', ident)
+ return generic_entity_delete(None, "webcapture", ident)
-@app.route('/editgroup/<editgroup_id>/webcapture/edit/<edit_id>/delete', methods=['POST'])
+
+@app.route("/editgroup/<editgroup_id>/webcapture/edit/<edit_id>/delete", methods=["POST"])
def webcapture_edit_delete(editgroup_id, edit_id):
- return generic_edit_delete(editgroup_id, 'webcapture', edit_id)
+ return generic_edit_delete(editgroup_id, "webcapture", edit_id)
+
-@app.route('/editgroup/<editgroup_id>/webcapture/<ident>/delete', methods=['GET', 'POST'])
+@app.route("/editgroup/<editgroup_id>/webcapture/<ident>/delete", methods=["GET", "POST"])
@login_required
def webcapture_editgroup_delete(editgroup_id, ident):
- return generic_entity_delete(editgroup_id, 'webcapture', ident)
+ return generic_entity_delete(editgroup_id, "webcapture", ident)
+
-@app.route('/release/create', methods=['GET', 'POST'])
+@app.route("/release/create", methods=["GET", "POST"])
@login_required
def release_create_view():
- return generic_entity_edit(None, 'release', None, 'release_create.html')
+ return generic_entity_edit(None, "release", None, "release_create.html")
-@app.route('/release/<ident>/edit', methods=['GET', 'POST'])
+
+@app.route("/release/<ident>/edit", methods=["GET", "POST"])
@login_required
def release_edit_view(ident):
- return generic_entity_edit(None, 'release', ident, 'release_edit.html')
+ return generic_entity_edit(None, "release", ident, "release_edit.html")
+
-@app.route('/release/<ident>/delete', methods=['GET', 'POST'])
+@app.route("/release/<ident>/delete", methods=["GET", "POST"])
@login_required
def release_delete_view(ident):
- return generic_entity_delete(None, 'release', ident)
+ return generic_entity_delete(None, "release", ident)
-@app.route('/editgroup/<editgroup_id>/release/<ident>/edit', methods=['GET', 'POST'])
+
+@app.route("/editgroup/<editgroup_id>/release/<ident>/edit", methods=["GET", "POST"])
@login_required
def release_editgroup_edit(editgroup_id, ident):
- return generic_entity_edit(editgroup_id, 'release', ident, 'release_edit.html')
+ return generic_entity_edit(editgroup_id, "release", ident, "release_edit.html")
+
-@app.route('/editgroup/<editgroup_id>/release/<ident>/delete', methods=['GET', 'POST'])
+@app.route("/editgroup/<editgroup_id>/release/<ident>/delete", methods=["GET", "POST"])
@login_required
def release_editgroup_delete(editgroup_id, ident):
- return generic_entity_delete(editgroup_id, 'release', ident)
+ return generic_entity_delete(editgroup_id, "release", ident)
-@app.route('/editgroup/<editgroup_id>/release/edit/<edit_id>/delete', methods=['POST'])
+
+@app.route("/editgroup/<editgroup_id>/release/edit/<edit_id>/delete", methods=["POST"])
@login_required
def release_edit_delete(editgroup_id, edit_id):
- return generic_edit_delete(editgroup_id, 'release', edit_id)
+ return generic_edit_delete(editgroup_id, "release", edit_id)
+
-@app.route('/work/<ident>/delete', methods=['GET', 'POST'])
+@app.route("/work/<ident>/delete", methods=["GET", "POST"])
@login_required
def work_delete_view(ident):
- return generic_entity_delete(None, 'work', ident)
+ return generic_entity_delete(None, "work", ident)
-@app.route('/editgroup/<editgroup_id>/work/edit/<edit_id>/delete', methods=['POST'])
+
+@app.route("/editgroup/<editgroup_id>/work/edit/<edit_id>/delete", methods=["POST"])
def work_edit_delete(editgroup_id, edit_id):
- return generic_edit_delete(editgroup_id, 'work', edit_id)
+ return generic_edit_delete(editgroup_id, "work", edit_id)
+
-@app.route('/editgroup/<editgroup_id>/work/<ident>/delete', methods=['GET', 'POST'])
+@app.route("/editgroup/<editgroup_id>/work/<ident>/delete", methods=["GET", "POST"])
@login_required
def work_editgroup_delete(editgroup_id, ident):
- return generic_entity_delete(editgroup_id, 'work', ident)
+ return generic_entity_delete(editgroup_id, "work", ident)
+
### TOML Views ##############################################################
-@app.route('/container/create/toml', methods=['GET', 'POST'])
+
+@app.route("/container/create/toml", methods=["GET", "POST"])
@login_required
def container_create_toml_view():
- return generic_entity_toml_edit(None, 'container', None, 'entity_create_toml.html')
+ return generic_entity_toml_edit(None, "container", None, "entity_create_toml.html")
+
-@app.route('/container/<ident>/edit/toml', methods=['GET', 'POST'])
+@app.route("/container/<ident>/edit/toml", methods=["GET", "POST"])
@login_required
def container_edit_toml_view(ident):
- return generic_entity_toml_edit(None, 'container', ident, 'entity_edit_toml.html')
+ return generic_entity_toml_edit(None, "container", ident, "entity_edit_toml.html")
-@app.route('/editgroup/<editgroup_id>/container/<ident>/edit/toml', methods=['GET', 'POST'])
+
+@app.route("/editgroup/<editgroup_id>/container/<ident>/edit/toml", methods=["GET", "POST"])
@login_required
def container_editgroup_edit_toml(editgroup_id, ident):
- return generic_entity_toml_edit(editgroup_id, 'container', ident, 'entity_edit_toml.html')
+ return generic_entity_toml_edit(editgroup_id, "container", ident, "entity_edit_toml.html")
+
-@app.route('/creator/create/toml', methods=['GET', 'POST'])
+@app.route("/creator/create/toml", methods=["GET", "POST"])
@login_required
def creator_create_toml_view():
- return generic_entity_toml_edit(None, 'creator', None, 'entity_create_toml.html')
+ return generic_entity_toml_edit(None, "creator", None, "entity_create_toml.html")
+
-@app.route('/creator/<ident>/edit/toml', methods=['GET', 'POST'])
+@app.route("/creator/<ident>/edit/toml", methods=["GET", "POST"])
@login_required
def creator_edit_toml_view(ident):
- return generic_entity_toml_edit(None, 'creator', ident, 'entity_edit_toml.html')
+ return generic_entity_toml_edit(None, "creator", ident, "entity_edit_toml.html")
-@app.route('/editgroup/<editgroup_id>/creator/<ident>/edit/toml', methods=['GET', 'POST'])
+
+@app.route("/editgroup/<editgroup_id>/creator/<ident>/edit/toml", methods=["GET", "POST"])
@login_required
def creator_editgroup_edit_toml(editgroup_id, ident):
- return generic_entity_toml_edit(editgroup_id, 'creator', ident, 'entity_edit_toml.html')
+ return generic_entity_toml_edit(editgroup_id, "creator", ident, "entity_edit_toml.html")
+
-@app.route('/file/create/toml', methods=['GET', 'POST'])
+@app.route("/file/create/toml", methods=["GET", "POST"])
@login_required
def file_create_toml_view():
- return generic_entity_toml_edit(None, 'file', None, 'entity_create_toml.html')
+ return generic_entity_toml_edit(None, "file", None, "entity_create_toml.html")
-@app.route('/file/<ident>/edit/toml', methods=['GET', 'POST'])
+
+@app.route("/file/<ident>/edit/toml", methods=["GET", "POST"])
@login_required
def file_edit_toml_view(ident):
- return generic_entity_toml_edit(None, 'file', ident, 'entity_edit_toml.html')
+ return generic_entity_toml_edit(None, "file", ident, "entity_edit_toml.html")
+
-@app.route('/editgroup/<editgroup_id>/file/<ident>/edit/toml', methods=['GET', 'POST'])
+@app.route("/editgroup/<editgroup_id>/file/<ident>/edit/toml", methods=["GET", "POST"])
@login_required
def file_editgroup_edit_toml(editgroup_id, ident):
- return generic_entity_toml_edit(editgroup_id, 'file', ident, 'entity_edit_toml.html')
+ return generic_entity_toml_edit(editgroup_id, "file", ident, "entity_edit_toml.html")
+
-@app.route('/fileset/create/toml', methods=['GET', 'POST'])
+@app.route("/fileset/create/toml", methods=["GET", "POST"])
@login_required
def fileset_create_toml_view():
- return generic_entity_toml_edit(None, 'fileset', None, 'entity_create_toml.html')
+ return generic_entity_toml_edit(None, "fileset", None, "entity_create_toml.html")
-@app.route('/fileset/<ident>/edit/toml', methods=['GET', 'POST'])
+
+@app.route("/fileset/<ident>/edit/toml", methods=["GET", "POST"])
@login_required
def fileset_edit_toml_view(ident):
- return generic_entity_toml_edit(None, 'fileset', ident, 'entity_edit_toml.html')
+ return generic_entity_toml_edit(None, "fileset", ident, "entity_edit_toml.html")
+
-@app.route('/editgroup/<editgroup_id>/fileset/<ident>/edit/toml', methods=['GET', 'POST'])
+@app.route("/editgroup/<editgroup_id>/fileset/<ident>/edit/toml", methods=["GET", "POST"])
@login_required
def fileset_editgroup_edit_toml(editgroup_id, ident):
- return generic_entity_toml_edit(editgroup_id, 'fileset', ident, 'entity_edit_toml.html')
+ return generic_entity_toml_edit(editgroup_id, "fileset", ident, "entity_edit_toml.html")
-@app.route('/webcapture/create/toml', methods=['GET', 'POST'])
+
+@app.route("/webcapture/create/toml", methods=["GET", "POST"])
@login_required
def webcapture_create_toml_view():
- return generic_entity_toml_edit(None, 'webcapture', None, 'entity_create_toml.html')
+ return generic_entity_toml_edit(None, "webcapture", None, "entity_create_toml.html")
+
-@app.route('/webcapture/<ident>/edit/toml', methods=['GET', 'POST'])
+@app.route("/webcapture/<ident>/edit/toml", methods=["GET", "POST"])
@login_required
def webcapture_edit_toml_view(ident):
- return generic_entity_toml_edit(None, 'webcapture', ident, 'entity_edit_toml.html')
+ return generic_entity_toml_edit(None, "webcapture", ident, "entity_edit_toml.html")
-@app.route('/editgroup/<editgroup_id>/webcapture/<ident>/edit/toml', methods=['GET', 'POST'])
+
+@app.route("/editgroup/<editgroup_id>/webcapture/<ident>/edit/toml", methods=["GET", "POST"])
@login_required
def webcapture_editgroup_edit_toml(editgroup_id, ident):
- return generic_entity_toml_edit(editgroup_id, 'webcapture', ident, 'entity_edit_toml.html')
+ return generic_entity_toml_edit(editgroup_id, "webcapture", ident, "entity_edit_toml.html")
+
-@app.route('/release/create/toml', methods=['GET', 'POST'])
+@app.route("/release/create/toml", methods=["GET", "POST"])
@login_required
def release_create_toml_view():
- return generic_entity_toml_edit(None, 'release', None, 'entity_create_toml.html')
+ return generic_entity_toml_edit(None, "release", None, "entity_create_toml.html")
-@app.route('/release/<ident>/edit/toml', methods=['GET', 'POST'])
+
+@app.route("/release/<ident>/edit/toml", methods=["GET", "POST"])
@login_required
def release_edit_toml_view(ident):
- return generic_entity_toml_edit(None, 'release', ident, 'entity_edit_toml.html')
+ return generic_entity_toml_edit(None, "release", ident, "entity_edit_toml.html")
+
-@app.route('/editgroup/<editgroup_id>/release/<ident>/edit/toml', methods=['GET', 'POST'])
+@app.route("/editgroup/<editgroup_id>/release/<ident>/edit/toml", methods=["GET", "POST"])
@login_required
def release_editgroup_edit_toml(editgroup_id, ident):
- return generic_entity_toml_edit(editgroup_id, 'release', ident, 'entity_edit_toml.html')
+ return generic_entity_toml_edit(editgroup_id, "release", ident, "entity_edit_toml.html")
+
-@app.route('/work/create/toml', methods=['GET', 'POST'])
+@app.route("/work/create/toml", methods=["GET", "POST"])
@login_required
def work_create_toml_view():
- return generic_entity_toml_edit(None, 'work', None, 'entity_create_toml.html')
+ return generic_entity_toml_edit(None, "work", None, "entity_create_toml.html")
-@app.route('/work/<ident>/edit/toml', methods=['GET', 'POST'])
+
+@app.route("/work/<ident>/edit/toml", methods=["GET", "POST"])
@login_required
def work_edit_toml_view(ident):
- return generic_entity_toml_edit(None, 'work', ident, 'entity_edit_toml.html')
+ return generic_entity_toml_edit(None, "work", ident, "entity_edit_toml.html")
+
-@app.route('/editgroup/<editgroup_id>/work/<ident>/edit/toml', methods=['GET', 'POST'])
+@app.route("/editgroup/<editgroup_id>/work/<ident>/edit/toml", methods=["GET", "POST"])
@login_required
def work_editgroup_edit_toml(editgroup_id, ident):
- return generic_entity_toml_edit(editgroup_id, 'work', ident, 'entity_edit_toml.html')
+ return generic_entity_toml_edit(editgroup_id, "work", ident, "entity_edit_toml.html")
+
### TOML-Only Editing Redirects ################################################
-@app.route('/creator/create', methods=['GET'])
+
+@app.route("/creator/create", methods=["GET"])
@login_required
def creator_create_view():
- return redirect('/creator/create/toml')
+ return redirect("/creator/create/toml")
-@app.route('/creator/<ident>/edit', methods=['GET'])
+
+@app.route("/creator/<ident>/edit", methods=["GET"])
@login_required
def creator_edit_view(ident):
- return redirect(f'/creator/{ident}/edit/toml')
+ return redirect(f"/creator/{ident}/edit/toml")
+
-@app.route('/editgroup/<editgroup_id>/creator/<ident>/edit', methods=['GET', 'POST'])
+@app.route("/editgroup/<editgroup_id>/creator/<ident>/edit", methods=["GET", "POST"])
@login_required
def creator_editgroup_edit(editgroup_id, ident):
- return redirect(f'/editgroup/{editgroup_id}/creator/{ident}/edit/toml')
+ return redirect(f"/editgroup/{editgroup_id}/creator/{ident}/edit/toml")
-@app.route('/fileset/create', methods=['GET'])
+
+@app.route("/fileset/create", methods=["GET"])
@login_required
def fileset_create_view():
- return redirect('/fileset/create/toml')
+ return redirect("/fileset/create/toml")
+
-@app.route('/fileset/<ident>/edit', methods=['GET'])
+@app.route("/fileset/<ident>/edit", methods=["GET"])
@login_required
def fileset_edit_view(ident):
- return redirect(f'/fileset/{ident}/edit/toml')
+ return redirect(f"/fileset/{ident}/edit/toml")
+
-@app.route('/editgroup/<editgroup_id>/fileset/<ident>/edit', methods=['GET', 'POST'])
+@app.route("/editgroup/<editgroup_id>/fileset/<ident>/edit", methods=["GET", "POST"])
@login_required
def fileset_editgroup_edit(editgroup_id, ident):
- return redirect(f'/editgroup/{editgroup_id}/fileset/{ident}/edit/toml')
+ return redirect(f"/editgroup/{editgroup_id}/fileset/{ident}/edit/toml")
-@app.route('/webcapture/create', methods=['GET'])
+
+@app.route("/webcapture/create", methods=["GET"])
@login_required
def webcapture_create_view():
- return redirect('/webcapture/create/toml')
+ return redirect("/webcapture/create/toml")
+
-@app.route('/webcapture/<ident>/edit', methods=['GET'])
+@app.route("/webcapture/<ident>/edit", methods=["GET"])
@login_required
def webcapture_edit_view(ident):
- return redirect(f'/webcapture/{ident}/edit/toml')
+ return redirect(f"/webcapture/{ident}/edit/toml")
-@app.route('/editgroup/<editgroup_id>/webcapture/<ident>/edit', methods=['GET', 'POST'])
+
+@app.route("/editgroup/<editgroup_id>/webcapture/<ident>/edit", methods=["GET", "POST"])
@login_required
def webcapture_editgroup_edit(editgroup_id, ident):
- return redirect(f'/editgroup/{editgroup_id}/webcapture/{ident}/edit/toml')
+ return redirect(f"/editgroup/{editgroup_id}/webcapture/{ident}/edit/toml")
+
-@app.route('/work/create', methods=['GET'])
+@app.route("/work/create", methods=["GET"])
@login_required
def work_create_view():
- return redirect('/work/create/toml')
+ return redirect("/work/create/toml")
-@app.route('/work/<ident>/edit', methods=['GET'])
+
+@app.route("/work/<ident>/edit", methods=["GET"])
@login_required
def work_edit_view(ident):
- return redirect(f'/work/{ident}/edit/toml')
+ return redirect(f"/work/{ident}/edit/toml")
+
-@app.route('/editgroup/<editgroup_id>/work/<ident>/edit', methods=['GET', 'POST'])
+@app.route("/editgroup/<editgroup_id>/work/<ident>/edit", methods=["GET", "POST"])
@login_required
def work_editgroup_edit(editgroup_id, ident):
- return redirect(f'/editgroup/{editgroup_id}/work/{ident}/edit/toml')
+ return redirect(f"/editgroup/{editgroup_id}/work/{ident}/edit/toml")
diff --git a/python/fatcat_web/entity_helpers.py b/python/fatcat_web/entity_helpers.py
index 5522f3b5..dbe11cb4 100644
--- a/python/fatcat_web/entity_helpers.py
+++ b/python/fatcat_web/entity_helpers.py
@@ -1,4 +1,3 @@
-
from fatcat_openapi_client import (
ContainerEntity,
CreatorEntity,
@@ -22,41 +21,46 @@ from fatcat_web.hacks import strip_extlink_xml, wayback_suffix
def enrich_container_entity(entity):
- if entity.state in ('redirect', 'deleted'):
+ if entity.state in ("redirect", "deleted"):
return entity
if entity.state == "active":
entity._es = container_to_elasticsearch(entity, force_bool=False)
return entity
+
def enrich_creator_entity(entity):
- if entity.state in ('redirect', 'deleted'):
+ if entity.state in ("redirect", "deleted"):
return entity
entity._releases = None
- if entity.state in ('active', 'wip'):
+ if entity.state in ("active", "wip"):
entity._releases = api.get_creator_releases(entity.ident)
return entity
+
def enrich_file_entity(entity):
if entity.state == "active":
entity._es = file_to_elasticsearch(entity)
return entity
+
def enrich_fileset_entity(entity):
- if entity.state in ('redirect', 'deleted'):
+ if entity.state in ("redirect", "deleted"):
return entity
entity._total_size = None
if entity.manifest is not None:
entity._total_size = sum([f.size for f in entity.manifest]) or 0
return entity
+
def enrich_webcapture_entity(entity):
- if entity.state in ('redirect', 'deleted'):
+ if entity.state in ("redirect", "deleted"):
return entity
entity._wayback_suffix = wayback_suffix(entity)
return entity
+
def enrich_release_entity(entity):
- if entity.state in ('redirect', 'deleted'):
+ if entity.state in ("redirect", "deleted"):
return entity
if entity.state == "active":
entity._es = release_to_elasticsearch(entity, force_bool=False)
@@ -64,8 +68,9 @@ def enrich_release_entity(entity):
entity.container._es = container_to_elasticsearch(entity.container, force_bool=False)
if entity.files:
# remove shadows-only files with no URLs
- entity.files = [f for f in entity.files
- if not (f.extra and f.extra.get('shadows') and not f.urls)]
+ entity.files = [
+ f for f in entity.files if not (f.extra and f.extra.get("shadows") and not f.urls)
+ ]
if entity.filesets:
for fs in entity.filesets:
fs._total_size = sum([f.size for f in fs.manifest])
@@ -79,60 +84,74 @@ def enrich_release_entity(entity):
# xlink:href="http://lockss.org/"
# xlink:type="simple">http://lockss.org/</ext-link>. Accessed: 2014
# November 1.
- if ref.extra and ref.extra.get('unstructured'):
- ref.extra['unstructured'] = strip_extlink_xml(ref.extra['unstructured'])
+ if ref.extra and ref.extra.get("unstructured"):
+ ref.extra["unstructured"] = strip_extlink_xml(ref.extra["unstructured"])
# for backwards compatability, copy extra['subtitle'] to subtitle
- if not entity.subtitle and entity.extra and entity.extra.get('subtitle'):
- if isinstance(entity.extra['subtitle'], str):
- entity.subtitle = entity.extra['subtitle']
- elif isinstance(entity.extra['subtitle'], list):
- entity.subtitle = entity.extra['subtitle'][0] or None
+ if not entity.subtitle and entity.extra and entity.extra.get("subtitle"):
+ if isinstance(entity.extra["subtitle"], str):
+ entity.subtitle = entity.extra["subtitle"]
+ elif isinstance(entity.extra["subtitle"], list):
+ entity.subtitle = entity.extra["subtitle"][0] or None
# author list to display; ensure it's sorted by index (any othors with
# index=None go to end of list)
- authors = [c for c in entity.contribs if
- c.role in ('author', None) and
- (c.surname or c.raw_name or (c.creator and c.creator.surname))
+ authors = [
+ c
+ for c in entity.contribs
+ if c.role in ("author", None)
+ and (c.surname or c.raw_name or (c.creator and c.creator.surname))
]
entity._authors = sorted(authors, key=lambda c: (c.index is None and 99999999) or c.index)
# need authors, title for citeproc to work
entity._can_citeproc = bool(entity._authors) and bool(entity.title)
if entity.abstracts:
# hack to show plain text instead of latex abstracts
- if 'latex' in entity.abstracts[0].mimetype:
+ if "latex" in entity.abstracts[0].mimetype:
entity.abstracts.reverse()
# hack to (partially) clean up common JATS abstract display case
- if entity.abstracts[0].mimetype == 'application/xml+jats':
- for tag in ('p', 'jats', 'jats:p'):
- entity.abstracts[0].content = entity.abstracts[0].content.replace('<{}>'.format(tag), '')
- entity.abstracts[0].content = entity.abstracts[0].content.replace('</{}>'.format(tag), '')
+ if entity.abstracts[0].mimetype == "application/xml+jats":
+ for tag in ("p", "jats", "jats:p"):
+ entity.abstracts[0].content = entity.abstracts[0].content.replace(
+ "<{}>".format(tag), ""
+ )
+ entity.abstracts[0].content = entity.abstracts[0].content.replace(
+ "</{}>".format(tag), ""
+ )
# ugh, double encoding happens
- entity.abstracts[0].content = entity.abstracts[0].content.replace('&lt;/{}&gt;'.format(tag), '')
- entity.abstracts[0].content = entity.abstracts[0].content.replace('&lt;{}&gt;'.format(tag), '')
+ entity.abstracts[0].content = entity.abstracts[0].content.replace(
+ "&lt;/{}&gt;".format(tag), ""
+ )
+ entity.abstracts[0].content = entity.abstracts[0].content.replace(
+ "&lt;{}&gt;".format(tag), ""
+ )
return entity
+
def enrich_work_entity(entity):
- if entity.state in ('redirect', 'deleted'):
+ if entity.state in ("redirect", "deleted"):
return entity
entity._releases = None
- if entity.state in ('active', 'wip'):
+ if entity.state in ("active", "wip"):
entity._releases = api.get_work_releases(entity.ident)
return entity
+
def generic_get_entity(entity_type, ident):
try:
- if entity_type == 'container':
+ if entity_type == "container":
return enrich_container_entity(api.get_container(ident))
- elif entity_type == 'creator':
+ elif entity_type == "creator":
return enrich_creator_entity(api.get_creator(ident))
- elif entity_type == 'file':
+ elif entity_type == "file":
return enrich_file_entity(api.get_file(ident, expand="releases"))
- elif entity_type == 'fileset':
+ elif entity_type == "fileset":
return enrich_fileset_entity(api.get_fileset(ident, expand="releases"))
- elif entity_type == 'webcapture':
+ elif entity_type == "webcapture":
return enrich_webcapture_entity(api.get_webcapture(ident, expand="releases"))
- elif entity_type == 'release':
- return enrich_release_entity(api.get_release(ident, expand="container,creators,files,filesets,webcaptures"))
- elif entity_type == 'work':
+ elif entity_type == "release":
+ return enrich_release_entity(
+ api.get_release(ident, expand="container,creators,files,filesets,webcaptures")
+ )
+ elif entity_type == "work":
return enrich_work_entity(api.get_work(ident))
else:
raise NotImplementedError
@@ -141,21 +160,28 @@ def generic_get_entity(entity_type, ident):
except ApiValueError:
abort(400)
+
def generic_get_entity_revision(entity_type, revision_id):
try:
- if entity_type == 'container':
+ if entity_type == "container":
return enrich_container_entity(api.get_container_revision(revision_id))
- elif entity_type == 'creator':
+ elif entity_type == "creator":
return enrich_creator_entity(api.get_creator_revision(revision_id))
- elif entity_type == 'file':
+ elif entity_type == "file":
return enrich_file_entity(api.get_file_revision(revision_id, expand="releases"))
- elif entity_type == 'fileset':
- return enrich_fileset_entity(api.get_fileset_revision(revision_id, expand="releases"))
- elif entity_type == 'webcapture':
- return enrich_webcapture_entity(api.get_webcapture_revision(revision_id, expand="releases"))
- elif entity_type == 'release':
- return enrich_release_entity(api.get_release_revision(revision_id, expand="container"))
- elif entity_type == 'work':
+ elif entity_type == "fileset":
+ return enrich_fileset_entity(
+ api.get_fileset_revision(revision_id, expand="releases")
+ )
+ elif entity_type == "webcapture":
+ return enrich_webcapture_entity(
+ api.get_webcapture_revision(revision_id, expand="releases")
+ )
+ elif entity_type == "release":
+ return enrich_release_entity(
+ api.get_release_revision(revision_id, expand="container")
+ )
+ elif entity_type == "work":
return enrich_work_entity(api.get_work_revision(revision_id))
else:
raise NotImplementedError
@@ -164,40 +190,42 @@ def generic_get_entity_revision(entity_type, revision_id):
except ApiValueError:
abort(400)
+
def generic_deleted_entity(entity_type, ident):
- if entity_type == 'container':
+ if entity_type == "container":
entity = ContainerEntity()
- elif entity_type == 'creator':
+ elif entity_type == "creator":
entity = CreatorEntity()
- elif entity_type == 'file':
+ elif entity_type == "file":
entity = FileEntity()
- elif entity_type == 'fileset':
+ elif entity_type == "fileset":
entity = FilesetEntity()
- elif entity_type == 'webcapture':
+ elif entity_type == "webcapture":
entity = WebcaptureEntity()
- elif entity_type == 'release':
+ elif entity_type == "release":
entity = ReleaseEntity(ext_ids=ReleaseExtIds())
- elif entity_type == 'work':
+ elif entity_type == "work":
entity = WorkEntity()
else:
raise NotImplementedError
entity.ident = ident
return entity
+
def generic_get_editgroup_entity(editgroup, entity_type, ident):
- if entity_type == 'container':
+ if entity_type == "container":
edits = editgroup.edits.containers
- elif entity_type == 'creator':
+ elif entity_type == "creator":
edits = editgroup.edits.creators
- elif entity_type == 'file':
+ elif entity_type == "file":
edits = editgroup.edits.files
- elif entity_type == 'fileset':
+ elif entity_type == "fileset":
edits = editgroup.edits.filesets
- elif entity_type == 'webcapture':
+ elif entity_type == "webcapture":
edits = editgroup.edits.webcaptures
- elif entity_type == 'release':
+ elif entity_type == "release":
edits = editgroup.edits.releases
- elif entity_type == 'work':
+ elif entity_type == "work":
edits = editgroup.edits.works
else:
raise NotImplementedError
diff --git a/python/fatcat_web/forms.py b/python/fatcat_web/forms.py
index b432ac16..25bfbb90 100644
--- a/python/fatcat_web/forms.py
+++ b/python/fatcat_web/forms.py
@@ -1,4 +1,3 @@
-
"""
Note: in thoery could use, eg, https://github.com/christabor/swagger_wtforms,
but can't find one that is actually maintained.
@@ -32,84 +31,99 @@ from wtforms import (
from fatcat_tools.transforms import entity_to_toml
release_type_options = [
- ('', 'Unknown (blank)'),
- ('article-journal', 'Journal Article'),
- ('paper-conference', 'Conference Proceeding'),
- ('article', 'Article (non-journal)'),
- ('book', 'Book'),
- ('chapter', 'Book Chapter'),
- ('dataset', 'Dataset'),
- ('stub', 'Invalid/Stub'),
+ ("", "Unknown (blank)"),
+ ("article-journal", "Journal Article"),
+ ("paper-conference", "Conference Proceeding"),
+ ("article", "Article (non-journal)"),
+ ("book", "Book"),
+ ("chapter", "Book Chapter"),
+ ("dataset", "Dataset"),
+ ("stub", "Invalid/Stub"),
]
release_stage_options = [
- ('', 'Unknown (blank)'),
- ('draft', 'Draft'),
- ('submitted', 'Submitted'),
- ('accepted', 'Accepted'),
- ('published', 'Published'),
- ('updated', 'Updated'),
+ ("", "Unknown (blank)"),
+ ("draft", "Draft"),
+ ("submitted", "Submitted"),
+ ("accepted", "Accepted"),
+ ("published", "Published"),
+ ("updated", "Updated"),
]
withdrawn_status_options = [
- ('', 'Not Withdrawn (blank)'),
- ('retracted', 'Retracted'),
- ('withdrawn', 'Withdrawn'),
- ('concern', 'Concern Noted'),
- ('spam', 'Spam'),
- ('legal', 'Legal Taketown'),
- ('safety', 'Public Safety'),
- ('national-security', 'National Security'),
+ ("", "Not Withdrawn (blank)"),
+ ("retracted", "Retracted"),
+ ("withdrawn", "Withdrawn"),
+ ("concern", "Concern Noted"),
+ ("spam", "Spam"),
+ ("legal", "Legal Taketown"),
+ ("safety", "Public Safety"),
+ ("national-security", "National Security"),
]
role_type_options = [
- ('author', 'Author'),
- ('editor', 'Editor'),
- ('translator', 'Translator'),
+ ("author", "Author"),
+ ("editor", "Editor"),
+ ("translator", "Translator"),
]
+
class EntityEditForm(FlaskForm):
- editgroup_id = StringField('Editgroup ID',
- [validators.Optional(True),
- validators.Length(min=26, max=26)])
- editgroup_description = StringField('Editgroup Description',
- [validators.Optional(True)])
- edit_description = StringField('Description of Changes',
- [validators.Optional(True)])
+ editgroup_id = StringField(
+ "Editgroup ID", [validators.Optional(True), validators.Length(min=26, max=26)]
+ )
+ editgroup_description = StringField("Editgroup Description", [validators.Optional(True)])
+ edit_description = StringField("Description of Changes", [validators.Optional(True)])
+
class ReleaseContribForm(FlaskForm):
class Meta:
# this is a sub-form, so disable CSRF
csrf = False
- #surname
- #given_name
- #creator_id (?)
- #orcid (for match?)
- prev_index = HiddenField('prev_revision index', default=None)
- raw_name = StringField('Display Name',
- [validators.DataRequired()])
- role = SelectField(
- [validators.DataRequired()],
- choices=role_type_options,
- default='author')
+ # surname
+ # given_name
+ # creator_id (?)
+ # orcid (for match?)
+ prev_index = HiddenField("prev_revision index", default=None)
+ raw_name = StringField("Display Name", [validators.DataRequired()])
+ role = SelectField([validators.DataRequired()], choices=role_type_options, default="author")
+
+
+RELEASE_SIMPLE_ATTRS = [
+ "title",
+ "original_title",
+ "work_id",
+ "container_id",
+ "release_type",
+ "release_stage",
+ "withdrawn_status",
+ "release_date",
+ "release_year",
+ "volume",
+ "issue",
+ "pages",
+ "publisher",
+ "language",
+ "license_slug",
+]
-RELEASE_SIMPLE_ATTRS = ['title', 'original_title', 'work_id', 'container_id',
- 'release_type', 'release_stage', 'withdrawn_status', 'release_date',
- 'release_year', 'volume', 'issue', 'pages', 'publisher', 'language',
- 'license_slug']
+RELEASE_EXTID_ATTRS = ["doi", "wikidata_qid", "isbn13", "pmid", "pmcid"]
-RELEASE_EXTID_ATTRS = ['doi', 'wikidata_qid', 'isbn13', 'pmid', 'pmcid']
def valid_year(form, field):
if field.data > datetime.date.today().year + 5:
- raise ValidationError(
- f"Year is too far in the future: {field.data}")
+ raise ValidationError(f"Year is too far in the future: {field.data}")
if field.data < 10:
- raise ValidationError(
- f"Year is too far in the past: {field.data}")
+ raise ValidationError(f"Year is too far in the past: {field.data}")
+
def valid_2char_ascii(form, field):
- if len(field.data) != 2 or len(field.data.encode('utf-8')) != 2 or not field.data.isalpha() or field.data != field.data.lower():
- raise ValidationError(
- f"Must be 2-character ISO format, lower case: {field.data}")
+ if (
+ len(field.data) != 2
+ or len(field.data.encode("utf-8")) != 2
+ or not field.data.isalpha()
+ or field.data != field.data.lower()
+ ):
+ raise ValidationError(f"Must be 2-character ISO format, lower case: {field.data}")
+
class ReleaseEntityForm(EntityEditForm):
"""
@@ -117,50 +131,52 @@ class ReleaseEntityForm(EntityEditForm):
- field types: fatcat id
- date
"""
- title = StringField('Title',
- [validators.DataRequired()])
- original_title = StringField('Title in Original Language (if different)')
- work_id = StringField('Work FCID',
- [validators.Optional(True),
- validators.Length(min=26, max=26)])
- container_id = StringField('Container FCID',
- [validators.Optional(True),
- validators.Length(min=26, max=26)])
- release_type = SelectField('Release Type',
- [validators.DataRequired()],
- choices=release_type_options,
- default='')
+
+ title = StringField("Title", [validators.DataRequired()])
+ original_title = StringField("Title in Original Language (if different)")
+ work_id = StringField(
+ "Work FCID", [validators.Optional(True), validators.Length(min=26, max=26)]
+ )
+ container_id = StringField(
+ "Container FCID", [validators.Optional(True), validators.Length(min=26, max=26)]
+ )
+ release_type = SelectField(
+ "Release Type", [validators.DataRequired()], choices=release_type_options, default=""
+ )
release_stage = SelectField(choices=release_stage_options)
- withdrawn_status = SelectField("Withdrawn Status",
+ withdrawn_status = SelectField(
+ "Withdrawn Status",
[validators.Optional(True)],
choices=withdrawn_status_options,
- default='')
- release_date = DateField('Release Date',
- [validators.Optional(True)])
- release_year = IntegerField('Release Year',
- [validators.Optional(True), valid_year])
- doi = StringField('DOI',
- [validators.Regexp(r'^10\..*\/.*', message="DOI must be valid"),
- validators.Optional(True)])
- wikidata_qid = StringField('Wikidata QID')
- isbn13 = StringField('ISBN-13')
- pmid = StringField('PubMed Id')
- pmcid = StringField('PubMed Central Id')
- #core_id
- #arxiv_id
- #jstor_id
- #oai
- #hdl
- volume = StringField('Volume')
- issue = StringField('Issue')
- pages = StringField('Pages')
- publisher = StringField('Publisher (optional)')
- language = StringField('Language (code)',
- [validators.Optional(True), valid_2char_ascii])
- license_slug = StringField('License (slug)')
+ default="",
+ )
+ release_date = DateField("Release Date", [validators.Optional(True)])
+ release_year = IntegerField("Release Year", [validators.Optional(True), valid_year])
+ doi = StringField(
+ "DOI",
+ [
+ validators.Regexp(r"^10\..*\/.*", message="DOI must be valid"),
+ validators.Optional(True),
+ ],
+ )
+ wikidata_qid = StringField("Wikidata QID")
+ isbn13 = StringField("ISBN-13")
+ pmid = StringField("PubMed Id")
+ pmcid = StringField("PubMed Central Id")
+ # core_id
+ # arxiv_id
+ # jstor_id
+ # oai
+ # hdl
+ volume = StringField("Volume")
+ issue = StringField("Issue")
+ pages = StringField("Pages")
+ publisher = StringField("Publisher (optional)")
+ language = StringField("Language (code)", [validators.Optional(True), valid_2char_ascii])
+ license_slug = StringField("License (slug)")
contribs = FieldList(FormField(ReleaseContribForm))
- #refs
- #abstracts
+ # refs
+ # abstracts
@staticmethod
def from_entity(re):
@@ -183,7 +199,7 @@ class ReleaseEntityForm(EntityEditForm):
return ref
def to_entity(self):
- assert(self.title.data)
+ assert self.title.data
entity = ReleaseEntity(title=self.title.data, ext_ids=ReleaseExtIds())
self.update_entity(entity)
return entity
@@ -198,13 +214,13 @@ class ReleaseEntityForm(EntityEditForm):
for simple_attr in RELEASE_SIMPLE_ATTRS:
a = getattr(self, simple_attr).data
# special case blank strings
- if a == '':
+ if a == "":
a = None
setattr(re, simple_attr, a)
for extid_attr in RELEASE_EXTID_ATTRS:
a = getattr(self, extid_attr).data
# special case blank strings
- if a == '':
+ if a == "":
a = None
setattr(re.ext_ids, extid_attr, a)
if self.release_date.data:
@@ -219,7 +235,7 @@ class ReleaseEntityForm(EntityEditForm):
old_contribs = []
re.contribs = []
for c in self.contribs:
- if c.prev_index.data not in ('', None):
+ if c.prev_index.data not in ("", None):
rc = old_contribs[int(c.prev_index.data)]
rc.role = c.role.data or None
rc.raw_name = c.raw_name.data or None
@@ -232,40 +248,52 @@ class ReleaseEntityForm(EntityEditForm):
if self.edit_description.data:
re.edit_extra = dict(description=self.edit_description.data)
+
container_type_options = (
- ('', 'Unknown (blank)'),
- ('journal', 'Scholarly Journal'),
- ('proceedings', 'Proceedings'),
- ('book-series', 'Book Series'),
- ('blog', 'Blog'),
- ('magazine', 'Magazine'),
- ('trade', 'Trade Magazine'),
- ('test', 'Test / Dummy'),
+ ("", "Unknown (blank)"),
+ ("journal", "Scholarly Journal"),
+ ("proceedings", "Proceedings"),
+ ("book-series", "Book Series"),
+ ("blog", "Blog"),
+ ("magazine", "Magazine"),
+ ("trade", "Trade Magazine"),
+ ("test", "Test / Dummy"),
)
-CONTAINER_SIMPLE_ATTRS = ['name', 'container_type', 'publisher', 'issnl',
- 'wikidata_qid', 'issne', 'issnp']
-CONTAINER_EXTRA_ATTRS = ['original_name', 'country']
+CONTAINER_SIMPLE_ATTRS = [
+ "name",
+ "container_type",
+ "publisher",
+ "issnl",
+ "wikidata_qid",
+ "issne",
+ "issnp",
+]
+CONTAINER_EXTRA_ATTRS = ["original_name", "country"]
+
class ContainerEntityForm(EntityEditForm):
- name = StringField('Name/Title',
- [validators.DataRequired()])
- container_type = SelectField('Container Type',
+ name = StringField("Name/Title", [validators.DataRequired()])
+ container_type = SelectField(
+ "Container Type",
[validators.Optional(True)],
choices=container_type_options,
- default='')
+ default="",
+ )
publisher = StringField("Publisher")
issnl = StringField("ISSN-L (linking)")
issne = StringField("ISSN (electronic)")
issnp = StringField("ISSN (print)")
original_name = StringField("Name in Original Language (if different)")
- country = StringField("Country of Publication (ISO code)",
- [validators.Optional(True), valid_2char_ascii])
- wikidata_qid = StringField('Wikidata QID')
+ country = StringField(
+ "Country of Publication (ISO code)", [validators.Optional(True), valid_2char_ascii]
+ )
+ wikidata_qid = StringField("Wikidata QID")
urls = FieldList(
- StringField("Container URLs",
- [validators.DataRequired(),
- validators.URL(require_tld=False)]))
+ StringField(
+ "Container URLs", [validators.DataRequired(), validators.URL(require_tld=False)]
+ )
+ )
@staticmethod
def from_entity(ce):
@@ -281,13 +309,13 @@ class ContainerEntityForm(EntityEditForm):
if ce.extra.get(k):
a = getattr(cef, k)
a.data = ce.extra[k]
- if ce.extra.get('urls'):
- for url in ce.extra['urls']:
+ if ce.extra.get("urls"):
+ for url in ce.extra["urls"]:
cef.urls.append_entry(url)
return cef
def to_entity(self):
- assert(self.name.data)
+ assert self.name.data
entity = ContainerEntity(name=self.name.data)
self.update_entity(entity)
return entity
@@ -302,70 +330,66 @@ class ContainerEntityForm(EntityEditForm):
for simple_attr in CONTAINER_SIMPLE_ATTRS:
a = getattr(self, simple_attr).data
# special case blank strings
- if a == '':
+ if a == "":
a = None
setattr(ce, simple_attr, a)
if not ce.extra:
ce.extra = dict()
for extra_attr in CONTAINER_EXTRA_ATTRS:
a = getattr(self, extra_attr).data
- if a and a != '':
+ if a and a != "":
ce.extra[extra_attr] = a
extra_urls = []
for url in self.urls:
extra_urls.append(url.data)
if extra_urls:
- ce.extra['urls'] = extra_urls
+ ce.extra["urls"] = extra_urls
if self.edit_description.data:
ce.edit_extra = dict(description=self.edit_description.data)
if not ce.extra:
ce.extra = None
+
url_rel_options = [
- ('web', 'Public Web'),
- ('webarchive', 'Web Archive'),
- ('repository', 'Repository'),
- ('archive', 'Preservation Archive'),
- ('academicsocial', 'Academic Social Network'),
- ('publisher', 'Publisher'),
- ('dweb', 'Decentralized Web'),
- ('aggregator', 'Aggregator'),
+ ("web", "Public Web"),
+ ("webarchive", "Web Archive"),
+ ("repository", "Repository"),
+ ("archive", "Preservation Archive"),
+ ("academicsocial", "Academic Social Network"),
+ ("publisher", "Publisher"),
+ ("dweb", "Decentralized Web"),
+ ("aggregator", "Aggregator"),
]
-FILE_SIMPLE_ATTRS = ['size', 'md5', 'sha1', 'sha256', 'mimetype']
+FILE_SIMPLE_ATTRS = ["size", "md5", "sha1", "sha256", "mimetype"]
+
class FileUrlForm(FlaskForm):
class Meta:
# this is a sub-form, so disable CSRF
csrf = False
- url = StringField('Display Name',
- [validators.DataRequired(),
- validators.URL(require_tld=False)])
- rel = SelectField(
- [validators.DataRequired()],
- choices=url_rel_options,
- default='web')
+ url = StringField(
+ "Display Name", [validators.DataRequired(), validators.URL(require_tld=False)]
+ )
+ rel = SelectField([validators.DataRequired()], choices=url_rel_options, default="web")
+
class FileEntityForm(EntityEditForm):
# TODO: positive definite
- size = IntegerField('Size (bytes)',
- [validators.DataRequired()])
- md5 = StringField("MD5",
- [validators.Optional(True),
- validators.Length(min=32, max=32)])
- sha1 = StringField("SHA-1",
- [validators.DataRequired(),
- validators.Length(min=40, max=40)])
- sha256 = StringField("SHA-256",
- [validators.Optional(True),
- validators.Length(min=64, max=64)])
+ size = IntegerField("Size (bytes)", [validators.DataRequired()])
+ md5 = StringField("MD5", [validators.Optional(True), validators.Length(min=32, max=32)])
+ sha1 = StringField("SHA-1", [validators.DataRequired(), validators.Length(min=40, max=40)])
+ sha256 = StringField(
+ "SHA-256", [validators.Optional(True), validators.Length(min=64, max=64)]
+ )
urls = FieldList(FormField(FileUrlForm))
mimetype = StringField("Mimetype")
release_ids = FieldList(
- StringField("Release FCID",
- [validators.DataRequired(),
- validators.Length(min=26, max=26)]))
+ StringField(
+ "Release FCID", [validators.DataRequired(), validators.Length(min=26, max=26)]
+ )
+ )
@staticmethod
def from_entity(fe):
@@ -386,7 +410,7 @@ class FileEntityForm(EntityEditForm):
return ref
def to_entity(self):
- assert(self.sha1.data)
+ assert self.sha1.data
entity = FileEntity()
self.update_entity(entity)
return entity
@@ -400,87 +424,92 @@ class FileEntityForm(EntityEditForm):
for simple_attr in FILE_SIMPLE_ATTRS:
a = getattr(self, simple_attr).data
# be flexible about hash capitalization
- if simple_attr in ('md5', 'sha1', 'sha256'):
+ if simple_attr in ("md5", "sha1", "sha256"):
a = a.lower()
# special case blank strings
- if a == '':
+ if a == "":
a = None
setattr(fe, simple_attr, a)
fe.urls = []
for u in self.urls:
- fe.urls.append(FileUrl(
- rel=u.rel.data or None,
- url=u.url.data or None,
- ))
+ fe.urls.append(
+ FileUrl(
+ rel=u.rel.data or None,
+ url=u.url.data or None,
+ )
+ )
fe.release_ids = []
for ri in self.release_ids:
fe.release_ids.append(ri.data)
if self.edit_description.data:
fe.edit_extra = dict(description=self.edit_description.data)
+
INGEST_TYPE_OPTIONS = [
- ('pdf', 'PDF Fulltext'),
- ('html', 'HTML Fulltext'),
- ('xml', 'XML Fulltext'),
+ ("pdf", "PDF Fulltext"),
+ ("html", "HTML Fulltext"),
+ ("xml", "XML Fulltext"),
]
+
class SavePaperNowForm(FlaskForm):
- base_url = StringField(
- "URL",
- [validators.DataRequired(),
- validators.URL()])
+ base_url = StringField("URL", [validators.DataRequired(), validators.URL()])
ingest_type = SelectField(
- "Content Type",
- [validators.DataRequired()],
- choices=INGEST_TYPE_OPTIONS,
- default='pdf')
+ "Content Type", [validators.DataRequired()], choices=INGEST_TYPE_OPTIONS, default="pdf"
+ )
release_stage = SelectField(
"Publication Stage",
[validators.DataRequired()],
choices=release_stage_options,
- default='')
+ default="",
+ )
- def to_ingest_request(self, release, ingest_request_source='savepapernow'):
+ def to_ingest_request(self, release, ingest_request_source="savepapernow"):
base_url = self.base_url.data
ext_ids = release.ext_ids.to_dict()
# by default this dict has a bunch of empty values
ext_ids = dict([(k, v) for (k, v) in ext_ids.items() if v])
ingest_request = {
- 'ingest_type': self.ingest_type.data,
- 'ingest_request_source': ingest_request_source,
- 'link_source': 'spn',
- 'link_source_id': release.ident,
- 'base_url': base_url,
- 'fatcat': {
- 'release_ident': release.ident,
- 'work_ident': release.work_id,
+ "ingest_type": self.ingest_type.data,
+ "ingest_request_source": ingest_request_source,
+ "link_source": "spn",
+ "link_source_id": release.ident,
+ "base_url": base_url,
+ "fatcat": {
+ "release_ident": release.ident,
+ "work_ident": release.work_id,
},
- 'ext_ids': ext_ids,
+ "ext_ids": ext_ids,
}
if self.release_stage.data:
- ingest_request['release_stage'] = self.release_stage.data
+ ingest_request["release_stage"] = self.release_stage.data
if release.ext_ids.doi and base_url == "https://doi.org/{}".format(release.ext_ids.doi):
- ingest_request['link_source'] = 'doi'
- ingest_request['link_source_id'] = release.ext_ids.doi
- elif release.ext_ids.arxiv and base_url == "https://arxiv.org/pdf/{}.pdf".format(release.ext_ids.arxiv):
- ingest_request['link_source'] = 'arxiv'
- ingest_request['link_source_id'] = release.ext_ids.arxiv
+ ingest_request["link_source"] = "doi"
+ ingest_request["link_source_id"] = release.ext_ids.doi
+ elif release.ext_ids.arxiv and base_url == "https://arxiv.org/pdf/{}.pdf".format(
+ release.ext_ids.arxiv
+ ):
+ ingest_request["link_source"] = "arxiv"
+ ingest_request["link_source_id"] = release.ext_ids.arxiv
return ingest_request
+
def valid_toml(form, field):
try:
toml.loads(field.data)
except toml.TomlDecodeError as tpe:
raise ValidationError(tpe)
+
class EntityTomlForm(EntityEditForm):
toml = TextAreaField(
"TOML",
- [validators.DataRequired(),
- valid_toml,
+ [
+ validators.DataRequired(),
+ valid_toml,
],
)
@@ -490,34 +519,42 @@ class EntityTomlForm(EntityEditForm):
Initializes form with TOML version of existing entity
"""
etf = EntityTomlForm()
- if entity.state == 'active':
- pop_fields = ['ident', 'state', 'revision', 'redirect']
+ if entity.state == "active":
+ pop_fields = ["ident", "state", "revision", "redirect"]
else:
- pop_fields = ['ident', 'state']
+ pop_fields = ["ident", "state"]
# remove "expand" fields
- pop_fields += ['releases', 'container', 'work', 'creators', 'files', 'filesets', 'webcaptures']
+ pop_fields += [
+ "releases",
+ "container",
+ "work",
+ "creators",
+ "files",
+ "filesets",
+ "webcaptures",
+ ]
etf.toml.data = entity_to_toml(entity, pop_fields=pop_fields)
return etf
-class ReferenceMatchForm(FlaskForm):
+class ReferenceMatchForm(FlaskForm):
class Meta:
# this is an API, so disable CSRF
csrf = False
- submit_type = SelectField('submit_type',
- [validators.DataRequired()],
- choices=['parse', 'match'])
+ submit_type = SelectField(
+ "submit_type", [validators.DataRequired()], choices=["parse", "match"]
+ )
- raw_citation = TextAreaField("Citation String", render_kw={'rows':'3'})
+ raw_citation = TextAreaField("Citation String", render_kw={"rows": "3"})
title = StringField("Title")
journal = StringField("Journal or Conference")
first_author = StringField("First Author")
- #author_names = StringField("Author Names")
- #year = IntegerField('Year Released',
+ # author_names = StringField("Author Names")
+ # year = IntegerField('Year Released',
# [validators.Optional(True), valid_year])
year = StringField("Year Released")
date = StringField("Date Released")
@@ -539,17 +576,17 @@ class ReferenceMatchForm(FlaskForm):
rmf = ReferenceMatchForm()
rmf.raw_citation.data = raw_citation
- direct_fields = ['title', 'journal', 'volume', 'issue', 'pages']
+ direct_fields = ["title", "journal", "volume", "issue", "pages"]
for k in direct_fields:
if parse_dict.get(k):
a = getattr(rmf, k)
a.data = parse_dict[k]
- date = parse_dict.get('date')
+ date = parse_dict.get("date")
if date and len(date) >= 4 and date[0:4].isdigit():
rmf.year.data = int(date[0:4])
- if parse_dict.get('authors'):
- rmf.first_author.data = parse_dict['authors'][0].get('name')
+ if parse_dict.get("authors"):
+ rmf.first_author.data = parse_dict["authors"][0].get("name")
return rmf
diff --git a/python/fatcat_web/graphics.py b/python/fatcat_web/graphics.py
index b5a83f6c..c76408cd 100644
--- a/python/fatcat_web/graphics.py
+++ b/python/fatcat_web/graphics.py
@@ -1,4 +1,3 @@
-
from typing import Dict, List, Tuple
import pygal
@@ -15,32 +14,40 @@ def ia_coverage_histogram(rows: List[Tuple]) -> pygal.Graph:
raw_years = [int(r[0]) for r in rows]
years = dict()
if raw_years:
- for y in range(min(raw_years), max(raw_years)+1):
+ for y in range(min(raw_years), max(raw_years) + 1):
years[int(y)] = dict(year=int(y), available=0, missing=0)
for r in rows:
if r[1]:
- years[int(r[0])]['available'] = r[2]
+ years[int(r[0])]["available"] = r[2]
else:
- years[int(r[0])]['missing'] = r[2]
+ years[int(r[0])]["missing"] = r[2]
- years = sorted(years.values(), key=lambda x: x['year'])
+ years = sorted(years.values(), key=lambda x: x["year"])
CleanStyle.colors = ("green", "purple")
label_count = len(years)
if len(years) > 20:
label_count = 10
- chart = pygal.StackedBar(dynamic_print_values=True, style=CleanStyle,
- width=1000, height=500, x_labels_major_count=label_count,
- show_minor_x_labels=False)
- #chart.title = "Perpetual Access Coverage"
+ chart = pygal.StackedBar(
+ dynamic_print_values=True,
+ style=CleanStyle,
+ width=1000,
+ height=500,
+ x_labels_major_count=label_count,
+ show_minor_x_labels=False,
+ )
+ # chart.title = "Perpetual Access Coverage"
chart.x_title = "Year"
- #chart.y_title = "Releases"
- chart.x_labels = [str(y['year']) for y in years]
- chart.add('via Fatcat', [y['available'] for y in years])
- chart.add('Missing', [y['missing'] for y in years])
+ # chart.y_title = "Releases"
+ chart.x_labels = [str(y["year"]) for y in years]
+ chart.add("via Fatcat", [y["available"] for y in years])
+ chart.add("Missing", [y["missing"] for y in years])
return chart
-def preservation_by_year_histogram(rows: List[Dict], merge_shadows: bool = False) -> pygal.Graph:
+
+def preservation_by_year_histogram(
+ rows: List[Dict], merge_shadows: bool = False
+) -> pygal.Graph:
"""
Note: this returns a raw pygal chart; it does not render it to SVG/PNG
@@ -48,7 +55,7 @@ def preservation_by_year_histogram(rows: List[Dict], merge_shadows: bool = False
There is also a 'year' key with float/int value.
"""
- years = sorted(rows, key=lambda x: x['year'])
+ years = sorted(rows, key=lambda x: x["year"])
if merge_shadows:
CleanStyle.colors = ("red", "darkolivegreen", "limegreen")
@@ -57,23 +64,32 @@ def preservation_by_year_histogram(rows: List[Dict], merge_shadows: bool = False
label_count = len(years)
if len(years) > 30:
label_count = 10
- chart = pygal.StackedBar(dynamic_print_values=True, style=CleanStyle,
- width=1000, height=500, x_labels_major_count=label_count,
- show_minor_x_labels=False, x_label_rotation=20)
- #chart.title = "Preservation by Year"
+ chart = pygal.StackedBar(
+ dynamic_print_values=True,
+ style=CleanStyle,
+ width=1000,
+ height=500,
+ x_labels_major_count=label_count,
+ show_minor_x_labels=False,
+ x_label_rotation=20,
+ )
+ # chart.title = "Preservation by Year"
chart.x_title = "Year"
- #chart.y_title = "Count"
- chart.x_labels = [str(y['year']) for y in years]
+ # chart.y_title = "Count"
+ chart.x_labels = [str(y["year"]) for y in years]
if merge_shadows:
- chart.add('None', [y['none'] + y['shadows_only'] for y in years])
+ chart.add("None", [y["none"] + y["shadows_only"] for y in years])
else:
- chart.add('None', [y['none'] for y in years])
- chart.add('Shadow', [y['shadows_only'] for y in years])
- chart.add('Dark', [y['dark'] for y in years])
- chart.add('Bright', [y['bright'] for y in years])
+ chart.add("None", [y["none"] for y in years])
+ chart.add("Shadow", [y["shadows_only"] for y in years])
+ chart.add("Dark", [y["dark"] for y in years])
+ chart.add("Bright", [y["bright"] for y in years])
return chart
-def preservation_by_date_histogram(rows: List[Dict], merge_shadows: bool = False) -> pygal.Graph:
+
+def preservation_by_date_histogram(
+ rows: List[Dict], merge_shadows: bool = False
+) -> pygal.Graph:
"""
Note: this returns a raw pygal chart; it does not render it to SVG/PNG
@@ -81,7 +97,7 @@ def preservation_by_date_histogram(rows: List[Dict], merge_shadows: bool = False
There is also a 'date' key with str value.
"""
- dates = sorted(rows, key=lambda x: x['date'])
+ dates = sorted(rows, key=lambda x: x["date"])
if merge_shadows:
CleanStyle.colors = ("red", "darkolivegreen", "limegreen")
@@ -90,23 +106,32 @@ def preservation_by_date_histogram(rows: List[Dict], merge_shadows: bool = False
label_count = len(dates)
if len(dates) > 30:
label_count = 10
- chart = pygal.StackedBar(dynamic_print_values=True, style=CleanStyle,
- width=1000, height=500, x_labels_major_count=label_count,
- show_minor_x_labels=False, x_label_rotation=20)
- #chart.title = "Preservation by Date"
+ chart = pygal.StackedBar(
+ dynamic_print_values=True,
+ style=CleanStyle,
+ width=1000,
+ height=500,
+ x_labels_major_count=label_count,
+ show_minor_x_labels=False,
+ x_label_rotation=20,
+ )
+ # chart.title = "Preservation by Date"
chart.x_title = "Date"
- #chart.y_title = "Count"
- chart.x_labels = [str(y['date']) for y in dates]
+ # chart.y_title = "Count"
+ chart.x_labels = [str(y["date"]) for y in dates]
if merge_shadows:
- chart.add('None', [y['none'] + y['shadows_only'] for y in dates])
+ chart.add("None", [y["none"] + y["shadows_only"] for y in dates])
else:
- chart.add('None', [y['none'] for y in dates])
- chart.add('Shadow', [y['shadows_only'] for y in dates])
- chart.add('Dark', [y['dark'] for y in dates])
- chart.add('Bright', [y['bright'] for y in dates])
+ chart.add("None", [y["none"] for y in dates])
+ chart.add("Shadow", [y["shadows_only"] for y in dates])
+ chart.add("Dark", [y["dark"] for y in dates])
+ chart.add("Bright", [y["bright"] for y in dates])
return chart
-def preservation_by_volume_histogram(rows: List[Dict], merge_shadows: bool = False) -> pygal.Graph:
+
+def preservation_by_volume_histogram(
+ rows: List[Dict], merge_shadows: bool = False
+) -> pygal.Graph:
"""
Note: this returns a raw pygal chart; it does not render it to SVG/PNG
@@ -114,7 +139,7 @@ def preservation_by_volume_histogram(rows: List[Dict], merge_shadows: bool = Fal
There is also a 'volume' key with str value.
"""
- volumes = sorted(rows, key=lambda x: x['volume'])
+ volumes = sorted(rows, key=lambda x: x["volume"])
if merge_shadows:
CleanStyle.colors = ("red", "darkolivegreen", "limegreen")
@@ -123,18 +148,24 @@ def preservation_by_volume_histogram(rows: List[Dict], merge_shadows: bool = Fal
label_count = len(volumes)
if len(volumes) >= 30:
label_count = 10
- chart = pygal.StackedBar(dynamic_print_values=True, style=CleanStyle,
- width=1000, height=500, x_labels_major_count=label_count,
- show_minor_x_labels=False, x_label_rotation=20)
- #chart.title = "Preservation by Volume"
+ chart = pygal.StackedBar(
+ dynamic_print_values=True,
+ style=CleanStyle,
+ width=1000,
+ height=500,
+ x_labels_major_count=label_count,
+ show_minor_x_labels=False,
+ x_label_rotation=20,
+ )
+ # chart.title = "Preservation by Volume"
chart.x_title = "Volume"
- #chart.y_title = "Count"
- chart.x_labels = [str(y['volume']) for y in volumes]
+ # chart.y_title = "Count"
+ chart.x_labels = [str(y["volume"]) for y in volumes]
if merge_shadows:
- chart.add('None', [y['none'] + y['shadows_only'] for y in volumes])
+ chart.add("None", [y["none"] + y["shadows_only"] for y in volumes])
else:
- chart.add('None', [y['none'] for y in volumes])
- chart.add('Shadow', [y['shadows_only'] for y in volumes])
- chart.add('Dark', [y['dark'] for y in volumes])
- chart.add('Bright', [y['bright'] for y in volumes])
+ chart.add("None", [y["none"] for y in volumes])
+ chart.add("Shadow", [y["shadows_only"] for y in volumes])
+ chart.add("Dark", [y["dark"] for y in volumes])
+ chart.add("Bright", [y["bright"] for y in volumes])
return chart
diff --git a/python/fatcat_web/hacks.py b/python/fatcat_web/hacks.py
index 9e6f6ab5..06350b41 100644
--- a/python/fatcat_web/hacks.py
+++ b/python/fatcat_web/hacks.py
@@ -1,17 +1,23 @@
-
import re
STRIP_EXTLINK_XML_RE = re.compile(r"<ext-link.*xlink:type=\"simple\">")
+
def strip_extlink_xml(unstr):
unstr = unstr.replace("</ext-link>", "")
unstr = STRIP_EXTLINK_XML_RE.sub("", unstr)
return unstr
+
def test_strip_extlink_xml():
assert strip_extlink_xml("asdf") == "asdf"
- assert strip_extlink_xml("""LOCKSS (2014) Available: <ext-link xmlns:xlink="http://www.w3.org/1999/xlink" ext-link-type="uri" xlink:href="http://lockss.org/" xlink:type="simple">http://lockss.org/</ext-link>. Accessed: 2014 November 1.""") == \
- """LOCKSS (2014) Available: http://lockss.org/. Accessed: 2014 November 1."""
+ assert (
+ strip_extlink_xml(
+ """LOCKSS (2014) Available: <ext-link xmlns:xlink="http://www.w3.org/1999/xlink" ext-link-type="uri" xlink:href="http://lockss.org/" xlink:type="simple">http://lockss.org/</ext-link>. Accessed: 2014 November 1."""
+ )
+ == """LOCKSS (2014) Available: http://lockss.org/. Accessed: 2014 November 1."""
+ )
+
def wayback_suffix(entity):
"""
diff --git a/python/fatcat_web/kafka.py b/python/fatcat_web/kafka.py
index 1d7288af..36dafade 100644
--- a/python/fatcat_web/kafka.py
+++ b/python/fatcat_web/kafka.py
@@ -1,4 +1,3 @@
-
import requests
from fatcat_web import Config
@@ -20,9 +19,9 @@ def kafka_pixy_produce(topic, msg, key=None, sync=True, timeout=25):
params = dict()
if key:
- params['key'] = key
+ params["key"] = key
if sync:
- params['sync'] = True
+ params["sync"] = True
resp = requests.post(
"{}/topics/{}/messages".format(Config.KAFKA_PIXY_ENDPOINT, topic),
params=params,
@@ -31,4 +30,4 @@ def kafka_pixy_produce(topic, msg, key=None, sync=True, timeout=25):
timeout=timeout,
)
resp.raise_for_status()
- #print(resp.json())
+ # print(resp.json())
diff --git a/python/fatcat_web/ref_routes.py b/python/fatcat_web/ref_routes.py
index eed3f1df..6a5eb064 100644
--- a/python/fatcat_web/ref_routes.py
+++ b/python/fatcat_web/ref_routes.py
@@ -28,10 +28,12 @@ from fatcat_web.entity_helpers import generic_get_entity
from fatcat_web.forms import ReferenceMatchForm
-def _refs_web(direction, release_ident=None, work_ident=None, openlibrary_id=None, wikipedia_article=None) -> RefHits:
- offset = request.args.get('offset', '0')
+def _refs_web(
+ direction, release_ident=None, work_ident=None, openlibrary_id=None, wikipedia_article=None
+) -> RefHits:
+ offset = request.args.get("offset", "0")
offset = max(0, int(offset)) if offset.isnumeric() else 0
- limit = request.args.get('limit', '30')
+ limit = request.args.get("limit", "30")
limit = min(max(0, int(limit)), 100) if limit.isnumeric() else 30
if direction == "in":
hits = get_inbound_refs(
@@ -66,144 +68,227 @@ def _refs_web(direction, release_ident=None, work_ident=None, openlibrary_id=Non
return hits
-@app.route('/release/<string(length=26):ident>/refs-in', methods=['GET'])
+@app.route("/release/<string(length=26):ident>/refs-in", methods=["GET"])
def release_view_refs_inbound(ident):
if request.accept_mimetypes.best == "application/json":
return release_view_refs_inbound_json(ident)
release = generic_get_entity("release", ident)
hits = _refs_web("in", release_ident=ident)
- return render_template('release_view_fuzzy_refs.html', direction="in", entity=release, hits=hits), 200
+ return (
+ render_template(
+ "release_view_fuzzy_refs.html", direction="in", entity=release, hits=hits
+ ),
+ 200,
+ )
-@app.route('/release/<string(length=26):ident>/refs-out', methods=['GET'])
+@app.route("/release/<string(length=26):ident>/refs-out", methods=["GET"])
def release_view_refs_outbound(ident):
if request.accept_mimetypes.best == "application/json":
return release_view_refs_outbound_json(ident)
release = generic_get_entity("release", ident)
hits = _refs_web("out", release_ident=ident)
- return render_template('release_view_fuzzy_refs.html', direction="out", entity=release, hits=hits), 200
+ return (
+ render_template(
+ "release_view_fuzzy_refs.html", direction="out", entity=release, hits=hits
+ ),
+ 200,
+ )
-@app.route('/openlibrary/OL<int:id_num>W/refs-in', methods=['GET'])
+
+@app.route("/openlibrary/OL<int:id_num>W/refs-in", methods=["GET"])
def openlibrary_view_refs_inbound(id_num):
if request.accept_mimetypes.best == "application/json":
return openlibrary_view_refs_inbound_json(id_num)
openlibrary_id = f"OL{id_num}W"
hits = _refs_web("in", openlibrary_id=openlibrary_id)
- return render_template('openlibrary_view_fuzzy_refs.html', openlibrary_id=openlibrary_id, direction="in", hits=hits), 200
-
-@app.route('/wikipedia/<string(length=2):wiki_lang>:<string:wiki_article>/refs-out', methods=['GET'])
+ return (
+ render_template(
+ "openlibrary_view_fuzzy_refs.html",
+ openlibrary_id=openlibrary_id,
+ direction="in",
+ hits=hits,
+ ),
+ 200,
+ )
+
+
+@app.route(
+ "/wikipedia/<string(length=2):wiki_lang>:<string:wiki_article>/refs-out", methods=["GET"]
+)
def wikipedia_view_refs_outbound(wiki_lang: str, wiki_article: str):
if request.accept_mimetypes.best == "application/json":
return wikipedia_view_refs_outbound_json(wiki_lang, wiki_article)
wiki_url = f"https://{wiki_lang}.wikipedia.org/wiki/{wiki_article}"
- wiki_article = wiki_article.replace('_', ' ')
+ wiki_article = wiki_article.replace("_", " ")
wikipedia_article = wiki_lang + ":" + wiki_article
hits = _refs_web("out", wikipedia_article=wikipedia_article)
- return render_template('wikipedia_view_fuzzy_refs.html', wiki_article=wiki_article, wiki_lang=wiki_lang, wiki_url=wiki_url, direction="out", hits=hits), 200
-
-@app.route('/reference/match', methods=['GET', 'POST'])
+ return (
+ render_template(
+ "wikipedia_view_fuzzy_refs.html",
+ wiki_article=wiki_article,
+ wiki_lang=wiki_lang,
+ wiki_url=wiki_url,
+ direction="out",
+ hits=hits,
+ ),
+ 200,
+ )
+
+
+@app.route("/reference/match", methods=["GET", "POST"])
def reference_match():
grobid_status = None
grobid_dict = None
form = ReferenceMatchForm()
- if not form.is_submitted() and request.args.get('submit_type'):
+ if not form.is_submitted() and request.args.get("submit_type"):
form = ReferenceMatchForm(request.args)
- if form.is_submitted() or request.args.get('title'):
+ if form.is_submitted() or request.args.get("title"):
if form.validate():
- if form.submit_type.data == 'parse':
+ if form.submit_type.data == "parse":
resp_xml = grobid_api_process_citation(form.raw_citation.data)
if not resp_xml:
grobid_status = "failed"
- return render_template('reference_match.html', form=form, grobid_status=grobid_status), 400
+ return (
+ render_template(
+ "reference_match.html", form=form, grobid_status=grobid_status
+ ),
+ 400,
+ )
grobid_dict = transform_grobid_ref_xml(resp_xml)
if not grobid_dict:
grobid_status = "empty"
- return render_template('reference_match.html', form=form, grobid_status=grobid_status), 200
- #print(grobid_dict)
+ return (
+ render_template(
+ "reference_match.html", form=form, grobid_status=grobid_status
+ ),
+ 200,
+ )
+ # print(grobid_dict)
release_stub = grobid_ref_to_release(grobid_dict)
# remove empty values from GROBID parsed dict
grobid_dict = {k: v for k, v in grobid_dict.items() if v is not None}
form = ReferenceMatchForm.from_grobid_parse(grobid_dict, form.raw_citation.data)
grobid_status = "success"
- matches = close_fuzzy_release_matches(es_client=app.es_client, release=release_stub, match_limit=10) or []
- elif form.submit_type.data == 'match':
- matches = close_fuzzy_biblio_matches(es_client=app.es_client, biblio=form.data, match_limit=10) or []
+ matches = (
+ close_fuzzy_release_matches(
+ es_client=app.es_client, release=release_stub, match_limit=10
+ )
+ or []
+ )
+ elif form.submit_type.data == "match":
+ matches = (
+ close_fuzzy_biblio_matches(
+ es_client=app.es_client, biblio=form.data, match_limit=10
+ )
+ or []
+ )
else:
raise NotImplementedError()
for m in matches:
# expand releases more completely
- m.release = api.get_release(m.release.ident, expand="container,files,filesets,webcaptures", hide="abstract,refs")
+ m.release = api.get_release(
+ m.release.ident,
+ expand="container,files,filesets,webcaptures",
+ hide="abstract,refs",
+ )
# hack in access options
m.access_options = release_access_options(m.release)
- return render_template('reference_match.html', form=form, grobid_dict=grobid_dict, grobid_status=grobid_status, matches=matches), 200
+ return (
+ render_template(
+ "reference_match.html",
+ form=form,
+ grobid_dict=grobid_dict,
+ grobid_status=grobid_status,
+ matches=matches,
+ ),
+ 200,
+ )
elif form.errors:
- return render_template('reference_match.html', form=form), 400
+ return render_template("reference_match.html", form=form), 400
- return render_template('reference_match.html', form=form), 200
+ return render_template("reference_match.html", form=form), 200
### Pseudo-APIs #############################################################
-@app.route('/release/<string(length=26):ident>/refs-out.json', methods=['GET', 'OPTIONS'])
-@crossdomain(origin='*',headers=['access-control-allow-origin','Content-Type'])
+
+@app.route("/release/<string(length=26):ident>/refs-out.json", methods=["GET", "OPTIONS"])
+@crossdomain(origin="*", headers=["access-control-allow-origin", "Content-Type"])
def release_view_refs_outbound_json(ident):
hits = _refs_web("out", release_ident=ident)
return Response(hits.json(exclude_unset=True), mimetype="application/json")
-@app.route('/release/<string(length=26):ident>/refs-in.json', methods=['GET', 'OPTIONS'])
-@crossdomain(origin='*',headers=['access-control-allow-origin','Content-Type'])
+@app.route("/release/<string(length=26):ident>/refs-in.json", methods=["GET", "OPTIONS"])
+@crossdomain(origin="*", headers=["access-control-allow-origin", "Content-Type"])
def release_view_refs_inbound_json(ident):
hits = _refs_web("in", release_ident=ident)
return Response(hits.json(exclude_unset=True), mimetype="application/json")
-@app.route('/openlibrary/OL<int:id_num>W/refs-in.json', methods=['GET', 'OPTIONS'])
-@crossdomain(origin='*',headers=['access-control-allow-origin','Content-Type'])
+
+@app.route("/openlibrary/OL<int:id_num>W/refs-in.json", methods=["GET", "OPTIONS"])
+@crossdomain(origin="*", headers=["access-control-allow-origin", "Content-Type"])
def openlibrary_view_refs_inbound_json(id_num):
openlibrary_id = f"OL{id_num}W"
hits = _refs_web("in", openlibrary_id=openlibrary_id)
return Response(hits.json(exclude_unset=True), mimetype="application/json")
-@app.route('/wikipedia/<string(length=2):wiki_lang>:<string:wiki_article>/refs-out.json', methods=['GET', 'OPTIONS'])
-@crossdomain(origin='*',headers=['access-control-allow-origin','Content-Type'])
+
+@app.route(
+ "/wikipedia/<string(length=2):wiki_lang>:<string:wiki_article>/refs-out.json",
+ methods=["GET", "OPTIONS"],
+)
+@crossdomain(origin="*", headers=["access-control-allow-origin", "Content-Type"])
def wikipedia_view_refs_outbound_json(wiki_lang: str, wiki_article: str):
- wiki_article = wiki_article.replace('_', ' ')
+ wiki_article = wiki_article.replace("_", " ")
wikipedia_article = wiki_lang + ":" + wiki_article
hits = _refs_web("out", wikipedia_article=wikipedia_article)
return Response(hits.json(exclude_unset=True), mimetype="application/json")
-@app.route('/reference/match.json', methods=['GET', 'OPTIONS'])
-@crossdomain(origin='*',headers=['access-control-allow-origin','Content-Type'])
+@app.route("/reference/match.json", methods=["GET", "OPTIONS"])
+@crossdomain(origin="*", headers=["access-control-allow-origin", "Content-Type"])
def reference_match_json():
form = ReferenceMatchForm(request.args)
if form.validate():
- if form.submit_type.data == 'match':
- matches = close_fuzzy_biblio_matches(es_client=app.es_client, biblio=form.data, match_limit=10) or []
+ if form.submit_type.data == "match":
+ matches = (
+ close_fuzzy_biblio_matches(
+ es_client=app.es_client, biblio=form.data, match_limit=10
+ )
+ or []
+ )
else:
raise NotImplementedError()
resp = []
for m in matches:
# expand releases more completely
- m.release = api.get_release(m.release.ident, expand="container,files,filesets,webcaptures", hide="abstract,refs")
+ m.release = api.get_release(
+ m.release.ident,
+ expand="container,files,filesets,webcaptures",
+ hide="abstract,refs",
+ )
# hack in access options
m.access_options = release_access_options(m.release)
# and manually convert to dict (for jsonify)
info = m.__dict__
- info['release'] = entity_to_dict(m.release)
- info['access_options'] = [o.dict() for o in m.access_options]
+ info["release"] = entity_to_dict(m.release)
+ info["access_options"] = [o.dict() for o in m.access_options]
resp.append(info)
return jsonify(resp), 200
else:
- return Response(json.dumps(dict(errors=form.errors)), mimetype="application/json", status=400)
+ return Response(
+ json.dumps(dict(errors=form.errors)), mimetype="application/json", status=400
+ )
diff --git a/python/fatcat_web/routes.py b/python/fatcat_web/routes.py
index fc94da66..e6963dbc 100644
--- a/python/fatcat_web/routes.py
+++ b/python/fatcat_web/routes.py
@@ -1,4 +1,3 @@
-
import json
import os
@@ -72,7 +71,8 @@ from fatcat_web.search import (
### Generic Entity Views ####################################################
-@app.route('/container/<string(length=26):ident>/history', methods=['GET'])
+
+@app.route("/container/<string(length=26):ident>/history", methods=["GET"])
def container_history(ident):
try:
entity = api.get_container(ident)
@@ -80,82 +80,82 @@ def container_history(ident):
except ApiException as ae:
app.log.info(ae)
abort(ae.status)
- return render_template('entity_history.html',
- entity_type="container",
- entity=entity,
- history=history)
+ return render_template(
+ "entity_history.html", entity_type="container", entity=entity, history=history
+ )
-@app.route('/creator/<string(length=26):ident>/history', methods=['GET'])
+
+@app.route("/creator/<string(length=26):ident>/history", methods=["GET"])
def creator_history(ident):
try:
entity = api.get_creator(ident)
history = api.get_creator_history(ident)
except ApiException as ae:
abort(ae.status)
- return render_template('entity_history.html',
- entity_type="creator",
- entity=entity,
- history=history)
+ return render_template(
+ "entity_history.html", entity_type="creator", entity=entity, history=history
+ )
-@app.route('/file/<string(length=26):ident>/history', methods=['GET'])
+
+@app.route("/file/<string(length=26):ident>/history", methods=["GET"])
def file_history(ident):
try:
entity = api.get_file(ident)
history = api.get_file_history(ident)
except ApiException as ae:
abort(ae.status)
- return render_template('entity_history.html',
- entity_type="file",
- entity=entity,
- history=history)
+ return render_template(
+ "entity_history.html", entity_type="file", entity=entity, history=history
+ )
-@app.route('/fileset/<string(length=26):ident>/history', methods=['GET'])
+
+@app.route("/fileset/<string(length=26):ident>/history", methods=["GET"])
def fileset_history(ident):
try:
entity = api.get_fileset(ident)
history = api.get_fileset_history(ident)
except ApiException as ae:
abort(ae.status)
- return render_template('entity_history.html',
- entity_type="fileset",
- entity=entity,
- history=history)
+ return render_template(
+ "entity_history.html", entity_type="fileset", entity=entity, history=history
+ )
-@app.route('/webcapture/<string(length=26):ident>/history', methods=['GET'])
+
+@app.route("/webcapture/<string(length=26):ident>/history", methods=["GET"])
def webcapture_history(ident):
try:
entity = api.get_webcapture(ident)
history = api.get_webcapture_history(ident)
except ApiException as ae:
abort(ae.status)
- return render_template('entity_history.html',
- entity_type="webcapture",
- entity=entity,
- history=history)
+ return render_template(
+ "entity_history.html", entity_type="webcapture", entity=entity, history=history
+ )
+
-@app.route('/release/<string(length=26):ident>/history', methods=['GET'])
+@app.route("/release/<string(length=26):ident>/history", methods=["GET"])
def release_history(ident):
try:
entity = api.get_release(ident)
history = api.get_release_history(ident)
except ApiException as ae:
abort(ae.status)
- return render_template('entity_history.html',
- entity_type="release",
- entity=entity,
- history=history)
+ return render_template(
+ "entity_history.html", entity_type="release", entity=entity, history=history
+ )
+
-@app.route('/work/<string(length=26):ident>/history', methods=['GET'])
+@app.route("/work/<string(length=26):ident>/history", methods=["GET"])
def work_history(ident):
try:
entity = api.get_work(ident)
history = api.get_work_history(ident)
except ApiException as ae:
abort(ae.status)
- return render_template('entity_history.html',
- entity_type="work",
- entity=entity,
- history=history)
+ return render_template(
+ "entity_history.html", entity_type="work", entity=entity, history=history
+ )
+
def generic_lookup_view(entity_type, lookup_template, extid_types, lookup_lambda):
extid = None
@@ -172,81 +172,106 @@ def generic_lookup_view(entity_type, lookup_template, extid_types, lookup_lambda
resp = lookup_lambda({extid: extid_value})
except ValueError:
return make_response(
- render_template(lookup_template,
- lookup_key=extid,
- lookup_value=extid_value,
- lookup_error=400),
- 400)
+ render_template(
+ lookup_template, lookup_key=extid, lookup_value=extid_value, lookup_error=400
+ ),
+ 400,
+ )
except ApiException as ae:
if ae.status == 404 or ae.status == 400:
return make_response(
- render_template(lookup_template,
+ render_template(
+ lookup_template,
lookup_key=extid,
lookup_value=extid_value,
- lookup_error=ae.status),
- ae.status)
+ lookup_error=ae.status,
+ ),
+ ae.status,
+ )
else:
app.log.info(ae)
raise ae
- return redirect('/{}/{}'.format(entity_type, resp.ident))
+ return redirect("/{}/{}".format(entity_type, resp.ident))
+
-@app.route('/container/lookup', methods=['GET'])
+@app.route("/container/lookup", methods=["GET"])
def container_lookup():
return generic_lookup_view(
- 'container',
- 'container_lookup.html',
- ('issn', 'issne', 'issnp', 'issnl', 'wikidata_qid'),
- lambda p: api.lookup_container(**p))
+ "container",
+ "container_lookup.html",
+ ("issn", "issne", "issnp", "issnl", "wikidata_qid"),
+ lambda p: api.lookup_container(**p),
+ )
-@app.route('/creator/lookup', methods=['GET'])
+
+@app.route("/creator/lookup", methods=["GET"])
def creator_lookup():
return generic_lookup_view(
- 'creator',
- 'creator_lookup.html',
- ('orcid', 'wikidata_qid'),
- lambda p: api.lookup_creator(**p))
+ "creator",
+ "creator_lookup.html",
+ ("orcid", "wikidata_qid"),
+ lambda p: api.lookup_creator(**p),
+ )
-@app.route('/file/lookup', methods=['GET'])
+
+@app.route("/file/lookup", methods=["GET"])
def file_lookup():
return generic_lookup_view(
- 'file',
- 'file_lookup.html',
- ('md5', 'sha1', 'sha256'),
- lambda p: api.lookup_file(**p))
+ "file", "file_lookup.html", ("md5", "sha1", "sha256"), lambda p: api.lookup_file(**p)
+ )
-@app.route('/fileset/lookup', methods=['GET'])
+
+@app.route("/fileset/lookup", methods=["GET"])
def fileset_lookup():
abort(404)
-@app.route('/webcapture/lookup', methods=['GET'])
+
+@app.route("/webcapture/lookup", methods=["GET"])
def webcapture_lookup():
abort(404)
-@app.route('/release/lookup', methods=['GET'])
+
+@app.route("/release/lookup", methods=["GET"])
def release_lookup():
return generic_lookup_view(
- 'release',
- 'release_lookup.html',
- ('doi', 'wikidata_qid', 'pmid', 'pmcid', 'isbn13', 'jstor', 'arxiv',
- 'core', 'ark', 'mag', 'oai', 'hdl'),
- lambda p: api.lookup_release(**p))
+ "release",
+ "release_lookup.html",
+ (
+ "doi",
+ "wikidata_qid",
+ "pmid",
+ "pmcid",
+ "isbn13",
+ "jstor",
+ "arxiv",
+ "core",
+ "ark",
+ "mag",
+ "oai",
+ "hdl",
+ ),
+ lambda p: api.lookup_release(**p),
+ )
+
-@app.route('/work/lookup', methods=['GET'])
+@app.route("/work/lookup", methods=["GET"])
def work_lookup():
abort(404)
+
### More Generic Entity Views ###############################################
+
def generic_entity_view(entity_type, ident, view_template):
entity = generic_get_entity(entity_type, ident)
if entity.state == "redirect":
- return redirect('/{}/{}'.format(entity_type, entity.redirect))
+ return redirect("/{}/{}".format(entity_type, entity.redirect))
elif entity.state == "deleted":
- return render_template('deleted_entity.html', entity_type=entity_type, entity=entity)
+ return render_template("deleted_entity.html", entity_type=entity_type, entity=entity)
metadata = entity.to_dict()
- metadata.pop('extra')
+ metadata.pop("extra")
entity._metadata = metadata
if view_template == "container_view.html":
@@ -258,16 +283,22 @@ def generic_entity_view(entity_type, ident, view_template):
ReleaseQuery(container_id=ident),
)
- return render_template(view_template, entity_type=entity_type, entity=entity, editgroup_id=None)
+ return render_template(
+ view_template, entity_type=entity_type, entity=entity, editgroup_id=None
+ )
+
def generic_entity_revision_view(entity_type, revision_id, view_template):
entity = generic_get_entity_revision(entity_type, revision_id)
metadata = entity.to_dict()
- metadata.pop('extra')
+ metadata.pop("extra")
entity._metadata = metadata
- return render_template(view_template, entity_type=entity_type, entity=entity, editgroup_id=None)
+ return render_template(
+ view_template, entity_type=entity_type, entity=entity, editgroup_id=None
+ )
+
def generic_editgroup_entity_view(editgroup_id, entity_type, ident, view_template):
try:
@@ -278,251 +309,354 @@ def generic_editgroup_entity_view(editgroup_id, entity_type, ident, view_templat
entity, edit = generic_get_editgroup_entity(editgroup, entity_type, ident)
if entity.revision is None or entity.state == "deleted":
- return render_template('deleted_entity.html', entity=entity,
- entity_type=entity_type, editgroup=editgroup)
+ return render_template(
+ "deleted_entity.html", entity=entity, entity_type=entity_type, editgroup=editgroup
+ )
metadata = entity.to_dict()
- metadata.pop('extra')
+ metadata.pop("extra")
entity._metadata = metadata
- return render_template(view_template, entity_type=entity_type, entity=entity, editgroup=editgroup)
+ return render_template(
+ view_template, entity_type=entity_type, entity=entity, editgroup=editgroup
+ )
-@app.route('/container/<string(length=26):ident>', methods=['GET'])
+@app.route("/container/<string(length=26):ident>", methods=["GET"])
def container_view(ident):
- return generic_entity_view('container', ident, 'container_view.html')
+ return generic_entity_view("container", ident, "container_view.html")
+
-@app.route('/container_<string(length=26):ident>', methods=['GET'])
+@app.route("/container_<string(length=26):ident>", methods=["GET"])
def container_underscore_view(ident):
- return redirect('/container/{}'.format(ident))
+ return redirect("/container/{}".format(ident))
-@app.route('/container/<string(length=26):ident>/coverage', methods=['GET'])
+
+@app.route("/container/<string(length=26):ident>/coverage", methods=["GET"])
def container_view_coverage(ident):
# note: there is a special hack to add entity._type_preservation for this endpoint
- return generic_entity_view('container', ident, 'container_view_coverage.html')
+ return generic_entity_view("container", ident, "container_view_coverage.html")
+
-@app.route('/container/<string(length=26):ident>/metadata', methods=['GET'])
+@app.route("/container/<string(length=26):ident>/metadata", methods=["GET"])
def container_view_metadata(ident):
- return generic_entity_view('container', ident, 'entity_view_metadata.html')
+ return generic_entity_view("container", ident, "entity_view_metadata.html")
+
-@app.route('/container/rev/<uuid:revision_id>', methods=['GET'])
+@app.route("/container/rev/<uuid:revision_id>", methods=["GET"])
def container_revision_view(revision_id):
- return generic_entity_revision_view('container', str(revision_id), 'container_view.html')
+ return generic_entity_revision_view("container", str(revision_id), "container_view.html")
-@app.route('/container/rev/<uuid:revision_id>/metadata', methods=['GET'])
+
+@app.route("/container/rev/<uuid:revision_id>/metadata", methods=["GET"])
def container_revision_view_metadata(revision_id):
- return generic_entity_revision_view('container', str(revision_id), 'entity_view_metadata.html')
+ return generic_entity_revision_view(
+ "container", str(revision_id), "entity_view_metadata.html"
+ )
-@app.route('/editgroup/<editgroup_id>/container/<string(length=26):ident>', methods=['GET'])
+
+@app.route("/editgroup/<editgroup_id>/container/<string(length=26):ident>", methods=["GET"])
def container_editgroup_view(editgroup_id, ident):
- return generic_editgroup_entity_view(editgroup_id, 'container', ident, 'container_view.html')
+ return generic_editgroup_entity_view(
+ editgroup_id, "container", ident, "container_view.html"
+ )
-@app.route('/editgroup/<editgroup_id>/container/<string(length=26):ident>/metadata', methods=['GET'])
+
+@app.route(
+ "/editgroup/<editgroup_id>/container/<string(length=26):ident>/metadata", methods=["GET"]
+)
def container_editgroup_view_metadata(editgroup_id, ident):
- return generic_editgroup_entity_view(editgroup_id, 'container', ident, 'entity_view_metadata.html')
+ return generic_editgroup_entity_view(
+ editgroup_id, "container", ident, "entity_view_metadata.html"
+ )
-@app.route('/creator/<string(length=26):ident>', methods=['GET'])
+@app.route("/creator/<string(length=26):ident>", methods=["GET"])
def creator_view(ident):
- return generic_entity_view('creator', ident, 'creator_view.html')
+ return generic_entity_view("creator", ident, "creator_view.html")
-@app.route('/creator_<string(length=26):ident>', methods=['GET'])
+
+@app.route("/creator_<string(length=26):ident>", methods=["GET"])
def creator_underscore_view(ident):
- return redirect('/creator/{}'.format(ident))
+ return redirect("/creator/{}".format(ident))
+
-@app.route('/creator/<string(length=26):ident>/metadata', methods=['GET'])
+@app.route("/creator/<string(length=26):ident>/metadata", methods=["GET"])
def creator_view_metadata(ident):
- return generic_entity_view('creator', ident, 'entity_view_metadata.html')
+ return generic_entity_view("creator", ident, "entity_view_metadata.html")
+
-@app.route('/creator/rev/<uuid:revision_id>', methods=['GET'])
+@app.route("/creator/rev/<uuid:revision_id>", methods=["GET"])
def creator_revision_view(revision_id):
- return generic_entity_revision_view('creator', str(revision_id), 'creator_view.html')
+ return generic_entity_revision_view("creator", str(revision_id), "creator_view.html")
-@app.route('/creator/rev/<uuid:revision_id>/metadata', methods=['GET'])
+
+@app.route("/creator/rev/<uuid:revision_id>/metadata", methods=["GET"])
def creator_revision_view_metadata(revision_id):
- return generic_entity_revision_view('creator', str(revision_id), 'entity_view_metadata.html')
+ return generic_entity_revision_view(
+ "creator", str(revision_id), "entity_view_metadata.html"
+ )
+
-@app.route('/editgroup/<editgroup_id>/creator/<string(length=26):ident>', methods=['GET'])
+@app.route("/editgroup/<editgroup_id>/creator/<string(length=26):ident>", methods=["GET"])
def creator_editgroup_view(editgroup_id, ident):
- return generic_editgroup_entity_view(editgroup_id, 'creator', ident, 'creator_view.html')
+ return generic_editgroup_entity_view(editgroup_id, "creator", ident, "creator_view.html")
-@app.route('/editgroup/<editgroup_id>/creator/<string(length=26):ident>/metadata', methods=['GET'])
+
+@app.route(
+ "/editgroup/<editgroup_id>/creator/<string(length=26):ident>/metadata", methods=["GET"]
+)
def creator_editgroup_view_metadata(editgroup_id, ident):
- return generic_editgroup_entity_view(editgroup_id, 'creator', ident, 'entity_view_metadata.html')
+ return generic_editgroup_entity_view(
+ editgroup_id, "creator", ident, "entity_view_metadata.html"
+ )
-@app.route('/file/<string(length=26):ident>', methods=['GET'])
+@app.route("/file/<string(length=26):ident>", methods=["GET"])
def file_view(ident):
- return generic_entity_view('file', ident, 'file_view.html')
+ return generic_entity_view("file", ident, "file_view.html")
-@app.route('/file_<string(length=26):ident>', methods=['GET'])
+
+@app.route("/file_<string(length=26):ident>", methods=["GET"])
def file_underscore_view(ident):
- return redirect('/file/{}'.format(ident))
+ return redirect("/file/{}".format(ident))
+
-@app.route('/file/<string(length=26):ident>/metadata', methods=['GET'])
+@app.route("/file/<string(length=26):ident>/metadata", methods=["GET"])
def file_view_metadata(ident):
- return generic_entity_view('file', ident, 'entity_view_metadata.html')
+ return generic_entity_view("file", ident, "entity_view_metadata.html")
+
-@app.route('/file/rev/<uuid:revision_id>', methods=['GET'])
+@app.route("/file/rev/<uuid:revision_id>", methods=["GET"])
def file_revision_view(revision_id):
- return generic_entity_revision_view('file', str(revision_id), 'file_view.html')
+ return generic_entity_revision_view("file", str(revision_id), "file_view.html")
-@app.route('/file/rev/<uuid:revision_id>/metadata', methods=['GET'])
+
+@app.route("/file/rev/<uuid:revision_id>/metadata", methods=["GET"])
def file_revision_view_metadata(revision_id):
- return generic_entity_revision_view('file', str(revision_id), 'entity_view_metadata.html')
+ return generic_entity_revision_view("file", str(revision_id), "entity_view_metadata.html")
+
-@app.route('/editgroup/<editgroup_id>/file/<string(length=26):ident>', methods=['GET'])
+@app.route("/editgroup/<editgroup_id>/file/<string(length=26):ident>", methods=["GET"])
def file_editgroup_view(editgroup_id, ident):
- return generic_editgroup_entity_view(editgroup_id, 'file', ident, 'file_view.html')
+ return generic_editgroup_entity_view(editgroup_id, "file", ident, "file_view.html")
+
-@app.route('/editgroup/<editgroup_id>/file/<string(length=26):ident>/metadata', methods=['GET'])
+@app.route("/editgroup/<editgroup_id>/file/<string(length=26):ident>/metadata", methods=["GET"])
def file_editgroup_view_metadata(editgroup_id, ident):
- return generic_editgroup_entity_view(editgroup_id, 'file', ident, 'entity_view_metadata.html')
+ return generic_editgroup_entity_view(
+ editgroup_id, "file", ident, "entity_view_metadata.html"
+ )
-@app.route('/fileset/<string(length=26):ident>', methods=['GET'])
+@app.route("/fileset/<string(length=26):ident>", methods=["GET"])
def fileset_view(ident):
- return generic_entity_view('fileset', ident, 'fileset_view.html')
+ return generic_entity_view("fileset", ident, "fileset_view.html")
+
-@app.route('/fileset_<string(length=26):ident>', methods=['GET'])
+@app.route("/fileset_<string(length=26):ident>", methods=["GET"])
def fileset_underscore_view(ident):
- return redirect('/fileset/{}'.format(ident))
+ return redirect("/fileset/{}".format(ident))
+
-@app.route('/fileset/<string(length=26):ident>/metadata', methods=['GET'])
+@app.route("/fileset/<string(length=26):ident>/metadata", methods=["GET"])
def fileset_view_metadata(ident):
- return generic_entity_view('fileset', ident, 'entity_view_metadata.html')
+ return generic_entity_view("fileset", ident, "entity_view_metadata.html")
-@app.route('/fileset/rev/<uuid:revision_id>', methods=['GET'])
+
+@app.route("/fileset/rev/<uuid:revision_id>", methods=["GET"])
def fileset_revision_view(revision_id):
- return generic_entity_revision_view('fileset', str(revision_id), 'fileset_view.html')
+ return generic_entity_revision_view("fileset", str(revision_id), "fileset_view.html")
+
-@app.route('/fileset/rev/<uuid:revision_id>/metadata', methods=['GET'])
+@app.route("/fileset/rev/<uuid:revision_id>/metadata", methods=["GET"])
def fileset_revision_view_metadata(revision_id):
- return generic_entity_revision_view('fileset', str(revision_id), 'entity_view_metadata.html')
+ return generic_entity_revision_view(
+ "fileset", str(revision_id), "entity_view_metadata.html"
+ )
+
-@app.route('/editgroup/<editgroup_id>/fileset/<string(length=26):ident>', methods=['GET'])
+@app.route("/editgroup/<editgroup_id>/fileset/<string(length=26):ident>", methods=["GET"])
def fileset_editgroup_view(editgroup_id, ident):
- return generic_editgroup_entity_view(editgroup_id, 'fileset', ident, 'fileset_view.html')
+ return generic_editgroup_entity_view(editgroup_id, "fileset", ident, "fileset_view.html")
-@app.route('/editgroup/<editgroup_id>/fileset/<string(length=26):ident>/metadata', methods=['GET'])
+
+@app.route(
+ "/editgroup/<editgroup_id>/fileset/<string(length=26):ident>/metadata", methods=["GET"]
+)
def fileset_editgroup_view_metadata(editgroup_id, ident):
- return generic_editgroup_entity_view(editgroup_id, 'fileset', ident, 'entity_view_metadata.html')
+ return generic_editgroup_entity_view(
+ editgroup_id, "fileset", ident, "entity_view_metadata.html"
+ )
-@app.route('/webcapture/<string(length=26):ident>', methods=['GET'])
+@app.route("/webcapture/<string(length=26):ident>", methods=["GET"])
def webcapture_view(ident):
- return generic_entity_view('webcapture', ident, 'webcapture_view.html')
+ return generic_entity_view("webcapture", ident, "webcapture_view.html")
-@app.route('/webcapture_<string(length=26):ident>', methods=['GET'])
+
+@app.route("/webcapture_<string(length=26):ident>", methods=["GET"])
def webcapture_underscore_view(ident):
- return redirect('/webcapture/{}'.format(ident))
+ return redirect("/webcapture/{}".format(ident))
+
-@app.route('/webcapture/<string(length=26):ident>/metadata', methods=['GET'])
+@app.route("/webcapture/<string(length=26):ident>/metadata", methods=["GET"])
def webcapture_view_metadata(ident):
- return generic_entity_view('webcapture', ident, 'entity_view_metadata.html')
+ return generic_entity_view("webcapture", ident, "entity_view_metadata.html")
+
-@app.route('/webcapture/rev/<uuid:revision_id>', methods=['GET'])
+@app.route("/webcapture/rev/<uuid:revision_id>", methods=["GET"])
def webcapture_revision_view(revision_id):
- return generic_entity_revision_view('webcapture', str(revision_id), 'webcapture_view.html')
+ return generic_entity_revision_view("webcapture", str(revision_id), "webcapture_view.html")
-@app.route('/webcapture/rev/<uuid:revision_id>/metadata', methods=['GET'])
+
+@app.route("/webcapture/rev/<uuid:revision_id>/metadata", methods=["GET"])
def webcapture_revision_view_metadata(revision_id):
- return generic_entity_revision_view('webcapture', str(revision_id), 'entity_view_metadata.html')
+ return generic_entity_revision_view(
+ "webcapture", str(revision_id), "entity_view_metadata.html"
+ )
+
-@app.route('/editgroup/<editgroup_id>/webcapture/<string(length=26):ident>', methods=['GET'])
+@app.route("/editgroup/<editgroup_id>/webcapture/<string(length=26):ident>", methods=["GET"])
def webcapture_editgroup_view(editgroup_id, ident):
- return generic_editgroup_entity_view(editgroup_id, 'webcapture', ident, 'webcapture_view.html')
+ return generic_editgroup_entity_view(
+ editgroup_id, "webcapture", ident, "webcapture_view.html"
+ )
+
-@app.route('/editgroup/<editgroup_id>/webcapture/<string(length=26):ident>/metadata', methods=['GET'])
+@app.route(
+ "/editgroup/<editgroup_id>/webcapture/<string(length=26):ident>/metadata", methods=["GET"]
+)
def webcapture_editgroup_view_metadata(editgroup_id, ident):
- return generic_editgroup_entity_view(editgroup_id, 'webcapture', ident, 'entity_view_metadata.html')
+ return generic_editgroup_entity_view(
+ editgroup_id, "webcapture", ident, "entity_view_metadata.html"
+ )
-@app.route('/release/<string(length=26):ident>', methods=['GET'])
+@app.route("/release/<string(length=26):ident>", methods=["GET"])
def release_view(ident):
- return generic_entity_view('release', ident, 'release_view.html')
+ return generic_entity_view("release", ident, "release_view.html")
+
-@app.route('/release_<string(length=26):ident>', methods=['GET'])
+@app.route("/release_<string(length=26):ident>", methods=["GET"])
def release_underscore_view(ident):
- return redirect('/release/{}'.format(ident))
+ return redirect("/release/{}".format(ident))
+
-@app.route('/release/<string(length=26):ident>/contribs', methods=['GET'])
+@app.route("/release/<string(length=26):ident>/contribs", methods=["GET"])
def release_view_contribs(ident):
- return generic_entity_view('release', ident, 'release_view_contribs.html')
+ return generic_entity_view("release", ident, "release_view_contribs.html")
-@app.route('/release/<string(length=26):ident>/references', methods=['GET'])
+
+@app.route("/release/<string(length=26):ident>/references", methods=["GET"])
def release_view_references(ident):
- return generic_entity_view('release', ident, 'release_view_references.html')
+ return generic_entity_view("release", ident, "release_view_references.html")
+
-@app.route('/release/<string(length=26):ident>/metadata', methods=['GET'])
+@app.route("/release/<string(length=26):ident>/metadata", methods=["GET"])
def release_view_metadata(ident):
- return generic_entity_view('release', ident, 'entity_view_metadata.html')
+ return generic_entity_view("release", ident, "entity_view_metadata.html")
+
-@app.route('/release/rev/<uuid:revision_id>', methods=['GET'])
+@app.route("/release/rev/<uuid:revision_id>", methods=["GET"])
def release_revision_view(revision_id):
- return generic_entity_revision_view('release', str(revision_id), 'release_view.html')
+ return generic_entity_revision_view("release", str(revision_id), "release_view.html")
-@app.route('/release/rev/<uuid:revision_id>/contribs', methods=['GET'])
+
+@app.route("/release/rev/<uuid:revision_id>/contribs", methods=["GET"])
def release_revision_view_contribs(revision_id):
- return generic_entity_revision_view('release', str(revision_id), 'release_view_contribs.html')
+ return generic_entity_revision_view(
+ "release", str(revision_id), "release_view_contribs.html"
+ )
+
-@app.route('/release/rev/<uuid:revision_id>/references', methods=['GET'])
+@app.route("/release/rev/<uuid:revision_id>/references", methods=["GET"])
def release_revision_view_references(revision_id):
- return generic_entity_revision_view('release', str(revision_id), 'release_view_references.html')
+ return generic_entity_revision_view(
+ "release", str(revision_id), "release_view_references.html"
+ )
+
-@app.route('/release/rev/<uuid:revision_id>/metadata', methods=['GET'])
+@app.route("/release/rev/<uuid:revision_id>/metadata", methods=["GET"])
def release_revision_view_metadata(revision_id):
- return generic_entity_revision_view('release', str(revision_id), 'entity_view_metadata.html')
+ return generic_entity_revision_view(
+ "release", str(revision_id), "entity_view_metadata.html"
+ )
+
-@app.route('/editgroup/<editgroup_id>/release/<string(length=26):ident>', methods=['GET'])
+@app.route("/editgroup/<editgroup_id>/release/<string(length=26):ident>", methods=["GET"])
def release_editgroup_view(editgroup_id, ident):
- return generic_editgroup_entity_view(editgroup_id, 'release', ident, 'release_view.html')
+ return generic_editgroup_entity_view(editgroup_id, "release", ident, "release_view.html")
-@app.route('/editgroup/<editgroup_id>/release/<string(length=26):ident>/contribs', methods=['GET'])
+
+@app.route(
+ "/editgroup/<editgroup_id>/release/<string(length=26):ident>/contribs", methods=["GET"]
+)
def release_editgroup_view_contribs(editgroup_id, ident):
- return generic_editgroup_entity_view(editgroup_id, 'release', ident, 'release_view_contribs.html')
+ return generic_editgroup_entity_view(
+ editgroup_id, "release", ident, "release_view_contribs.html"
+ )
-@app.route('/editgroup/<editgroup_id>/release/<string(length=26):ident>/references', methods=['GET'])
+
+@app.route(
+ "/editgroup/<editgroup_id>/release/<string(length=26):ident>/references", methods=["GET"]
+)
def release_editgroup_view_references(editgroup_id, ident):
- return generic_editgroup_entity_view(editgroup_id, 'release', ident, 'release_view_references.html')
+ return generic_editgroup_entity_view(
+ editgroup_id, "release", ident, "release_view_references.html"
+ )
-@app.route('/editgroup/<editgroup_id>/release/<string(length=26):ident>/metadata', methods=['GET'])
+
+@app.route(
+ "/editgroup/<editgroup_id>/release/<string(length=26):ident>/metadata", methods=["GET"]
+)
def release_editgroup_view_metadata(editgroup_id, ident):
- return generic_editgroup_entity_view(editgroup_id, 'release', ident, 'entity_view_metadata.html')
+ return generic_editgroup_entity_view(
+ editgroup_id, "release", ident, "entity_view_metadata.html"
+ )
-@app.route('/work/<string(length=26):ident>', methods=['GET'])
+@app.route("/work/<string(length=26):ident>", methods=["GET"])
def work_view(ident):
- return generic_entity_view('work', ident, 'work_view.html')
+ return generic_entity_view("work", ident, "work_view.html")
-@app.route('/work_<string(length=26):ident>', methods=['GET'])
+
+@app.route("/work_<string(length=26):ident>", methods=["GET"])
def work_underscore_view(ident):
- return redirect('/work/{}'.format(ident))
+ return redirect("/work/{}".format(ident))
+
-@app.route('/work/<string(length=26):ident>/metadata', methods=['GET'])
+@app.route("/work/<string(length=26):ident>/metadata", methods=["GET"])
def work_view_metadata(ident):
- return generic_entity_view('work', ident, 'entity_view_metadata.html')
+ return generic_entity_view("work", ident, "entity_view_metadata.html")
+
-@app.route('/work/rev/<uuid:revision_id>', methods=['GET'])
+@app.route("/work/rev/<uuid:revision_id>", methods=["GET"])
def work_revision_view(revision_id):
- return generic_entity_revision_view('work', str(revision_id), 'work_view.html')
+ return generic_entity_revision_view("work", str(revision_id), "work_view.html")
-@app.route('/work/rev/<uuid:revision_id>/metadata', methods=['GET'])
+
+@app.route("/work/rev/<uuid:revision_id>/metadata", methods=["GET"])
def work_revision_view_metadata(revision_id):
- return generic_entity_revision_view('work', str(revision_id), 'entity_view_metadata.html')
+ return generic_entity_revision_view("work", str(revision_id), "entity_view_metadata.html")
+
-@app.route('/editgroup/<editgroup_id>/work/<string(length=26):ident>', methods=['GET'])
+@app.route("/editgroup/<editgroup_id>/work/<string(length=26):ident>", methods=["GET"])
def work_editgroup_view(editgroup_id, ident):
- return generic_editgroup_entity_view(editgroup_id, 'work', ident, 'work_view.html')
+ return generic_editgroup_entity_view(editgroup_id, "work", ident, "work_view.html")
+
-@app.route('/editgroup/<editgroup_id>/work/<string(length=26):ident>/metadata', methods=['GET'])
+@app.route("/editgroup/<editgroup_id>/work/<string(length=26):ident>/metadata", methods=["GET"])
def work_editgroup_view_metadata(editgroup_id, ident):
- return generic_editgroup_entity_view(editgroup_id, 'work', ident, 'entity_view_metadata.html')
+ return generic_editgroup_entity_view(
+ editgroup_id, "work", ident, "entity_view_metadata.html"
+ )
### Views ###################################################################
-@app.route('/editgroup/<string(length=26):ident>', methods=['GET'])
+
+@app.route("/editgroup/<string(length=26):ident>", methods=["GET"])
def editgroup_view(ident):
try:
eg = api.get_editgroup(str(ident))
@@ -537,28 +671,28 @@ def editgroup_view(ident):
edit=False,
annotate=False,
)
- if session.get('editor'):
- user = load_user(session['editor']['editor_id'])
- auth_to['annotate'] = True
+ if session.get("editor"):
+ user = load_user(session["editor"]["editor_id"])
+ auth_to["annotate"] = True
if user.is_admin or user.editor_id == eg.editor_id:
- auth_to['submit'] = True
- auth_to['edit'] = True
+ auth_to["submit"] = True
+ auth_to["edit"] = True
if user.is_admin:
- auth_to['accept'] = True
- return render_template('editgroup_view.html', editgroup=eg,
- auth_to=auth_to)
+ auth_to["accept"] = True
+ return render_template("editgroup_view.html", editgroup=eg, auth_to=auth_to)
+
-@app.route('/editgroup/<string(length=26):ident>/annotation', methods=['POST'])
+@app.route("/editgroup/<string(length=26):ident>/annotation", methods=["POST"])
@login_required
def editgroup_create_annotation(ident):
if not app.testing:
app.csrf.protect()
- comment_markdown = request.form.get('comment_markdown')
+ comment_markdown = request.form.get("comment_markdown")
if not comment_markdown:
app.log.info("empty comment field")
abort(400)
# on behalf of user...
- user_api = auth_api(session['api_token'])
+ user_api = auth_api(session["api_token"])
try:
eg = user_api.get_editgroup(str(ident))
if eg.changelog_index:
@@ -571,15 +705,16 @@ def editgroup_create_annotation(ident):
except ApiException as ae:
app.log.info(ae)
raise ae
- return redirect('/editgroup/{}'.format(ident))
+ return redirect("/editgroup/{}".format(ident))
-@app.route('/editgroup/<string(length=26):ident>/accept', methods=['POST'])
+
+@app.route("/editgroup/<string(length=26):ident>/accept", methods=["POST"])
@login_required
def editgroup_accept(ident):
if not app.testing:
app.csrf.protect()
# on behalf of user...
- user_api = auth_api(session['api_token'])
+ user_api = auth_api(session["api_token"])
try:
eg = user_api.get_editgroup(str(ident))
if eg.changelog_index:
@@ -588,15 +723,16 @@ def editgroup_accept(ident):
except ApiException as ae:
app.log.info(ae)
abort(ae.status)
- return redirect('/editgroup/{}'.format(ident))
+ return redirect("/editgroup/{}".format(ident))
+
-@app.route('/editgroup/<string(length=26):ident>/unsubmit', methods=['POST'])
+@app.route("/editgroup/<string(length=26):ident>/unsubmit", methods=["POST"])
@login_required
def editgroup_unsubmit(ident):
if not app.testing:
app.csrf.protect()
# on behalf of user...
- user_api = auth_api(session['api_token'])
+ user_api = auth_api(session["api_token"])
try:
eg = user_api.get_editgroup(str(ident))
if eg.changelog_index:
@@ -605,15 +741,16 @@ def editgroup_unsubmit(ident):
except ApiException as ae:
app.log.info(ae)
abort(ae.status)
- return redirect('/editgroup/{}'.format(ident))
+ return redirect("/editgroup/{}".format(ident))
+
-@app.route('/editgroup/<string(length=26):ident>/submit', methods=['POST'])
+@app.route("/editgroup/<string(length=26):ident>/submit", methods=["POST"])
@login_required
def editgroup_submit(ident):
if not app.testing:
app.csrf.protect()
# on behalf of user...
- user_api = auth_api(session['api_token'])
+ user_api = auth_api(session["api_token"])
try:
eg = user_api.get_editgroup(str(ident))
if eg.changelog_index:
@@ -622,17 +759,19 @@ def editgroup_submit(ident):
except ApiException as ae:
app.log.info(ae)
abort(ae.status)
- return redirect('/editgroup/{}'.format(ident))
+ return redirect("/editgroup/{}".format(ident))
-@app.route('/editor/<string(length=26):ident>', methods=['GET'])
+
+@app.route("/editor/<string(length=26):ident>", methods=["GET"])
def editor_view(ident):
try:
entity = api.get_editor(ident)
except ApiException as ae:
abort(ae.status)
- return render_template('editor_view.html', editor=entity)
+ return render_template("editor_view.html", editor=entity)
+
-@app.route('/editor/<string(length=26):ident>/editgroups', methods=['GET'])
+@app.route("/editor/<string(length=26):ident>/editgroups", methods=["GET"])
def editor_editgroups(ident):
try:
editor = api.get_editor(ident)
@@ -642,57 +781,62 @@ def editor_editgroups(ident):
eg.editor = editor
except ApiException as ae:
abort(ae.status)
- return render_template('editor_editgroups.html', editor=editor,
- editgroups=editgroups)
+ return render_template("editor_editgroups.html", editor=editor, editgroups=editgroups)
-@app.route('/editor/<string(length=26):ident>/annotations', methods=['GET'])
+
+@app.route("/editor/<string(length=26):ident>/annotations", methods=["GET"])
def editor_annotations(ident):
try:
editor = api.get_editor(ident)
annotations = api.get_editor_annotations(ident, limit=50)
except ApiException as ae:
abort(ae.status)
- return render_template('editor_annotations.html', editor=editor,
- annotations=annotations)
+ return render_template("editor_annotations.html", editor=editor, annotations=annotations)
+
-@app.route('/u/<string:username>', methods=['GET', 'HEAD'])
+@app.route("/u/<string:username>", methods=["GET", "HEAD"])
def editor_username_redirect(username):
try:
editor = api.lookup_editor(username=username)
except ApiException as ae:
abort(ae.status)
- return redirect(f'/editor/{editor.editor_id}')
+ return redirect(f"/editor/{editor.editor_id}")
-@app.route('/changelog', methods=['GET'])
+
+@app.route("/changelog", methods=["GET"])
def changelog_view():
try:
- #limit = int(request.args.get('limit', 10))
- entries = api.get_changelog() # TODO: expand="editors"
+ # limit = int(request.args.get('limit', 10))
+ entries = api.get_changelog() # TODO: expand="editors"
except ApiException as ae:
abort(ae.status)
- return render_template('changelog.html', entries=entries)
+ return render_template("changelog.html", entries=entries)
+
-@app.route('/changelog/<int:index>', methods=['GET'])
+@app.route("/changelog/<int:index>", methods=["GET"])
def changelog_entry_view(index):
try:
entry = api.get_changelog_entry(int(index))
entry.editgroup.editor = api.get_editor(entry.editgroup.editor_id)
- entry.editgroup.annotations = \
- api.get_editgroup_annotations(entry.editgroup_id, expand="editors")
+ entry.editgroup.annotations = api.get_editgroup_annotations(
+ entry.editgroup_id, expand="editors"
+ )
except ApiException as ae:
abort(ae.status)
- return render_template('changelog_view.html', entry=entry, editgroup=entry.editgroup)
+ return render_template("changelog_view.html", entry=entry, editgroup=entry.editgroup)
-@app.route('/reviewable', methods=['GET'])
+
+@app.route("/reviewable", methods=["GET"])
def reviewable_view():
try:
- #limit = int(request.args.get('limit', 10))
+ # limit = int(request.args.get('limit', 10))
entries = api.get_editgroups_reviewable(expand="editors")
except ApiException as ae:
abort(ae.status)
- return render_template('editgroup_reviewable.html', entries=entries)
+ return render_template("editgroup_reviewable.html", entries=entries)
+
-@app.route('/release/<string(length=26):ident>/save', methods=['GET', 'POST'])
+@app.route("/release/<string(length=26):ident>/save", methods=["GET", "POST"])
def release_save(ident):
form = SavePaperNowForm()
@@ -704,7 +848,12 @@ def release_save(ident):
abort(ae.status)
if not Config.KAFKA_PIXY_ENDPOINT:
- return render_template('release_save.html', entity=release, form=form, spn_status='not-configured'), 501
+ return (
+ render_template(
+ "release_save.html", entity=release, form=form, spn_status="not-configured"
+ ),
+ 501,
+ )
if form.is_submitted():
if form.validate_on_submit():
@@ -716,10 +865,20 @@ def release_save(ident):
json.dumps(msg, sort_keys=True),
)
except Exception:
- return render_template('release_save.html', entity=release, form=form, spn_status='kafka-error'), 500
- return render_template('release_save.html', entity=release, form=form, spn_status='success'), 200
+ return (
+ render_template(
+ "release_save.html", entity=release, form=form, spn_status="kafka-error"
+ ),
+ 500,
+ )
+ return (
+ render_template(
+ "release_save.html", entity=release, form=form, spn_status="success"
+ ),
+ 200,
+ )
elif form.errors:
- return render_template('release_save.html', entity=release, form=form), 400
+ return render_template("release_save.html", entity=release, form=form), 400
# form was not submitted; populate defaults
if release.release_stage:
@@ -729,50 +888,57 @@ def release_save(ident):
elif release.ext_ids.arxiv:
form.base_url.data = "https://arxiv.org/pdf/{}.pdf".format(release.ext_ids.arxiv)
elif release.ext_ids.pmcid:
- form.base_url.data = "https://europepmc.org/backend/ptpmcrender.fcgi?accid={}&blobtype=pdf".format(release.ext_ids.pmcid)
+ form.base_url.data = (
+ "https://europepmc.org/backend/ptpmcrender.fcgi?accid={}&blobtype=pdf".format(
+ release.ext_ids.pmcid
+ )
+ )
elif release.ext_ids.hdl:
form.base_url.data = "https://hdl.handle.net/{}".format(release.ext_ids.hdl)
- return render_template('release_save.html', entity=release, form=form), 200
+ return render_template("release_save.html", entity=release, form=form), 200
+
### Search ##################################################################
-@app.route('/search', methods=['GET', 'POST'])
+
+@app.route("/search", methods=["GET", "POST"])
def generic_search():
- if 'q' not in request.args.keys():
- return redirect('/release/search')
- query = request.args.get('q').strip()
+ if "q" not in request.args.keys():
+ return redirect("/release/search")
+ query = request.args.get("q").strip()
if len(query.split()) != 1:
# multi-term? must be a real search
- return redirect(url_for('release_search', q=query, generic=1))
+ return redirect(url_for("release_search", q=query, generic=1))
if clean_doi(query):
- return redirect(url_for('release_lookup', doi=clean_doi(query)))
+ return redirect(url_for("release_lookup", doi=clean_doi(query)))
if clean_pmcid(query):
- return redirect(url_for('release_lookup', pmcid=clean_pmcid(query)))
+ return redirect(url_for("release_lookup", pmcid=clean_pmcid(query)))
if clean_sha1(query):
- return redirect(url_for('file_lookup', sha1=clean_sha1(query)))
+ return redirect(url_for("file_lookup", sha1=clean_sha1(query)))
if clean_sha256(query):
- return redirect(url_for('file_lookup', sha256=clean_sha256(query)))
+ return redirect(url_for("file_lookup", sha256=clean_sha256(query)))
if clean_issn(query):
- return redirect(url_for('container_lookup', issnl=clean_issn(query)))
+ return redirect(url_for("container_lookup", issnl=clean_issn(query)))
if clean_isbn13(query):
- return redirect(url_for('release_lookup', isbn13=clean_isbn13(query)))
+ return redirect(url_for("release_lookup", isbn13=clean_isbn13(query)))
if clean_arxiv_id(query):
- return redirect(url_for('release_lookup', arxiv=clean_arxiv_id(query)))
+ return redirect(url_for("release_lookup", arxiv=clean_arxiv_id(query)))
if clean_orcid(query):
- return redirect(url_for('creator_lookup', orcid=clean_orcid(query)))
+ return redirect(url_for("creator_lookup", orcid=clean_orcid(query)))
+
+ return redirect(url_for("release_search", q=query, generic=1))
- return redirect(url_for('release_search', q=query, generic=1))
-@app.route('/release/search', methods=['GET', 'POST'])
+@app.route("/release/search", methods=["GET", "POST"])
def release_search():
- if 'q' not in request.args.keys():
- return render_template('release_search.html', query=ReleaseQuery(), found=None)
+ if "q" not in request.args.keys():
+ return render_template("release_search.html", query=ReleaseQuery(), found=None)
container_found = None
- if request.args.get('generic'):
+ if request.args.get("generic"):
container_query = GenericQuery.from_args(request.args)
container_query.limit = 1
try:
@@ -784,28 +950,38 @@ def release_search():
try:
found = do_release_search(query)
except FatcatSearchError as fse:
- return render_template('release_search.html', query=query, es_error=fse), fse.status_code
- return render_template('release_search.html', query=query, found=found, container_found=container_found)
+ return (
+ render_template("release_search.html", query=query, es_error=fse),
+ fse.status_code,
+ )
+ return render_template(
+ "release_search.html", query=query, found=found, container_found=container_found
+ )
+
-@app.route('/container/search', methods=['GET', 'POST'])
+@app.route("/container/search", methods=["GET", "POST"])
def container_search():
- if 'q' not in request.args.keys():
- return render_template('container_search.html', query=GenericQuery(), found=None)
+ if "q" not in request.args.keys():
+ return render_template("container_search.html", query=GenericQuery(), found=None)
query = GenericQuery.from_args(request.args)
try:
found = do_container_search(query)
except FatcatSearchError as fse:
- return render_template('container_search.html', query=query, es_error=fse), fse.status_code
- return render_template('container_search.html', query=query, found=found)
+ return (
+ render_template("container_search.html", query=query, es_error=fse),
+ fse.status_code,
+ )
+ return render_template("container_search.html", query=query, found=found)
+
-@app.route('/coverage/search', methods=['GET', 'POST'])
+@app.route("/coverage/search", methods=["GET", "POST"])
def coverage_search():
- if 'q' not in request.args.keys():
+ if "q" not in request.args.keys():
return render_template(
- 'coverage_search.html',
+ "coverage_search.html",
query=ReleaseQuery(),
coverage_stats=None,
coverage_type_preservation=None,
@@ -817,19 +993,22 @@ def coverage_search():
try:
coverage_stats = get_elastic_search_coverage(query)
except FatcatSearchError as fse:
- return render_template(
- 'coverage_search.html',
- query=query,
- coverage_stats=None,
- coverage_type_preservation=None,
- year_histogram_svg=None,
- date_histogram_svg=None,
- es_error=fse,
- ), fse.status_code
+ return (
+ render_template(
+ "coverage_search.html",
+ query=query,
+ coverage_stats=None,
+ coverage_type_preservation=None,
+ year_histogram_svg=None,
+ date_histogram_svg=None,
+ es_error=fse,
+ ),
+ fse.status_code,
+ )
year_histogram_svg = None
date_histogram_svg = None
coverage_type_preservation = None
- if coverage_stats['total'] > 1:
+ if coverage_stats["total"] > 1:
coverage_type_preservation = get_elastic_preservation_by_type(query)
if query.recent:
date_histogram = get_elastic_preservation_by_date(query)
@@ -844,7 +1023,7 @@ def coverage_search():
merge_shadows=Config.FATCAT_MERGE_SHADOW_PRESERVATION,
).render_data_uri()
return render_template(
- 'coverage_search.html',
+ "coverage_search.html",
query=query,
coverage_stats=coverage_stats,
coverage_type_preservation=coverage_type_preservation,
@@ -852,16 +1031,20 @@ def coverage_search():
date_histogram_svg=date_histogram_svg,
)
+
def get_changelog_stats():
stats = {}
latest_changelog = api.get_changelog(limit=1)[0]
- stats['changelog'] = {"latest": {
- "index": latest_changelog.index,
- "timestamp": latest_changelog.timestamp.isoformat(),
- }}
+ stats["changelog"] = {
+ "latest": {
+ "index": latest_changelog.index,
+ "timestamp": latest_changelog.timestamp.isoformat(),
+ }
+ }
return stats
-@app.route('/stats', methods=['GET'])
+
+@app.route("/stats", methods=["GET"])
def stats_page():
try:
stats = get_elastic_entity_stats()
@@ -869,12 +1052,14 @@ def stats_page():
except Exception as ae:
app.log.error(ae)
abort(503)
- return render_template('stats.html', stats=stats)
+ return render_template("stats.html", stats=stats)
+
### Pseudo-APIs #############################################################
-@app.route('/stats.json', methods=['GET', 'OPTIONS'])
-@crossdomain(origin='*',headers=['access-control-allow-origin','Content-Type'])
+
+@app.route("/stats.json", methods=["GET", "OPTIONS"])
+@crossdomain(origin="*", headers=["access-control-allow-origin", "Content-Type"])
def stats_json():
try:
stats = get_elastic_entity_stats()
@@ -884,10 +1069,11 @@ def stats_json():
abort(503)
return jsonify(stats)
-@app.route('/container/issnl/<issnl>/stats.json', methods=['GET', 'OPTIONS'])
-@crossdomain(origin='*',headers=['access-control-allow-origin','Content-Type'])
+
+@app.route("/container/issnl/<issnl>/stats.json", methods=["GET", "OPTIONS"])
+@crossdomain(origin="*", headers=["access-control-allow-origin", "Content-Type"])
def container_issnl_stats(issnl):
- if not (len(issnl) == 9 and issnl[4] == '-'):
+ if not (len(issnl) == 9 and issnl[4] == "-"):
abort(400, "Not a valid ISSN-L: {}".format(issnl))
try:
container = api.lookup_container(issnl=issnl)
@@ -900,8 +1086,9 @@ def container_issnl_stats(issnl):
abort(503)
return jsonify(stats)
-@app.route('/container/<string(length=26):ident>/stats.json', methods=['GET', 'OPTIONS'])
-@crossdomain(origin='*',headers=['access-control-allow-origin','Content-Type'])
+
+@app.route("/container/<string(length=26):ident>/stats.json", methods=["GET", "OPTIONS"])
+@crossdomain(origin="*", headers=["access-control-allow-origin", "Content-Type"])
def container_ident_stats(ident):
try:
container = api.get_container(ident)
@@ -914,8 +1101,11 @@ def container_ident_stats(ident):
abort(503)
return jsonify(stats)
-@app.route('/container/<string(length=26):ident>/ia_coverage_years.json', methods=['GET', 'OPTIONS'])
-@crossdomain(origin='*',headers=['access-control-allow-origin','Content-Type'])
+
+@app.route(
+ "/container/<string(length=26):ident>/ia_coverage_years.json", methods=["GET", "OPTIONS"]
+)
+@crossdomain(origin="*", headers=["access-control-allow-origin", "Content-Type"])
def container_ident_ia_coverage_years_json(ident):
try:
container = api.get_container(ident)
@@ -927,10 +1117,13 @@ def container_ident_ia_coverage_years_json(ident):
app.log.error(ae)
abort(503)
histogram = [dict(year=h[0], in_ia=h[1], count=h[2]) for h in histogram]
- return jsonify({'container_id': ident, "histogram": histogram})
+ return jsonify({"container_id": ident, "histogram": histogram})
-@app.route('/container/<string(length=26):ident>/ia_coverage_years.svg', methods=['GET', 'OPTIONS'])
-@crossdomain(origin='*',headers=['access-control-allow-origin','Content-Type'])
+
+@app.route(
+ "/container/<string(length=26):ident>/ia_coverage_years.svg", methods=["GET", "OPTIONS"]
+)
+@crossdomain(origin="*", headers=["access-control-allow-origin", "Content-Type"])
def container_ident_ia_coverage_years_svg(ident):
try:
container = api.get_container(ident)
@@ -943,8 +1136,11 @@ def container_ident_ia_coverage_years_svg(ident):
abort(503)
return ia_coverage_histogram(histogram).render_response()
-@app.route('/container/<string(length=26):ident>/preservation_by_year.json', methods=['GET', 'OPTIONS'])
-@crossdomain(origin='*',headers=['access-control-allow-origin','Content-Type'])
+
+@app.route(
+ "/container/<string(length=26):ident>/preservation_by_year.json", methods=["GET", "OPTIONS"]
+)
+@crossdomain(origin="*", headers=["access-control-allow-origin", "Content-Type"])
def container_ident_preservation_by_year_json(ident):
try:
container = api.get_container(ident)
@@ -956,10 +1152,13 @@ def container_ident_preservation_by_year_json(ident):
except Exception as ae:
app.log.error(ae)
abort(503)
- return jsonify({'container_id': ident, "histogram": histogram})
+ return jsonify({"container_id": ident, "histogram": histogram})
+
-@app.route('/container/<string(length=26):ident>/preservation_by_year.svg', methods=['GET', 'OPTIONS'])
-@crossdomain(origin='*',headers=['access-control-allow-origin','Content-Type'])
+@app.route(
+ "/container/<string(length=26):ident>/preservation_by_year.svg", methods=["GET", "OPTIONS"]
+)
+@crossdomain(origin="*", headers=["access-control-allow-origin", "Content-Type"])
def container_ident_preservation_by_year_svg(ident):
try:
container = api.get_container(ident)
@@ -976,8 +1175,12 @@ def container_ident_preservation_by_year_svg(ident):
merge_shadows=Config.FATCAT_MERGE_SHADOW_PRESERVATION,
).render_response()
-@app.route('/container/<string(length=26):ident>/preservation_by_volume.json', methods=['GET', 'OPTIONS'])
-@crossdomain(origin='*',headers=['access-control-allow-origin','Content-Type'])
+
+@app.route(
+ "/container/<string(length=26):ident>/preservation_by_volume.json",
+ methods=["GET", "OPTIONS"],
+)
+@crossdomain(origin="*", headers=["access-control-allow-origin", "Content-Type"])
def container_ident_preservation_by_volume_json(ident):
try:
container = api.get_container(ident)
@@ -988,10 +1191,14 @@ def container_ident_preservation_by_volume_json(ident):
except Exception as ae:
app.log.error(ae)
abort(503)
- return jsonify({'container_id': ident, "histogram": histogram})
+ return jsonify({"container_id": ident, "histogram": histogram})
+
-@app.route('/container/<string(length=26):ident>/preservation_by_volume.svg', methods=['GET', 'OPTIONS'])
-@crossdomain(origin='*',headers=['access-control-allow-origin','Content-Type'])
+@app.route(
+ "/container/<string(length=26):ident>/preservation_by_volume.svg",
+ methods=["GET", "OPTIONS"],
+)
+@crossdomain(origin="*", headers=["access-control-allow-origin", "Content-Type"])
def container_ident_preservation_by_volume_svg(ident):
try:
container = api.get_container(ident)
@@ -1007,21 +1214,23 @@ def container_ident_preservation_by_volume_svg(ident):
merge_shadows=Config.FATCAT_MERGE_SHADOW_PRESERVATION,
).render_response()
-@app.route('/release/<string(length=26):ident>.bib', methods=['GET'])
+
+@app.route("/release/<string(length=26):ident>.bib", methods=["GET"])
def release_bibtex(ident):
try:
entity = api.get_release(ident)
except ApiException as ae:
raise ae
csl = release_to_csl(entity)
- bibtex = citeproc_csl(csl, 'bibtex')
+ bibtex = citeproc_csl(csl, "bibtex")
return Response(bibtex, mimetype="text/plain")
-@app.route('/release/<string(length=26):ident>/citeproc', methods=['GET'])
+
+@app.route("/release/<string(length=26):ident>/citeproc", methods=["GET"])
def release_citeproc(ident):
- style = request.args.get('style', 'harvard1')
- is_html = request.args.get('html', False)
- if is_html and is_html.lower() in ('yes', '1', 'true', 'y', 't'):
+ style = request.args.get("style", "harvard1")
+ is_html = request.args.get("html", False)
+ if is_html and is_html.lower() in ("yes", "1", "true", "y", "t"):
is_html = True
else:
is_html = False
@@ -1042,67 +1251,75 @@ def release_citeproc(ident):
else:
return Response(cite, mimetype="text/plain")
-@app.route('/health.json', methods=['GET', 'OPTIONS'])
-@crossdomain(origin='*',headers=['access-control-allow-origin','Content-Type'])
+
+@app.route("/health.json", methods=["GET", "OPTIONS"])
+@crossdomain(origin="*", headers=["access-control-allow-origin", "Content-Type"])
def health_json():
- return jsonify({'ok': True})
+ return jsonify({"ok": True})
### Auth ####################################################################
-@app.route('/auth/login')
+
+@app.route("/auth/login")
def login():
# show the user a list of login options
if not priv_api:
- app.log.warn("This web interface not configured with credentials to actually allow login (other than via token)")
- return render_template('auth_login.html')
+ app.log.warn(
+ "This web interface not configured with credentials to actually allow login (other than via token)"
+ )
+ return render_template("auth_login.html")
-@app.route('/auth/ia/login', methods=['GET', 'POST'])
+
+@app.route("/auth/ia/login", methods=["GET", "POST"])
def ia_xauth_login():
- if 'email' in request.form:
+ if "email" in request.form:
# if a login attempt...
- return handle_ia_xauth(request.form.get('email'), request.form.get('password'))
+ return handle_ia_xauth(request.form.get("email"), request.form.get("password"))
# else show form
- return render_template('auth_ia_login.html')
+ return render_template("auth_ia_login.html")
+
-@app.route('/auth/token_login', methods=['GET', 'POST'])
+@app.route("/auth/token_login", methods=["GET", "POST"])
def token_login():
# show the user a list of login options
- if 'token' in request.args:
- return handle_token_login(request.args.get('token'))
- if 'token' in request.form:
- return handle_token_login(request.form.get('token'))
- return render_template('auth_token_login.html')
+ if "token" in request.args:
+ return handle_token_login(request.args.get("token"))
+ if "token" in request.form:
+ return handle_token_login(request.form.get("token"))
+ return render_template("auth_token_login.html")
-@app.route('/auth/change_username', methods=['POST'])
+
+@app.route("/auth/change_username", methods=["POST"])
@login_required
def change_username():
if not app.testing:
app.csrf.protect()
# show the user a list of login options
- if 'username' not in request.form:
+ if "username" not in request.form:
abort(400)
# on behalf of user...
- user_api = auth_api(session['api_token'])
+ user_api = auth_api(session["api_token"])
try:
- editor = user_api.get_editor(session['editor']['editor_id'])
- editor.username = request.form['username']
+ editor = user_api.get_editor(session["editor"]["editor_id"])
+ editor.username = request.form["username"]
editor = user_api.update_editor(editor.editor_id, editor)
except ApiException as ae:
app.log.info(ae)
raise ae
# update our session
- session['editor'] = editor.to_dict()
+ session["editor"] = editor.to_dict()
load_user(editor.editor_id)
- return redirect('/auth/account')
+ return redirect("/auth/account")
+
-@app.route('/auth/create_token', methods=['POST'])
+@app.route("/auth/create_token", methods=["POST"])
@login_required
def create_auth_token():
if not app.testing:
app.csrf.protect()
- duration_seconds = request.form.get('duration_seconds', None)
+ duration_seconds = request.form.get("duration_seconds", None)
if duration_seconds:
try:
duration_seconds = int(duration_seconds)
@@ -1114,88 +1331,99 @@ def create_auth_token():
# cookie, so if api_token is valid editor_id is assumed to match. If that
# wasn't true, users could manipulate session cookies and create tokens for
# any user
- user_api = auth_api(session['api_token'])
+ user_api = auth_api(session["api_token"])
resp = user_api.auth_check()
- assert(resp.success)
+ assert resp.success
# generate token using *superuser* privs
- editor_id = session['editor']['editor_id']
+ editor_id = session["editor"]["editor_id"]
try:
- resp = priv_api.create_auth_token(editor_id,
- duration_seconds=duration_seconds)
+ resp = priv_api.create_auth_token(editor_id, duration_seconds=duration_seconds)
except ApiException as ae:
app.log.info(ae)
raise ae
- return render_template('auth_token.html', auth_token=resp.token)
+ return render_template("auth_token.html", auth_token=resp.token)
-@app.route('/auth/logout')
+
+@app.route("/auth/logout")
def logout():
handle_logout()
- return render_template('auth_logout.html')
+ return render_template("auth_logout.html")
+
-@app.route('/auth/account')
+@app.route("/auth/account")
@login_required
def auth_account():
# auth check on account page
- user_api = auth_api(session['api_token'])
+ user_api = auth_api(session["api_token"])
resp = user_api.auth_check()
- assert(resp.success)
- editor = user_api.get_editor(session['editor']['editor_id'])
- session['editor'] = editor.to_dict()
+ assert resp.success
+ editor = user_api.get_editor(session["editor"]["editor_id"])
+ session["editor"] = editor.to_dict()
load_user(editor.editor_id)
- return render_template('auth_account.html')
+ return render_template("auth_account.html")
+
-@app.route('/auth/wikipedia/auth')
+@app.route("/auth/wikipedia/auth")
def wp_oauth_rewrite():
"""
This is a dirty hack to rewrite '/auth/wikipedia/auth' to '/auth/wikipedia/oauth-callback'
"""
return redirect(
- (b"/auth/wikipedia/oauth-callback?" + request.query_string).decode('utf-8'),
+ (b"/auth/wikipedia/oauth-callback?" + request.query_string).decode("utf-8"),
307,
)
-@app.route('/auth/wikipedia/finish-login')
+
+@app.route("/auth/wikipedia/finish-login")
def wp_oauth_finish_login():
wp_username = mwoauth.get_current_user(cached=True)
- assert(wp_username)
+ assert wp_username
return handle_wmoauth(wp_username)
### Static Routes ###########################################################
+
@app.errorhandler(404)
def page_not_found(e):
- return render_template('404.html'), 404
+ return render_template("404.html"), 404
+
@app.errorhandler(401)
@app.errorhandler(403)
def page_not_authorized(e):
- return render_template('403.html'), 403
+ return render_template("403.html"), 403
+
@app.errorhandler(405)
def page_method_not_allowed(e):
- return render_template('405.html'), 405
+ return render_template("405.html"), 405
+
@app.errorhandler(400)
def page_bad_request(e):
- return render_template('400.html', err=e), 400
+ return render_template("400.html", err=e), 400
+
@app.errorhandler(409)
def page_edit_conflict(e):
- return render_template('409.html'), 409
+ return render_template("409.html"), 409
+
@app.errorhandler(500)
def page_server_error(e):
app.log.error(e)
- return render_template('500.html'), 500
+ return render_template("500.html"), 500
+
@app.errorhandler(502)
@app.errorhandler(503)
@app.errorhandler(504)
def page_server_down(e):
app.log.error(e)
- return render_template('503.html'), 503
+ return render_template("503.html"), 503
+
@app.errorhandler(ApiException)
def page_fatcat_api_error(ae):
@@ -1214,47 +1442,54 @@ def page_fatcat_api_error(ae):
return page_edit_conflict(ae)
try:
json_body = json.loads(ae.body)
- ae.error_name = json_body.get('error')
- ae.message = json_body.get('message')
+ ae.error_name = json_body.get("error")
+ ae.message = json_body.get("message")
except ValueError:
pass
- return render_template('api_error.html', api_error=ae), ae.status
+ return render_template("api_error.html", api_error=ae), ae.status
+
@app.errorhandler(ApiValueError)
def page_fatcat_api_value_error(ae):
ae.status = 400
ae.error_name = "ValueError"
ae.message = str(ae)
- return render_template('api_error.html', api_error=ae), 400
+ return render_template("api_error.html", api_error=ae), 400
+
@app.errorhandler(CSRFError)
def page_csrf_error(e):
- return render_template('csrf_error.html', reason=e.description), 400
+ return render_template("csrf_error.html", reason=e.description), 400
+
-@app.route('/', methods=['GET'])
+@app.route("/", methods=["GET"])
def page_home():
- return render_template('home.html')
+ return render_template("home.html")
-@app.route('/about', methods=['GET'])
+
+@app.route("/about", methods=["GET"])
def page_about():
- return render_template('about.html')
+ return render_template("about.html")
+
-@app.route('/rfc', methods=['GET'])
+@app.route("/rfc", methods=["GET"])
def page_rfc():
- return render_template('rfc.html')
+ return render_template("rfc.html")
+
-@app.route('/robots.txt', methods=['GET'])
+@app.route("/robots.txt", methods=["GET"])
def page_robots_txt():
- if app.config['FATCAT_DOMAIN'] == "fatcat.wiki":
+ if app.config["FATCAT_DOMAIN"] == "fatcat.wiki":
robots_path = "robots.txt"
else:
robots_path = "robots.deny_all.txt"
- return send_from_directory(os.path.join(app.root_path, 'static'),
- robots_path,
- mimetype='text/plain')
+ return send_from_directory(
+ os.path.join(app.root_path, "static"), robots_path, mimetype="text/plain"
+ )
+
-@app.route('/sitemap.xml', methods=['GET'])
+@app.route("/sitemap.xml", methods=["GET"])
def page_sitemap_xml():
- return send_from_directory(os.path.join(app.root_path, 'static'),
- "sitemap.xml",
- mimetype='text/xml')
+ return send_from_directory(
+ os.path.join(app.root_path, "static"), "sitemap.xml", mimetype="text/xml"
+ )
diff --git a/python/fatcat_web/search.py b/python/fatcat_web/search.py
index 73781016..5fc3f614 100644
--- a/python/fatcat_web/search.py
+++ b/python/fatcat_web/search.py
@@ -1,4 +1,3 @@
-
"""
Helpers for doing elasticsearch queries (used in the web interface; not part of
the formal API)
@@ -17,7 +16,6 @@ from fatcat_web import app
class FatcatSearchError(Exception):
-
def __init__(self, status_code: int, name: str, description: str = None):
if status_code == "N/A":
status_code = 503
@@ -25,6 +23,7 @@ class FatcatSearchError(Exception):
self.name = name
self.description = description
+
@dataclass
class ReleaseQuery:
q: Optional[str] = None
@@ -35,31 +34,32 @@ class ReleaseQuery:
recent: bool = False
@classmethod
- def from_args(cls, args) -> 'ReleaseQuery':
+ def from_args(cls, args) -> "ReleaseQuery":
- query_str = args.get('q') or '*'
+ query_str = args.get("q") or "*"
- container_id = args.get('container_id')
+ container_id = args.get("container_id")
# TODO: as filter, not in query string
if container_id:
query_str += ' container_id:"{}"'.format(container_id)
# TODO: where are container_issnl queries actually used?
- issnl = args.get('container_issnl')
+ issnl = args.get("container_issnl")
if issnl and query_str:
query_str += ' container_issnl:"{}"'.format(issnl)
- offset = args.get('offset', '0')
+ offset = args.get("offset", "0")
offset = max(0, int(offset)) if offset.isnumeric() else 0
return ReleaseQuery(
q=query_str,
offset=offset,
- fulltext_only=bool(args.get('fulltext_only')),
+ fulltext_only=bool(args.get("fulltext_only")),
container_id=container_id,
- recent=bool(args.get('recent')),
+ recent=bool(args.get("recent")),
)
+
@dataclass
class GenericQuery:
q: Optional[str] = None
@@ -67,11 +67,11 @@ class GenericQuery:
offset: Optional[int] = None
@classmethod
- def from_args(cls, args) -> 'GenericQuery':
- query_str = args.get('q')
+ def from_args(cls, args) -> "GenericQuery":
+ query_str = args.get("q")
if not query_str:
- query_str = '*'
- offset = args.get('offset', '0')
+ query_str = "*"
+ offset = args.get("offset", "0")
offset = max(0, int(offset)) if offset.isnumeric() else 0
return GenericQuery(
@@ -79,6 +79,7 @@ class GenericQuery:
offset=offset,
)
+
@dataclass
class SearchHits:
count_returned: int
@@ -89,6 +90,7 @@ class SearchHits:
query_time_ms: int
results: List[Any]
+
def _hits_total_int(val: Any) -> int:
"""
Compatibility hack between ES 6.x and 7.x. In ES 6x, total is returned as
@@ -97,7 +99,7 @@ def _hits_total_int(val: Any) -> int:
if isinstance(val, int):
return val
else:
- return int(val['value'])
+ return int(val["value"])
def results_to_dict(response: elasticsearch_dsl.response.Response) -> List[dict]:
@@ -121,6 +123,7 @@ def results_to_dict(response: elasticsearch_dsl.response.Response) -> List[dict]
h[key] = h[key].encode("utf8", "ignore").decode("utf8")
return results
+
def wrap_es_execution(search: Search) -> Any:
"""
Executes a Search object, and converts various ES error types into
@@ -146,6 +149,7 @@ def wrap_es_execution(search: Search) -> Any:
raise FatcatSearchError(e.status_code, str(e.error), description)
return resp
+
def agg_to_dict(agg) -> dict:
"""
Takes a simple term aggregation result (with buckets) and returns a simple
@@ -157,14 +161,13 @@ def agg_to_dict(agg) -> dict:
for bucket in agg.buckets:
result[bucket.key] = bucket.doc_count
if agg.sum_other_doc_count:
- result['_other'] = agg.sum_other_doc_count
+ result["_other"] = agg.sum_other_doc_count
return result
-def do_container_search(
- query: GenericQuery, deep_page_limit: int = 2000
-) -> SearchHits:
- search = Search(using=app.es_client, index=app.config['ELASTICSEARCH_CONTAINER_INDEX'])
+def do_container_search(query: GenericQuery, deep_page_limit: int = 2000) -> SearchHits:
+
+ search = Search(using=app.es_client, index=app.config["ELASTICSEARCH_CONTAINER_INDEX"])
search = search.query(
"query_string",
@@ -199,11 +202,10 @@ def do_container_search(
results=results,
)
-def do_release_search(
- query: ReleaseQuery, deep_page_limit: int = 2000
-) -> SearchHits:
- search = Search(using=app.es_client, index=app.config['ELASTICSEARCH_RELEASE_INDEX'])
+def do_release_search(query: ReleaseQuery, deep_page_limit: int = 2000) -> SearchHits:
+
+ search = Search(using=app.es_client, index=app.config["ELASTICSEARCH_RELEASE_INDEX"])
# availability filters
if query.fulltext_only:
@@ -240,7 +242,11 @@ def do_release_search(
search = search.query(
"boosting",
- positive=Q("bool", must=basic_biblio, should=[has_fulltext],),
+ positive=Q(
+ "bool",
+ must=basic_biblio,
+ should=[has_fulltext],
+ ),
negative=poor_metadata,
negative_boost=0.5,
)
@@ -260,9 +266,13 @@ def do_release_search(
for h in results:
# Ensure 'contrib_names' is a list, not a single string
- if type(h['contrib_names']) is not list:
- h['contrib_names'] = [h['contrib_names'], ]
- h['contrib_names'] = [name.encode('utf8', 'ignore').decode('utf8') for name in h['contrib_names']]
+ if type(h["contrib_names"]) is not list:
+ h["contrib_names"] = [
+ h["contrib_names"],
+ ]
+ h["contrib_names"] = [
+ name.encode("utf8", "ignore").decode("utf8") for name in h["contrib_names"]
+ ]
return SearchHits(
count_returned=len(results),
@@ -274,6 +284,7 @@ def do_release_search(
results=results,
)
+
def get_elastic_container_random_releases(ident: str, limit=5) -> dict:
"""
Returns a list of releases from the container.
@@ -281,16 +292,16 @@ def get_elastic_container_random_releases(ident: str, limit=5) -> dict:
assert limit > 0 and limit <= 100
- search = Search(using=app.es_client, index=app.config['ELASTICSEARCH_RELEASE_INDEX'])
+ search = Search(using=app.es_client, index=app.config["ELASTICSEARCH_RELEASE_INDEX"])
search = search.query(
- 'bool',
+ "bool",
must=[
- Q('term', container_id=ident),
- Q('range', release_year={ "lte": datetime.datetime.today().year }),
- ]
+ Q("term", container_id=ident),
+ Q("range", release_year={"lte": datetime.datetime.today().year}),
+ ],
)
- search = search.sort('-in_web', '-release_date')
- search = search[:int(limit)]
+ search = search.sort("-in_web", "-release_date")
+ search = search[: int(limit)]
search = search.params(request_cache=True)
# not needed: search = search.params(track_total_hits=True)
@@ -299,6 +310,7 @@ def get_elastic_container_random_releases(ident: str, limit=5) -> dict:
return results
+
def get_elastic_entity_stats() -> dict:
"""
TODO: files, filesets, webcaptures (no schema yet)
@@ -312,11 +324,11 @@ def get_elastic_entity_stats() -> dict:
stats = {}
# release totals
- search = Search(using=app.es_client, index=app.config['ELASTICSEARCH_RELEASE_INDEX'])
+ search = Search(using=app.es_client, index=app.config["ELASTICSEARCH_RELEASE_INDEX"])
search.aggs.bucket(
- 'release_ref_count',
- 'sum',
- field='ref_count',
+ "release_ref_count",
+ "sum",
+ field="ref_count",
)
search = search[:0] # pylint: disable=unsubscriptable-object
@@ -324,15 +336,15 @@ def get_elastic_entity_stats() -> dict:
search = search.params(track_total_hits=True)
resp = wrap_es_execution(search)
- stats['release'] = {
+ stats["release"] = {
"total": _hits_total_int(resp.hits.total),
"refs_total": int(resp.aggregations.release_ref_count.value),
}
# paper counts
- search = Search(using=app.es_client, index=app.config['ELASTICSEARCH_RELEASE_INDEX'])
+ search = Search(using=app.es_client, index=app.config["ELASTICSEARCH_RELEASE_INDEX"])
search = search.query(
- 'terms',
+ "terms",
release_type=[
"article-journal",
"paper-conference",
@@ -341,17 +353,21 @@ def get_elastic_entity_stats() -> dict:
],
)
search.aggs.bucket(
- 'paper_like',
- 'filters',
+ "paper_like",
+ "filters",
filters={
- "in_web": { "term": { "in_web": "true" } },
- "is_oa": { "term": { "is_oa": "true" } },
- "in_kbart": { "term": { "in_kbart": "true" } },
- "in_web_not_kbart": { "bool": { "filter": [
- { "term": { "in_web": "true" } },
- { "term": { "in_kbart": "false" } },
- ]}},
- }
+ "in_web": {"term": {"in_web": "true"}},
+ "is_oa": {"term": {"is_oa": "true"}},
+ "in_kbart": {"term": {"in_kbart": "true"}},
+ "in_web_not_kbart": {
+ "bool": {
+ "filter": [
+ {"term": {"in_web": "true"}},
+ {"term": {"in_kbart": "false"}},
+ ]
+ }
+ },
+ },
)
search = search[:0]
@@ -359,35 +375,36 @@ def get_elastic_entity_stats() -> dict:
search = search.params(track_total_hits=True)
resp = wrap_es_execution(search)
buckets = resp.aggregations.paper_like.buckets
- stats['papers'] = {
- 'total': _hits_total_int(resp.hits.total),
- 'in_web': buckets.in_web.doc_count,
- 'is_oa': buckets.is_oa.doc_count,
- 'in_kbart': buckets.in_kbart.doc_count,
- 'in_web_not_kbart': buckets.in_web_not_kbart.doc_count,
+ stats["papers"] = {
+ "total": _hits_total_int(resp.hits.total),
+ "in_web": buckets.in_web.doc_count,
+ "is_oa": buckets.is_oa.doc_count,
+ "in_kbart": buckets.in_kbart.doc_count,
+ "in_web_not_kbart": buckets.in_web_not_kbart.doc_count,
}
# container counts
- search = Search(using=app.es_client, index=app.config['ELASTICSEARCH_CONTAINER_INDEX'])
+ search = Search(using=app.es_client, index=app.config["ELASTICSEARCH_CONTAINER_INDEX"])
search.aggs.bucket(
- 'release_ref_count',
- 'sum',
- field='ref_count',
+ "release_ref_count",
+ "sum",
+ field="ref_count",
)
search = search[:0] # pylint: disable=unsubscriptable-object
search = search.params(request_cache=True)
search = search.params(track_total_hits=True)
resp = wrap_es_execution(search)
- stats['container'] = {
+ stats["container"] = {
"total": _hits_total_int(resp.hits.total),
}
return stats
+
def get_elastic_search_coverage(query: ReleaseQuery) -> dict:
- search = Search(using=app.es_client, index=app.config['ELASTICSEARCH_RELEASE_INDEX'])
+ search = Search(using=app.es_client, index=app.config["ELASTICSEARCH_RELEASE_INDEX"])
search = search.query(
"query_string",
query=query.q,
@@ -398,10 +415,10 @@ def get_elastic_search_coverage(query: ReleaseQuery) -> dict:
fields=["biblio"],
)
search.aggs.bucket(
- 'preservation',
- 'terms',
- field='preservation',
- missing='_unknown',
+ "preservation",
+ "terms",
+ field="preservation",
+ missing="_unknown",
)
if query.recent:
date_today = datetime.date.today()
@@ -416,21 +433,24 @@ def get_elastic_search_coverage(query: ReleaseQuery) -> dict:
resp = wrap_es_execution(search)
preservation_bucket = agg_to_dict(resp.aggregations.preservation)
- preservation_bucket['total'] = _hits_total_int(resp.hits.total)
- for k in ('bright', 'dark', 'shadows_only', 'none'):
+ preservation_bucket["total"] = _hits_total_int(resp.hits.total)
+ for k in ("bright", "dark", "shadows_only", "none"):
if k not in preservation_bucket:
preservation_bucket[k] = 0
- if app.config['FATCAT_MERGE_SHADOW_PRESERVATION']:
- preservation_bucket['none'] += preservation_bucket['shadows_only']
- preservation_bucket['shadows_only'] = 0
+ if app.config["FATCAT_MERGE_SHADOW_PRESERVATION"]:
+ preservation_bucket["none"] += preservation_bucket["shadows_only"]
+ preservation_bucket["shadows_only"] = 0
stats = {
- 'total': _hits_total_int(resp.hits.total),
- 'preservation': preservation_bucket,
+ "total": _hits_total_int(resp.hits.total),
+ "preservation": preservation_bucket,
}
return stats
-def get_elastic_container_stats(ident, issnl=None, es_client=None, es_index=None, merge_shadows=None):
+
+def get_elastic_container_stats(
+ ident, issnl=None, es_client=None, es_index=None, merge_shadows=None
+):
"""
Returns dict:
ident
@@ -444,41 +464,41 @@ def get_elastic_container_stats(ident, issnl=None, es_client=None, es_index=None
if not es_client:
es_client = app.es_client
if not es_index:
- es_index = app.config['ELASTICSEARCH_RELEASE_INDEX']
+ es_index = app.config["ELASTICSEARCH_RELEASE_INDEX"]
if merge_shadows is None:
- merge_shadows = app.config['FATCAT_MERGE_SHADOW_PRESERVATION']
+ merge_shadows = app.config["FATCAT_MERGE_SHADOW_PRESERVATION"]
search = Search(using=es_client, index=es_index)
search = search.query(
- 'term',
+ "term",
container_id=ident,
)
search.aggs.bucket(
- 'container_stats',
- 'filters',
+ "container_stats",
+ "filters",
filters={
"in_web": {
- "term": { "in_web": True },
+ "term": {"in_web": True},
},
"in_kbart": {
- "term": { "in_kbart": True },
+ "term": {"in_kbart": True},
},
"is_preserved": {
- "term": { "is_preserved": True },
+ "term": {"is_preserved": True},
},
},
)
search.aggs.bucket(
- 'preservation',
- 'terms',
- field='preservation',
- missing='_unknown',
+ "preservation",
+ "terms",
+ field="preservation",
+ missing="_unknown",
)
search.aggs.bucket(
- 'release_type',
- 'terms',
- field='release_type',
- missing='_unknown',
+ "release_type",
+ "terms",
+ field="release_type",
+ missing="_unknown",
)
search = search[:0]
@@ -489,27 +509,28 @@ def get_elastic_container_stats(ident, issnl=None, es_client=None, es_index=None
container_stats = resp.aggregations.container_stats.buckets
preservation_bucket = agg_to_dict(resp.aggregations.preservation)
- preservation_bucket['total'] = _hits_total_int(resp.hits.total)
- for k in ('bright', 'dark', 'shadows_only', 'none'):
+ preservation_bucket["total"] = _hits_total_int(resp.hits.total)
+ for k in ("bright", "dark", "shadows_only", "none"):
if k not in preservation_bucket:
preservation_bucket[k] = 0
if merge_shadows:
- preservation_bucket['none'] += preservation_bucket['shadows_only']
- preservation_bucket['shadows_only'] = 0
+ preservation_bucket["none"] += preservation_bucket["shadows_only"]
+ preservation_bucket["shadows_only"] = 0
release_type_bucket = agg_to_dict(resp.aggregations.release_type)
stats = {
- 'ident': ident,
- 'issnl': issnl,
- 'total': _hits_total_int(resp.hits.total),
- 'in_web': container_stats['in_web']['doc_count'],
- 'in_kbart': container_stats['in_kbart']['doc_count'],
- 'is_preserved': container_stats['is_preserved']['doc_count'],
- 'preservation': preservation_bucket,
- 'release_type': release_type_bucket,
+ "ident": ident,
+ "issnl": issnl,
+ "total": _hits_total_int(resp.hits.total),
+ "in_web": container_stats["in_web"]["doc_count"],
+ "in_kbart": container_stats["in_kbart"]["doc_count"],
+ "is_preserved": container_stats["is_preserved"]["doc_count"],
+ "preservation": preservation_bucket,
+ "release_type": release_type_bucket,
}
return stats
+
def get_elastic_container_histogram_legacy(ident) -> List:
"""
Fetches a stacked histogram of {year, in_ia}. This is for the older style
@@ -522,48 +543,58 @@ def get_elastic_container_histogram_legacy(ident) -> List:
(year, in_ia, count)
"""
- search = Search(using=app.es_client, index=app.config['ELASTICSEARCH_RELEASE_INDEX'])
+ search = Search(using=app.es_client, index=app.config["ELASTICSEARCH_RELEASE_INDEX"])
search = search.query(
- 'bool',
+ "bool",
must=[
- Q("range", release_year={
- "gte": datetime.datetime.today().year - 499,
- "lte": datetime.datetime.today().year,
- }),
+ Q(
+ "range",
+ release_year={
+ "gte": datetime.datetime.today().year - 499,
+ "lte": datetime.datetime.today().year,
+ },
+ ),
],
filter=[
- Q("bool", minimum_should_match=1, should=[
- Q("match", container_id=ident),
- ]),
+ Q(
+ "bool",
+ minimum_should_match=1,
+ should=[
+ Q("match", container_id=ident),
+ ],
+ ),
],
)
search.aggs.bucket(
- 'year_in_ia',
- 'composite',
+ "year_in_ia",
+ "composite",
size=1000,
sources=[
- {"year": {
- "histogram": {
- "field": "release_year",
- "interval": 1,
- },
- }},
- {"in_ia": {
- "terms": {
- "field": "in_ia",
- },
- }},
+ {
+ "year": {
+ "histogram": {
+ "field": "release_year",
+ "interval": 1,
+ },
+ }
+ },
+ {
+ "in_ia": {
+ "terms": {
+ "field": "in_ia",
+ },
+ }
+ },
],
)
search = search[:0]
- search = search.params(request_cache='true')
+ search = search.params(request_cache="true")
search = search.params(track_total_hits=True)
resp = wrap_es_execution(search)
buckets = resp.aggregations.year_in_ia.buckets
- vals = [(int(h['key']['year']), h['key']['in_ia'], h['doc_count'])
- for h in buckets]
+ vals = [(int(h["key"]["year"]), h["key"]["in_ia"], h["doc_count"]) for h in buckets]
vals = sorted(vals)
return vals
@@ -580,7 +611,7 @@ def get_elastic_preservation_by_year(query) -> List[dict]:
{year (int), bright (int), dark (int), shadows_only (int), none (int)}
"""
- search = Search(using=app.es_client, index=app.config['ELASTICSEARCH_RELEASE_INDEX'])
+ search = Search(using=app.es_client, index=app.config["ELASTICSEARCH_RELEASE_INDEX"])
if query.q not in [None, "*"]:
search = search.query(
"query_string",
@@ -607,41 +638,47 @@ def get_elastic_preservation_by_year(query) -> List[dict]:
)
search.aggs.bucket(
- 'year_preservation',
- 'composite',
+ "year_preservation",
+ "composite",
size=1500,
sources=[
- {"year": {
- "histogram": {
- "field": "release_year",
- "interval": 1,
- },
- }},
- {"preservation": {
- "terms": {
- "field": "preservation",
- },
- }},
+ {
+ "year": {
+ "histogram": {
+ "field": "release_year",
+ "interval": 1,
+ },
+ }
+ },
+ {
+ "preservation": {
+ "terms": {
+ "field": "preservation",
+ },
+ }
+ },
],
)
search = search[:0]
- search = search.params(request_cache='true')
+ search = search.params(request_cache="true")
search = search.params(track_total_hits=True)
resp = wrap_es_execution(search)
buckets = resp.aggregations.year_preservation.buckets
- year_nums = set([int(h['key']['year']) for h in buckets])
+ year_nums = set([int(h["key"]["year"]) for h in buckets])
year_dicts = dict()
if year_nums:
- for num in range(min(year_nums), max(year_nums)+1):
+ for num in range(min(year_nums), max(year_nums) + 1):
year_dicts[num] = dict(year=num, bright=0, dark=0, shadows_only=0, none=0)
for row in buckets:
- year_dicts[int(row['key']['year'])][row['key']['preservation']] = int(row['doc_count'])
- if app.config['FATCAT_MERGE_SHADOW_PRESERVATION']:
+ year_dicts[int(row["key"]["year"])][row["key"]["preservation"]] = int(
+ row["doc_count"]
+ )
+ if app.config["FATCAT_MERGE_SHADOW_PRESERVATION"]:
for k in year_dicts.keys():
- year_dicts[k]['none'] += year_dicts[k]['shadows_only']
- year_dicts[k]['shadows_only'] = 0
- return sorted(year_dicts.values(), key=lambda x: x['year'])
+ year_dicts[k]["none"] += year_dicts[k]["shadows_only"]
+ year_dicts[k]["shadows_only"] = 0
+ return sorted(year_dicts.values(), key=lambda x: x["year"])
def get_elastic_preservation_by_date(query) -> List[dict]:
@@ -656,7 +693,7 @@ def get_elastic_preservation_by_date(query) -> List[dict]:
{date (str), bright (int), dark (int), shadows_only (int), none (int)}
"""
- search = Search(using=app.es_client, index=app.config['ELASTICSEARCH_RELEASE_INDEX'])
+ search = Search(using=app.es_client, index=app.config["ELASTICSEARCH_RELEASE_INDEX"])
if query.q not in [None, "*"]:
search = search.query(
"query_string",
@@ -678,32 +715,37 @@ def get_elastic_preservation_by_date(query) -> List[dict]:
start_date = date_today - datetime.timedelta(days=60)
end_date = date_today + datetime.timedelta(days=1)
search = search.filter(
- "range", release_date=dict(
+ "range",
+ release_date=dict(
gte=str(start_date),
lte=str(end_date),
- )
+ ),
)
search.aggs.bucket(
- 'date_preservation',
- 'composite',
+ "date_preservation",
+ "composite",
size=1500,
sources=[
- {"date": {
- "histogram": {
- "field": "release_date",
- "interval": 1,
- },
- }},
- {"preservation": {
- "terms": {
- "field": "preservation",
- },
- }},
+ {
+ "date": {
+ "histogram": {
+ "field": "release_date",
+ "interval": 1,
+ },
+ }
+ },
+ {
+ "preservation": {
+ "terms": {
+ "field": "preservation",
+ },
+ }
+ },
],
)
search = search[:0]
- search = search.params(request_cache='true')
+ search = search.params(request_cache="true")
search = search.params(track_total_hits=True)
resp = wrap_es_execution(search)
@@ -711,15 +753,18 @@ def get_elastic_preservation_by_date(query) -> List[dict]:
date_dicts = dict()
this_date = start_date
while this_date <= end_date:
- date_dicts[str(this_date)] = dict(date=str(this_date), bright=0, dark=0, shadows_only=0, none=0)
+ date_dicts[str(this_date)] = dict(
+ date=str(this_date), bright=0, dark=0, shadows_only=0, none=0
+ )
this_date = this_date + datetime.timedelta(days=1)
for row in buckets:
- date_dicts[row['key']['date'][0:10]][row['key']['preservation']] = int(row['doc_count'])
- if app.config['FATCAT_MERGE_SHADOW_PRESERVATION']:
+ date_dicts[row["key"]["date"][0:10]][row["key"]["preservation"]] = int(row["doc_count"])
+ if app.config["FATCAT_MERGE_SHADOW_PRESERVATION"]:
for k in date_dicts.keys():
- date_dicts[k]['none'] += date_dicts[k]['shadows_only']
- date_dicts[k]['shadows_only'] = 0
- return sorted(date_dicts.values(), key=lambda x: x['date'])
+ date_dicts[k]["none"] += date_dicts[k]["shadows_only"]
+ date_dicts[k]["shadows_only"] = 0
+ return sorted(date_dicts.values(), key=lambda x: x["date"])
+
def get_elastic_container_preservation_by_volume(container_id: str) -> List[dict]:
"""
@@ -733,52 +778,64 @@ def get_elastic_container_preservation_by_volume(container_id: str) -> List[dict
{year (int), bright (int), dark (int), shadows_only (int), none (int)}
"""
- search = Search(using=app.es_client, index=app.config['ELASTICSEARCH_RELEASE_INDEX'])
+ search = Search(using=app.es_client, index=app.config["ELASTICSEARCH_RELEASE_INDEX"])
search = search.query(
- 'bool',
+ "bool",
filter=[
- Q("bool", must=[
- Q("match", container_id=container_id),
- Q("exists", field="volume"),
- ]),
+ Q(
+ "bool",
+ must=[
+ Q("match", container_id=container_id),
+ Q("exists", field="volume"),
+ ],
+ ),
],
)
search.aggs.bucket(
- 'volume_preservation',
- 'composite',
+ "volume_preservation",
+ "composite",
size=1500,
sources=[
- {"volume": {
- "terms": {
- "field": "volume",
- },
- }},
- {"preservation": {
- "terms": {
- "field": "preservation",
- },
- }},
+ {
+ "volume": {
+ "terms": {
+ "field": "volume",
+ },
+ }
+ },
+ {
+ "preservation": {
+ "terms": {
+ "field": "preservation",
+ },
+ }
+ },
],
)
search = search[:0]
- search = search.params(request_cache='true')
+ search = search.params(request_cache="true")
search = search.params(track_total_hits=True)
resp = wrap_es_execution(search)
buckets = resp.aggregations.volume_preservation.buckets
- volume_nums = set([int(h['key']['volume']) for h in buckets if h['key']['volume'].isdigit()])
+ volume_nums = set(
+ [int(h["key"]["volume"]) for h in buckets if h["key"]["volume"].isdigit()]
+ )
volume_dicts = dict()
if volume_nums:
- for num in range(min(volume_nums), max(volume_nums)+1):
+ for num in range(min(volume_nums), max(volume_nums) + 1):
volume_dicts[num] = dict(volume=num, bright=0, dark=0, shadows_only=0, none=0)
for row in buckets:
- if row['key']['volume'].isdigit():
- volume_dicts[int(row['key']['volume'])][row['key']['preservation']] = int(row['doc_count'])
- if app.config['FATCAT_MERGE_SHADOW_PRESERVATION']:
+ if row["key"]["volume"].isdigit():
+ volume_dicts[int(row["key"]["volume"])][row["key"]["preservation"]] = int(
+ row["doc_count"]
+ )
+ if app.config["FATCAT_MERGE_SHADOW_PRESERVATION"]:
for k in volume_dicts.keys():
- volume_dicts[k]['none'] += volume_dicts[k]['shadows_only']
- volume_dicts[k]['shadows_only'] = 0
- return sorted(volume_dicts.values(), key=lambda x: x['volume'])
+ volume_dicts[k]["none"] += volume_dicts[k]["shadows_only"]
+ volume_dicts[k]["shadows_only"] = 0
+ return sorted(volume_dicts.values(), key=lambda x: x["volume"])
+
def get_elastic_preservation_by_type(query: ReleaseQuery) -> List[dict]:
"""
@@ -789,7 +846,7 @@ def get_elastic_preservation_by_type(query: ReleaseQuery) -> List[dict]:
{year (int), bright (int), dark (int), shadows_only (int), none (int)}
"""
- search = Search(using=app.es_client, index=app.config['ELASTICSEARCH_RELEASE_INDEX'])
+ search = Search(using=app.es_client, index=app.config["ELASTICSEARCH_RELEASE_INDEX"])
if query.q not in [None, "*"]:
search = search.query(
"query_string",
@@ -804,11 +861,14 @@ def get_elastic_preservation_by_type(query: ReleaseQuery) -> List[dict]:
)
if query.container_id:
search = search.query(
- 'bool',
+ "bool",
filter=[
- Q("bool", must=[
- Q("match", container_id=query.container_id),
- ]),
+ Q(
+ "bool",
+ must=[
+ Q("match", container_id=query.container_id),
+ ],
+ ),
],
)
if query.recent:
@@ -817,39 +877,45 @@ def get_elastic_preservation_by_type(query: ReleaseQuery) -> List[dict]:
end_date = str(date_today + datetime.timedelta(days=1))
search = search.filter("range", release_date=dict(gte=start_date, lte=end_date))
search.aggs.bucket(
- 'type_preservation',
- 'composite',
+ "type_preservation",
+ "composite",
size=1500,
sources=[
- {"release_type": {
- "terms": {
- "field": "release_type",
- },
- }},
- {"preservation": {
- "terms": {
- "field": "preservation",
- },
- }},
+ {
+ "release_type": {
+ "terms": {
+ "field": "release_type",
+ },
+ }
+ },
+ {
+ "preservation": {
+ "terms": {
+ "field": "preservation",
+ },
+ }
+ },
],
)
search = search[:0]
- search = search.params(request_cache='true')
+ search = search.params(request_cache="true")
search = search.params(track_total_hits=True)
resp = wrap_es_execution(search)
buckets = resp.aggregations.type_preservation.buckets
- type_set = set([h['key']['release_type'] for h in buckets])
+ type_set = set([h["key"]["release_type"] for h in buckets])
type_dicts = dict()
for k in type_set:
type_dicts[k] = dict(release_type=k, bright=0, dark=0, shadows_only=0, none=0, total=0)
for row in buckets:
- type_dicts[row['key']['release_type']][row['key']['preservation']] = int(row['doc_count'])
+ type_dicts[row["key"]["release_type"]][row["key"]["preservation"]] = int(
+ row["doc_count"]
+ )
for k in type_set:
- for p in ('bright', 'dark', 'shadows_only', 'none'):
- type_dicts[k]['total'] += type_dicts[k][p]
- if app.config['FATCAT_MERGE_SHADOW_PRESERVATION']:
+ for p in ("bright", "dark", "shadows_only", "none"):
+ type_dicts[k]["total"] += type_dicts[k][p]
+ if app.config["FATCAT_MERGE_SHADOW_PRESERVATION"]:
for k in type_set:
- type_dicts[k]['none'] += type_dicts[k]['shadows_only']
- type_dicts[k]['shadows_only'] = 0
- return sorted(type_dicts.values(), key=lambda x: x['total'], reverse=True)
+ type_dicts[k]["none"] += type_dicts[k]["shadows_only"]
+ type_dicts[k]["shadows_only"] = 0
+ return sorted(type_dicts.values(), key=lambda x: x["total"], reverse=True)
diff --git a/python/fatcat_web/web_config.py b/python/fatcat_web/web_config.py
index c15fefa4..229c2761 100644
--- a/python/fatcat_web/web_config.py
+++ b/python/fatcat_web/web_config.py
@@ -1,4 +1,3 @@
-
"""
Default configuration for fatcat web interface (Flask application).
@@ -16,26 +15,43 @@ import raven
basedir = os.path.abspath(os.path.dirname(__file__))
+
class Config(object):
- GIT_REVISION = subprocess.check_output(["git", "describe", "--tags", "--long", "--always"]).strip().decode('utf-8')
+ GIT_REVISION = (
+ subprocess.check_output(["git", "describe", "--tags", "--long", "--always"])
+ .strip()
+ .decode("utf-8")
+ )
# This is, effectively, the QA/PROD flag
FATCAT_DOMAIN = os.environ.get("FATCAT_DOMAIN", default="dev.fatcat.wiki")
FATCAT_API_AUTH_TOKEN = os.environ.get("FATCAT_API_AUTH_TOKEN", default=None)
- FATCAT_API_HOST = os.environ.get("FATCAT_API_HOST", default=f"https://api.{FATCAT_DOMAIN}/v0")
+ FATCAT_API_HOST = os.environ.get(
+ "FATCAT_API_HOST", default=f"https://api.{FATCAT_DOMAIN}/v0"
+ )
public_host_default = f"https://api.{FATCAT_DOMAIN}/v0"
if FATCAT_DOMAIN == "dev.fatcat.wiki":
public_host_default = FATCAT_API_HOST
- FATCAT_PUBLIC_API_HOST = os.environ.get("FATCAT_PUBLIC_API_HOST", default=public_host_default)
+ FATCAT_PUBLIC_API_HOST = os.environ.get(
+ "FATCAT_PUBLIC_API_HOST", default=public_host_default
+ )
# can set this to https://search.fatcat.wiki for some experimentation
- ELASTICSEARCH_BACKEND = os.environ.get("ELASTICSEARCH_BACKEND", default="http://localhost:9200")
- ELASTICSEARCH_RELEASE_INDEX = os.environ.get("ELASTICSEARCH_RELEASE_INDEX", default="fatcat_release")
- ELASTICSEARCH_CONTAINER_INDEX = os.environ.get("ELASTICSEARCH_CONTAINER_INDEX", default="fatcat_container")
+ ELASTICSEARCH_BACKEND = os.environ.get(
+ "ELASTICSEARCH_BACKEND", default="http://localhost:9200"
+ )
+ ELASTICSEARCH_RELEASE_INDEX = os.environ.get(
+ "ELASTICSEARCH_RELEASE_INDEX", default="fatcat_release"
+ )
+ ELASTICSEARCH_CONTAINER_INDEX = os.environ.get(
+ "ELASTICSEARCH_CONTAINER_INDEX", default="fatcat_container"
+ )
# for save-paper-now. set to None if not configured, so we don't display forms/links
KAFKA_PIXY_ENDPOINT = os.environ.get("KAFKA_PIXY_ENDPOINT", default=None) or None
- KAFKA_SAVEPAPERNOW_TOPIC = os.environ.get("KAFKA_SAVEPAPERNOW_TOPIC", default="sandcrawler-dev.ingest-file-requests-priority")
+ KAFKA_SAVEPAPERNOW_TOPIC = os.environ.get(
+ "KAFKA_SAVEPAPERNOW_TOPIC", default="sandcrawler-dev.ingest-file-requests-priority"
+ )
# for flask things, like session cookies
FLASK_SECRET_KEY = os.environ.get("FLASK_SECRET_KEY", default=None)
@@ -59,11 +75,17 @@ class Config(object):
# analytics; used in production
ENABLE_GOATCOUNTER = bool(os.environ.get("ENABLE_GOATCOUNTER", default=False))
- GOATCOUNTER_ENDPOINT = os.environ.get("GOATCOUNTER_ENDPOINT", default="https://goatcounter.fatcat.wiki/count")
- GOATCOUNTER_SCRIPT_URL = os.environ.get("GOATCOUNTER_SCRIPT_URL", default="https://goatcounter.fatcat.wiki/count.js")
+ GOATCOUNTER_ENDPOINT = os.environ.get(
+ "GOATCOUNTER_ENDPOINT", default="https://goatcounter.fatcat.wiki/count"
+ )
+ GOATCOUNTER_SCRIPT_URL = os.environ.get(
+ "GOATCOUNTER_SCRIPT_URL", default="https://goatcounter.fatcat.wiki/count.js"
+ )
# controls granularity of "shadow_only" preservation category
- FATCAT_MERGE_SHADOW_PRESERVATION = os.environ.get("FATCAT_MERGE_SHADOW_PRESERVATION", default=False)
+ FATCAT_MERGE_SHADOW_PRESERVATION = os.environ.get(
+ "FATCAT_MERGE_SHADOW_PRESERVATION", default=False
+ )
# CSRF on by default, but only for WTF forms (not, eg, search, lookups, GET
# forms)
@@ -75,27 +97,27 @@ class Config(object):
if FATCAT_DOMAIN == "dev.fatcat.wiki":
# "Even more verbose" debug options
- #SQLALCHEMY_ECHO = True
- #DEBUG = True
+ # SQLALCHEMY_ECHO = True
+ # DEBUG = True
pass
else:
# protect cookies (which include API tokens)
SESSION_COOKIE_HTTPONLY = True
SESSION_COOKIE_SECURE = True
- SESSION_COOKIE_SAMESITE = 'Lax'
- PERMANENT_SESSION_LIFETIME = 2678400 # 31 days, in seconds
+ SESSION_COOKIE_SAMESITE = "Lax"
+ PERMANENT_SESSION_LIFETIME = 2678400 # 31 days, in seconds
try:
- GIT_RELEASE = raven.fetch_git_sha('..')
+ GIT_RELEASE = raven.fetch_git_sha("..")
except Exception as e:
print("WARNING: couldn't set sentry git release automatically: " + str(e))
GIT_RELEASE = None
SENTRY_CONFIG = {
#'include_paths': ['fatcat_web', 'fatcat_openapi_client', 'fatcat_tools'],
- 'enable-threads': True, # for uWSGI
- 'release': GIT_RELEASE,
- 'tags': {
- 'fatcat_domain': FATCAT_DOMAIN,
+ "enable-threads": True, # for uWSGI
+ "release": GIT_RELEASE,
+ "tags": {
+ "fatcat_domain": FATCAT_DOMAIN,
},
}