aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorBryan Newbold <bnewbold@robocracy.org>2018-10-02 18:27:07 -0700
committerBryan Newbold <bnewbold@robocracy.org>2018-10-12 15:33:47 -0400
commitc254c25822496ce44bd2d203588df9d104c3cb13 (patch)
tree6d44b6f3e19187112d41c7f5a60412b00f12e971
parent5be576fe3df5ad7a8c114314f9d46a7d428882a6 (diff)
downloadfatcat-c254c25822496ce44bd2d203588df9d104c3cb13.tar.gz
fatcat-c254c25822496ce44bd2d203588df9d104c3cb13.zip
filter bad and duplicate URLs from webface
Will need to do this properly in importers/backend.
-rw-r--r--python/fatcat/routes.py7
1 files changed, 7 insertions, 0 deletions
diff --git a/python/fatcat/routes.py b/python/fatcat/routes.py
index 801397e8..caf70ec3 100644
--- a/python/fatcat/routes.py
+++ b/python/fatcat/routes.py
@@ -214,6 +214,13 @@ def release_view(ident):
abort(ae.status)
authors = [c for c in entity.contribs if c.role in ('author', None)]
authors = sorted(authors, key=lambda c: c.index)
+ for fe in files:
+ # crudely filter out exact duplicates
+ kept = []
+ for u in fe.urls:
+ if not u in kept:
+ kept.append(u)
+ fe.urls = [u for u in kept if not '/web/None/' in u.url]
return render_template('release_view.html', release=entity,
authors=authors, files=files, container=container)