aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorBryan Newbold <bnewbold@robocracy.org>2018-10-02 18:27:07 -0700
committerBryan Newbold <bnewbold@robocracy.org>2018-10-02 18:27:09 -0700
commit0988735a4f0f37a75964e803a443793bf51dc2b2 (patch)
treef997b7fcd29ca695db7beda68b5d696c613eaf6a
parent0411aa0949140721a2d48d12fdee07165ee4ae35 (diff)
downloadfatcat-0988735a4f0f37a75964e803a443793bf51dc2b2.tar.gz
fatcat-0988735a4f0f37a75964e803a443793bf51dc2b2.zip
filter bad and duplicate URLs from webface
Will need to do this properly in importers/backend.
-rw-r--r--python/fatcat/routes.py7
1 files changed, 7 insertions, 0 deletions
diff --git a/python/fatcat/routes.py b/python/fatcat/routes.py
index b1458fb1..ad5faea6 100644
--- a/python/fatcat/routes.py
+++ b/python/fatcat/routes.py
@@ -214,6 +214,13 @@ def release_view(ident):
abort(ae.status)
authors = [c for c in entity.contribs if c.role in ('author', None)]
authors = sorted(authors, key=lambda c: c.index)
+ for fe in files:
+ # crudely filter out exact duplicates
+ kept = []
+ for u in fe.urls:
+ if not u in kept:
+ kept.append(u)
+ fe.urls = [u for u in kept if not '/web/None/' in u.url]
return render_template('release_view.html', release=entity,
authors=authors, files=files, container=container)