From 0988735a4f0f37a75964e803a443793bf51dc2b2 Mon Sep 17 00:00:00 2001 From: Bryan Newbold Date: Tue, 2 Oct 2018 18:27:07 -0700 Subject: filter bad and duplicate URLs from webface Will need to do this properly in importers/backend. --- python/fatcat/routes.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/python/fatcat/routes.py b/python/fatcat/routes.py index b1458fb1..ad5faea6 100644 --- a/python/fatcat/routes.py +++ b/python/fatcat/routes.py @@ -214,6 +214,13 @@ def release_view(ident): abort(ae.status) authors = [c for c in entity.contribs if c.role in ('author', None)] authors = sorted(authors, key=lambda c: c.index) + for fe in files: + # crudely filter out exact duplicates + kept = [] + for u in fe.urls: + if not u in kept: + kept.append(u) + fe.urls = [u for u in kept if not '/web/None/' in u.url] return render_template('release_view.html', release=entity, authors=authors, files=files, container=container) -- cgit v1.2.3