From 53e277895a71ac243d0f99155ae0a1658eecd84f Mon Sep 17 00:00:00 2001 From: Bryan Newbold Date: Mon, 3 Dec 2018 21:26:06 -0800 Subject: remove textIcuSearch elastic filter This was breaking, eg, DOI search with non-trivial characters. Because we set the tokenizer on individual fields, shouldn't be necessary on the whole query. --- python/fatcat/search.py | 1 - 1 file changed, 1 deletion(-) diff --git a/python/fatcat/search.py b/python/fatcat/search.py index b6826110..ac136cdd 100644 --- a/python/fatcat/search.py +++ b/python/fatcat/search.py @@ -18,7 +18,6 @@ def do_search(q, limit=50, fulltext_only=True): "query": { "query_string": { "query": q, - "analyzer": "textIcuSearch", "default_operator": "AND", "analyze_wildcard": True, "lenient": True, -- cgit v1.2.3