From 48c633d03073bc9f1c86e457c31dc760984139a7 Mon Sep 17 00:00:00 2001 From: Bryan Newbold Date: Mon, 3 Dec 2018 21:26:06 -0800 Subject: remove textIcuSearch elastic filter This was breaking, eg, DOI search with non-trivial characters. Because we set the tokenizer on individual fields, shouldn't be necessary on the whole query. --- python/fatcat_web/search.py | 1 - 1 file changed, 1 deletion(-) diff --git a/python/fatcat_web/search.py b/python/fatcat_web/search.py index 4d45dcf1..471e522a 100644 --- a/python/fatcat_web/search.py +++ b/python/fatcat_web/search.py @@ -22,7 +22,6 @@ def do_search(q, limit=50, fulltext_only=True): "query": { "query_string": { "query": q, - "analyzer": "textIcuSearch", "default_operator": "AND", "analyze_wildcard": True, "lenient": True, -- cgit v1.2.3