aboutsummaryrefslogtreecommitdiffstats
path: root/fatcat_scholar
diff options
context:
space:
mode:
authorBryan Newbold <bnewbold@archive.org>2020-08-06 10:37:25 -0700
committerBryan Newbold <bnewbold@archive.org>2020-08-06 10:37:27 -0700
commit7a5beede97cafede0d5168220f864acc3120e6bf (patch)
treefc839f9d6969bae8a81a8838c879c4029ddca6de /fatcat_scholar
parent850235280576a6ff3786d7eb27121fa02caf94ec (diff)
downloadfatcat-scholar-7a5beede97cafede0d5168220f864acc3120e6bf.tar.gz
fatcat-scholar-7a5beede97cafede0d5168220f864acc3120e6bf.zip
search tweaks to be forwards-compatible with ES 7.x
When we fully commit to ES 7.x we should upgrade the client library correspondingly, and then can remove these work-arounds. But for now we have one instance of ES 6.x and one ES 7.x.
Diffstat (limited to 'fatcat_scholar')
-rw-r--r--fatcat_scholar/search.py12
1 files changed, 10 insertions, 2 deletions
diff --git a/fatcat_scholar/search.py b/fatcat_scholar/search.py
index b991583..a7505b6 100644
--- a/fatcat_scholar/search.py
+++ b/fatcat_scholar/search.py
@@ -231,6 +231,7 @@ def do_fulltext_search(
# Avoid deep paging problem.
offset = deep_page_limit
+ search = search.params(track_total_hits=True)
search = search[offset : (offset + limit)]
try:
@@ -260,7 +261,10 @@ def do_fulltext_search(
r["_collapsed"] = []
r["_collapsed_count"] = 0
if "inner_hits" in dir(h.meta):
- r["_collapsed_count"] = h.meta.inner_hits.more_pages.hits.total - 1
+ if isinstance(h.meta.inner_hits.more_pages.hits.total, int):
+ r["_collapsed_count"] = h.meta.inner_hits.more_pages.hits.total - 1
+ else:
+ r["_collapsed_count"] = h.meta.inner_hits.more_pages.hits.total['value'] - 1
for k in h.meta.inner_hits.more_pages:
if k["key"] != r["key"]:
r["_collapsed"].append(k)
@@ -277,7 +281,11 @@ def do_fulltext_search(
if type(h["collapse_key"]) == list:
h["collapse_key"] = h["collapse_key"][0]
- count_found: int = int(resp.hits.total)
+ count_found: int = 0
+ if isinstance(resp.hits.total, int):
+ count_found = int(resp.hits.total)
+ else:
+ count_found = int(resp.hits.total['value'])
count_returned = len(results)
# if we grouped to less than a page of hits, update returned count