summaryrefslogtreecommitdiffstats
path: root/python/fatcat_web/search.py
blob: a301fcb5a034789ab07ed68cc9dd18998490c03d (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120

import requests
from flask import abort, flash
from fatcat_web import app

"""
Helpers for doing elasticsearch queries (used in the web interface; not part of
the formal API)

TODO: ELASTICSEARCH_*_INDEX should probably be factored out and just hard-coded
"""

def do_release_search(q, limit=50, fulltext_only=True):

    #print("Search hit: " + q)
    if limit > 100:
        # Sanity check
        limit = 100

    if fulltext_only:
        q += " in_web:true"

    search_request = {
        "query": {
            "query_string": {
            "query": q,
            "default_operator": "AND",
            "analyze_wildcard": True,
            "lenient": True,
            "fields": ["title^5", "contrib_names^2", "container_title"]
            },
        },
        "size": int(limit),
    }

    #print(search_request)
    resp = requests.get("%s/%s/_search" %
            (app.config['ELASTICSEARCH_BACKEND'], app.config['ELASTICSEARCH_RELEASE_INDEX']),
        json=search_request)

    if resp.status_code == 400:
        print("elasticsearch 400: " + str(resp.content))
        flash("Search query failed to parse; you might need to use quotes.<p><code>{}</code>".format(resp.content))
        abort(resp.status_code)
    elif resp.status_code != 200:
        print("elasticsearch non-200 status code: " + str(resp.status_code))
        print(resp.content)
        abort(resp.status_code)

    content = resp.json()
    #print(content)
    results = [h['_source'] for h in content['hits']['hits']]
    for h in results:
        # Ensure 'contrib_names' is a list, not a single string
        if type(h['contrib_names']) is not list:
            h['contrib_names'] = [h['contrib_names'], ]
        # Handle surrogate strings that elasticsearch returns sometimes,
        # probably due to mangled data processing in some pipeline.
        # "Crimes against Unicode"; production workaround
        for key in h:
            if type(h[key]) is str:
                h[key] = h[key].encode('utf8', 'ignore').decode('utf8')
        h['contrib_names'] = [name.encode('utf8', 'ignore').decode('utf8') for name in h['contrib_names']]

    found = content['hits']['total']
    return {"query": { "q": q },
            "count_returned": len(results),
            "count_found": found,
            "results": results }

def do_container_search(q, limit=50):

    #print("Search hit: " + q)
    if limit > 100:
        # Sanity check
        limit = 100

    search_request = {
        "query": {
            "query_string": {
            "query": q,
            "default_operator": "AND",
            "analyze_wildcard": True,
            "lenient": True,
            "fields": ["name^5", "publisher"]
            },
        },
        "size": int(limit),
    }

    #print(search_request)
    resp = requests.get("%s/%s/_search" %
            (app.config['ELASTICSEARCH_BACKEND'], app.config['ELASTICSEARCH_CONTAINER_INDEX']),
        json=search_request)

    if resp.status_code == 400:
        print("elasticsearch 400: " + str(resp.content))
        flash("Search query failed to parse; you might need to use quotes.<p><code>{}</code>".format(resp.content))
        abort(resp.status_code)
    elif resp.status_code != 200:
        print("elasticsearch non-200 status code: " + str(resp.status_code))
        print(resp.content)
        abort(resp.status_code)

    content = resp.json()
    #print(content)
    results = [h['_source'] for h in content['hits']['hits']]
    for h in results:
        # Handle surrogate strings that elasticsearch returns sometimes,
        # probably due to mangled data processing in some pipeline.
        # "Crimes against Unicode"; production workaround
        for key in h:
            if type(h[key]) is str:
                h[key] = h[key].encode('utf8', 'ignore').decode('utf8')

    found = content['hits']['total']
    return {"query": { "q": q },
            "count_returned": len(results),
            "count_found": found,
            "results": results }