summaryrefslogtreecommitdiffstats
path: root/src/lib.rs
blob: fc13e6433e8f0ace06df29bf1842d5d1ac502415 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158

use serde::Deserialize;
use hyper::{Request, Body, Method, Uri};
use serde_json::json;

pub mod parse;

use parse::UrlQueryParams;

#[derive(Default, Deserialize, Debug, Clone)]
pub struct ProxyConfig {
    pub bind_addr: Option<String>,      // 127.0.0.1:9292
    pub upstream_addr: Option<String>,  // 127.0.0.1:9200
    pub allow_all_indices: Option<bool>,
    pub index: Vec<IndexConfig>
}

#[derive(Deserialize, Debug, Clone)]
pub struct IndexConfig {
    pub name: String,
}

impl ProxyConfig {

    pub fn allow_index(&self, name: &str) -> bool {
        if self.allow_all_indices == Some(true) {
            return true
        }
        for index in &self.index {
            if index.name == name {
                return true
            }
        }
        false
    }
}

#[derive(Debug)]
pub enum ProxyError {
    Malformed(String),
    ParseError(String),
    NotAllowed(String),
    NotSupported(String),
    NotFound(String),
}

impl ProxyError {

    pub fn to_json(&self) -> serde_json::Value {
        json!({
            "error": {
                "reason": format!("{:?}", self),
                "type": "unknown",
            },
            "status": 500,
        })
    }
}

pub async fn filter_request(req: Request<Body>, config: &ProxyConfig) -> Result<Request<Body>, ProxyError> {
    let (parts, body) = req.into_parts();

    // split path into at most 3 chunks
    let mut req_path = parts.uri.path();
    if req_path.starts_with("/") {
        req_path = &req_path[1..];
    }
    let path_chunks: Vec<&str> = req_path.split("/").collect();
    if path_chunks.len() > 3 {
        return Err(ProxyError::NotSupported("only request paths with up to three segments allowed".to_string()))
    }

    let params: UrlQueryParams = serde_urlencoded::from_str(parts.uri.query().unwrap_or(""))
        .map_err(|e| ProxyError::ParseError(e.to_string()))?;

    // this is sort of like a router
    let body = match (&parts.method, path_chunks.as_slice()) {
        (&Method::GET, [""]) | (&Method::HEAD, [""]) => {
            Body::empty()
        },
        (&Method::POST, ["_search", "scroll"]) | (&Method::DELETE, ["_search", "scroll"]) => {
            let whole_body = hyper::body::to_bytes(body)
                .await
                .map_err(|e| ProxyError::Malformed(e.to_string()))?;
            filter_scroll_request(&params, &whole_body, config)?
        },
        (&Method::GET, [index, "_search"]) | (&Method::POST, [index, "_search"]) => {
            let whole_body = hyper::body::to_bytes(body)
                .await
                .map_err(|e| ProxyError::Malformed(e.to_string()))?;
            filter_search_request(index, &params, &whole_body, config)?
        },
        (&Method::GET, [index, "_count"]) | (&Method::POST, [index, "_count"]) => {
            let whole_body = hyper::body::to_bytes(body)
                .await
                .map_err(|e| ProxyError::Malformed(e.to_string()))?;
            filter_search_request(index, &params, &whole_body, config)?
        },
        (&Method::GET, [index, "_doc", key]) | (&Method::GET, [index, "_source", key]) => {
            filter_read_request(index, path_chunks[1], key, &params, config)?
        },
        _ => Err(ProxyError::NotSupported("unknown endpoint".to_string()))?,
    };

    let upstream_query = serde_urlencoded::to_string(params).expect("re-encoding URL parameters");
    let upstream_query_and_params = if upstream_query.len() > 0 {
        format!("{}?{}", req_path, upstream_query)
    } else {
        req_path.to_string()
    };
    let upstream_uri = Uri::builder()
        .scheme("http")
        .authority(config.upstream_addr.as_ref().unwrap_or(&"localhost:9200".to_string()).as_str())
        .path_and_query(upstream_query_and_params.as_str())
        .build()
        .expect("constructing upstream request URI");

    let upstream_req = Request::builder()
        .uri(upstream_uri)
        .method(&parts.method)
        .body(body)
        .expect("constructing upstream request");

    Ok(upstream_req)
}
pub fn filter_scroll_request(_params: &UrlQueryParams, body: &[u8], _config: &ProxyConfig) -> Result<Body, ProxyError> {
    // XXX
    // TODO: check that scroll_id is not "_all"
    if body.len() > 0 {
        let parsed: parse::ScrollBody = serde_json::from_slice(body)
            .map_err(|e| ProxyError::ParseError(e.to_string()))?;
        Ok(Body::from(serde_json::to_string(&parsed).unwrap()))
    } else {
        Ok(Body::empty())
    }
}

pub fn filter_read_request(index: &str, _endpoint: &str, _key: &str, _params: &UrlQueryParams, config: &ProxyConfig) -> Result<Body, ProxyError>{
    if !config.allow_index(index) {
        return Err(ProxyError::NotAllowed(format!("index doesn't exist or isn't proxied: {}", index)));
    }
    // XXX: no body needed?
    Ok(Body::empty())
}

pub fn filter_search_request(index: &str, _params: &UrlQueryParams, body: &[u8], config: &ProxyConfig) -> Result<Body, ProxyError> {
    if !config.allow_index(index) {
        return Err(ProxyError::NotAllowed(format!("index doesn't exist or isn't proxied: {}", index)));
    }
    // XXX: more checks
    if body.len() > 0 {
        let parsed: parse::SearchBody = serde_json::from_slice(body)
            .map_err(|e| ProxyError::ParseError(e.to_string()))?;
        Ok(Body::from(serde_json::to_string(&parsed).unwrap()))
    } else {
        Ok(Body::empty())
    }
}