diff options
author | Bryan Newbold <bnewbold@archive.org> | 2020-08-26 18:21:52 -0700 |
---|---|---|
committer | Bryan Newbold <bnewbold@archive.org> | 2020-08-26 18:21:52 -0700 |
commit | 13220ca46bbc9fd0001c1c942c3b7238e0f596ee (patch) | |
tree | f96acce59fa7d5b93ca5a8bcbd78132f29f2de26 /src/lib.rs | |
parent | c848647acf7aa967da4f2b9ec89843f208fb4e24 (diff) | |
download | es-public-proxy-13220ca46bbc9fd0001c1c942c3b7238e0f596ee.tar.gz es-public-proxy-13220ca46bbc9fd0001c1c942c3b7238e0f596ee.zip |
refactor errors; fix header names; fmt
Diffstat (limited to 'src/lib.rs')
-rw-r--r-- | src/lib.rs | 72 |
1 files changed, 48 insertions, 24 deletions
@@ -1,4 +1,4 @@ -use hyper::{Body, Method, Request, Uri}; +use hyper::{Body, Method, Request, StatusCode, Uri}; use serde::Deserialize; use serde_json::json; @@ -36,21 +36,42 @@ impl ProxyConfig { #[derive(Debug)] pub enum ProxyError { - Malformed(String), + HttpError(String), ParseError(String), - NotAllowed(String), + UnknownIndex(String), NotSupported(String), - NotFound(String), } impl ProxyError { - pub fn to_json(&self) -> serde_json::Value { + pub fn http_status_code(&self) -> StatusCode { + match self { + ProxyError::HttpError(_) => StatusCode::BAD_REQUEST, + ProxyError::ParseError(_) => StatusCode::BAD_REQUEST, + ProxyError::UnknownIndex(_) => StatusCode::NOT_FOUND, + ProxyError::NotSupported(_) => StatusCode::FORBIDDEN, + } + } + + pub fn to_json_value(&self) -> serde_json::Value { + let (type_slug, reason) = match self { + ProxyError::HttpError(s) => ("http-error", s.clone()), + ProxyError::ParseError(s) => ("parse-error", s.clone()), + ProxyError::UnknownIndex(index) => ( + "unknown-index", + format!( + "index does not exists, or public access not allowed: {}", + index + ), + ), + ProxyError::NotSupported(s) => ("not-supported", s.clone()), + }; + json!({ "error": { - "reason": format!("{:?}", self), - "type": "unknown", + "reason": reason, + "type": type_slug, }, - "status": 500, + "status": self.http_status_code().as_u16(), }) } } @@ -79,11 +100,13 @@ pub async fn filter_request( // this is sort of like a router let body = match (&parts.method, path_chunks.as_slice()) { (&Method::GET, [""]) | (&Method::HEAD, [""]) | (&Method::OPTIONS, [""]) => Body::empty(), - (&Method::HEAD, ["_search", "scroll"]) | (&Method::OPTIONS, ["_search", "scroll"]) => Body::empty(), + (&Method::HEAD, ["_search", "scroll"]) | (&Method::OPTIONS, ["_search", "scroll"]) => { + Body::empty() + } (&Method::POST, ["_search", "scroll"]) | (&Method::DELETE, ["_search", "scroll"]) => { let whole_body = hyper::body::to_bytes(body) .await - .map_err(|e| ProxyError::Malformed(e.to_string()))?; + .map_err(|e| ProxyError::HttpError(e.to_string()))?; filter_scroll_request(¶ms, &whole_body, config)? } (&Method::HEAD, [index, "_search"]) | (&Method::OPTIONS, [index, "_search"]) => { @@ -92,7 +115,7 @@ pub async fn filter_request( (&Method::GET, [index, "_search"]) | (&Method::POST, [index, "_search"]) => { let whole_body = hyper::body::to_bytes(body) .await - .map_err(|e| ProxyError::Malformed(e.to_string()))?; + .map_err(|e| ProxyError::HttpError(e.to_string()))?; filter_search_request(index, ¶ms, &whole_body, config)? } (&Method::HEAD, [index, "_count"]) | (&Method::OPTIONS, [index, "_count"]) => { @@ -101,19 +124,26 @@ pub async fn filter_request( (&Method::GET, [index, "_count"]) | (&Method::POST, [index, "_count"]) => { let whole_body = hyper::body::to_bytes(body) .await - .map_err(|e| ProxyError::Malformed(e.to_string()))?; + .map_err(|e| ProxyError::HttpError(e.to_string()))?; filter_search_request(index, ¶ms, &whole_body, config)? } - (&Method::GET, [index, "_doc", _key]) | (&Method::GET, [index, "_source", _key]) | (&Method::HEAD, [index, "_doc", _key]) | (&Method::OPTIONS, [index, "_source", _key]) => { + (&Method::GET, [index, "_doc", _key]) + | (&Method::GET, [index, "_source", _key]) + | (&Method::HEAD, [index, "_doc", _key]) + | (&Method::OPTIONS, [index, "_source", _key]) => { filter_read_request(index, path_chunks[1], ¶ms, config)? } - (&Method::GET, [index, ""]) | (&Method::HEAD, [index, ""]) | (&Method::OPTIONS, [index, ""]) => { + (&Method::GET, [index, ""]) + | (&Method::HEAD, [index, ""]) + | (&Method::OPTIONS, [index, ""]) => { filter_read_request(index, path_chunks[1], ¶ms, config)? } - (&Method::GET, [index, "_mapping"]) | (&Method::HEAD, [index, "_mapping"]) | (&Method::OPTIONS, [index, "_mapping"]) => { + (&Method::GET, [index, "_mapping"]) + | (&Method::HEAD, [index, "_mapping"]) + | (&Method::OPTIONS, [index, "_mapping"]) => { filter_read_request(index, path_chunks[1], ¶ms, config)? } - _ => Err(ProxyError::NotSupported("unknown endpoint".to_string()))?, + _ => Err(ProxyError::NotSupported("unknown elasticsearch API endpoint".to_string()))?, }; let upstream_query = serde_urlencoded::to_string(params).expect("re-encoding URL parameters"); @@ -186,10 +216,7 @@ pub fn filter_read_request( config: &ProxyConfig, ) -> Result<Body, ProxyError> { if !config.allow_index(index) { - return Err(ProxyError::NotAllowed(format!( - "index doesn't exist or isn't proxied: {}", - index - ))); + return Err(ProxyError::UnknownIndex(index.to_string())); } Ok(Body::empty()) } @@ -201,10 +228,7 @@ pub fn filter_search_request( config: &ProxyConfig, ) -> Result<Body, ProxyError> { if !config.allow_index(index) { - return Err(ProxyError::NotAllowed(format!( - "index doesn't exist or isn't proxied: {}", - index - ))); + return Err(ProxyError::UnknownIndex(index.to_string())); } // XXX: more checks if body.len() > 0 { |