diff options
author | Bryan Newbold <bnewbold@archive.org> | 2021-03-29 19:56:08 -0700 |
---|---|---|
committer | Bryan Newbold <bnewbold@archive.org> | 2021-03-29 19:56:08 -0700 |
commit | 1553ed113f0e12d8e36b67d92a746131695a6612 (patch) | |
tree | 16109e64ea580c4bf51ac29c0fa0c578960b104d /src | |
parent | a7a56ece82aa920bd7cdeadb91db6fb123aeb58d (diff) | |
download | es-public-proxy-1553ed113f0e12d8e36b67d92a746131695a6612.tar.gz es-public-proxy-1553ed113f0e12d8e36b67d92a746131695a6612.zip |
clippy/fmt cleanv0.2.6
Diffstat (limited to 'src')
-rw-r--r-- | src/lib.rs | 18 | ||||
-rw-r--r-- | src/main.rs | 3 |
2 files changed, 10 insertions, 11 deletions
@@ -87,10 +87,10 @@ pub async fn filter_request( // split path into at most 3 chunks let mut req_path = parts.uri.path(); - if req_path.starts_with("/") { + if req_path.starts_with('/') { req_path = &req_path[1..]; } - let path_chunks: Vec<&str> = req_path.split("/").collect(); + let path_chunks: Vec<&str> = req_path.split('/').collect(); if path_chunks.len() > 3 { return Err(ProxyError::NotSupported( "only request paths with up to three segments allowed".to_string(), @@ -143,16 +143,16 @@ pub async fn filter_request( | (&Method::OPTIONS, [index, ""]) => { filter_read_request(index, path_chunks[1], ¶ms, config)? } - (&Method::GET, [index]) - | (&Method::HEAD, [index]) - | (&Method::OPTIONS, [index]) => { + (&Method::GET, [index]) | (&Method::HEAD, [index]) | (&Method::OPTIONS, [index]) => { // only allow operations on index name (no trailing slash) if not "unsafe_all_indices" // (aka, only if indexes are explicitly enumerated) // otherwise all top-level API endpoints would be allowed if config.unsafe_all_indices != Some(true) { filter_read_request(index, "", ¶ms, config)? } else { - Err(ProxyError::NotSupported("unknown elasticsearch API endpoint".to_string()))? + return Err(ProxyError::NotSupported( + "unknown elasticsearch API endpoint".to_string(), + )); } } (&Method::GET, [index, "_mapping"]) @@ -166,7 +166,7 @@ pub async fn filter_request( }; let upstream_query = serde_urlencoded::to_string(params).expect("re-encoding URL parameters"); - let upstream_query_and_params = if upstream_query.len() > 0 { + let upstream_query_and_params = if !upstream_query.is_empty() { format!("{}?{}", req_path, upstream_query) } else { req_path.to_string() @@ -198,7 +198,7 @@ pub fn filter_scroll_request( body: &[u8], _config: &ProxyConfig, ) -> Result<Body, ProxyError> { - if body.len() > 0 { + if !body.is_empty() { let parsed: parse::ScrollBody = serde_json::from_slice(body).map_err(|e| ProxyError::ParseError(e.to_string()))?; // check that scroll_id is not "_all" or too short @@ -250,7 +250,7 @@ pub fn filter_search_request( return Err(ProxyError::UnknownIndex(index.to_string())); } // XXX: more checks - if body.len() > 0 { + if !body.is_empty() { let parsed: parse::SearchBody = serde_json::from_slice(body).map_err(|e| ProxyError::ParseError(e.to_string()))?; Ok(Body::from(serde_json::to_string(&parsed).unwrap())) diff --git a/src/main.rs b/src/main.rs index 761c020..2075535 100644 --- a/src/main.rs +++ b/src/main.rs @@ -2,7 +2,6 @@ use hyper::service::{make_service_fn, service_fn}; use hyper::{header::HeaderValue, Body, Client, Request, Response, Server}; use std::env; use std::net::SocketAddr; -use toml; #[macro_use] extern crate log; @@ -124,7 +123,7 @@ fn load_config() -> ProxyConfig { } // then try environment variables - if let None = config_path { + if config_path.is_none() { config_path = std::env::var("ES_PUBLIC_PROXY_CONFIG_PATH").ok(); } |