aboutsummaryrefslogtreecommitdiffstats
path: root/tests/parse_es_requests.rs
diff options
context:
space:
mode:
authorBryan Newbold <bnewbold@archive.org>2020-08-25 14:10:44 -0700
committerBryan Newbold <bnewbold@archive.org>2020-08-25 14:10:44 -0700
commitd8d10a292a0d6bd0134fcfe785718bb8f48dd085 (patch)
tree9df0ff2dc7127d88c91666021569896847ede21a /tests/parse_es_requests.rs
parentc49b85ffa31016be21f03e484a263c36932fefe0 (diff)
downloades-public-proxy-d8d10a292a0d6bd0134fcfe785718bb8f48dd085.tar.gz
es-public-proxy-d8d10a292a0d6bd0134fcfe785718bb8f48dd085.zip
tests: proper URIs; filter request bodies
Diffstat (limited to 'tests/parse_es_requests.rs')
-rw-r--r--tests/parse_es_requests.rs50
1 files changed, 35 insertions, 15 deletions
diff --git a/tests/parse_es_requests.rs b/tests/parse_es_requests.rs
index c34dfad..bd38ce6 100644
--- a/tests/parse_es_requests.rs
+++ b/tests/parse_es_requests.rs
@@ -2,27 +2,28 @@
use std::fs;
use std::ffi::OsStr;
use es_public_proxy::parse::{ScrollBody, SearchBody};
+use es_public_proxy::{ProxyConfig, filter_request};
mod common;
#[test]
fn basic_load() {
- let request = common::load_request_by_name("GET_search");
- assert_eq!(request.method, "GET");
- assert_eq!(request.path_and_query, "/_search");
+ let parts = common::load_parts_by_name("GET_search");
+ assert_eq!(parts.method, "GET");
+ assert_eq!(parts.path_and_query, "/some-index/_search");
}
#[test]
fn basic_parse() {
- let request = common::load_request_by_name("GET_search");
- assert_eq!(request.method, "GET");
- assert_eq!(request.path_and_query, "/_search");
+ let parts = common::load_parts_by_name("GET_search");
+ assert_eq!(parts.method, "GET");
+ assert_eq!(parts.path_and_query, "/some-index/_search");
- let _parsed: SearchBody = serde_json::from_str(&request.body.unwrap()).unwrap();
+ let _parsed: SearchBody = serde_json::from_str(&parts.body.unwrap()).unwrap();
}
#[test]
-fn parse_search_requests() {
+fn parse_search_bodies() {
let file_paths = fs::read_dir("tests/files").unwrap();
@@ -31,8 +32,8 @@ fn parse_search_requests() {
if path.extension() != Some(OsStr::new("txt")) {
continue
}
- let request = common::load_request(&path);
- if let Some(body) = request.body {
+ let parts = common::load_parts(&path);
+ if let Some(body) = parts.body {
println!("parsing: {}", path.display());
println!("BODY: {}", body);
let _parsed: SearchBody = serde_json::from_str(&body).unwrap();
@@ -41,7 +42,7 @@ fn parse_search_requests() {
}
#[test]
-fn parse_scroll_requests() {
+fn parse_scroll_bodies() {
let file_paths = fs::read_dir("tests/files/scroll").unwrap();
@@ -50,11 +51,30 @@ fn parse_scroll_requests() {
if path.extension() != Some(OsStr::new("txt")) {
continue
}
- let request = common::load_request(&path);
- if let Some(body) = request.body {
- println!("parsing: {}", path.display());
- println!("BODY: {}", body);
+ let parts = common::load_parts(&path);
+ if let Some(body) = parts.body {
+ println!(" parsing: {}", path.display());
+ //println!("BODY: {}", body);
let _parsed: ScrollBody = serde_json::from_str(&body).unwrap();
}
}
}
+
+#[test]
+fn filter_search_requests() {
+
+ let file_paths = fs::read_dir("tests/files").unwrap();
+ let mut config = ProxyConfig::default();
+ config.allow_all_indices = Some(true);
+ let mut rt = tokio::runtime::Runtime::new().unwrap();
+
+ for path in file_paths {
+ let path = path.unwrap().path();
+ if path.extension() != Some(OsStr::new("txt")) {
+ continue
+ }
+ println!(" filtering: {}", path.display());
+ let req = common::load_request(&path);
+ rt.block_on(filter_request(req, &config)).unwrap();
+ }
+}