├── .gitignore ├── Cargo.toml ├── LICENSE ├── src ├── expr.pest └── lib.rs ├── readme.md └── Cargo.lock /.gitignore: -------------------------------------------------------------------------------- 1 | /target 2 | **/*.rs.bk 3 | .idea 4 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "elastic_query" 3 | version = "0.4.4" 4 | authors = ["Xargin "] 5 | edition = "2018" 6 | license = "MIT" 7 | description = "convert bool expression to elasticsearch DSL" 8 | repository = "https://github.com/cch123/elastic-rs" 9 | documentation = "https://docs.rs/elastic_query/0.2.0/elastic_query" 10 | readme = "readme.md" 11 | 12 | [dependencies] 13 | pest = "2.0" 14 | pest_derive = "2.0" 15 | serde_json = "1.0.37" 16 | 17 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | Copyright © 2017 Xargin 3 | 4 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the “Software”), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 5 | 6 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 7 | 8 | THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 9 | -------------------------------------------------------------------------------- /src/expr.pest: -------------------------------------------------------------------------------- 1 | bool_expr = { SOI ~ expr ~ EOI } 2 | 3 | expr = { 4 | (paren_bool | comp_expr) ~ ( (and_op|or_op)~ (paren_bool| comp_expr))* 5 | } 6 | 7 | and_op = { "and" } 8 | or_op = { "or" } 9 | 10 | paren_bool = { "(" ~ expr ~ ")" } 11 | 12 | comp_expr = { field ~ op ~ value } 13 | 14 | field = @{ (ASCII_ALPHA ~ ASCII_ALPHANUMERIC*) } 15 | op = { eq | neq | op_in | op_not_in | gt | gte | lt | lte | like | not_like } 16 | eq = { "=" } 17 | neq = { "!=" | "<>"} 18 | op_in = { "in" } 19 | op_not_in= { "not" ~ "in"} 20 | gt = { ">" } 21 | gte = { ">=" } 22 | lt = { "<" } 23 | lte = { "<=" } 24 | like = { "like" } 25 | not_like = { "not" ~ "like" } 26 | 27 | value = { 28 | string_literal 29 | | num_literal 30 | | "(" ~ string_literal ~("," ~ string_literal)* ~ ")" 31 | | "(" ~ num_literal ~("," ~ num_literal)* ~ ")" 32 | } 33 | 34 | num_literal = @{ 35 | "-"? 36 | ~ ("0" | ASCII_NONZERO_DIGIT ~ ASCII_DIGIT*) 37 | ~ ("." ~ ASCII_DIGIT*)? 38 | ~ (^"e" ~ ("+" | "-")? ~ ASCII_DIGIT+)? 39 | } 40 | 41 | string_literal = ${ "\"" ~ string ~ "\"" } 42 | string = @{ char* } 43 | char = { 44 | !("\"" | "\\") ~ ANY 45 | | "\\" ~ ("\"" | "\\" | "/" | "b" | "f" | "n" | "r" | "t") 46 | | "\\" ~ ("u" ~ ASCII_HEX_DIGIT{4}) 47 | } 48 | 49 | WHITESPACE = _{ " " | "\n" | "\r" } 50 | 51 | -------------------------------------------------------------------------------- /readme.md: -------------------------------------------------------------------------------- 1 | # Overview 2 | 3 | Convert Bool Expression to Elasticsearch DSL. 4 | 5 | ``` 6 | +----------------------------------------------------+ 7 | |{ | 8 | | "query": { | 9 | | "bool": { | 10 | | "must": [{ | 11 | | "match": { | 12 | | "a": { | 13 | | "query": "1", | 14 | | "type": "phrase" | 15 | | } | 16 | | } | 17 | | }, { | 18 | | "bool": { | 19 | | "must": [{ | 20 | | "match": { | 21 | | "b": { | 22 | +-----------------------------+ | "query": "2", | 23 | |a = 1 and (b = 2 and (c = 3))|---------->| "type": "phrase" | 24 | +-----------------------------+ | } | 25 | | } | 26 | | }, { | 27 | | "match": { | 28 | | "c": { | 29 | | "query": "3", | 30 | | "type": "phrase" | 31 | | } | 32 | | } | 33 | | }] | 34 | | } | 35 | | }] | 36 | | } | 37 | | } | 38 | |} | 39 | +----------------------------------------------------+ 40 | ``` 41 | 42 | Example: 43 | 44 | Add: 45 | 46 | ```toml 47 | [dependencies] 48 | elastic_query = "0.4.4" 49 | ``` 50 | 51 | To your `Cargo.toml`, then use as follows: 52 | 53 | ```rust 54 | extern crate elastic_query; 55 | 56 | fn main() { 57 | let result = elastic_query::convert("a = 1 and b in (1,2,3)".to_string(), 0, 100, vec![], vec![]).unwrap(); 58 | println!("{}", result); 59 | } 60 | 61 | ``` 62 | 63 | Grammar: 64 | 65 | ```peg 66 | bool_expr = { SOI ~ expr ~ EOI } 67 | 68 | expr = { 69 | (paren_bool | comp_expr) ~ ( (and_op|or_op)~ (paren_bool| comp_expr))* 70 | } 71 | 72 | and_op = { "and" } 73 | or_op = { "or" } 74 | 75 | paren_bool = { "(" ~ expr ~ ")" } 76 | 77 | comp_expr = { field ~ op ~ value } 78 | 79 | field = @{ (ASCII_ALPHA ~ ASCII_ALPHANUMERIC*) } 80 | op = { eq | neq | op_in | op_not_in | gt | gte | lt | lte | like | not_like } 81 | eq = { "=" } 82 | neq = { "!=" | "<>"} 83 | op_in = { "in" } 84 | op_not_in= { "not" ~ "in"} 85 | gt = { ">" } 86 | gte = { ">=" } 87 | lt = { "<" } 88 | lte = { "<=" } 89 | like = { "like" } 90 | not_like = { "not" ~ "like" } 91 | 92 | value = { 93 | string_literal 94 | | num_literal 95 | | "(" ~ string_literal ~("," ~ string_literal)* ~ ")" 96 | | "(" ~ num_literal ~("," ~ num_literal)* ~ ")" 97 | } 98 | 99 | num_literal = @{ 100 | "-"? 101 | ~ ("0" | ASCII_NONZERO_DIGIT ~ ASCII_DIGIT*) 102 | ~ ("." ~ ASCII_DIGIT*)? 103 | ~ (^"e" ~ ("+" | "-")? ~ ASCII_DIGIT+)? 104 | } 105 | 106 | string_literal = ${ "\"" ~ string ~ "\"" } 107 | string = @{ char* } 108 | char = { 109 | !("\"" | "\\") ~ ANY 110 | | "\\" ~ ("\"" | "\\" | "/" | "b" | "f" | "n" | "r" | "t") 111 | | "\\" ~ ("u" ~ ASCII_HEX_DIGIT{4}) 112 | } 113 | 114 | WHITESPACE = _{ " " | "\n" | "\r" } 115 | ``` 116 | 117 | -------------------------------------------------------------------------------- /Cargo.lock: -------------------------------------------------------------------------------- 1 | # This file is automatically @generated by Cargo. 2 | # It is not intended for manual editing. 3 | version = 3 4 | 5 | [[package]] 6 | name = "arrayref" 7 | version = "0.3.5" 8 | source = "registry+https://github.com/rust-lang/crates.io-index" 9 | checksum = "0d382e583f07208808f6b1249e60848879ba3543f57c32277bf52d69c2f0f0ee" 10 | 11 | [[package]] 12 | name = "block-buffer" 13 | version = "0.3.3" 14 | source = "registry+https://github.com/rust-lang/crates.io-index" 15 | checksum = "a076c298b9ecdb530ed9d967e74a6027d6a7478924520acddcddc24c1c8ab3ab" 16 | dependencies = [ 17 | "arrayref", 18 | "byte-tools", 19 | ] 20 | 21 | [[package]] 22 | name = "byte-tools" 23 | version = "0.2.0" 24 | source = "registry+https://github.com/rust-lang/crates.io-index" 25 | checksum = "560c32574a12a89ecd91f5e742165893f86e3ab98d21f8ea548658eb9eef5f40" 26 | 27 | [[package]] 28 | name = "digest" 29 | version = "0.7.6" 30 | source = "registry+https://github.com/rust-lang/crates.io-index" 31 | checksum = "03b072242a8cbaf9c145665af9d250c59af3b958f83ed6824e13533cf76d5b90" 32 | dependencies = [ 33 | "generic-array", 34 | ] 35 | 36 | [[package]] 37 | name = "elastic_query" 38 | version = "0.4.4" 39 | dependencies = [ 40 | "pest", 41 | "pest_derive", 42 | "serde_json", 43 | ] 44 | 45 | [[package]] 46 | name = "fake-simd" 47 | version = "0.1.2" 48 | source = "registry+https://github.com/rust-lang/crates.io-index" 49 | checksum = "e88a8acf291dafb59c2d96e8f59828f3838bb1a70398823ade51a84de6a6deed" 50 | 51 | [[package]] 52 | name = "generic-array" 53 | version = "0.9.1" 54 | source = "registry+https://github.com/rust-lang/crates.io-index" 55 | checksum = "6d00328cedcac5e81c683e5620ca6a30756fc23027ebf9bff405c0e8da1fbb7e" 56 | dependencies = [ 57 | "typenum", 58 | ] 59 | 60 | [[package]] 61 | name = "itoa" 62 | version = "0.4.3" 63 | source = "registry+https://github.com/rust-lang/crates.io-index" 64 | checksum = "1306f3464951f30e30d12373d31c79fbd52d236e5e896fd92f96ec7babbbe60b" 65 | 66 | [[package]] 67 | name = "maplit" 68 | version = "1.0.1" 69 | source = "registry+https://github.com/rust-lang/crates.io-index" 70 | checksum = "08cbb6b4fef96b6d77bfc40ec491b1690c779e77b05cd9f07f787ed376fd4c43" 71 | 72 | [[package]] 73 | name = "pest" 74 | version = "2.1.0" 75 | source = "registry+https://github.com/rust-lang/crates.io-index" 76 | checksum = "54f0c72a98d8ab3c99560bfd16df8059cc10e1f9a8e83e6e3b97718dd766e9c3" 77 | dependencies = [ 78 | "ucd-trie", 79 | ] 80 | 81 | [[package]] 82 | name = "pest_derive" 83 | version = "2.1.0" 84 | source = "registry+https://github.com/rust-lang/crates.io-index" 85 | checksum = "833d1ae558dc601e9a60366421196a8d94bc0ac980476d0b67e1d0988d72b2d0" 86 | dependencies = [ 87 | "pest", 88 | "pest_generator", 89 | ] 90 | 91 | [[package]] 92 | name = "pest_generator" 93 | version = "2.1.0" 94 | source = "registry+https://github.com/rust-lang/crates.io-index" 95 | checksum = "63120576c4efd69615b5537d3d052257328a4ca82876771d6944424ccfd9f646" 96 | dependencies = [ 97 | "pest", 98 | "pest_meta", 99 | "proc-macro2", 100 | "quote", 101 | "syn", 102 | ] 103 | 104 | [[package]] 105 | name = "pest_meta" 106 | version = "2.1.0" 107 | source = "registry+https://github.com/rust-lang/crates.io-index" 108 | checksum = "f5a3492a4ed208ffc247adcdcc7ba2a95be3104f58877d0d02f0df39bf3efb5e" 109 | dependencies = [ 110 | "maplit", 111 | "pest", 112 | "sha-1", 113 | ] 114 | 115 | [[package]] 116 | name = "proc-macro2" 117 | version = "0.4.26" 118 | source = "registry+https://github.com/rust-lang/crates.io-index" 119 | checksum = "38fddd23d98b2144d197c0eca5705632d4fe2667d14a6be5df8934f8d74f1978" 120 | dependencies = [ 121 | "unicode-xid", 122 | ] 123 | 124 | [[package]] 125 | name = "quote" 126 | version = "0.6.11" 127 | source = "registry+https://github.com/rust-lang/crates.io-index" 128 | checksum = "cdd8e04bd9c52e0342b406469d494fcb033be4bdbe5c606016defbb1681411e1" 129 | dependencies = [ 130 | "proc-macro2", 131 | ] 132 | 133 | [[package]] 134 | name = "ryu" 135 | version = "0.2.7" 136 | source = "registry+https://github.com/rust-lang/crates.io-index" 137 | checksum = "eb9e9b8cde282a9fe6a42dd4681319bfb63f121b8a8ee9439c6f4107e58a46f7" 138 | 139 | [[package]] 140 | name = "serde" 141 | version = "1.0.85" 142 | source = "registry+https://github.com/rust-lang/crates.io-index" 143 | checksum = "534b8b91a95e0f71bca3ed5824752d558da048d4248c91af873b63bd60519752" 144 | 145 | [[package]] 146 | name = "serde_json" 147 | version = "1.0.37" 148 | source = "registry+https://github.com/rust-lang/crates.io-index" 149 | checksum = "4b90a9fbe1211e57d3e1c15670f1cb00802988fb23a1a4aad7a2b63544f1920e" 150 | dependencies = [ 151 | "itoa", 152 | "ryu", 153 | "serde", 154 | ] 155 | 156 | [[package]] 157 | name = "sha-1" 158 | version = "0.7.0" 159 | source = "registry+https://github.com/rust-lang/crates.io-index" 160 | checksum = "51b9d1f3b5de8a167ab06834a7c883bd197f2191e1dda1a22d9ccfeedbf9aded" 161 | dependencies = [ 162 | "block-buffer", 163 | "byte-tools", 164 | "digest", 165 | "fake-simd", 166 | ] 167 | 168 | [[package]] 169 | name = "syn" 170 | version = "0.15.26" 171 | source = "registry+https://github.com/rust-lang/crates.io-index" 172 | checksum = "f92e629aa1d9c827b2bb8297046c1ccffc57c99b947a680d3ccff1f136a3bee9" 173 | dependencies = [ 174 | "proc-macro2", 175 | "quote", 176 | "unicode-xid", 177 | ] 178 | 179 | [[package]] 180 | name = "typenum" 181 | version = "1.10.0" 182 | source = "registry+https://github.com/rust-lang/crates.io-index" 183 | checksum = "612d636f949607bdf9b123b4a6f6d966dedf3ff669f7f045890d3a4a73948169" 184 | 185 | [[package]] 186 | name = "ucd-trie" 187 | version = "0.1.1" 188 | source = "registry+https://github.com/rust-lang/crates.io-index" 189 | checksum = "71a9c5b1fe77426cf144cc30e49e955270f5086e31a6441dfa8b32efc09b9d77" 190 | 191 | [[package]] 192 | name = "unicode-xid" 193 | version = "0.1.0" 194 | source = "registry+https://github.com/rust-lang/crates.io-index" 195 | checksum = "fc72304796d0818e357ead4e000d19c9c174ab23dc11093ac919054d20a6a7fc" 196 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | #![recursion_limit = "1024"] 2 | 3 | #[macro_use] 4 | extern crate pest_derive; 5 | 6 | use pest::prec_climber::{Assoc, Operator, PrecClimber}; 7 | use serde_json::json; 8 | 9 | //use pest::error::Error; 10 | use pest::Parser; 11 | 12 | #[derive(Parser)] 13 | #[grammar = "expr.pest"] 14 | struct ExprParser; 15 | 16 | /// error occurred when parsing user input 17 | #[derive(Debug)] 18 | pub struct ParseError { 19 | pub location: pest::error::InputLocation, 20 | pub expected: String, 21 | } 22 | 23 | /// convert user input to Elasticsearch DSL 24 | /// example : 25 | /// ``` 26 | /// extern crate elastic_query; 27 | /// use elastic_query::convert; 28 | /// convert("a = 1 and b = 2 and c = 3".to_string(), 0, 1000, vec![], vec![]); 29 | /// ``` 30 | /// will generate result : 31 | /// ```json 32 | /// { 33 | /// "query": { 34 | /// "bool": { 35 | /// "must": [{ 36 | /// "bool": { 37 | /// "must": [{ 38 | /// "match": { 39 | /// "a": { 40 | /// "query": "1", 41 | /// "type": "phrase" 42 | /// } 43 | /// } 44 | /// }, { 45 | /// "match": { 46 | /// "b": { 47 | /// "query": "2", 48 | /// "type": "phrase" 49 | /// } 50 | /// } 51 | /// }] 52 | /// } 53 | /// }, { 54 | /// "match": { 55 | /// "c": { 56 | /// "query": "3", 57 | /// "type": "phrase" 58 | /// } 59 | /// } 60 | /// }] 61 | /// } 62 | /// } 63 | ///} 64 | /// ``` 65 | pub fn convert( 66 | query: String, 67 | from: i32, 68 | size: i32, 69 | sort: Vec<&str>, 70 | aggs: Vec<&str>, 71 | ) -> Result { 72 | let parse_result = ExprParser::parse(Rule::expr, query.as_str()); 73 | match parse_result { 74 | Ok(mut expr_ast) => { 75 | let ast = generate_ast(expr_ast.next().unwrap()); 76 | let dsl = walk_tree(ast, true); 77 | 78 | let mut result = json!({ 79 | "query": dsl, 80 | "from" : from, 81 | "size" : size, 82 | }); 83 | 84 | if sort.len() > 0 { 85 | result["sort"] = build_sort(sort); 86 | } 87 | 88 | if aggs.len() > 0 { 89 | result["aggregations"] = build_aggs(aggs); 90 | } 91 | 92 | return Ok(result); 93 | } 94 | Err(err) => { 95 | // TODO: more friendly error 96 | Err(ParseError { 97 | location: err.location, 98 | expected: "".to_string(), 99 | }) 100 | } 101 | } 102 | } 103 | 104 | fn build_aggs(aggs: Vec<&str>) -> serde_json::Value { 105 | let mut result: serde_json::Value = json!({}); 106 | aggs.iter().enumerate().rev().for_each(|(idx, &field)| { 107 | let previous_result = result.clone(); 108 | 109 | let size = if idx == 0 { 200 } else { 0 }; 110 | 111 | result = json!({ 112 | field : { 113 | "terms" : { 114 | "field" : field, 115 | "size" : size, 116 | } 117 | } 118 | }); 119 | 120 | if previous_result.as_object().unwrap().len() > 0 { 121 | result[field]["aggregations"] = previous_result; 122 | } 123 | }); 124 | 125 | result 126 | } 127 | 128 | fn build_sort(sort: Vec<&str>) -> serde_json::Value { 129 | sort.iter() 130 | .map(|&s| { 131 | let mut elem: Vec<&str> = s.split_whitespace().collect(); 132 | if elem.len() < 2 { 133 | elem.push("asc"); 134 | } 135 | json!({elem[0] : elem[1]}) 136 | }) 137 | .collect() 138 | } 139 | 140 | use pest::iterators::Pair; 141 | 142 | #[derive(Debug)] 143 | enum Expression { 144 | CompExpr(String, Rule, String), 145 | AndExpr(Box, Box), 146 | OrExpr(Box, Box), 147 | } 148 | 149 | fn generate_ast(pair: Pair) -> Expression { 150 | let climber = PrecClimber::new(vec![ 151 | Operator::new(Rule::and_op, Assoc::Left) | Operator::new(Rule::or_op, Assoc::Left), 152 | ]); 153 | 154 | consume(pair, &climber) 155 | } 156 | 157 | fn consume(pair: Pair, climber: &PrecClimber) -> Expression { 158 | let atom = |pair| consume(pair, climber); 159 | let infix = |lhs, op: Pair, rhs| match op.as_rule() { 160 | Rule::and_op => Expression::AndExpr(Box::new(lhs), Box::new(rhs)), 161 | Rule::or_op => Expression::OrExpr(Box::new(lhs), Box::new(rhs)), 162 | _ => unreachable!(), 163 | }; 164 | 165 | match pair.as_rule() { 166 | Rule::expr => { 167 | let pairs = pair.into_inner(); 168 | climber.climb(pairs, atom, infix) 169 | } 170 | Rule::paren_bool => pair.into_inner().next().map(atom).unwrap(), 171 | Rule::comp_expr => { 172 | let mut iter = pair.into_inner(); 173 | let (lhs, op, rhs) = ( 174 | iter.next().unwrap().as_str().to_string(), 175 | iter.next().unwrap().into_inner().next().unwrap().as_rule(), 176 | iter.next().unwrap().as_str().to_string(), 177 | ); 178 | return Expression::CompExpr(lhs, op, rhs); 179 | } 180 | _ => unreachable!(), 181 | } 182 | } 183 | 184 | fn walk_tree(expr: Expression, is_root: bool) -> serde_json::Value { 185 | match expr { 186 | Expression::AndExpr(lexpr, rexpr) => { 187 | let (left_val, right_val) = (walk_tree(*lexpr, false), walk_tree(*rexpr, false)); 188 | return serde_json::json!({ 189 | "bool" : { 190 | "must" : [left_val, right_val] 191 | } 192 | }); 193 | } 194 | Expression::OrExpr(lexpr, rexpr) => { 195 | let (left_val, right_val) = (walk_tree(*lexpr, false), walk_tree(*rexpr, false)); 196 | return serde_json::json!({ 197 | "bool" : { 198 | "should" : [left_val, right_val] 199 | } 200 | }); 201 | } 202 | Expression::CompExpr(lhs, operator, rhs) => { 203 | #[rustfmt::skip] 204 | let result = match operator { 205 | Rule::eq | Rule::like => json!({"match" : {lhs : {"query" : rhs, "type" : "phrase"}}}), 206 | Rule::gte => json!({"range" : {lhs : {"from" : rhs}}}), 207 | Rule::lte => json!({"range" : {lhs : {"to" : rhs}}}), 208 | Rule::gt => json!({"range" : {lhs : {"gt" : rhs}}}), 209 | Rule::lt => json!({"range" : {lhs : {"lt" : rhs}}}), 210 | Rule::neq => json!({"bool" : {"must_not" : [{"match" : {lhs : {"query" : rhs, "type" : "phrase"}}}]}}), 211 | Rule::op_in => { 212 | let rhs = rhs.replace("\'", "\""); 213 | let r_vec: Vec<&str> = rhs 214 | .trim_left_matches("(") 215 | .trim_right_matches(")") 216 | .split(",") 217 | .map(|v| v.trim()) 218 | .collect(); 219 | json!({"terms" : {lhs : r_vec}}) 220 | } 221 | Rule::op_not_in => { 222 | let rhs = rhs.replace("\'", "\""); 223 | let r_vec: Vec<&str> = rhs 224 | .trim_left_matches("(") 225 | .trim_right_matches(")") 226 | .split(",") 227 | .map(|v| v.trim()) 228 | .collect(); 229 | json!({"bool" : {"must_not" : {"terms" : { lhs : r_vec}}}}) 230 | } 231 | 232 | _ => unreachable!(), 233 | }; 234 | 235 | if is_root { 236 | return json!({"bool" : {"must" :[result]}}); 237 | } 238 | return result; 239 | } 240 | } 241 | } 242 | 243 | 244 | #[cfg(test)] 245 | mod tests { 246 | use serde_json::json; 247 | 248 | #[allow(dead_code)] 249 | struct TestCase<'a> { 250 | input: (&'a str, i32, i32, Vec<&'a str>, Vec<&'a str>), // query, from, size, sort, agg 251 | output: serde_json::Value, 252 | comment: &'a str, 253 | } 254 | 255 | #[test] 256 | fn test_convert() { 257 | let test_cases: Vec = vec![ 258 | TestCase { 259 | input: ("a=1", 1000, 1000, vec![], vec![]), 260 | output: json!({"query" : {"bool" : {"must" : [{"match" :{"a" : {"query" : "1", "type" : "phrase"}}}]}}, "from" : 1000, "size" : 1000}), 261 | comment: "equal expression test", 262 | }, 263 | TestCase { 264 | input: ("a=1", 1000, 1000, vec!["a asc", "b desc"], vec![]), 265 | output: json!({"from":1000,"query":{"bool":{"must":[{"match":{"a":{"query":"1","type":"phrase"}}}]}},"size":1000,"sort":[{"a":"asc"},{"b":"desc"}]}), 266 | comment: "sort test", 267 | }, 268 | TestCase { 269 | input: ("a=1", 0, 1000, vec!["a asc", "b"], vec![]), 270 | output: json!({"from":0,"query":{"bool":{"must":[{"match":{"a":{"query":"1","type":"phrase"}}}]}},"size":1000,"sort":[{"a":"asc"},{"b":"asc"}]}), 271 | comment: "sort test", 272 | }, 273 | TestCase { 274 | input: ("a in (1,2,3)", 1000, 1000, vec![], vec![]), 275 | output: json!({"from":1000,"query":{"bool":{"must":[{"terms":{"a":["1","2","3"]}}]}},"size":1000}), 276 | comment: "in expression test", 277 | }, 278 | TestCase { 279 | input: ("a in ( 1, 2, 3)", 1000, 1000, vec![], vec![]), 280 | output: json!({"from":1000,"query":{"bool":{"must":[{"terms":{"a":["1","2","3"]}}]}},"size":1000}), 281 | comment: "auto trim space test", 282 | }, 283 | TestCase { 284 | input: ("a in ( 1, 2, 3)", 1000, 1000, vec![], vec!["a", "b"]), 285 | output: json!({"aggregations":{"a":{"aggregations":{"b":{"terms":{"field":"b","size":0}}},"terms":{"field":"a","size":200}}},"from":1000,"query":{"bool":{"must":[{"terms":{"a":["1","2","3"]}}]}},"size":1000}), 286 | comment: "aggregation test", 287 | }, 288 | TestCase { 289 | input: ("a = 1 and (b = 2 and (c = 3))", 0, 1000, vec![], vec![]), 290 | output: json!({"from":0,"query":{"bool":{"must":[{"match":{"a":{"query":"1","type":"phrase"}}},{"bool":{"must":[{"match":{"b":{"query":"2","type":"phrase"}}},{"match":{"c":{"query":"3","type":"phrase"}}}]}}]}},"size":1000}), 291 | comment: "paren expr test", 292 | }, 293 | TestCase { 294 | input: ("a = 1 and b = 2 and c = 3", 0, 1000, vec![], vec![]), 295 | output: json!({"from":0,"query":{"bool":{"must":[{"bool":{"must":[{"match":{"a":{"query":"1","type":"phrase"}}},{"match":{"b":{"query":"2","type":"phrase"}}}]}},{"match":{"c":{"query":"3","type":"phrase"}}}]}},"size":1000}), 296 | comment: "left association test", 297 | }, 298 | TestCase { 299 | input: ("a = 1 and b = 2 and c = 3 and d = 4", 0, 1000, vec![], vec![]), 300 | output: json!({"from":0,"query":{"bool":{"must":[{"bool":{"must":[{"bool":{"must":[{"match":{"a":{"query":"1","type":"phrase"}}},{"match":{"b":{"query":"2","type":"phrase"}}}]}},{"match":{"c":{"query":"3","type":"phrase"}}}]}},{"match":{"d":{"query":"4","type":"phrase"}}}]}},"size":1000}), 301 | comment: "left association test", 302 | }, 303 | ]; 304 | test_cases.iter().for_each(|case| { 305 | let output = super::convert( 306 | case.input.0.to_string(), 307 | case.input.1, 308 | case.input.2, 309 | case.input.3.clone(), 310 | case.input.4.clone(), 311 | ) 312 | .unwrap(); 313 | println!("{}", output); 314 | assert_eq!(output, case.output) 315 | }); 316 | } 317 | } 318 | --------------------------------------------------------------------------------