summaryrefslogtreecommitdiffstats
path: root/vendor/jsonpath_lib/src
diff options
context:
space:
mode:
Diffstat (limited to 'vendor/jsonpath_lib/src')
-rw-r--r--vendor/jsonpath_lib/src/ffi/mod.rs59
-rw-r--r--vendor/jsonpath_lib/src/lib.rs540
-rw-r--r--vendor/jsonpath_lib/src/parser/mod.rs1503
-rw-r--r--vendor/jsonpath_lib/src/parser/path_reader.rs53
-rw-r--r--vendor/jsonpath_lib/src/parser/tokenizer.rs370
-rw-r--r--vendor/jsonpath_lib/src/select/cmp.rs335
-rw-r--r--vendor/jsonpath_lib/src/select/expr_term.rs227
-rw-r--r--vendor/jsonpath_lib/src/select/mod.rs1001
-rw-r--r--vendor/jsonpath_lib/src/select/value_walker.rs99
9 files changed, 4187 insertions, 0 deletions
diff --git a/vendor/jsonpath_lib/src/ffi/mod.rs b/vendor/jsonpath_lib/src/ffi/mod.rs
new file mode 100644
index 000000000..2ad259b5c
--- /dev/null
+++ b/vendor/jsonpath_lib/src/ffi/mod.rs
@@ -0,0 +1,59 @@
+use std::ffi::{CStr, CString};
+use std::os::raw::{c_char, c_void};
+
+use {parser, select, select_as_str};
+
+const INVALID_PATH: &str = "invalid path";
+const INVALID_JSON: &str = "invalud json";
+
+fn to_str(v: *const c_char, err_msg: &str) -> &str {
+ unsafe { CStr::from_ptr(v) }.to_str().expect(err_msg)
+}
+
+fn to_char_ptr(v: &str) -> *const c_char {
+ let s = CString::new(v).unwrap_or_else(|_| panic!("invalid string: {}", v));
+ let ptr = s.as_ptr();
+ std::mem::forget(s);
+ ptr
+}
+
+#[no_mangle]
+pub extern "C" fn ffi_select(json_str: *const c_char, path: *const c_char) -> *const c_char {
+ let json_str = to_str(json_str, INVALID_JSON);
+ let path = to_str(path, INVALID_PATH);
+ match select_as_str(json_str, path) {
+ Ok(v) => to_char_ptr(v.as_str()),
+ Err(e) => {
+ panic!("{:?}", e);
+ }
+ }
+}
+
+#[no_mangle]
+#[allow(clippy::forget_copy)]
+pub extern "C" fn ffi_path_compile(path: *const c_char) -> *mut c_void {
+ let path = to_str(path, INVALID_PATH);
+ let ref_node = Box::into_raw(Box::new(parser::Parser::compile(path).unwrap()));
+ let ptr = ref_node as *mut c_void;
+ std::mem::forget(ref_node);
+ ptr
+}
+
+#[no_mangle]
+pub extern "C" fn ffi_select_with_compiled_path(
+ path_ptr: *mut c_void,
+ json_ptr: *const c_char,
+) -> *const c_char {
+ let node = unsafe { Box::from_raw(path_ptr as *mut parser::Node) };
+ let json_str = to_str(json_ptr, INVALID_JSON);
+ let json = serde_json::from_str(json_str)
+ .unwrap_or_else(|_| panic!("invalid json string: {}", json_str));
+
+ let mut selector = select::Selector::default();
+ let found = selector.compiled_path(&node).value(&json).select().unwrap();
+ std::mem::forget(node);
+
+ let result = serde_json::to_string(&found)
+ .unwrap_or_else(|_| panic!("json serialize error: {:?}", found));
+ to_char_ptr(result.as_str())
+}
diff --git a/vendor/jsonpath_lib/src/lib.rs b/vendor/jsonpath_lib/src/lib.rs
new file mode 100644
index 000000000..55ac51943
--- /dev/null
+++ b/vendor/jsonpath_lib/src/lib.rs
@@ -0,0 +1,540 @@
+//! JsonPath implementation written in Rust.
+//!
+//! # Example
+//! ```
+//! extern crate jsonpath_lib as jsonpath;
+//! #[macro_use] extern crate serde_json;
+//! let json_obj = json!({
+//! "store": {
+//! "book": [
+//! {
+//! "category": "reference",
+//! "author": "Nigel Rees",
+//! "title": "Sayings of the Century",
+//! "price": 8.95
+//! },
+//! {
+//! "category": "fiction",
+//! "author": "Evelyn Waugh",
+//! "title": "Sword of Honour",
+//! "price": 12.99
+//! },
+//! {
+//! "category": "fiction",
+//! "author": "Herman Melville",
+//! "title": "Moby Dick",
+//! "isbn": "0-553-21311-3",
+//! "price": 8.99
+//! },
+//! {
+//! "category": "fiction",
+//! "author": "J. R. R. Tolkien",
+//! "title": "The Lord of the Rings",
+//! "isbn": "0-395-19395-8",
+//! "price": 22.99
+//! }
+//! ],
+//! "bicycle": {
+//! "color": "red",
+//! "price": 19.95
+//! }
+//! },
+//! "expensive": 10
+//! });
+//!
+//! let mut selector = jsonpath::selector(&json_obj);
+//!
+//! assert_eq!(selector("$.store.book[*].author").unwrap(),
+//! vec![
+//! "Nigel Rees", "Evelyn Waugh", "Herman Melville", "J. R. R. Tolkien"
+//! ]);
+//!
+//! assert_eq!(selector("$..author").unwrap(),
+//! vec![
+//! "Nigel Rees", "Evelyn Waugh", "Herman Melville", "J. R. R. Tolkien"
+//! ]);
+//!
+//! assert_eq!(selector("$.store.*").unwrap(),
+//! vec![
+//! &json!([
+//! { "category": "reference", "author": "Nigel Rees", "title": "Sayings of the Century", "price": 8.95 },
+//! { "category": "fiction", "author": "Evelyn Waugh", "title": "Sword of Honour", "price": 12.99 },
+//! { "category": "fiction", "author": "Herman Melville", "title": "Moby Dick", "isbn": "0-553-21311-3", "price": 8.99 },
+//! { "category": "fiction", "author": "J. R. R. Tolkien", "title": "The Lord of the Rings", "isbn": "0-395-19395-8", "price": 22.99 }
+//! ]),
+//! &json!({ "color": "red", "price": 19.95 })
+//! ]);
+//!
+//! assert_eq!(selector("$.store..price").unwrap(),
+//! vec![
+//! 8.95, 12.99, 8.99, 22.99, 19.95
+//! ]);
+//!
+//! assert_eq!(selector("$..book[2]").unwrap(),
+//! vec![
+//! &json!({
+//! "category" : "fiction",
+//! "author" : "Herman Melville",
+//! "title" : "Moby Dick",
+//! "isbn" : "0-553-21311-3",
+//! "price" : 8.99
+//! })
+//! ]);
+//!
+//! assert_eq!(selector("$..book[-2]").unwrap(),
+//! vec![
+//! &json!({
+//! "category" : "fiction",
+//! "author" : "Herman Melville",
+//! "title" : "Moby Dick",
+//! "isbn" : "0-553-21311-3",
+//! "price" : 8.99
+//! })
+//! ]);
+//!
+//! assert_eq!(selector("$..book[0,1]").unwrap(),
+//! vec![
+//! &json!({"category" : "reference","author" : "Nigel Rees","title" : "Sayings of the Century","price" : 8.95}),
+//! &json!({"category" : "fiction","author" : "Evelyn Waugh","title" : "Sword of Honour","price" : 12.99})
+//! ]);
+//!
+//! assert_eq!(selector("$..book[:2]").unwrap(),
+//! vec![
+//! &json!({"category" : "reference","author" : "Nigel Rees","title" : "Sayings of the Century","price" : 8.95}),
+//! &json!({"category" : "fiction","author" : "Evelyn Waugh","title" : "Sword of Honour","price" : 12.99})
+//! ]);
+//!
+//! assert_eq!(selector("$..book[:2]").unwrap(),
+//! vec![
+//! &json!({"category" : "reference","author" : "Nigel Rees","title" : "Sayings of the Century","price" : 8.95}),
+//! &json!({"category" : "fiction","author" : "Evelyn Waugh","title" : "Sword of Honour","price" : 12.99})
+//! ]);
+//!
+//! assert_eq!(selector("$..book[?(@.isbn)]").unwrap(),
+//! vec![
+//! &json!({"category" : "fiction","author" : "Herman Melville","title" : "Moby Dick","isbn" : "0-553-21311-3","price" : 8.99}),
+//! &json!({"category" : "fiction","author" : "J. R. R. Tolkien","title" : "The Lord of the Rings","isbn" : "0-395-19395-8","price" : 22.99})
+//! ]);
+//!
+//! assert_eq!(selector("$.store.book[?(@.price < 10)]").unwrap(),
+//! vec![
+//! &json!({"category" : "reference","author" : "Nigel Rees","title" : "Sayings of the Century","price" : 8.95}),
+//! &json!({"category" : "fiction","author" : "Herman Melville","title" : "Moby Dick","isbn" : "0-553-21311-3","price" : 8.99})
+//! ]);
+//! ```
+extern crate array_tool;
+extern crate core;
+extern crate env_logger;
+#[macro_use]
+extern crate log;
+extern crate serde;
+extern crate serde_json;
+
+use serde_json::Value;
+
+pub use parser::Parser; // TODO private
+pub use select::JsonPathError;
+pub use select::{Selector, SelectorMut};
+use parser::Node;
+
+#[doc(hidden)]
+mod ffi;
+#[doc(hidden)]
+mod parser;
+#[doc(hidden)]
+mod select;
+
+/// It is a high-order function. it compile a jsonpath and then returns a closure that has JSON as argument. if you need to reuse a jsonpath, it is good for performance.
+///
+/// ```rust
+/// extern crate jsonpath_lib as jsonpath;
+/// #[macro_use] extern crate serde_json;
+///
+/// let mut first_firend = jsonpath::compile("$..friends[0]");
+///
+/// let json_obj = json!({
+/// "school": {
+/// "friends": [
+/// {"name": "친구1", "age": 20},
+/// {"name": "친구2", "age": 20}
+/// ]
+/// },
+/// "friends": [
+/// {"name": "친구3", "age": 30},
+/// {"name": "친구4"}
+/// ]});
+///
+/// let json = first_firend(&json_obj).unwrap();
+///
+/// assert_eq!(json, vec![
+/// &json!({"name": "친구3", "age": 30}),
+/// &json!({"name": "친구1", "age": 20})
+/// ]);
+/// ```
+#[deprecated(
+ since = "0.2.5",
+ note = "Please use the Compiled::compile function instead"
+)]
+pub fn compile(path: &str) -> impl FnMut(&Value) -> Result<Vec<&Value>, JsonPathError> {
+ let node = parser::Parser::compile(path);
+ move |json| match &node {
+ Ok(node) => {
+ let mut selector = Selector::default();
+ selector.compiled_path(node).value(json).select()
+ }
+ Err(e) => Err(JsonPathError::Path(e.to_string())),
+ }
+}
+
+/// It is a high-order function. it returns a closure that has a jsonpath string as argument. you can use diffenent jsonpath for one JSON object.
+///
+/// ```rust
+/// extern crate jsonpath_lib as jsonpath;
+/// #[macro_use] extern crate serde_json;
+///
+/// let json_obj = json!({
+/// "school": {
+/// "friends": [
+/// {"name": "친구1", "age": 20},
+/// {"name": "친구2", "age": 20}
+/// ]
+/// },
+/// "friends": [
+/// {"name": "친구3", "age": 30},
+/// {"name": "친구4"}
+/// ]});
+///
+/// let mut selector = jsonpath::selector(&json_obj);
+///
+/// let json = selector("$..friends[0]").unwrap();
+///
+/// assert_eq!(json, vec![
+/// &json!({"name": "친구3", "age": 30}),
+/// &json!({"name": "친구1", "age": 20})
+/// ]);
+///
+/// let json = selector("$..friends[1]").unwrap();
+///
+/// assert_eq!(json, vec![
+/// &json!({"name": "친구4"}),
+/// &json!({"name": "친구2", "age": 20})
+/// ]);
+/// ```
+#[allow(clippy::needless_lifetimes)]
+pub fn selector<'a>(json: &'a Value) -> impl FnMut(&str) -> Result<Vec<&'a Value>, JsonPathError> {
+ let mut selector = Selector::default();
+ let _ = selector.value(json);
+ move |path: &str| selector.str_path(path)?.reset_value().select()
+}
+
+/// It is the same to `selector` function. but it deserialize the result as given type `T`.
+///
+/// ```rust
+/// extern crate jsonpath_lib as jsonpath;
+/// extern crate serde;
+/// #[macro_use] extern crate serde_json;
+///
+/// use serde::{Deserialize, Serialize};
+///
+/// let json_obj = json!({
+/// "school": {
+/// "friends": [
+/// {"name": "친구1", "age": 20},
+/// {"name": "친구2", "age": 20}
+/// ]
+/// },
+/// "friends": [
+/// {"name": "친구3", "age": 30},
+/// {"name": "친구4"}
+/// ]});
+///
+/// #[derive(Deserialize, PartialEq, Debug)]
+/// struct Friend {
+/// name: String,
+/// age: Option<u8>,
+/// }
+///
+/// let mut selector = jsonpath::selector_as::<Friend>(&json_obj);
+///
+/// let json = selector("$..friends[0]").unwrap();
+///
+/// let ret = vec!(
+/// Friend { name: "친구3".to_string(), age: Some(30) },
+/// Friend { name: "친구1".to_string(), age: Some(20) }
+/// );
+/// assert_eq!(json, ret);
+///
+/// let json = selector("$..friends[1]").unwrap();
+///
+/// let ret = vec!(
+/// Friend { name: "친구4".to_string(), age: None },
+/// Friend { name: "친구2".to_string(), age: Some(20) }
+/// );
+///
+/// assert_eq!(json, ret);
+/// ```
+pub fn selector_as<T: serde::de::DeserializeOwned>(
+ json: &Value,
+) -> impl FnMut(&str) -> Result<Vec<T>, JsonPathError> + '_ {
+ let mut selector = Selector::default();
+ let _ = selector.value(json);
+ move |path: &str| selector.str_path(path)?.reset_value().select_as()
+}
+
+/// It is a simple select function. but it compile the jsonpath argument every time.
+///
+/// ```rust
+/// extern crate jsonpath_lib as jsonpath;
+/// #[macro_use] extern crate serde_json;
+///
+/// let json_obj = json!({
+/// "school": {
+/// "friends": [
+/// {"name": "친구1", "age": 20},
+/// {"name": "친구2", "age": 20}
+/// ]
+/// },
+/// "friends": [
+/// {"name": "친구3", "age": 30},
+/// {"name": "친구4"}
+/// ]});
+///
+/// let json = jsonpath::select(&json_obj, "$..friends[0]").unwrap();
+///
+/// assert_eq!(json, vec![
+/// &json!({"name": "친구3", "age": 30}),
+/// &json!({"name": "친구1", "age": 20})
+/// ]);
+/// ```
+pub fn select<'a>(json: &'a Value, path: &str) -> Result<Vec<&'a Value>, JsonPathError> {
+ Selector::default().str_path(path)?.value(json).select()
+}
+
+/// It is the same to `select` function but it return the result as string.
+///
+/// ```rust
+/// extern crate jsonpath_lib as jsonpath;
+/// #[macro_use] extern crate serde_json;
+///
+/// let ret = jsonpath::select_as_str(r#"
+/// {
+/// "school": {
+/// "friends": [
+/// {"name": "친구1", "age": 20},
+/// {"name": "친구2", "age": 20}
+/// ]
+/// },
+/// "friends": [
+/// {"name": "친구3", "age": 30},
+/// {"name": "친구4"}
+/// ]
+/// }
+/// "#, "$..friends[0]").unwrap();
+///
+/// assert_eq!(ret, r#"[{"name":"친구3","age":30},{"name":"친구1","age":20}]"#);
+/// ```
+pub fn select_as_str(json_str: &str, path: &str) -> Result<String, JsonPathError> {
+ let json = serde_json::from_str(json_str).map_err(|e| JsonPathError::Serde(e.to_string()))?;
+ let ret = Selector::default().str_path(path)?.value(&json).select()?;
+ serde_json::to_string(&ret).map_err(|e| JsonPathError::Serde(e.to_string()))
+}
+
+/// It is the same to `select` function but it deserialize the the result as given type `T`.
+///
+/// ```rust
+/// extern crate jsonpath_lib as jsonpath;
+/// extern crate serde;
+/// #[macro_use] extern crate serde_json;
+///
+/// use serde::{Deserialize, Serialize};
+///
+/// #[derive(Deserialize, PartialEq, Debug)]
+/// struct Person {
+/// name: String,
+/// age: u8,
+/// phones: Vec<String>,
+/// }
+///
+/// let ret: Vec<Person> = jsonpath::select_as(r#"
+/// {
+/// "person":
+/// {
+/// "name": "Doe John",
+/// "age": 44,
+/// "phones": [
+/// "+44 1234567",
+/// "+44 2345678"
+/// ]
+/// }
+/// }
+/// "#, "$.person").unwrap();
+///
+/// let person = Person {
+/// name: "Doe John".to_string(),
+/// age: 44,
+/// phones: vec!["+44 1234567".to_string(), "+44 2345678".to_string()],
+/// };
+///
+/// assert_eq!(ret[0], person);
+/// ```
+pub fn select_as<T: serde::de::DeserializeOwned>(
+ json_str: &str,
+ path: &str,
+) -> Result<Vec<T>, JsonPathError> {
+ let json = serde_json::from_str(json_str).map_err(|e| JsonPathError::Serde(e.to_string()))?;
+ Selector::default().str_path(path)?.value(&json).select_as()
+}
+
+/// Delete(= replace with null) the JSON property using the jsonpath.
+///
+/// ```rust
+/// extern crate jsonpath_lib as jsonpath;
+/// #[macro_use] extern crate serde_json;
+///
+/// let json_obj = json!({
+/// "school": {
+/// "friends": [
+/// {"name": "친구1", "age": 20},
+/// {"name": "친구2", "age": 20}
+/// ]
+/// },
+/// "friends": [
+/// {"name": "친구3", "age": 30},
+/// {"name": "친구4"}
+/// ]});
+///
+/// let ret = jsonpath::delete(json_obj, "$..[?(20 == @.age)]").unwrap();
+///
+/// assert_eq!(ret, json!({
+/// "school": {
+/// "friends": [
+/// null,
+/// null
+/// ]
+/// },
+/// "friends": [
+/// {"name": "친구3", "age": 30},
+/// {"name": "친구4"}
+/// ]}));
+/// ```
+pub fn delete(value: Value, path: &str) -> Result<Value, JsonPathError> {
+ let mut selector = SelectorMut::default();
+ let value = selector.str_path(path)?.value(value).delete()?;
+ Ok(value.take().unwrap_or(Value::Null))
+}
+
+/// Select JSON properties using a jsonpath and transform the result and then replace it. via closure that implements `FnMut` you can transform the selected results.
+///
+/// ```rust
+/// extern crate jsonpath_lib as jsonpath;
+/// #[macro_use] extern crate serde_json;
+///
+/// use serde_json::Value;
+///
+/// let json_obj = json!({
+/// "school": {
+/// "friends": [
+/// {"name": "친구1", "age": 20},
+/// {"name": "친구2", "age": 20}
+/// ]
+/// },
+/// "friends": [
+/// {"name": "친구3", "age": 30},
+/// {"name": "친구4"}
+/// ]});
+///
+/// let ret = jsonpath::replace_with(json_obj, "$..[?(@.age == 20)].age", &mut |v| {
+/// let age = if let Value::Number(n) = v {
+/// n.as_u64().unwrap() * 2
+/// } else {
+/// 0
+/// };
+///
+/// Some(json!(age))
+/// }).unwrap();
+///
+/// assert_eq!(ret, json!({
+/// "school": {
+/// "friends": [
+/// {"name": "친구1", "age": 40},
+/// {"name": "친구2", "age": 40}
+/// ]
+/// },
+/// "friends": [
+/// {"name": "친구3", "age": 30},
+/// {"name": "친구4"}
+/// ]}));
+/// ```
+pub fn replace_with<F>(value: Value, path: &str, fun: &mut F) -> Result<Value, JsonPathError>
+where
+ F: FnMut(Value) -> Option<Value>,
+{
+ let mut selector = SelectorMut::default();
+ let value = selector.str_path(path)?.value(value).replace_with(fun)?;
+ Ok(value.take().unwrap_or(Value::Null))
+}
+
+/// A pre-compiled expression.
+///
+/// Calling the select function of this struct will re-use the existing, compiled expression.
+///
+/// ## Example
+///
+/// ```rust
+/// extern crate jsonpath_lib as jsonpath;
+/// #[macro_use] extern crate serde_json;
+///
+/// let mut first_friend = jsonpath::Compiled::compile("$..friends[0]").unwrap();
+///
+/// let json_obj = json!({
+/// "school": {
+/// "friends": [
+/// {"name": "친구1", "age": 20},
+/// {"name": "친구2", "age": 20}
+/// ]
+/// },
+/// "friends": [
+/// {"name": "친구3", "age": 30},
+/// {"name": "친구4"}
+/// ]});
+///
+/// // call a first time
+///
+/// let json = first_friend.select(&json_obj).unwrap();
+///
+/// assert_eq!(json, vec![
+/// &json!({"name": "친구3", "age": 30}),
+/// &json!({"name": "친구1", "age": 20})
+/// ]);
+///
+/// // call a second time
+///
+/// let json = first_friend.select(&json_obj).unwrap();
+///
+/// assert_eq!(json, vec![
+/// &json!({"name": "친구3", "age": 30}),
+/// &json!({"name": "친구1", "age": 20})
+/// ]);
+/// ```
+#[derive(Clone, Debug)]
+pub struct Compiled {
+ node: Node,
+}
+
+impl Compiled {
+ /// Compile a path expression and return a compiled instance.
+ ///
+ /// If parsing the path fails, it will return an error.
+ pub fn compile(path: &str) -> Result<Compiled, String> {
+ let node = parser::Parser::compile(path)?;
+ Ok(Compiled{
+ node
+ })
+ }
+
+ /// Execute the select operation on the pre-compiled path.
+ pub fn select<'a>(&self, value: &'a Value) -> Result<Vec<&'a Value>, JsonPathError> {
+ let mut selector = Selector::default();
+ selector.compiled_path(&self.node).value(value).select()
+ }
+}
diff --git a/vendor/jsonpath_lib/src/parser/mod.rs b/vendor/jsonpath_lib/src/parser/mod.rs
new file mode 100644
index 000000000..91cd8960b
--- /dev/null
+++ b/vendor/jsonpath_lib/src/parser/mod.rs
@@ -0,0 +1,1503 @@
+mod path_reader;
+mod tokenizer;
+
+use std::str::FromStr;
+
+use self::tokenizer::*;
+
+const DUMMY: usize = 0;
+
+type ParseResult<T> = Result<T, String>;
+
+mod utils {
+ use std::str::FromStr;
+
+ pub fn string_to_num<F, S: FromStr>(string: &str, msg_handler: F) -> Result<S, String>
+ where
+ F: Fn() -> String,
+ {
+ match string.parse() {
+ Ok(n) => Ok(n),
+ _ => Err(msg_handler()),
+ }
+ }
+}
+
+#[derive(Debug, PartialEq, Clone)]
+pub enum ParseToken {
+ // '$'
+ Absolute,
+ // '@'
+ Relative,
+ // '.'
+ In,
+ // '..'
+ Leaves,
+ // '*'
+ All,
+
+ Key(String),
+ Keys(Vec<String>),
+ // []
+ Array,
+ // 메타토큰
+ ArrayEof,
+ // ?( filter )
+ Filter(FilterToken),
+ // 1 : 2
+ Range(Option<isize>, Option<isize>, Option<usize>),
+ // 1, 2, 3
+ Union(Vec<isize>),
+
+ Number(f64),
+
+ Bool(bool),
+
+ Eof,
+}
+
+#[derive(Debug, PartialEq, Clone)]
+pub enum FilterToken {
+ Equal,
+ NotEqual,
+ Little,
+ LittleOrEqual,
+ Greater,
+ GreaterOrEqual,
+ And,
+ Or,
+}
+
+#[derive(Debug, Clone)]
+pub struct Node {
+ left: Option<Box<Node>>,
+ right: Option<Box<Node>>,
+ token: ParseToken,
+}
+
+pub struct Parser;
+
+impl Parser {
+ pub fn compile(input: &str) -> ParseResult<Node> {
+ let mut tokenizer = TokenReader::new(input);
+ Ok(Self::json_path(&mut tokenizer)?)
+ }
+
+ fn json_path(tokenizer: &mut TokenReader) -> ParseResult<Node> {
+ debug!("#json_path");
+ match tokenizer.next_token() {
+ Ok(Token::Absolute(_)) => {
+ let node = Self::node(ParseToken::Absolute);
+ Self::paths(node, tokenizer)
+ }
+ _ => Err(tokenizer.err_msg()),
+ }
+ }
+
+ fn paths(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
+ debug!("#paths");
+ match tokenizer.peek_token() {
+ Ok(Token::Dot(_)) => {
+ Self::eat_token(tokenizer);
+ Self::paths_dot(prev, tokenizer)
+ }
+ Ok(Token::OpenArray(_)) => {
+ Self::eat_token(tokenizer);
+ Self::eat_whitespace(tokenizer);
+ let node = Self::array(prev, tokenizer)?;
+ Self::paths(node, tokenizer)
+ }
+ _ => Ok(prev),
+ }
+ }
+
+ fn paths_dot(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
+ debug!("#paths_dot");
+ let node = Self::path(prev, tokenizer)?;
+ Self::paths(node, tokenizer)
+ }
+
+ fn path(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
+ debug!("#path");
+ match tokenizer.peek_token() {
+ Ok(Token::Dot(_)) => Self::path_leaves(prev, tokenizer),
+ Ok(Token::Asterisk(_)) => Self::path_in_all(prev, tokenizer),
+ Ok(Token::Key(_, _)) => Self::path_in_key(prev, tokenizer),
+ Ok(Token::OpenArray(_)) => {
+ Self::eat_token(tokenizer);
+ Self::array(prev, tokenizer)
+ }
+ _ => Err(tokenizer.err_msg()),
+ }
+ }
+
+ fn path_leaves(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
+ debug!("#path_leaves");
+ Self::eat_token(tokenizer);
+ match tokenizer.peek_token() {
+ Ok(Token::Asterisk(_)) => Self::path_leaves_all(prev, tokenizer),
+ Ok(Token::OpenArray(_)) => {
+ let mut leaves_node = Self::node(ParseToken::Leaves);
+ leaves_node.left = Some(Box::new(prev));
+ Ok(Self::paths(leaves_node, tokenizer)?)
+ }
+ _ => Self::path_leaves_key(prev, tokenizer),
+ }
+ }
+
+ fn path_leaves_key(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
+ debug!("#path_leaves_key");
+ Ok(Node {
+ token: ParseToken::Leaves,
+ left: Some(Box::new(prev)),
+ right: Some(Box::new(Self::key(tokenizer)?)),
+ })
+ }
+
+ fn path_leaves_all(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
+ debug!("#path_leaves_all");
+ Self::eat_token(tokenizer);
+ Ok(Node {
+ token: ParseToken::Leaves,
+ left: Some(Box::new(prev)),
+ right: Some(Box::new(Self::node(ParseToken::All))),
+ })
+ }
+
+ fn path_in_all(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
+ debug!("#path_in_all");
+ Self::eat_token(tokenizer);
+ Ok(Node {
+ token: ParseToken::In,
+ left: Some(Box::new(prev)),
+ right: Some(Box::new(Self::node(ParseToken::All))),
+ })
+ }
+
+ fn path_in_key(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
+ debug!("#path_in_key");
+ Ok(Node {
+ token: ParseToken::In,
+ left: Some(Box::new(prev)),
+ right: Some(Box::new(Self::key(tokenizer)?)),
+ })
+ }
+
+ fn key(tokenizer: &mut TokenReader) -> ParseResult<Node> {
+ debug!("#key");
+ match tokenizer.next_token() {
+ Ok(Token::Key(_, v)) => Ok(Self::node(ParseToken::Key(v))),
+ _ => Err(tokenizer.err_msg()),
+ }
+ }
+
+ fn boolean(tokenizer: &mut TokenReader) -> ParseResult<Node> {
+ debug!("#boolean");
+
+ fn validation_bool_value(v: &str) -> bool {
+ let b = v.as_bytes();
+ !b.is_empty() && (b[0] == b't' || b[0] == b'T' || b[0] == b'f' || b[0] == b'F')
+ }
+
+ match tokenizer.next_token() {
+ Ok(Token::Key(_, ref v)) if validation_bool_value(v) => {
+ Ok(Self::node(ParseToken::Bool(v.eq_ignore_ascii_case("true"))))
+ }
+ _ => Err(tokenizer.err_msg()),
+ }
+ }
+
+ fn array_keys(tokenizer: &mut TokenReader, first_key: String) -> ParseResult<Node> {
+ let mut keys = vec![first_key];
+
+ while let Ok(Token::Comma(_)) = tokenizer.peek_token() {
+ Self::eat_token(tokenizer);
+ Self::eat_whitespace(tokenizer);
+
+ match tokenizer.next_token() {
+ Ok(Token::SingleQuoted(_, val)) | Ok(Token::DoubleQuoted(_, val)) => {
+ keys.push(val);
+ }
+ _ => return Err(tokenizer.err_msg()),
+ }
+
+ Self::eat_whitespace(tokenizer);
+ }
+
+ Ok(Self::node(ParseToken::Keys(keys)))
+ }
+
+ fn array_quote_value(tokenizer: &mut TokenReader) -> ParseResult<Node> {
+ debug!("#array_quote_value");
+ match tokenizer.next_token() {
+ Ok(Token::SingleQuoted(_, val)) | Ok(Token::DoubleQuoted(_, val)) => {
+ if let Ok(Token::Comma(_)) = tokenizer.peek_token() {
+ Self::array_keys(tokenizer, val)
+ } else {
+ Ok(Self::node(ParseToken::Key(val)))
+ }
+ }
+ _ => Err(tokenizer.err_msg()),
+ }
+ }
+
+ fn array_start(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
+ debug!("#array_start");
+ match tokenizer.peek_token() {
+ Ok(Token::Question(_)) => {
+ Self::eat_token(tokenizer);
+ Ok(Node {
+ token: ParseToken::Array,
+ left: Some(Box::new(prev)),
+ right: Some(Box::new(Self::filter(tokenizer)?)),
+ })
+ }
+ Ok(Token::Asterisk(_)) => {
+ Self::eat_token(tokenizer);
+ Ok(Node {
+ token: ParseToken::Array,
+ left: Some(Box::new(prev)),
+ right: Some(Box::new(Self::node(ParseToken::All))),
+ })
+ }
+ _ => Ok(Node {
+ token: ParseToken::Array,
+ left: Some(Box::new(prev)),
+ right: Some(Box::new(Self::array_value(tokenizer)?)),
+ }),
+ }
+ }
+
+ fn array(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
+ debug!("#array");
+ let ret = Self::array_start(prev, tokenizer)?;
+ Self::eat_whitespace(tokenizer);
+ Self::close_token(ret, Token::CloseArray(DUMMY), tokenizer)
+ }
+
+ fn array_value_key(tokenizer: &mut TokenReader) -> ParseResult<Node> {
+ debug!("#array_value_key");
+ match tokenizer.next_token() {
+ Ok(Token::Key(pos, ref val)) => {
+ let digit = utils::string_to_num(val, || tokenizer.err_msg_with_pos(pos))?;
+ Self::eat_whitespace(tokenizer);
+
+ match tokenizer.peek_token() {
+ Ok(Token::Comma(_)) => Self::union(digit, tokenizer),
+ Ok(Token::Split(_)) => Self::range_from(digit, tokenizer),
+ _ => Ok(Self::node(ParseToken::Number(digit as f64))),
+ }
+ }
+ _ => Err(tokenizer.err_msg()),
+ }
+ }
+
+ fn array_value(tokenizer: &mut TokenReader) -> ParseResult<Node> {
+ debug!("#array_value");
+ match tokenizer.peek_token() {
+ Ok(Token::Key(_, _)) => Self::array_value_key(tokenizer),
+ Ok(Token::Split(_)) => {
+ Self::eat_token(tokenizer);
+ Self::range_to(tokenizer)
+ }
+ Ok(Token::DoubleQuoted(_, _)) | Ok(Token::SingleQuoted(_, _)) => {
+ Self::array_quote_value(tokenizer)
+ }
+ Err(TokenError::Eof) => Ok(Self::node(ParseToken::Eof)),
+ _ => {
+ Self::eat_token(tokenizer);
+ Err(tokenizer.err_msg())
+ }
+ }
+ }
+
+ fn union(num: isize, tokenizer: &mut TokenReader) -> ParseResult<Node> {
+ debug!("#union");
+ let mut values = vec![num];
+ while matches!(tokenizer.peek_token(), Ok(Token::Comma(_))) {
+ Self::eat_token(tokenizer);
+ Self::eat_whitespace(tokenizer);
+ match tokenizer.next_token() {
+ Ok(Token::Key(pos, ref val)) => {
+ let digit = utils::string_to_num(val, || tokenizer.err_msg_with_pos(pos))?;
+ values.push(digit);
+ }
+ _ => {
+ return Err(tokenizer.err_msg());
+ }
+ }
+ }
+ Ok(Self::node(ParseToken::Union(values)))
+ }
+
+ fn range_value<S: FromStr>(tokenizer: &mut TokenReader) -> Result<Option<S>, String> {
+ Self::eat_whitespace(tokenizer);
+
+ match tokenizer.peek_token() {
+ Ok(Token::Split(_)) => {
+ Self::eat_token(tokenizer);
+ Self::eat_whitespace(tokenizer);
+ }
+ _ => {
+ return Ok(None);
+ }
+ }
+
+ match tokenizer.peek_token() {
+ Ok(Token::Key(_, _)) => {}
+ _ => {
+ return Ok(None);
+ }
+ }
+
+ match tokenizer.next_token() {
+ Ok(Token::Key(pos, str_step)) => {
+ match utils::string_to_num(&str_step, || tokenizer.err_msg_with_pos(pos)) {
+ Ok(step) => Ok(Some(step)),
+ Err(e) => Err(e),
+ }
+ }
+ _ => {
+ unreachable!();
+ }
+ }
+ }
+
+ fn range_from(from: isize, tokenizer: &mut TokenReader) -> ParseResult<Node> {
+ debug!("#range_from");
+ Self::eat_token(tokenizer);
+ Self::eat_whitespace(tokenizer);
+
+ match tokenizer.peek_token() {
+ Ok(Token::Key(_, _)) => Self::range(from, tokenizer),
+ Ok(Token::Split(_)) => match Self::range_value(tokenizer)? {
+ Some(step) => Ok(Self::node(ParseToken::Range(Some(from), None, Some(step)))),
+ _ => Ok(Self::node(ParseToken::Range(Some(from), None, None))),
+ },
+ _ => Ok(Self::node(ParseToken::Range(Some(from), None, None))),
+ }
+ }
+
+ fn range_to(tokenizer: &mut TokenReader) -> ParseResult<Node> {
+ debug!("#range_to");
+
+ if let Some(step) = Self::range_value(tokenizer)? {
+ return Ok(Self::node(ParseToken::Range(None, None, Some(step))));
+ }
+
+ if let Ok(Token::CloseArray(_)) = tokenizer.peek_token() {
+ return Ok(Self::node(ParseToken::Range(None, None, None)));
+ }
+
+ match tokenizer.next_token() {
+ Ok(Token::Key(pos, ref to_str)) => {
+ let to = utils::string_to_num(to_str, || tokenizer.err_msg_with_pos(pos))?;
+ let step = Self::range_value(tokenizer)?;
+ Ok(Self::node(ParseToken::Range(None, Some(to), step)))
+ }
+ _ => Err(tokenizer.err_msg()),
+ }
+ }
+
+ fn range(from: isize, tokenizer: &mut TokenReader) -> ParseResult<Node> {
+ debug!("#range");
+ match tokenizer.next_token() {
+ Ok(Token::Key(pos, ref str_to)) => {
+ let to = utils::string_to_num(str_to, || tokenizer.err_msg_with_pos(pos))?;
+ let step = Self::range_value(tokenizer)?;
+ Ok(Self::node(ParseToken::Range(Some(from), Some(to), step)))
+ }
+ _ => Err(tokenizer.err_msg()),
+ }
+ }
+
+ fn filter(tokenizer: &mut TokenReader) -> ParseResult<Node> {
+ debug!("#filter");
+ match tokenizer.next_token() {
+ Ok(Token::OpenParenthesis(_)) => {
+ let ret = Self::exprs(tokenizer)?;
+ Self::eat_whitespace(tokenizer);
+ Self::close_token(ret, Token::CloseParenthesis(DUMMY), tokenizer)
+ }
+ _ => Err(tokenizer.err_msg()),
+ }
+ }
+
+ fn exprs(tokenizer: &mut TokenReader) -> ParseResult<Node> {
+ Self::eat_whitespace(tokenizer);
+ debug!("#exprs");
+ let node = match tokenizer.peek_token() {
+ Ok(Token::OpenParenthesis(_)) => {
+ Self::eat_token(tokenizer);
+ trace!("\t-exprs - open_parenthesis");
+ let ret = Self::exprs(tokenizer)?;
+ Self::eat_whitespace(tokenizer);
+ Self::close_token(ret, Token::CloseParenthesis(DUMMY), tokenizer)?
+ }
+ _ => {
+ trace!("\t-exprs - else");
+ Self::expr(tokenizer)?
+ }
+ };
+ Self::eat_whitespace(tokenizer);
+ Self::condition_expr(node, tokenizer)
+ }
+
+ fn condition_expr(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
+ debug!("#condition_expr");
+ match tokenizer.peek_token() {
+ Ok(Token::And(_)) => {
+ Self::eat_token(tokenizer);
+ Ok(Node {
+ token: ParseToken::Filter(FilterToken::And),
+ left: Some(Box::new(prev)),
+ right: Some(Box::new(Self::exprs(tokenizer)?)),
+ })
+ }
+ Ok(Token::Or(_)) => {
+ Self::eat_token(tokenizer);
+ Ok(Node {
+ token: ParseToken::Filter(FilterToken::Or),
+ left: Some(Box::new(prev)),
+ right: Some(Box::new(Self::exprs(tokenizer)?)),
+ })
+ }
+ _ => Ok(prev),
+ }
+ }
+
+ fn expr(tokenizer: &mut TokenReader) -> ParseResult<Node> {
+ debug!("#expr");
+
+ let has_prop_candidate = matches!(tokenizer.peek_token(), Ok(Token::At(_)));
+
+ let node = Self::term(tokenizer)?;
+ Self::eat_whitespace(tokenizer);
+
+ if matches!(tokenizer.peek_token(),
+ Ok(Token::Equal(_))
+ | Ok(Token::NotEqual(_))
+ | Ok(Token::Little(_))
+ | Ok(Token::LittleOrEqual(_))
+ | Ok(Token::Greater(_))
+ | Ok(Token::GreaterOrEqual(_)))
+ {
+ Self::op(node, tokenizer)
+ } else if has_prop_candidate {
+ Ok(node)
+ } else {
+ Err(tokenizer.err_msg())
+ }
+ }
+
+ fn term_num(tokenizer: &mut TokenReader) -> ParseResult<Node> {
+ debug!("#term_num");
+ match tokenizer.next_token() {
+ Ok(Token::Key(pos, val)) => match tokenizer.peek_token() {
+ Ok(Token::Dot(_)) => Self::term_num_float(val.as_str(), tokenizer),
+ _ => {
+ let number = utils::string_to_num(&val, || tokenizer.err_msg_with_pos(pos))?;
+ Ok(Self::node(ParseToken::Number(number)))
+ }
+ },
+ _ => Err(tokenizer.err_msg()),
+ }
+ }
+
+ fn term_num_float(num: &str, tokenizer: &mut TokenReader) -> ParseResult<Node> {
+ debug!("#term_num_float");
+ Self::eat_token(tokenizer);
+ match tokenizer.next_token() {
+ Ok(Token::Key(pos, frac)) => {
+ let mut f = String::new();
+ f.push_str(&num);
+ f.push('.');
+ f.push_str(frac.as_str());
+ let number = utils::string_to_num(&f, || tokenizer.err_msg_with_pos(pos))?;
+ Ok(Self::node(ParseToken::Number(number)))
+ }
+ _ => Err(tokenizer.err_msg()),
+ }
+ }
+
+ fn term(tokenizer: &mut TokenReader) -> ParseResult<Node> {
+ debug!("#term");
+
+ match tokenizer.peek_token() {
+ Ok(Token::At(_)) => {
+ Self::eat_token(tokenizer);
+ let node = Self::node(ParseToken::Relative);
+
+ match tokenizer.peek_token() {
+ Ok(Token::Whitespace(_, _)) => {
+ Self::eat_whitespace(tokenizer);
+ Ok(node)
+ }
+ _ => Self::paths(node, tokenizer),
+ }
+ }
+ Ok(Token::Absolute(_)) => {
+ Self::json_path(tokenizer)
+ }
+ Ok(Token::DoubleQuoted(_, _)) | Ok(Token::SingleQuoted(_, _)) => {
+ Self::array_quote_value(tokenizer)
+ }
+ Ok(Token::Key(_, key)) => {
+ match key.as_bytes()[0] {
+ b'-' | b'0'..=b'9' => Self::term_num(tokenizer),
+ _ => Self::boolean(tokenizer),
+ }
+ }
+ _ => {
+ Err(tokenizer.err_msg())
+ }
+ }
+ }
+
+ fn op(prev: Node, tokenizer: &mut TokenReader) -> ParseResult<Node> {
+ debug!("#op");
+ let token = match tokenizer.next_token() {
+ Ok(Token::Equal(_)) => ParseToken::Filter(FilterToken::Equal),
+ Ok(Token::NotEqual(_)) => ParseToken::Filter(FilterToken::NotEqual),
+ Ok(Token::Little(_)) => ParseToken::Filter(FilterToken::Little),
+ Ok(Token::LittleOrEqual(_)) => ParseToken::Filter(FilterToken::LittleOrEqual),
+ Ok(Token::Greater(_)) => ParseToken::Filter(FilterToken::Greater),
+ Ok(Token::GreaterOrEqual(_)) => ParseToken::Filter(FilterToken::GreaterOrEqual),
+ _ => {
+ return Err(tokenizer.err_msg());
+ }
+ };
+
+ Self::eat_whitespace(tokenizer);
+
+ Ok(Node {
+ token,
+ left: Some(Box::new(prev)),
+ right: Some(Box::new(Self::term(tokenizer)?)),
+ })
+ }
+
+ fn eat_whitespace(tokenizer: &mut TokenReader) {
+ while let Ok(Token::Whitespace(_, _)) = tokenizer.peek_token() {
+ let _ = tokenizer.next_token();
+ }
+ }
+
+ fn eat_token(tokenizer: &mut TokenReader) {
+ let _ = tokenizer.next_token();
+ }
+
+ fn node(token: ParseToken) -> Node {
+ Node {
+ left: None,
+ right: None,
+ token,
+ }
+ }
+
+ fn close_token(ret: Node, token: Token, tokenizer: &mut TokenReader) -> ParseResult<Node> {
+ debug!("#close_token");
+ match tokenizer.next_token() {
+ Ok(ref t) if t.is_match_token_type(token) => Ok(ret),
+ _ => Err(tokenizer.err_msg()),
+ }
+ }
+}
+
+pub trait NodeVisitor {
+ fn visit(&mut self, node: &Node) {
+ match &node.token {
+ ParseToken::Absolute
+ | ParseToken::Relative
+ | ParseToken::All
+ | ParseToken::Key(_)
+ | ParseToken::Keys(_)
+ | ParseToken::Range(_, _, _)
+ | ParseToken::Union(_)
+ | ParseToken::Number(_)
+ | ParseToken::Bool(_) => {
+ self.visit_token(&node.token);
+ }
+ ParseToken::In | ParseToken::Leaves => {
+ if let Some(n) = &node.left {
+ self.visit(&*n);
+ }
+
+ self.visit_token(&node.token);
+
+ if let Some(n) = &node.right {
+ self.visit(&*n);
+ }
+ }
+ ParseToken::Array => {
+ if let Some(n) = &node.left {
+ self.visit(&*n);
+ }
+
+ self.visit_token(&node.token);
+
+ if let Some(n) = &node.right {
+ self.visit(&*n);
+ }
+
+ self.visit_token(&ParseToken::ArrayEof);
+ }
+ ParseToken::Filter(FilterToken::And) | ParseToken::Filter(FilterToken::Or) => {
+ if let Some(n) = &node.left {
+ self.visit(&*n);
+ }
+
+ if let Some(n) = &node.right {
+ self.visit(&*n);
+ }
+
+ self.visit_token(&node.token);
+ }
+ ParseToken::Filter(_) => {
+ if let Some(n) = &node.left {
+ self.visit(&*n);
+ }
+
+ self.end_term();
+
+ if let Some(n) = &node.right {
+ self.visit(&*n);
+ }
+
+ self.end_term();
+
+ self.visit_token(&node.token);
+ }
+ _ => {}
+ }
+ }
+
+ fn visit_token(&mut self, token: &ParseToken);
+ fn end_term(&mut self) {}
+}
+
+#[cfg(test)]
+mod parser_tests {
+ use parser::{FilterToken, NodeVisitor, ParseToken, Parser};
+
+ struct NodeVisitorTestImpl<'a> {
+ input: &'a str,
+ stack: Vec<ParseToken>,
+ }
+
+ impl<'a> NodeVisitorTestImpl<'a> {
+ fn new(input: &'a str) -> Self {
+ NodeVisitorTestImpl {
+ input,
+ stack: Vec::new(),
+ }
+ }
+
+ fn start(&mut self) -> Result<Vec<ParseToken>, String> {
+ let node = Parser::compile(self.input)?;
+ self.visit(&node);
+ Ok(self.stack.split_off(0))
+ }
+ }
+
+ impl<'a> NodeVisitor for NodeVisitorTestImpl<'a> {
+ fn visit_token(&mut self, token: &ParseToken) {
+ self.stack.push(token.clone());
+ }
+ }
+
+ fn setup() {
+ let _ = env_logger::try_init();
+ }
+
+ fn run(input: &str) -> Result<Vec<ParseToken>, String> {
+ let mut interpreter = NodeVisitorTestImpl::new(input);
+ interpreter.start()
+ }
+
+ #[test]
+ fn parse_error() {
+ setup();
+
+ fn invalid(path: &str) {
+ assert!(run(path).is_err());
+ }
+
+ invalid("$[]");
+ invalid("$[a]");
+ invalid("$[?($.a)]");
+ invalid("$[?(@.a > @.b]");
+ invalid("$[?(@.a < @.b&&(@.c < @.d)]");
+ invalid("@.");
+ invalid("$..[?(a <= @.a)]"); // invalid term value
+ invalid("$['a', b]");
+ invalid("$[0, >=]");
+ invalid("$[a:]");
+ invalid("$[:a]");
+ invalid("$[::a]");
+ invalid("$[:>]");
+ invalid("$[1:>]");
+ invalid("$[1,,]");
+ invalid("$[?]");
+ invalid("$[?(1 = 1)]");
+ invalid("$[?(1 = >)]");
+ }
+
+ #[test]
+ fn parse_path() {
+ setup();
+
+ assert_eq!(
+ run("$.aa"),
+ Ok(vec![
+ ParseToken::Absolute,
+ ParseToken::In,
+ ParseToken::Key("aa".to_owned())
+ ])
+ );
+
+ assert_eq!(
+ run("$.00.a"),
+ Ok(vec![
+ ParseToken::Absolute,
+ ParseToken::In,
+ ParseToken::Key("00".to_owned()),
+ ParseToken::In,
+ ParseToken::Key("a".to_owned())
+ ])
+ );
+
+ assert_eq!(
+ run("$.00.韓창.seok"),
+ Ok(vec![
+ ParseToken::Absolute,
+ ParseToken::In,
+ ParseToken::Key("00".to_owned()),
+ ParseToken::In,
+ ParseToken::Key("韓창".to_owned()),
+ ParseToken::In,
+ ParseToken::Key("seok".to_owned())
+ ])
+ );
+
+ assert_eq!(
+ run("$.*"),
+ Ok(vec![ParseToken::Absolute, ParseToken::In, ParseToken::All])
+ );
+
+ assert_eq!(
+ run("$..*"),
+ Ok(vec![
+ ParseToken::Absolute,
+ ParseToken::Leaves,
+ ParseToken::All
+ ])
+ );
+
+ assert_eq!(
+ run("$..[0]"),
+ Ok(vec![
+ ParseToken::Absolute,
+ ParseToken::Leaves,
+ ParseToken::Array,
+ ParseToken::Number(0.0),
+ ParseToken::ArrayEof
+ ])
+ );
+
+ assert_eq!(
+ run("$.$a"),
+ Ok(vec![
+ ParseToken::Absolute,
+ ParseToken::In,
+ ParseToken::Key("$a".to_owned())
+ ])
+ );
+
+ assert_eq!(
+ run("$.['$a']"),
+ Ok(vec![
+ ParseToken::Absolute,
+ ParseToken::Array,
+ ParseToken::Key("$a".to_owned()),
+ ParseToken::ArrayEof,
+ ])
+ );
+
+ if run("$.").is_ok() {
+ panic!();
+ }
+
+ if run("$..").is_ok() {
+ panic!();
+ }
+
+ if run("$. a").is_ok() {
+ panic!();
+ }
+ }
+
+ #[test]
+ fn parse_array_syntax() {
+ setup();
+
+ assert_eq!(
+ run("$.book[?(@.isbn)]"),
+ Ok(vec![
+ ParseToken::Absolute,
+ ParseToken::In,
+ ParseToken::Key("book".to_string()),
+ ParseToken::Array,
+ ParseToken::Relative,
+ ParseToken::In,
+ ParseToken::Key("isbn".to_string()),
+ ParseToken::ArrayEof
+ ])
+ );
+
+ //
+ // Array도 컨텍스트 In으로 간주 할거라서 중첩되면 하나만
+ //
+ assert_eq!(
+ run("$.[*]"),
+ Ok(vec![
+ ParseToken::Absolute,
+ ParseToken::Array,
+ ParseToken::All,
+ ParseToken::ArrayEof
+ ])
+ );
+
+ assert_eq!(
+ run("$.a[*]"),
+ Ok(vec![
+ ParseToken::Absolute,
+ ParseToken::In,
+ ParseToken::Key("a".to_owned()),
+ ParseToken::Array,
+ ParseToken::All,
+ ParseToken::ArrayEof
+ ])
+ );
+
+ assert_eq!(
+ run("$.a[*].가"),
+ Ok(vec![
+ ParseToken::Absolute,
+ ParseToken::In,
+ ParseToken::Key("a".to_owned()),
+ ParseToken::Array,
+ ParseToken::All,
+ ParseToken::ArrayEof,
+ ParseToken::In,
+ ParseToken::Key("가".to_owned())
+ ])
+ );
+
+ assert_eq!(
+ run("$.a[0][1]"),
+ Ok(vec![
+ ParseToken::Absolute,
+ ParseToken::In,
+ ParseToken::Key("a".to_owned()),
+ ParseToken::Array,
+ ParseToken::Number(0_f64),
+ ParseToken::ArrayEof,
+ ParseToken::Array,
+ ParseToken::Number(1_f64),
+ ParseToken::ArrayEof
+ ])
+ );
+
+ assert_eq!(
+ run("$.a[1,2]"),
+ Ok(vec![
+ ParseToken::Absolute,
+ ParseToken::In,
+ ParseToken::Key("a".to_owned()),
+ ParseToken::Array,
+ ParseToken::Union(vec![1, 2]),
+ ParseToken::ArrayEof
+ ])
+ );
+
+ assert_eq!(
+ run("$.a[10:]"),
+ Ok(vec![
+ ParseToken::Absolute,
+ ParseToken::In,
+ ParseToken::Key("a".to_owned()),
+ ParseToken::Array,
+ ParseToken::Range(Some(10), None, None),
+ ParseToken::ArrayEof
+ ])
+ );
+
+ assert_eq!(
+ run("$.a[:11]"),
+ Ok(vec![
+ ParseToken::Absolute,
+ ParseToken::In,
+ ParseToken::Key("a".to_owned()),
+ ParseToken::Array,
+ ParseToken::Range(None, Some(11), None),
+ ParseToken::ArrayEof
+ ])
+ );
+
+ assert_eq!(
+ run("$.a[-12:13]"),
+ Ok(vec![
+ ParseToken::Absolute,
+ ParseToken::In,
+ ParseToken::Key("a".to_owned()),
+ ParseToken::Array,
+ ParseToken::Range(Some(-12), Some(13), None),
+ ParseToken::ArrayEof
+ ])
+ );
+
+ assert_eq!(
+ run(r#"$[0:3:2]"#),
+ Ok(vec![
+ ParseToken::Absolute,
+ ParseToken::Array,
+ ParseToken::Range(Some(0), Some(3), Some(2)),
+ ParseToken::ArrayEof
+ ])
+ );
+
+ assert_eq!(
+ run(r#"$[:3:2]"#),
+ Ok(vec![
+ ParseToken::Absolute,
+ ParseToken::Array,
+ ParseToken::Range(None, Some(3), Some(2)),
+ ParseToken::ArrayEof
+ ])
+ );
+
+ assert_eq!(
+ run(r#"$[:]"#),
+ Ok(vec![
+ ParseToken::Absolute,
+ ParseToken::Array,
+ ParseToken::Range(None, None, None),
+ ParseToken::ArrayEof
+ ])
+ );
+
+ assert_eq!(
+ run(r#"$[::]"#),
+ Ok(vec![
+ ParseToken::Absolute,
+ ParseToken::Array,
+ ParseToken::Range(None, None, None),
+ ParseToken::ArrayEof
+ ])
+ );
+
+ assert_eq!(
+ run(r#"$[::2]"#),
+ Ok(vec![
+ ParseToken::Absolute,
+ ParseToken::Array,
+ ParseToken::Range(None, None, Some(2)),
+ ParseToken::ArrayEof
+ ])
+ );
+
+ assert_eq!(
+ run(r#"$["a", 'b']"#),
+ Ok(vec![
+ ParseToken::Absolute,
+ ParseToken::Array,
+ ParseToken::Keys(vec!["a".to_string(), "b".to_string()]),
+ ParseToken::ArrayEof
+ ])
+ );
+
+ assert_eq!(
+ run("$.a[?(1>2)]"),
+ Ok(vec![
+ ParseToken::Absolute,
+ ParseToken::In,
+ ParseToken::Key("a".to_owned()),
+ ParseToken::Array,
+ ParseToken::Number(1_f64),
+ ParseToken::Number(2_f64),
+ ParseToken::Filter(FilterToken::Greater),
+ ParseToken::ArrayEof
+ ])
+ );
+
+ assert_eq!(
+ run("$.a[?($.b>3)]"),
+ Ok(vec![
+ ParseToken::Absolute,
+ ParseToken::In,
+ ParseToken::Key("a".to_owned()),
+ ParseToken::Array,
+ ParseToken::Absolute,
+ ParseToken::In,
+ ParseToken::Key("b".to_owned()),
+ ParseToken::Number(3_f64),
+ ParseToken::Filter(FilterToken::Greater),
+ ParseToken::ArrayEof
+ ])
+ );
+
+ assert_eq!(
+ run("$[?($.c>@.d && 1==2)]"),
+ Ok(vec![
+ ParseToken::Absolute,
+ ParseToken::Array,
+ ParseToken::Absolute,
+ ParseToken::In,
+ ParseToken::Key("c".to_owned()),
+ ParseToken::Relative,
+ ParseToken::In,
+ ParseToken::Key("d".to_owned()),
+ ParseToken::Filter(FilterToken::Greater),
+ ParseToken::Number(1_f64),
+ ParseToken::Number(2_f64),
+ ParseToken::Filter(FilterToken::Equal),
+ ParseToken::Filter(FilterToken::And),
+ ParseToken::ArrayEof
+ ])
+ );
+
+ assert_eq!(
+ run("$[?($.c>@.d&&(1==2||3>=4))]"),
+ Ok(vec![
+ ParseToken::Absolute,
+ ParseToken::Array,
+ ParseToken::Absolute,
+ ParseToken::In,
+ ParseToken::Key("c".to_owned()),
+ ParseToken::Relative,
+ ParseToken::In,
+ ParseToken::Key("d".to_owned()),
+ ParseToken::Filter(FilterToken::Greater),
+ ParseToken::Number(1_f64),
+ ParseToken::Number(2_f64),
+ ParseToken::Filter(FilterToken::Equal),
+ ParseToken::Number(3_f64),
+ ParseToken::Number(4_f64),
+ ParseToken::Filter(FilterToken::GreaterOrEqual),
+ ParseToken::Filter(FilterToken::Or),
+ ParseToken::Filter(FilterToken::And),
+ ParseToken::ArrayEof
+ ])
+ );
+
+ assert_eq!(
+ run("$[?(@.a<@.b)]"),
+ Ok(vec![
+ ParseToken::Absolute,
+ ParseToken::Array,
+ ParseToken::Relative,
+ ParseToken::In,
+ ParseToken::Key("a".to_owned()),
+ ParseToken::Relative,
+ ParseToken::In,
+ ParseToken::Key("b".to_owned()),
+ ParseToken::Filter(FilterToken::Little),
+ ParseToken::ArrayEof
+ ])
+ );
+
+ assert_eq!(
+ run("$[*][*][*]"),
+ Ok(vec![
+ ParseToken::Absolute,
+ ParseToken::Array,
+ ParseToken::All,
+ ParseToken::ArrayEof,
+ ParseToken::Array,
+ ParseToken::All,
+ ParseToken::ArrayEof,
+ ParseToken::Array,
+ ParseToken::All,
+ ParseToken::ArrayEof
+ ])
+ );
+
+ assert_eq!(
+ run("$['a']['bb']"),
+ Ok(vec![
+ ParseToken::Absolute,
+ ParseToken::Array,
+ ParseToken::Key("a".to_string()),
+ ParseToken::ArrayEof,
+ ParseToken::Array,
+ ParseToken::Key("bb".to_string()),
+ ParseToken::ArrayEof
+ ])
+ );
+
+ assert_eq!(
+ run("$.a[?(@.e==true)]"),
+ Ok(vec![
+ ParseToken::Absolute,
+ ParseToken::In,
+ ParseToken::Key("a".to_string()),
+ ParseToken::Array,
+ ParseToken::Relative,
+ ParseToken::In,
+ ParseToken::Key("e".to_string()),
+ ParseToken::Bool(true),
+ ParseToken::Filter(FilterToken::Equal),
+ ParseToken::ArrayEof
+ ])
+ );
+
+ assert_eq!(
+ run(r#"$[?(@ > 1)]"#),
+ Ok(vec![
+ ParseToken::Absolute,
+ ParseToken::Array,
+ ParseToken::Relative,
+ ParseToken::Number(1_f64),
+ ParseToken::Filter(FilterToken::Greater),
+ ParseToken::ArrayEof
+ ])
+ );
+
+ assert_eq!(
+ run("$[:]"),
+ Ok(vec![
+ ParseToken::Absolute,
+ ParseToken::Array,
+ ParseToken::Range(None, None, None),
+ ParseToken::ArrayEof
+ ])
+ );
+
+ assert_eq!(
+ run(r#"$['single\'quote']"#),
+ Ok(vec![
+ ParseToken::Absolute,
+ ParseToken::Array,
+ ParseToken::Key("single'quote".to_string()),
+ ParseToken::ArrayEof
+ ])
+ );
+
+ assert_eq!(
+ run(r#"$["single\"quote"]"#),
+ Ok(vec![
+ ParseToken::Absolute,
+ ParseToken::Array,
+ ParseToken::Key(r#"single"quote"#.to_string()),
+ ParseToken::ArrayEof
+ ])
+ );
+ }
+
+ #[test]
+ fn parse_array_float() {
+ setup();
+
+ assert_eq!(
+ run("$[?(1.1<2.1)]"),
+ Ok(vec![
+ ParseToken::Absolute,
+ ParseToken::Array,
+ ParseToken::Number(1.1),
+ ParseToken::Number(2.1),
+ ParseToken::Filter(FilterToken::Little),
+ ParseToken::ArrayEof
+ ])
+ );
+
+ if run("$[1.1]").is_ok() {
+ panic!();
+ }
+
+ if run("$[?(1.1<.2)]").is_ok() {
+ panic!();
+ }
+
+ if run("$[?(1.1<2.)]").is_ok() {
+ panic!();
+ }
+
+ if run("$[?(1.1<2.a)]").is_ok() {
+ panic!();
+ }
+ }
+}
+
+#[cfg(test)]
+mod tokenizer_tests {
+ use parser::tokenizer::{Token, TokenError, TokenReader, Tokenizer};
+
+ fn setup() {
+ let _ = env_logger::try_init();
+ }
+
+ fn collect_token(input: &str) -> (Vec<Token>, Option<TokenError>) {
+ let mut tokenizer = Tokenizer::new(input);
+ let mut vec = vec![];
+ loop {
+ match tokenizer.next_token() {
+ Ok(t) => vec.push(t),
+ Err(e) => return (vec, Some(e)),
+ }
+ }
+ }
+
+ fn run(input: &str, expected: (Vec<Token>, Option<TokenError>)) {
+ let (vec, err) = collect_token(input);
+ assert_eq!((vec, err), expected, "\"{}\"", input);
+ }
+
+ #[test]
+ fn peek() {
+ let mut tokenizer = TokenReader::new("$.a");
+ match tokenizer.next_token() {
+ Ok(t) => assert_eq!(Token::Absolute(0), t),
+ _ => panic!(),
+ }
+
+ match tokenizer.peek_token() {
+ Ok(t) => assert_eq!(&Token::Dot(1), t),
+ _ => panic!(),
+ }
+
+ match tokenizer.peek_token() {
+ Ok(t) => assert_eq!(&Token::Dot(1), t),
+ _ => panic!(),
+ }
+
+ match tokenizer.next_token() {
+ Ok(t) => assert_eq!(Token::Dot(1), t),
+ _ => panic!(),
+ }
+ }
+
+ #[test]
+ fn token() {
+ setup();
+
+ run(
+ "$.01.a",
+ (
+ vec![
+ Token::Absolute(0),
+ Token::Dot(1),
+ Token::Key(2, "01".to_string()),
+ Token::Dot(4),
+ Token::Key(5, "a".to_string()),
+ ],
+ Some(TokenError::Eof),
+ ),
+ );
+
+ run(
+ "$. []",
+ (
+ vec![
+ Token::Absolute(0),
+ Token::Dot(1),
+ Token::Whitespace(2, 2),
+ Token::OpenArray(5),
+ Token::CloseArray(6),
+ ],
+ Some(TokenError::Eof),
+ ),
+ );
+
+ run(
+ "$..",
+ (
+ vec![Token::Absolute(0), Token::Dot(1), Token::Dot(2)],
+ Some(TokenError::Eof),
+ ),
+ );
+
+ run(
+ "$..ab",
+ (
+ vec![
+ Token::Absolute(0),
+ Token::Dot(1),
+ Token::Dot(2),
+ Token::Key(3, "ab".to_string()),
+ ],
+ Some(TokenError::Eof),
+ ),
+ );
+
+ run(
+ "$..가 [",
+ (
+ vec![
+ Token::Absolute(0),
+ Token::Dot(1),
+ Token::Dot(2),
+ Token::Key(3, "가".to_string()),
+ Token::Whitespace(6, 0),
+ Token::OpenArray(7),
+ ],
+ Some(TokenError::Eof),
+ ),
+ );
+
+ run(
+ "[-1, 2 ]",
+ (
+ vec![
+ Token::OpenArray(0),
+ Token::Key(1, "-1".to_string()),
+ Token::Comma(3),
+ Token::Whitespace(4, 0),
+ Token::Key(5, "2".to_string()),
+ Token::Whitespace(6, 0),
+ Token::CloseArray(7),
+ ],
+ Some(TokenError::Eof),
+ ),
+ );
+
+ run(
+ "[ 1 2 , 3 \"abc\" : -10 ]",
+ (
+ vec![
+ Token::OpenArray(0),
+ Token::Whitespace(1, 0),
+ Token::Key(2, "1".to_string()),
+ Token::Whitespace(3, 0),
+ Token::Key(4, "2".to_string()),
+ Token::Whitespace(5, 0),
+ Token::Comma(6),
+ Token::Whitespace(7, 0),
+ Token::Key(8, "3".to_string()),
+ Token::Whitespace(9, 0),
+ Token::DoubleQuoted(10, "abc".to_string()),
+ Token::Whitespace(15, 0),
+ Token::Split(16),
+ Token::Whitespace(17, 0),
+ Token::Key(18, "-10".to_string()),
+ Token::Whitespace(21, 0),
+ Token::CloseArray(22),
+ ],
+ Some(TokenError::Eof),
+ ),
+ );
+
+ run(
+ "?(@.a가 <41.01)",
+ (
+ vec![
+ Token::Question(0),
+ Token::OpenParenthesis(1),
+ Token::At(2),
+ Token::Dot(3),
+ Token::Key(4, "a가".to_string()),
+ Token::Whitespace(8, 0),
+ Token::Little(9),
+ Token::Key(10, "41".to_string()),
+ Token::Dot(12),
+ Token::Key(13, "01".to_string()),
+ Token::CloseParenthesis(15),
+ ],
+ Some(TokenError::Eof),
+ ),
+ );
+
+ run(
+ "?(@.a <4a.01)",
+ (
+ vec![
+ Token::Question(0),
+ Token::OpenParenthesis(1),
+ Token::At(2),
+ Token::Dot(3),
+ Token::Key(4, "a".to_string()),
+ Token::Whitespace(5, 0),
+ Token::Little(6),
+ Token::Key(7, "4a".to_string()),
+ Token::Dot(9),
+ Token::Key(10, "01".to_string()),
+ Token::CloseParenthesis(12),
+ ],
+ Some(TokenError::Eof),
+ ),
+ );
+
+ run(
+ "?($.c>@.d)",
+ (
+ vec![
+ Token::Question(0),
+ Token::OpenParenthesis(1),
+ Token::Absolute(2),
+ Token::Dot(3),
+ Token::Key(4, "c".to_string()),
+ Token::Greater(5),
+ Token::At(6),
+ Token::Dot(7),
+ Token::Key(8, "d".to_string()),
+ Token::CloseParenthesis(9),
+ ],
+ Some(TokenError::Eof),
+ ),
+ );
+
+ run(
+ "$[:]",
+ (
+ vec![
+ Token::Absolute(0),
+ Token::OpenArray(1),
+ Token::Split(2),
+ Token::CloseArray(3),
+ ],
+ Some(TokenError::Eof),
+ ),
+ );
+
+ run(
+ r#"$['single\'quote']"#,
+ (
+ vec![
+ Token::Absolute(0),
+ Token::OpenArray(1),
+ Token::SingleQuoted(2, "single\'quote".to_string()),
+ Token::CloseArray(17),
+ ],
+ Some(TokenError::Eof),
+ ),
+ );
+
+ run(
+ r#"$['single\'1','single\'2']"#,
+ (
+ vec![
+ Token::Absolute(0),
+ Token::OpenArray(1),
+ Token::SingleQuoted(2, "single\'1".to_string()),
+ Token::Comma(13),
+ Token::SingleQuoted(14, "single\'2".to_string()),
+ Token::CloseArray(25),
+ ],
+ Some(TokenError::Eof),
+ ),
+ );
+
+ run(
+ r#"$["double\"quote"]"#,
+ (
+ vec![
+ Token::Absolute(0),
+ Token::OpenArray(1),
+ Token::DoubleQuoted(2, "double\"quote".to_string()),
+ Token::CloseArray(17),
+ ],
+ Some(TokenError::Eof),
+ ),
+ );
+ }
+}
diff --git a/vendor/jsonpath_lib/src/parser/path_reader.rs b/vendor/jsonpath_lib/src/parser/path_reader.rs
new file mode 100644
index 000000000..8147f554d
--- /dev/null
+++ b/vendor/jsonpath_lib/src/parser/path_reader.rs
@@ -0,0 +1,53 @@
+use std::result::Result;
+
+#[derive(Debug, PartialEq)]
+pub enum ReaderError {
+ Eof,
+}
+
+pub struct PathReader<'a> {
+ input: &'a str,
+ pos: usize,
+}
+
+impl<'a> PathReader<'a> {
+ pub fn new(input: &'a str) -> Self {
+ PathReader { input, pos: 0 }
+ }
+
+ pub fn peek_char(&self) -> Result<(usize, char), ReaderError> {
+ let ch = self.input.chars().next().ok_or(ReaderError::Eof)?;
+ Ok((self.pos + ch.len_utf8(), ch))
+ }
+
+ pub fn take_while<F>(&mut self, fun: F) -> Result<(usize, String), ReaderError>
+ where
+ F: Fn(&char) -> bool,
+ {
+ let mut char_len: usize = 0;
+ let mut ret = String::new();
+ for c in self.input.chars().by_ref() {
+ if !fun(&c) {
+ break;
+ }
+ char_len += c.len_utf8();
+ ret.push(c);
+ }
+
+ self.pos += char_len;
+ self.input = &self.input[char_len..];
+ Ok((self.pos, ret))
+ }
+
+ pub fn next_char(&mut self) -> Result<(usize, char), ReaderError> {
+ let (_, ch) = self.peek_char()?;
+ self.input = &self.input[ch.len_utf8()..];
+ let ret = Ok((self.pos, ch));
+ self.pos += ch.len_utf8();
+ ret
+ }
+
+ pub fn current_pos(&self) -> usize {
+ self.pos
+ }
+}
diff --git a/vendor/jsonpath_lib/src/parser/tokenizer.rs b/vendor/jsonpath_lib/src/parser/tokenizer.rs
new file mode 100644
index 000000000..3e079b9d8
--- /dev/null
+++ b/vendor/jsonpath_lib/src/parser/tokenizer.rs
@@ -0,0 +1,370 @@
+use std::result::Result;
+
+use super::path_reader::{PathReader, ReaderError};
+
+const CH_DOLLA: char = '$';
+const CH_DOT: char = '.';
+const CH_ASTERISK: char = '*';
+const CH_LARRAY: char = '[';
+const CH_RARRAY: char = ']';
+const CH_LPAREN: char = '(';
+const CH_RPAREN: char = ')';
+const CH_AT: char = '@';
+const CH_QUESTION: char = '?';
+const CH_COMMA: char = ',';
+const CH_SEMICOLON: char = ':';
+const CH_EQUAL: char = '=';
+const CH_AMPERSAND: char = '&';
+const CH_PIPE: char = '|';
+const CH_LITTLE: char = '<';
+const CH_GREATER: char = '>';
+const CH_EXCLAMATION: char = '!';
+const CH_SINGLE_QUOTE: char = '\'';
+const CH_DOUBLE_QUOTE: char = '"';
+
+#[derive(Debug, Clone, PartialEq)]
+pub enum TokenError {
+ Eof,
+ Position(usize),
+}
+
+fn to_token_error(read_err: ReaderError) -> TokenError {
+ match read_err {
+ ReaderError::Eof => TokenError::Eof,
+ }
+}
+
+#[derive(Debug, PartialEq)]
+pub enum Token {
+ Absolute(usize),
+ Dot(usize),
+ At(usize),
+ OpenArray(usize),
+ CloseArray(usize),
+ Asterisk(usize),
+ Question(usize),
+ Comma(usize),
+ Split(usize),
+ OpenParenthesis(usize),
+ CloseParenthesis(usize),
+ Key(usize, String),
+ DoubleQuoted(usize, String),
+ SingleQuoted(usize, String),
+ Equal(usize),
+ GreaterOrEqual(usize),
+ Greater(usize),
+ Little(usize),
+ LittleOrEqual(usize),
+ NotEqual(usize),
+ And(usize),
+ Or(usize),
+ Whitespace(usize, usize),
+}
+
+impl Token {
+ pub fn is_match_token_type(&self, other: Token) -> bool {
+ match self {
+ Token::Absolute(_) => matches!(other, Token::Absolute(_)),
+ Token::Dot(_) => matches!(other, Token::Dot(_)),
+ Token::At(_) => matches!(other, Token::At(_)),
+ Token::OpenArray(_) => matches!(other, Token::OpenArray(_)),
+ Token::CloseArray(_) => matches!(other, Token::CloseArray(_)),
+ Token::Asterisk(_) => matches!(other, Token::Asterisk(_)),
+ Token::Question(_) => matches!(other, Token::Question(_)),
+ Token::Comma(_) => matches!(other, Token::Comma(_)),
+ Token::Split(_) => matches!(other, Token::Split(_)),
+ Token::OpenParenthesis(_) => matches!(other, Token::OpenParenthesis(_)),
+ Token::CloseParenthesis(_) => matches!(other, Token::CloseParenthesis(_)),
+ Token::Key(_, _) => matches!(other, Token::Key(_, _)),
+ Token::DoubleQuoted(_, _) => matches!(other, Token::DoubleQuoted(_, _)),
+ Token::SingleQuoted(_, _) => matches!(other, Token::SingleQuoted(_, _)),
+ Token::Equal(_) => matches!(other, Token::Equal(_)),
+ Token::GreaterOrEqual(_) => matches!(other, Token::GreaterOrEqual(_)),
+ Token::Greater(_) => matches!(other, Token::Greater(_)),
+ Token::Little(_) => matches!(other, Token::Little(_)),
+ Token::LittleOrEqual(_) => matches!(other, Token::LittleOrEqual(_)),
+ Token::NotEqual(_) => matches!(other, Token::NotEqual(_)),
+ Token::And(_) => matches!(other, Token::And(_)),
+ Token::Or(_) => matches!(other, Token::Or(_)),
+ Token::Whitespace(_, _) => matches!(other, Token::Whitespace(_, _)),
+ }
+ }
+}
+
+pub struct Tokenizer<'a> {
+ input: PathReader<'a>,
+}
+
+impl<'a> Tokenizer<'a> {
+ pub fn new(input: &'a str) -> Self {
+ trace!("input: {}", input);
+ Tokenizer {
+ input: PathReader::new(input),
+ }
+ }
+
+ fn dolla(&mut self, pos: usize, ch: char) -> Result<Token, TokenError> {
+ let fun = |c: &char| match c {
+ &CH_DOT
+ | &CH_ASTERISK
+ | &CH_LARRAY
+ | &CH_RARRAY
+ | &CH_LPAREN
+ | &CH_RPAREN
+ | &CH_AT
+ | &CH_QUESTION
+ | &CH_COMMA
+ | &CH_SEMICOLON
+ | &CH_LITTLE
+ | &CH_GREATER
+ | &CH_EQUAL
+ | &CH_AMPERSAND
+ | &CH_PIPE
+ | &CH_EXCLAMATION
+ => false,
+ _ => !c.is_whitespace(),
+ };
+ let (_, mut vec) = self.input.take_while(fun).map_err(to_token_error)?;
+ vec.insert(0, ch);
+
+ if vec.len() == 1 {
+ Ok(Token::Absolute(pos))
+ } else {
+ Ok(Token::Key(pos, vec))
+ }
+ }
+
+ fn quote(&mut self, ch: char) -> Result<String, TokenError> {
+ let (_, mut val) = self
+ .input
+ .take_while(|c| *c != ch)
+ .map_err(to_token_error)?;
+
+ if let Some('\\') = val.chars().last() {
+ self.input.next_char().map_err(to_token_error)?;
+ let _ = val.pop();
+ let (_, val_remain) = self
+ .input
+ .take_while(|c| *c != ch)
+ .map_err(to_token_error)?;
+ self.input.next_char().map_err(to_token_error)?;
+ val.push(ch);
+ val.push_str(val_remain.as_str());
+ } else {
+ self.input.next_char().map_err(to_token_error)?;
+ }
+
+ Ok(val)
+ }
+
+ fn single_quote(&mut self, pos: usize, ch: char) -> Result<Token, TokenError> {
+ let val = self.quote(ch)?;
+ Ok(Token::SingleQuoted(pos, val))
+ }
+
+ fn double_quote(&mut self, pos: usize, ch: char) -> Result<Token, TokenError> {
+ let val = self.quote(ch)?;
+ Ok(Token::DoubleQuoted(pos, val))
+ }
+
+ fn equal(&mut self, pos: usize, _: char) -> Result<Token, TokenError> {
+ let (_, ch) = self.input.peek_char().map_err(to_token_error)?;
+ match ch {
+ CH_EQUAL => {
+ self.input.next_char().map_err(to_token_error)?;
+ Ok(Token::Equal(pos))
+ }
+ _ => Err(TokenError::Position(pos)),
+ }
+ }
+
+ fn not_equal(&mut self, pos: usize, _: char) -> Result<Token, TokenError> {
+ let (_, ch) = self.input.peek_char().map_err(to_token_error)?;
+ match ch {
+ CH_EQUAL => {
+ self.input.next_char().map_err(to_token_error)?;
+ Ok(Token::NotEqual(pos))
+ }
+ _ => Err(TokenError::Position(pos)),
+ }
+ }
+
+ fn little(&mut self, pos: usize, _: char) -> Result<Token, TokenError> {
+ let (_, ch) = self.input.peek_char().map_err(to_token_error)?;
+ match ch {
+ CH_EQUAL => {
+ self.input.next_char().map_err(to_token_error)?;
+ Ok(Token::LittleOrEqual(pos))
+ }
+ _ => Ok(Token::Little(pos)),
+ }
+ }
+
+ fn greater(&mut self, pos: usize, _: char) -> Result<Token, TokenError> {
+ let (_, ch) = self.input.peek_char().map_err(to_token_error)?;
+ match ch {
+ CH_EQUAL => {
+ self.input.next_char().map_err(to_token_error)?;
+ Ok(Token::GreaterOrEqual(pos))
+ }
+ _ => Ok(Token::Greater(pos)),
+ }
+ }
+
+ fn and(&mut self, pos: usize, _: char) -> Result<Token, TokenError> {
+ let (_, ch) = self.input.peek_char().map_err(to_token_error)?;
+ match ch {
+ CH_AMPERSAND => {
+ let _ = self.input.next_char().map_err(to_token_error);
+ Ok(Token::And(pos))
+ }
+ _ => Err(TokenError::Position(pos)),
+ }
+ }
+
+ fn or(&mut self, pos: usize, _: char) -> Result<Token, TokenError> {
+ let (_, ch) = self.input.peek_char().map_err(to_token_error)?;
+ match ch {
+ CH_PIPE => {
+ self.input.next_char().map_err(to_token_error)?;
+ Ok(Token::Or(pos))
+ }
+ _ => Err(TokenError::Position(pos)),
+ }
+ }
+
+ fn whitespace(&mut self, pos: usize, _: char) -> Result<Token, TokenError> {
+ let (_, vec) = self
+ .input
+ .take_while(|c| c.is_whitespace())
+ .map_err(to_token_error)?;
+ Ok(Token::Whitespace(pos, vec.len()))
+ }
+
+ fn other(&mut self, pos: usize, ch: char) -> Result<Token, TokenError> {
+ let fun = |c: &char| match c {
+ &CH_DOLLA
+ | &CH_DOT
+ | &CH_ASTERISK
+ | &CH_LARRAY
+ | &CH_RARRAY
+ | &CH_LPAREN
+ | &CH_RPAREN
+ | &CH_AT
+ | &CH_QUESTION
+ | &CH_COMMA
+ | &CH_SEMICOLON
+ | &CH_LITTLE
+ | &CH_GREATER
+ | &CH_EQUAL
+ | &CH_AMPERSAND
+ | &CH_PIPE
+ | &CH_EXCLAMATION
+ => false,
+ _ => !c.is_whitespace(),
+ };
+ let (_, mut vec) = self.input.take_while(fun).map_err(to_token_error)?;
+ vec.insert(0, ch);
+ Ok(Token::Key(pos, vec))
+ }
+
+ pub fn next_token(&mut self) -> Result<Token, TokenError> {
+ let (pos, ch) = self.input.next_char().map_err(to_token_error)?;
+ match ch {
+ CH_DOLLA => self.dolla(pos, ch),
+ CH_DOT => Ok(Token::Dot(pos)),
+ CH_ASTERISK => Ok(Token::Asterisk(pos)),
+ CH_LARRAY => Ok(Token::OpenArray(pos)),
+ CH_RARRAY => Ok(Token::CloseArray(pos)),
+ CH_LPAREN => Ok(Token::OpenParenthesis(pos)),
+ CH_RPAREN => Ok(Token::CloseParenthesis(pos)),
+ CH_AT => Ok(Token::At(pos)),
+ CH_QUESTION => Ok(Token::Question(pos)),
+ CH_COMMA => Ok(Token::Comma(pos)),
+ CH_SEMICOLON => Ok(Token::Split(pos)),
+ CH_SINGLE_QUOTE => self.single_quote(pos, ch),
+ CH_DOUBLE_QUOTE => self.double_quote(pos, ch),
+ CH_EQUAL => self.equal(pos, ch),
+ CH_GREATER => self.greater(pos, ch),
+ CH_LITTLE => self.little(pos, ch),
+ CH_AMPERSAND => self.and(pos, ch),
+ CH_PIPE => self.or(pos, ch),
+ CH_EXCLAMATION => self.not_equal(pos, ch),
+ _ if ch.is_whitespace() => self.whitespace(pos, ch),
+ _ => self.other(pos, ch),
+ }
+ }
+
+ fn current_pos(&self) -> usize {
+ self.input.current_pos()
+ }
+}
+
+pub struct TokenReader<'a> {
+ origin_input: &'a str,
+ err: TokenError,
+ err_pos: usize,
+ tokens: Vec<(usize, Token)>,
+ curr_pos: Option<usize>,
+}
+
+impl<'a> TokenReader<'a> {
+ pub fn new(input: &'a str) -> Self {
+ let mut tokenizer = Tokenizer::new(input);
+ let mut tokens = vec![];
+ loop {
+ match tokenizer.next_token() {
+ Ok(t) => {
+ tokens.insert(0, (tokenizer.current_pos(), t));
+ }
+ Err(e) => {
+ return TokenReader {
+ origin_input: input,
+ err: e,
+ err_pos: tokenizer.current_pos(),
+ tokens,
+ curr_pos: None,
+ };
+ }
+ }
+ }
+ }
+
+ pub fn peek_token(&self) -> Result<&Token, TokenError> {
+ match self.tokens.last() {
+ Some((_, t)) => {
+ trace!("%{:?}", t);
+ Ok(t)
+ }
+ _ => {
+ trace!("%{:?}", self.err);
+ Err(self.err.clone())
+ }
+ }
+ }
+
+ pub fn next_token(&mut self) -> Result<Token, TokenError> {
+ match self.tokens.pop() {
+ Some((pos, t)) => {
+ self.curr_pos = Some(pos);
+ trace!("@{:?}", t);
+ Ok(t)
+ }
+ _ => {
+ trace!("@{:?}", self.err);
+ Err(self.err.clone())
+ }
+ }
+ }
+
+ pub fn err_msg_with_pos(&self, pos: usize) -> String {
+ format!("{}\n{}", self.origin_input, "^".repeat(pos))
+ }
+
+ pub fn err_msg(&self) -> String {
+ match self.curr_pos {
+ Some(pos) => self.err_msg_with_pos(pos),
+ _ => self.err_msg_with_pos(self.err_pos),
+ }
+ }
+}
diff --git a/vendor/jsonpath_lib/src/select/cmp.rs b/vendor/jsonpath_lib/src/select/cmp.rs
new file mode 100644
index 000000000..209e67298
--- /dev/null
+++ b/vendor/jsonpath_lib/src/select/cmp.rs
@@ -0,0 +1,335 @@
+use array_tool::vec::{Intersect, Union};
+use serde_json::Value;
+
+pub(super) trait Cmp {
+ fn cmp_bool(&self, v1: bool, v2: bool) -> bool;
+
+ fn cmp_f64(&self, v1: f64, v2: f64) -> bool;
+
+ fn cmp_string(&self, v1: &str, v2: &str) -> bool;
+
+ fn cmp_json<'a>(&self, v1: &[&'a Value], v2: &[&'a Value]) -> Vec<&'a Value>;
+
+ fn default(&self) -> bool {
+ false
+ }
+}
+
+pub(super) struct CmpEq;
+
+impl Cmp for CmpEq {
+ fn cmp_bool(&self, v1: bool, v2: bool) -> bool {
+ v1 == v2
+ }
+
+ fn cmp_f64(&self, v1: f64, v2: f64) -> bool {
+ (v1 - v2).abs() == 0_f64
+ }
+
+ fn cmp_string(&self, v1: &str, v2: &str) -> bool {
+ v1 == v2
+ }
+
+ fn cmp_json<'a>(&self, v1: &[&'a Value], v2: &[&'a Value]) -> Vec<&'a Value> {
+ v1.to_vec().intersect(v2.to_vec())
+ }
+}
+
+pub(super) struct CmpNe;
+
+impl Cmp for CmpNe {
+ fn cmp_bool(&self, v1: bool, v2: bool) -> bool {
+ v1 != v2
+ }
+
+ fn cmp_f64(&self, v1: f64, v2: f64) -> bool {
+ (v1 - v2).abs() != 0_f64
+ }
+
+ fn cmp_string(&self, v1: &str, v2: &str) -> bool {
+ v1 != v2
+ }
+
+ fn cmp_json<'a>(&self, v1: &[&'a Value], v2: &[&'a Value]) -> Vec<&'a Value> {
+ v1.to_vec().intersect_if(v2.to_vec(), |a, b| a != b)
+ }
+}
+
+pub(super) struct CmpGt;
+
+impl Cmp for CmpGt {
+ fn cmp_bool(&self, v1: bool, v2: bool) -> bool {
+ v1 & !v2
+ }
+
+ fn cmp_f64(&self, v1: f64, v2: f64) -> bool {
+ v1 > v2
+ }
+
+ fn cmp_string(&self, v1: &str, v2: &str) -> bool {
+ v1 > v2
+ }
+
+ fn cmp_json<'a>(&self, _: &[&'a Value], _: &[&'a Value]) -> Vec<&'a Value> {
+ Vec::new()
+ }
+}
+
+pub(super) struct CmpGe;
+
+impl Cmp for CmpGe {
+ fn cmp_bool(&self, v1: bool, v2: bool) -> bool {
+ v1 >= v2
+ }
+
+ fn cmp_f64(&self, v1: f64, v2: f64) -> bool {
+ v1 >= v2
+ }
+
+ fn cmp_string(&self, v1: &str, v2: &str) -> bool {
+ v1 >= v2
+ }
+
+ fn cmp_json<'a>(&self, _: &[&'a Value], _: &[&'a Value]) -> Vec<&'a Value> {
+ Vec::new()
+ }
+}
+
+pub(super) struct CmpLt;
+
+impl Cmp for CmpLt {
+ fn cmp_bool(&self, v1: bool, v2: bool) -> bool {
+ !v1 & v2
+ }
+
+ fn cmp_f64(&self, v1: f64, v2: f64) -> bool {
+ v1 < v2
+ }
+
+ fn cmp_string(&self, v1: &str, v2: &str) -> bool {
+ v1 < v2
+ }
+
+ fn cmp_json<'a>(&self, _: &[&'a Value], _: &[&'a Value]) -> Vec<&'a Value> {
+ Vec::new()
+ }
+}
+
+pub(super) struct CmpLe;
+
+impl Cmp for CmpLe {
+ fn cmp_bool(&self, v1: bool, v2: bool) -> bool {
+ v1 <= v2
+ }
+
+ fn cmp_f64(&self, v1: f64, v2: f64) -> bool {
+ v1 <= v2
+ }
+
+ fn cmp_string(&self, v1: &str, v2: &str) -> bool {
+ v1 <= v2
+ }
+
+ fn cmp_json<'a>(&self, _: &[&'a Value], _: &[&'a Value]) -> Vec<&'a Value> {
+ Vec::new()
+ }
+}
+
+pub(super) struct CmpAnd;
+
+impl Cmp for CmpAnd {
+ fn cmp_bool(&self, v1: bool, v2: bool) -> bool {
+ v1 && v2
+ }
+
+ fn cmp_f64(&self, _v1: f64, _v2: f64) -> bool {
+ true
+ }
+
+ fn cmp_string(&self, v1: &str, v2: &str) -> bool {
+ !v1.is_empty() && !v2.is_empty()
+ }
+
+ fn cmp_json<'a>(&self, v1: &[&'a Value], v2: &[&'a Value]) -> Vec<&'a Value> {
+ v1.to_vec().intersect(v2.to_vec())
+ }
+}
+
+pub(super) struct CmpOr;
+
+impl Cmp for CmpOr {
+ fn cmp_bool(&self, v1: bool, v2: bool) -> bool {
+ v1 || v2
+ }
+
+ fn cmp_f64(&self, _v1: f64, _v2: f64) -> bool {
+ true
+ }
+
+ fn cmp_string(&self, v1: &str, v2: &str) -> bool {
+ !v1.is_empty() || !v2.is_empty()
+ }
+
+ fn cmp_json<'a>(&self, v1: &[&'a Value], v2: &[&'a Value]) -> Vec<&'a Value> {
+ v1.to_vec().union(v2.to_vec())
+ }
+}
+
+
+#[cfg(test)]
+mod cmp_inner_tests {
+ use serde_json::Value;
+
+ use select::cmp::*;
+
+ #[test]
+ fn cmp_eq() {
+ let cmp_fn = CmpEq;
+ assert_eq!(cmp_fn.default(), false);
+ assert_eq!(cmp_fn.cmp_bool(true, false), false);
+ assert_eq!(cmp_fn.cmp_bool(true, true), true);
+ assert_eq!(cmp_fn.cmp_f64(0.1, 0.1), true);
+ assert_eq!(cmp_fn.cmp_f64(0.1, 0.2), false);
+ assert_eq!(cmp_fn.cmp_string("1", "1"), true);
+ assert_eq!(cmp_fn.cmp_string("1", "2"), false);
+ }
+
+ #[test]
+ fn cmp_ne() {
+ let cmp_fn = CmpNe;
+ assert_eq!(cmp_fn.default(), false);
+ assert_eq!(cmp_fn.cmp_bool(true, false), true);
+ assert_eq!(cmp_fn.cmp_bool(true, true), false);
+ assert_eq!(cmp_fn.cmp_f64(0.1, 0.1), false);
+ assert_eq!(cmp_fn.cmp_f64(0.1, 0.2), true);
+ assert_eq!(cmp_fn.cmp_string("1", "1"), false);
+ assert_eq!(cmp_fn.cmp_string("1", "2"), true);
+ }
+
+ #[test]
+ fn cmp_gt() {
+ let cmp_fn = CmpGt;
+ assert_eq!(cmp_fn.default(), false);
+ assert_eq!(cmp_fn.cmp_bool(true, false), true);
+ assert_eq!(cmp_fn.cmp_bool(true, true), false);
+ assert_eq!(cmp_fn.cmp_f64(0.2, 0.1), true);
+ assert_eq!(cmp_fn.cmp_f64(0.1, 0.2), false);
+ assert_eq!(cmp_fn.cmp_string("a", "a"), false);
+ assert_eq!(cmp_fn.cmp_string("b", "a"), true);
+ assert_eq!(cmp_fn.cmp_string("1", "2"), false);
+ }
+
+ #[test]
+ fn cmp_ge() {
+ let cmp_fn = CmpGe;
+ assert_eq!(cmp_fn.default(), false);
+ assert_eq!(cmp_fn.cmp_bool(true, false), true);
+ assert_eq!(cmp_fn.cmp_bool(true, true), true);
+ assert_eq!(cmp_fn.cmp_f64(0.2, 0.1), true);
+ assert_eq!(cmp_fn.cmp_f64(0.1, 0.1), true);
+ assert_eq!(cmp_fn.cmp_f64(0.1, 0.2), false);
+ assert_eq!(cmp_fn.cmp_string("1", "1"), true);
+ assert_eq!(cmp_fn.cmp_string("ab", "a"), true);
+ assert_eq!(cmp_fn.cmp_string("1", "2"), false);
+ }
+
+ #[test]
+ fn cmp_lt() {
+ let cmp_fn = CmpLt;
+ assert_eq!(cmp_fn.default(), false);
+ assert_eq!(cmp_fn.cmp_bool(true, false), false);
+ assert_eq!(cmp_fn.cmp_bool(false, true), true);
+ assert_eq!(cmp_fn.cmp_bool(true, true), false);
+ assert_eq!(cmp_fn.cmp_bool(false, false), false);
+ assert_eq!(cmp_fn.cmp_f64(0.1, 0.2), true);
+ assert_eq!(cmp_fn.cmp_f64(0.1, 0.1), false);
+ assert_eq!(cmp_fn.cmp_f64(0.2, 0.1), false);
+ assert_eq!(cmp_fn.cmp_string("a", "a"), false);
+ assert_eq!(cmp_fn.cmp_string("ab", "b"), true);
+ assert_eq!(cmp_fn.cmp_string("1", "2"), true);
+ }
+
+ #[test]
+ fn cmp_le() {
+ let cmp_fn = CmpLe;
+ assert_eq!(cmp_fn.default(), false);
+ assert_eq!(cmp_fn.cmp_bool(true, false), false);
+ assert_eq!(cmp_fn.cmp_bool(false, true), true);
+ assert_eq!(cmp_fn.cmp_bool(true, true), true);
+ assert_eq!(cmp_fn.cmp_bool(false, false), true);
+ assert_eq!(cmp_fn.cmp_f64(0.1, 0.2), true);
+ assert_eq!(cmp_fn.cmp_f64(0.1, 0.1), true);
+ assert_eq!(cmp_fn.cmp_f64(0.2, 0.1), false);
+ assert_eq!(cmp_fn.cmp_string("a", "a"), true);
+ assert_eq!(cmp_fn.cmp_string("ab", "b"), true);
+ assert_eq!(cmp_fn.cmp_string("abd", "abc"), false);
+ assert_eq!(cmp_fn.cmp_string("1", "2"), true);
+ }
+
+ #[test]
+ fn cmp_and() {
+ let cmp_fn = CmpAnd;
+ assert_eq!(cmp_fn.default(), false);
+ assert_eq!(cmp_fn.cmp_bool(true, false), false);
+ assert_eq!(cmp_fn.cmp_bool(false, true), false);
+ assert_eq!(cmp_fn.cmp_bool(true, true), true);
+ assert_eq!(cmp_fn.cmp_bool(false, false), false);
+ assert_eq!(cmp_fn.cmp_f64(0.0, 0.0), true);
+ assert_eq!(cmp_fn.cmp_string("a", "a"), true);
+ }
+
+ #[test]
+ fn cmp_or() {
+ let cmp_fn = CmpOr;
+ assert_eq!(cmp_fn.default(), false);
+ assert_eq!(cmp_fn.cmp_bool(true, false), true);
+ assert_eq!(cmp_fn.cmp_bool(false, true), true);
+ assert_eq!(cmp_fn.cmp_bool(true, true), true);
+ assert_eq!(cmp_fn.cmp_bool(false, false), false);
+ assert_eq!(cmp_fn.cmp_f64(0.0, 0.0), true);
+ assert_eq!(cmp_fn.cmp_string("a", "a"), true);
+ }
+
+ #[test]
+ fn cmp_json() {
+ let v1 = Value::Bool(true);
+ let v2 = Value::String("1".to_string());
+ let left = [&v1, &v2];
+ let right = [&v1, &v2];
+ let empty: Vec<&Value> = Vec::new();
+
+ assert_eq!(CmpEq.cmp_json(&left, &right), left.to_vec());
+ assert_eq!(CmpNe.cmp_json(&left, &right), left.to_vec());
+ assert_eq!(CmpGt.cmp_json(&left, &right), empty);
+ assert_eq!(CmpGe.cmp_json(&left, &right), empty);
+ assert_eq!(CmpLt.cmp_json(&left, &right), empty);
+ assert_eq!(CmpLe.cmp_json(&left, &right), empty);
+ assert_eq!(CmpAnd.cmp_json(&left, &right), left.to_vec());
+ assert_eq!(CmpOr.cmp_json(&left, &right), left.to_vec());
+
+ assert_eq!(
+ CmpEq.cmp_json(&[&Value::Bool(true)], &[&Value::Bool(true)]),
+ vec![&Value::Bool(true)]
+ );
+ assert_eq!(
+ CmpEq.cmp_json(&[&Value::Bool(true)], &[&Value::Bool(false)]),
+ empty
+ );
+ assert_eq!(
+ CmpNe.cmp_json(&[&Value::Bool(true)], &[&Value::Bool(true)]),
+ empty
+ );
+ assert_eq!(
+ CmpNe.cmp_json(&[&Value::Bool(false)], &[&Value::Bool(true)]),
+ vec![&Value::Bool(false)]
+ );
+ assert_eq!(
+ CmpAnd.cmp_json(&[&Value::Bool(true)], &[&Value::Bool(true)]),
+ vec![&Value::Bool(true)]
+ );
+ assert_eq!(
+ CmpOr.cmp_json(&[&Value::Bool(true)], &[&Value::Bool(false)]),
+ vec![&Value::Bool(true), &Value::Bool(false)]
+ );
+ }
+}
diff --git a/vendor/jsonpath_lib/src/select/expr_term.rs b/vendor/jsonpath_lib/src/select/expr_term.rs
new file mode 100644
index 000000000..ddbf64e53
--- /dev/null
+++ b/vendor/jsonpath_lib/src/select/expr_term.rs
@@ -0,0 +1,227 @@
+use serde_json::{Number, Value};
+use select::cmp::*;
+use select::{FilterKey, to_f64};
+
+#[derive(Debug, PartialEq)]
+pub(super) enum ExprTerm<'a> {
+ String(String),
+ Number(Number),
+ Bool(bool),
+ Json(Option<Vec<&'a Value>>, Option<FilterKey>, Vec<&'a Value>),
+}
+
+impl<'a> ExprTerm<'a> {
+ fn cmp<C1: Cmp, C2: Cmp>(
+ &self,
+ other: &Self,
+ cmp_fn: &C1,
+ reverse_cmp_fn: &C2,
+ ) -> ExprTerm<'a> {
+ match &self {
+ ExprTerm::String(s1) => match &other {
+ ExprTerm::String(s2) => ExprTerm::Bool(cmp_fn.cmp_string(s1, s2)),
+ ExprTerm::Json(_, _, _) => other.cmp(&self, reverse_cmp_fn, cmp_fn),
+ _ => ExprTerm::Bool(cmp_fn.default()),
+ },
+ ExprTerm::Number(n1) => match &other {
+ ExprTerm::Number(n2) => ExprTerm::Bool(cmp_fn.cmp_f64(to_f64(n1), to_f64(n2))),
+ ExprTerm::Json(_, _, _) => other.cmp(&self, reverse_cmp_fn, cmp_fn),
+ _ => ExprTerm::Bool(cmp_fn.default()),
+ },
+ ExprTerm::Bool(b1) => match &other {
+ ExprTerm::Bool(b2) => ExprTerm::Bool(cmp_fn.cmp_bool(*b1, *b2)),
+ ExprTerm::Json(_, _, _) => other.cmp(&self, reverse_cmp_fn, cmp_fn),
+ _ => ExprTerm::Bool(cmp_fn.default()),
+ },
+ ExprTerm::Json(rel, fk1, vec1) => {
+ let ret: Vec<&Value> = match &other {
+ ExprTerm::String(s2) => vec1
+ .iter()
+ .filter(|v1| match v1 {
+ Value::String(s1) => cmp_fn.cmp_string(s1, s2),
+ Value::Object(map1) => {
+ if let Some(FilterKey::String(k)) = fk1 {
+ if let Some(Value::String(s1)) = map1.get(k) {
+ return cmp_fn.cmp_string(s1, s2);
+ }
+ }
+ cmp_fn.default()
+ }
+ _ => cmp_fn.default(),
+ })
+ .cloned()
+ .collect(),
+ ExprTerm::Number(n2) => vec1
+ .iter()
+ .filter(|v1| match v1 {
+ Value::Number(n1) => cmp_fn.cmp_f64(to_f64(n1), to_f64(n2)),
+ Value::Object(map1) => {
+ if let Some(FilterKey::String(k)) = fk1 {
+ if let Some(Value::Number(n1)) = map1.get(k) {
+ return cmp_fn.cmp_f64(to_f64(n1), to_f64(n2));
+ }
+ }
+ cmp_fn.default()
+ }
+ _ => cmp_fn.default(),
+ })
+ .cloned()
+ .collect(),
+ ExprTerm::Bool(b2) => vec1
+ .iter()
+ .filter(|v1| match v1 {
+ Value::Bool(b1) => cmp_fn.cmp_bool(*b1, *b2),
+ Value::Object(map1) => {
+ if let Some(FilterKey::String(k)) = fk1 {
+ if let Some(Value::Bool(b1)) = map1.get(k) {
+ return cmp_fn.cmp_bool(*b1, *b2);
+ }
+ }
+ cmp_fn.default()
+ }
+ _ => cmp_fn.default(),
+ })
+ .cloned()
+ .collect(),
+ ExprTerm::Json(parent, _, vec2) => {
+ if let Some(vec1) = rel {
+ cmp_fn.cmp_json(vec1, vec2)
+ } else if let Some(vec2) = parent {
+ cmp_fn.cmp_json(vec1, vec2)
+ } else {
+ cmp_fn.cmp_json(vec1, vec2)
+ }
+ }
+ };
+
+ if ret.is_empty() {
+ ExprTerm::Bool(cmp_fn.default())
+ } else if let Some(rel) = rel {
+ if let ExprTerm::Json(_, _, _) = &other {
+ ExprTerm::Json(Some(rel.to_vec()), None, ret)
+ } else {
+ let mut tmp = Vec::new();
+ for rel_value in rel {
+ if let Value::Object(map) = rel_value {
+ for map_value in map.values() {
+ for result_value in &ret {
+ if map_value.eq(*result_value) {
+ tmp.push(*rel_value);
+ }
+ }
+ }
+ }
+ }
+ ExprTerm::Json(Some(tmp), None, ret)
+ }
+ } else {
+ ExprTerm::Json(None, None, ret)
+ }
+ }
+ }
+ }
+
+ pub fn eq(&self, other: &Self, ret: &mut Option<ExprTerm<'a>>) {
+ debug!("eq - {:?} : {:?}", &self, &other);
+ let _ = ret.take();
+ let tmp = self.cmp(other, &CmpEq, &CmpEq);
+ debug!("eq = {:?}", tmp);
+ *ret = Some(tmp);
+ }
+
+ pub fn ne(&self, other: &Self, ret: &mut Option<ExprTerm<'a>>) {
+ debug!("ne - {:?} : {:?}", &self, &other);
+ let _ = ret.take();
+ let tmp = self.cmp(other, &CmpNe, &CmpNe);
+ debug!("ne = {:?}", tmp);
+ *ret = Some(tmp);
+ }
+
+ pub fn gt(&self, other: &Self, ret: &mut Option<ExprTerm<'a>>) {
+ debug!("gt - {:?} : {:?}", &self, &other);
+ let _ = ret.take();
+ let tmp = self.cmp(other, &CmpGt, &CmpLt);
+ debug!("gt = {:?}", tmp);
+ *ret = Some(tmp);
+ }
+
+ pub fn ge(&self, other: &Self, ret: &mut Option<ExprTerm<'a>>) {
+ debug!("ge - {:?} : {:?}", &self, &other);
+ let _ = ret.take();
+ let tmp = self.cmp(other, &CmpGe, &CmpLe);
+ debug!("ge = {:?}", tmp);
+ *ret = Some(tmp);
+ }
+
+ pub fn lt(&self, other: &Self, ret: &mut Option<ExprTerm<'a>>) {
+ debug!("lt - {:?} : {:?}", &self, &other);
+ let _ = ret.take();
+ let tmp = self.cmp(other, &CmpLt, &CmpGt);
+ debug!("lt = {:?}", tmp);
+ *ret = Some(tmp);
+ }
+
+ pub fn le(&self, other: &Self, ret: &mut Option<ExprTerm<'a>>) {
+ debug!("le - {:?} : {:?}", &self, &other);
+ let _ = ret.take();
+ let tmp = self.cmp(other, &CmpLe, &CmpGe);
+ debug!("le = {:?}", tmp);
+ *ret = Some(tmp);
+ }
+
+ pub fn and(&self, other: &Self, ret: &mut Option<ExprTerm<'a>>) {
+ debug!("and - {:?} : {:?}", &self, &other);
+ let _ = ret.take();
+ let tmp = self.cmp(other, &CmpAnd, &CmpAnd);
+ debug!("and = {:?}", tmp);
+ *ret = Some(tmp);
+ }
+
+ pub fn or(&self, other: &Self, ret: &mut Option<ExprTerm<'a>>) {
+ debug!("or - {:?} : {:?}", &self, &other);
+ let _ = ret.take();
+ let tmp = self.cmp(other, &CmpOr, &CmpOr);
+ debug!("or = {:?}", tmp);
+ *ret = Some(tmp);
+ }
+}
+
+impl<'a> Into<ExprTerm<'a>> for &Vec<&'a Value> {
+ fn into(self) -> ExprTerm<'a> {
+ if self.len() == 1 {
+ match &self[0] {
+ Value::Number(v) => return ExprTerm::Number(v.clone()),
+ Value::String(v) => return ExprTerm::String(v.clone()),
+ Value::Bool(v) => return ExprTerm::Bool(*v),
+ _ => {}
+ }
+ }
+
+ ExprTerm::Json(None, None, self.to_vec())
+ }
+}
+
+
+#[cfg(test)]
+mod expr_term_inner_tests {
+ use serde_json::{Number, Value};
+ use select::expr_term::ExprTerm;
+
+ #[test]
+ fn value_vec_into() {
+ let v = Value::Bool(true);
+ let vec = &vec![&v];
+ let term: ExprTerm = vec.into();
+ assert_eq!(term, ExprTerm::Bool(true));
+
+ let v = Value::String("a".to_string());
+ let vec = &vec![&v];
+ let term: ExprTerm = vec.into();
+ assert_eq!(term, ExprTerm::String("a".to_string()));
+
+ let v = serde_json::from_str("1.0").unwrap();
+ let vec = &vec![&v];
+ let term: ExprTerm = vec.into();
+ assert_eq!(term, ExprTerm::Number(Number::from_f64(1.0).unwrap()));
+ }
+}
diff --git a/vendor/jsonpath_lib/src/select/mod.rs b/vendor/jsonpath_lib/src/select/mod.rs
new file mode 100644
index 000000000..a3d0ec43f
--- /dev/null
+++ b/vendor/jsonpath_lib/src/select/mod.rs
@@ -0,0 +1,1001 @@
+use std::collections::HashSet;
+use std::fmt;
+
+use serde_json::{Number, Value};
+use serde_json::map::Entry;
+
+use parser::*;
+
+use self::expr_term::*;
+use self::value_walker::ValueWalker;
+
+mod cmp;
+mod expr_term;
+mod value_walker;
+
+fn to_f64(n: &Number) -> f64 {
+ if n.is_i64() {
+ n.as_i64().unwrap() as f64
+ } else if n.is_f64() {
+ n.as_f64().unwrap()
+ } else {
+ n.as_u64().unwrap() as f64
+ }
+}
+
+fn abs_index(n: isize, len: usize) -> usize {
+ if n < 0_isize {
+ (n + len as isize).max(0) as usize
+ } else {
+ n.min(len as isize) as usize
+ }
+}
+
+#[derive(Debug, PartialEq)]
+enum FilterKey {
+ String(String),
+ All,
+}
+
+pub enum JsonPathError {
+ EmptyPath,
+ EmptyValue,
+ Path(String),
+ Serde(String),
+}
+
+impl fmt::Debug for JsonPathError {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ write!(f, "{}", self)
+ }
+}
+
+impl fmt::Display for JsonPathError {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+ JsonPathError::EmptyPath => f.write_str("path not set"),
+ JsonPathError::EmptyValue => f.write_str("json value not set"),
+ JsonPathError::Path(msg) => f.write_str(&format!("path error: \n{}\n", msg)),
+ JsonPathError::Serde(msg) => f.write_str(&format!("serde error: \n{}\n", msg)),
+ }
+ }
+}
+
+#[derive(Debug, Default)]
+struct FilterTerms<'a>(Vec<Option<ExprTerm<'a>>>);
+
+impl<'a> FilterTerms<'a> {
+ fn new_filter_context(&mut self) {
+ self.0.push(None);
+ debug!("new_filter_context: {:?}", self.0);
+ }
+
+ fn is_term_empty(&self) -> bool {
+ self.0.is_empty()
+ }
+
+ fn push_term(&mut self, term: Option<ExprTerm<'a>>) {
+ self.0.push(term);
+ }
+
+ #[allow(clippy::option_option)]
+ fn pop_term(&mut self) -> Option<Option<ExprTerm<'a>>> {
+ self.0.pop()
+ }
+
+ fn filter_json_term<F: Fn(&Vec<&'a Value>, &mut Vec<&'a Value>, &mut HashSet<usize>) -> FilterKey>(
+ &mut self,
+ e: ExprTerm<'a>,
+ fun: F,
+ ) {
+ debug!("filter_json_term: {:?}", e);
+
+ if let ExprTerm::Json(rel, fk, vec) = e {
+ let mut tmp = Vec::new();
+ let mut not_matched = HashSet::new();
+ let filter_key = if let Some(FilterKey::String(key)) = fk {
+ let key_contained = &vec.iter().map(|v| match v {
+ Value::Object(map) if map.contains_key(&key) => map.get(&key).unwrap(),
+ _ => v,
+ }).collect();
+ fun(key_contained, &mut tmp, &mut not_matched)
+ } else {
+ fun(&vec, &mut tmp, &mut not_matched)
+ };
+
+ if rel.is_some() {
+ self.0.push(Some(ExprTerm::Json(rel, Some(filter_key), tmp)));
+ } else {
+ let filtered: Vec<&Value> = vec.iter().enumerate()
+ .filter(
+ |(idx, _)| !not_matched.contains(idx)
+ )
+ .map(|(_, v)| *v)
+ .collect();
+
+ self.0.push(Some(ExprTerm::Json(Some(filtered), Some(filter_key), tmp)));
+ }
+ } else {
+ unreachable!("unexpected: ExprTerm: {:?}", e);
+ }
+ }
+
+ fn push_json_term<F: Fn(&Vec<&'a Value>, &mut Vec<&'a Value>, &mut HashSet<usize>) -> FilterKey>(
+ &mut self,
+ current: &Option<Vec<&'a Value>>,
+ fun: F,
+ ) {
+ debug!("push_json_term: {:?}", &current);
+
+ if let Some(current) = &current {
+ let mut tmp = Vec::new();
+ let mut not_matched = HashSet::new();
+ let filter_key = fun(current, &mut tmp, &mut not_matched);
+ self.0.push(Some(ExprTerm::Json(None, Some(filter_key), tmp)));
+ }
+ }
+
+ fn filter<F: Fn(&Vec<&'a Value>, &mut Vec<&'a Value>, &mut HashSet<usize>) -> FilterKey>(
+ &mut self,
+ current: &Option<Vec<&'a Value>>,
+ fun: F,
+ ) {
+ if let Some(peek) = self.0.pop() {
+ if let Some(e) = peek {
+ self.filter_json_term(e, fun);
+ } else {
+ self.push_json_term(current, fun);
+ }
+ }
+ }
+
+ fn filter_all_with_str(&mut self, current: &Option<Vec<&'a Value>>, key: &str) {
+ self.filter(current, |vec, tmp, _| {
+ ValueWalker::all_with_str(&vec, tmp, key, true);
+ FilterKey::All
+ });
+
+ debug!("filter_all_with_str : {}, {:?}", key, self.0);
+ }
+
+ fn filter_next_with_str(&mut self, current: &Option<Vec<&'a Value>>, key: &str) {
+ self.filter(current, |vec, tmp, not_matched| {
+ let mut visited = HashSet::new();
+ for (idx, v) in vec.iter().enumerate() {
+ match v {
+ Value::Object(map) => {
+ if map.contains_key(key) {
+ let ptr = *v as *const Value;
+ if !visited.contains(&ptr) {
+ visited.insert(ptr);
+ tmp.push(v)
+ }
+ } else {
+ not_matched.insert(idx);
+ }
+ }
+ Value::Array(vec) => {
+ not_matched.insert(idx);
+ for v in vec {
+ ValueWalker::walk_dedup(v, tmp, key, &mut visited);
+ }
+ }
+ _ => {
+ not_matched.insert(idx);
+ }
+ }
+ }
+
+ FilterKey::String(key.to_owned())
+ });
+
+ debug!("filter_next_with_str : {}, {:?}", key, self.0);
+ }
+
+ fn collect_next_with_num(&mut self, current: &Option<Vec<&'a Value>>, index: f64) -> Option<Vec<&'a Value>> {
+ fn _collect<'a>(tmp: &mut Vec<&'a Value>, vec: &'a [Value], index: f64) {
+ let index = abs_index(index as isize, vec.len());
+ if let Some(v) = vec.get(index) {
+ tmp.push(v);
+ }
+ }
+
+ if let Some(current) = current {
+ let mut tmp = Vec::new();
+ for c in current {
+ match c {
+ Value::Object(map) => {
+ for k in map.keys() {
+ if let Some(Value::Array(vec)) = map.get(k) {
+ _collect(&mut tmp, vec, index);
+ }
+ }
+ }
+ Value::Array(vec) => {
+ _collect(&mut tmp, vec, index);
+ }
+ _ => {}
+ }
+ }
+
+ if tmp.is_empty() {
+ self.0.pop();
+ return Some(vec![]);
+ } else {
+ return Some(tmp);
+ }
+ }
+
+ debug!(
+ "collect_next_with_num : {:?}, {:?}",
+ &index, &current
+ );
+
+ None
+ }
+
+ fn collect_next_all(&mut self, current: &Option<Vec<&'a Value>>) -> Option<Vec<&'a Value>> {
+ if let Some(current) = current {
+ let mut tmp = Vec::new();
+ for c in current {
+ match c {
+ Value::Object(map) => {
+ for (_, v) in map {
+ tmp.push(v)
+ }
+ }
+ Value::Array(vec) => {
+ for v in vec {
+ tmp.push(v);
+ }
+ }
+ _ => {}
+ }
+ }
+ return Some(tmp);
+ }
+
+ debug!("collect_next_all : {:?}", &current);
+
+ None
+ }
+
+ fn collect_next_with_str(&mut self, current: &Option<Vec<&'a Value>>, keys: &[String]) -> Option<Vec<&'a Value>> {
+ if let Some(current) = current {
+ let mut tmp = Vec::new();
+ for c in current {
+ if let Value::Object(map) = c {
+ for key in keys {
+ if let Some(v) = map.get(key) {
+ tmp.push(v)
+ }
+ }
+ }
+ }
+
+ if tmp.is_empty() {
+ self.0.pop();
+ return Some(vec![]);
+ } else {
+ return Some(tmp);
+ }
+ }
+
+ debug!(
+ "collect_next_with_str : {:?}, {:?}",
+ keys, &current
+ );
+
+ None
+ }
+
+ fn collect_all(&mut self, current: &Option<Vec<&'a Value>>) -> Option<Vec<&'a Value>> {
+ if let Some(current) = current {
+ let mut tmp = Vec::new();
+ ValueWalker::all(&current, &mut tmp);
+ return Some(tmp);
+ }
+ debug!("collect_all: {:?}", &current);
+
+ None
+ }
+
+ fn collect_all_with_str(&mut self, current: &Option<Vec<&'a Value>>, key: &str) -> Option<Vec<&'a Value>> {
+ if let Some(current) = current {
+ let mut tmp = Vec::new();
+ ValueWalker::all_with_str(&current, &mut tmp, key, false);
+ return Some(tmp);
+ }
+
+ debug!("collect_all_with_str: {}, {:?}", key, &current);
+
+ None
+ }
+
+ fn collect_all_with_num(&mut self, current: &Option<Vec<&'a Value>>, index: f64) -> Option<Vec<&'a Value>> {
+ if let Some(current) = current {
+ let mut tmp = Vec::new();
+ ValueWalker::all_with_num(&current, &mut tmp, index);
+ return Some(tmp);
+ }
+
+ debug!("collect_all_with_num: {}, {:?}", index, &current);
+
+ None
+ }
+}
+
+#[derive(Debug, Default)]
+pub struct Selector<'a, 'b> {
+ node: Option<Node>,
+ node_ref: Option<&'b Node>,
+ value: Option<&'a Value>,
+ tokens: Vec<ParseToken>,
+ current: Option<Vec<&'a Value>>,
+ selectors: Vec<Selector<'a, 'b>>,
+ selector_filter: FilterTerms<'a>,
+}
+
+impl<'a, 'b> Selector<'a, 'b> {
+ pub fn new() -> Self {
+ Self::default()
+ }
+
+ pub fn str_path(&mut self, path: &str) -> Result<&mut Self, JsonPathError> {
+ debug!("path : {}", path);
+ self.node_ref.take();
+ self.node = Some(Parser::compile(path).map_err(JsonPathError::Path)?);
+ Ok(self)
+ }
+
+ pub fn node_ref(&self) -> Option<&Node> {
+ if let Some(node) = &self.node {
+ return Some(node);
+ }
+
+ if let Some(node) = &self.node_ref {
+ return Some(*node);
+ }
+
+ None
+ }
+
+ pub fn compiled_path(&mut self, node: &'b Node) -> &mut Self {
+ self.node.take();
+ self.node_ref = Some(node);
+ self
+ }
+
+ pub fn reset_value(&mut self) -> &mut Self {
+ self.current = None;
+ self
+ }
+
+ pub fn value(&mut self, v: &'a Value) -> &mut Self {
+ self.value = Some(v);
+ self
+ }
+
+ fn _select(&mut self) -> Result<(), JsonPathError> {
+ if self.node_ref.is_some() {
+ let node_ref = self.node_ref.take().unwrap();
+ self.visit(node_ref);
+ return Ok(());
+ }
+
+ if self.node.is_none() {
+ return Err(JsonPathError::EmptyPath);
+ }
+
+ let node = self.node.take().unwrap();
+ self.visit(&node);
+ self.node = Some(node);
+
+ Ok(())
+ }
+
+ pub fn select_as<T: serde::de::DeserializeOwned>(&mut self) -> Result<Vec<T>, JsonPathError> {
+ self._select()?;
+
+ match &self.current {
+ Some(vec) => {
+ let mut ret = Vec::new();
+ for v in vec {
+ match T::deserialize(*v) {
+ Ok(v) => ret.push(v),
+ Err(e) => return Err(JsonPathError::Serde(e.to_string())),
+ }
+ }
+ Ok(ret)
+ }
+ _ => Err(JsonPathError::EmptyValue),
+ }
+ }
+
+ pub fn select_as_str(&mut self) -> Result<String, JsonPathError> {
+ self._select()?;
+
+ match &self.current {
+ Some(r) => {
+ Ok(serde_json::to_string(r).map_err(|e| JsonPathError::Serde(e.to_string()))?)
+ }
+ _ => Err(JsonPathError::EmptyValue),
+ }
+ }
+
+ pub fn select(&mut self) -> Result<Vec<&'a Value>, JsonPathError> {
+ self._select()?;
+
+ match &self.current {
+ Some(r) => Ok(r.to_vec()),
+ _ => Err(JsonPathError::EmptyValue),
+ }
+ }
+
+ fn compute_absolute_path_filter(&mut self, token: &ParseToken) -> bool {
+ if !self.selectors.is_empty() {
+ match token {
+ ParseToken::Absolute | ParseToken::Relative | ParseToken::Filter(_) => {
+ let selector = self.selectors.pop().unwrap();
+
+ if let Some(current) = &selector.current {
+ let term = current.into();
+
+ if let Some(s) = self.selectors.last_mut() {
+ s.selector_filter.push_term(Some(term));
+ } else {
+ self.selector_filter.push_term(Some(term));
+ }
+ } else {
+ unreachable!()
+ }
+ }
+ _ => {}
+ }
+ }
+
+ if let Some(selector) = self.selectors.last_mut() {
+ selector.visit_token(token);
+ true
+ } else {
+ false
+ }
+ }
+}
+
+impl<'a, 'b> Selector<'a, 'b> {
+ fn visit_absolute(&mut self) {
+ if self.current.is_some() {
+ let mut selector = Selector::default();
+
+ if let Some(value) = self.value {
+ selector.value = Some(value);
+ selector.current = Some(vec![value]);
+ self.selectors.push(selector);
+ }
+ return;
+ }
+
+ if let Some(v) = &self.value {
+ self.current = Some(vec![v]);
+ }
+ }
+
+ fn visit_relative(&mut self) {
+ if let Some(ParseToken::Array) = self.tokens.last() {
+ let array_token = self.tokens.pop();
+ if let Some(ParseToken::Leaves) = self.tokens.last() {
+ self.tokens.pop();
+ self.current = self.selector_filter.collect_all(&self.current);
+ }
+ self.tokens.push(array_token.unwrap());
+ }
+ self.selector_filter.new_filter_context();
+ }
+
+ fn visit_array_eof(&mut self) {
+ if self.is_last_before_token_match(ParseToken::Array) {
+ if let Some(Some(e)) = self.selector_filter.pop_term() {
+ if let ExprTerm::String(key) = e {
+ self.selector_filter.filter_next_with_str(&self.current, &key);
+ self.tokens.pop();
+ return;
+ }
+
+ self.selector_filter.push_term(Some(e));
+ }
+ }
+
+ if self.is_last_before_token_match(ParseToken::Leaves) {
+ self.tokens.pop();
+ self.tokens.pop();
+ if let Some(Some(e)) = self.selector_filter.pop_term() {
+ let selector_filter_consumed = match &e {
+ ExprTerm::Number(n) => {
+ self.current = self.selector_filter.collect_all_with_num(&self.current, to_f64(n));
+ self.selector_filter.pop_term();
+ true
+ }
+ ExprTerm::String(key) => {
+ self.current = self.selector_filter.collect_all_with_str(&self.current, key);
+ self.selector_filter.pop_term();
+ true
+ }
+ _ => {
+ self.selector_filter.push_term(Some(e));
+ false
+ }
+ };
+
+ if selector_filter_consumed {
+ return;
+ }
+ }
+ }
+
+ if let Some(Some(e)) = self.selector_filter.pop_term() {
+ match e {
+ ExprTerm::Number(n) => {
+ self.current = self.selector_filter.collect_next_with_num(&self.current, to_f64(&n));
+ }
+ ExprTerm::String(key) => {
+ self.current = self.selector_filter.collect_next_with_str(&self.current, &[key]);
+ }
+ ExprTerm::Json(rel, _, v) => {
+ if v.is_empty() {
+ self.current = Some(vec![]);
+ } else if let Some(vec) = rel {
+ self.current = Some(vec);
+ } else {
+ self.current = Some(v);
+ }
+ }
+ ExprTerm::Bool(false) => {
+ self.current = Some(vec![]);
+ }
+ _ => {}
+ }
+ }
+
+ self.tokens.pop();
+ }
+
+ fn is_last_before_token_match(&mut self, token: ParseToken) -> bool {
+ if self.tokens.len() > 1 {
+ return token == self.tokens[self.tokens.len() - 2];
+ }
+
+ false
+ }
+
+ fn visit_all(&mut self) {
+ if let Some(ParseToken::Array) = self.tokens.last() {
+ self.tokens.pop();
+ }
+
+ match self.tokens.last() {
+ Some(ParseToken::Leaves) => {
+ self.tokens.pop();
+ self.current = self.selector_filter.collect_all(&self.current);
+ }
+ Some(ParseToken::In) => {
+ self.tokens.pop();
+ self.current = self.selector_filter.collect_next_all(&self.current);
+ }
+ _ => {
+ self.current = self.selector_filter.collect_next_all(&self.current);
+ }
+ }
+ }
+
+ fn visit_key(&mut self, key: &str) {
+ if let Some(ParseToken::Array) = self.tokens.last() {
+ self.selector_filter.push_term(Some(ExprTerm::String(key.to_string())));
+ return;
+ }
+
+ if let Some(t) = self.tokens.pop() {
+ if self.selector_filter.is_term_empty() {
+ match t {
+ ParseToken::Leaves => {
+ self.current = self.selector_filter.collect_all_with_str(&self.current, key)
+ }
+ ParseToken::In => {
+ self.current = self.selector_filter.collect_next_with_str(&self.current, &[key.to_string()])
+ }
+ _ => {}
+ }
+ } else {
+ match t {
+ ParseToken::Leaves => {
+ self.selector_filter.filter_all_with_str(&self.current, key);
+ }
+ ParseToken::In => {
+ self.selector_filter.filter_next_with_str(&self.current, key);
+ }
+ _ => {}
+ }
+ }
+ }
+ }
+
+ fn visit_keys(&mut self, keys: &[String]) {
+ if !self.selector_filter.is_term_empty() {
+ unimplemented!("keys in filter");
+ }
+
+ if let Some(ParseToken::Array) = self.tokens.pop() {
+ self.current = self.selector_filter.collect_next_with_str(&self.current, keys);
+ } else {
+ unreachable!();
+ }
+ }
+
+ fn visit_filter(&mut self, ft: &FilterToken) {
+ let right = match self.selector_filter.pop_term() {
+ Some(Some(right)) => right,
+ Some(None) => ExprTerm::Json(
+ None,
+ None,
+ match &self.current {
+ Some(current) => current.to_vec(),
+ _ => unreachable!(),
+ },
+ ),
+ _ => panic!("empty term right"),
+ };
+
+ let left = match self.selector_filter.pop_term() {
+ Some(Some(left)) => left,
+ Some(None) => ExprTerm::Json(
+ None,
+ None,
+ match &self.current {
+ Some(current) => current.to_vec(),
+ _ => unreachable!(),
+ },
+ ),
+ _ => panic!("empty term left"),
+ };
+
+ let mut ret = None;
+ match ft {
+ FilterToken::Equal => left.eq(&right, &mut ret),
+ FilterToken::NotEqual => left.ne(&right, &mut ret),
+ FilterToken::Greater => left.gt(&right, &mut ret),
+ FilterToken::GreaterOrEqual => left.ge(&right, &mut ret),
+ FilterToken::Little => left.lt(&right, &mut ret),
+ FilterToken::LittleOrEqual => left.le(&right, &mut ret),
+ FilterToken::And => left.and(&right, &mut ret),
+ FilterToken::Or => left.or(&right, &mut ret),
+ };
+
+ if let Some(e) = ret {
+ self.selector_filter.push_term(Some(e));
+ }
+ }
+
+ fn visit_range(&mut self, from: &Option<isize>, to: &Option<isize>, step: &Option<usize>) {
+ if !self.selector_filter.is_term_empty() {
+ unimplemented!("range syntax in filter");
+ }
+
+ if let Some(ParseToken::Array) = self.tokens.pop() {
+ let mut tmp = Vec::new();
+ if let Some(current) = &self.current {
+ for v in current {
+ if let Value::Array(vec) = v {
+ let from = if let Some(from) = from {
+ abs_index(*from, vec.len())
+ } else {
+ 0
+ };
+
+ let to = if let Some(to) = to {
+ abs_index(*to, vec.len())
+ } else {
+ vec.len()
+ };
+
+ for i in (from..to).step_by(match step {
+ Some(step) => *step,
+ _ => 1,
+ }) {
+ if let Some(v) = vec.get(i) {
+ tmp.push(v);
+ }
+ }
+ }
+ }
+ }
+ self.current = Some(tmp);
+ } else {
+ unreachable!();
+ }
+ }
+
+ fn visit_union(&mut self, indices: &[isize]) {
+ if !self.selector_filter.is_term_empty() {
+ unimplemented!("union syntax in filter");
+ }
+
+ if let Some(ParseToken::Array) = self.tokens.pop() {
+ let mut tmp = Vec::new();
+ if let Some(current) = &self.current {
+ for v in current {
+ if let Value::Array(vec) = v {
+ for i in indices {
+ if let Some(v) = vec.get(abs_index(*i, vec.len())) {
+ tmp.push(v);
+ }
+ }
+ }
+ }
+ }
+
+ self.current = Some(tmp);
+ } else {
+ unreachable!();
+ }
+ }
+}
+
+impl<'a, 'b> NodeVisitor for Selector<'a, 'b> {
+ fn visit_token(&mut self, token: &ParseToken) {
+ debug!("token: {:?}, stack: {:?}", token, self.tokens);
+
+ if self.compute_absolute_path_filter(token) {
+ return;
+ }
+
+ match token {
+ ParseToken::Absolute => self.visit_absolute(),
+ ParseToken::Relative => self.visit_relative(),
+ ParseToken::In | ParseToken::Leaves | ParseToken::Array => {
+ self.tokens.push(token.clone());
+ }
+ ParseToken::ArrayEof => self.visit_array_eof(),
+ ParseToken::All => self.visit_all(),
+ ParseToken::Bool(b) => {
+ self.selector_filter.push_term(Some(ExprTerm::Bool(*b)));
+ }
+ ParseToken::Key(key) => self.visit_key(key),
+ ParseToken::Keys(keys) => self.visit_keys(keys),
+ ParseToken::Number(v) => {
+ self.selector_filter.push_term(Some(ExprTerm::Number(Number::from_f64(*v).unwrap())));
+ }
+ ParseToken::Filter(ref ft) => self.visit_filter(ft),
+ ParseToken::Range(from, to, step) => self.visit_range(from, to, step),
+ ParseToken::Union(indices) => self.visit_union(indices),
+ ParseToken::Eof => {
+ debug!("visit_token eof");
+ }
+ }
+ }
+}
+
+#[derive(Default)]
+pub struct SelectorMut {
+ path: Option<Node>,
+ value: Option<Value>,
+}
+
+fn replace_value<F: FnMut(Value) -> Option<Value>>(
+ mut tokens: Vec<String>,
+ value: &mut Value,
+ fun: &mut F,
+) {
+ let mut target = value;
+
+ let last_index = tokens.len().saturating_sub(1);
+ for (i, token) in tokens.drain(..).enumerate() {
+ let target_once = target;
+ let is_last = i == last_index;
+ let target_opt = match *target_once {
+ Value::Object(ref mut map) => {
+ if is_last {
+ if let Entry::Occupied(mut e) = map.entry(token) {
+ let v = e.insert(Value::Null);
+ if let Some(res) = fun(v) {
+ e.insert(res);
+ } else {
+ e.remove();
+ }
+ }
+ return;
+ }
+ map.get_mut(&token)
+ }
+ Value::Array(ref mut vec) => {
+ if let Ok(x) = token.parse::<usize>() {
+ if is_last {
+ let v = std::mem::replace(&mut vec[x], Value::Null);
+ if let Some(res) = fun(v) {
+ vec[x] = res;
+ } else {
+ vec.remove(x);
+ }
+ return;
+ }
+ vec.get_mut(x)
+ } else {
+ None
+ }
+ }
+ _ => None,
+ };
+
+ if let Some(t) = target_opt {
+ target = t;
+ } else {
+ break;
+ }
+ }
+}
+
+impl SelectorMut {
+ pub fn new() -> Self {
+ Self::default()
+ }
+
+ pub fn str_path(&mut self, path: &str) -> Result<&mut Self, JsonPathError> {
+ self.path = Some(Parser::compile(path).map_err(JsonPathError::Path)?);
+ Ok(self)
+ }
+
+ pub fn value(&mut self, value: Value) -> &mut Self {
+ self.value = Some(value);
+ self
+ }
+
+ pub fn take(&mut self) -> Option<Value> {
+ self.value.take()
+ }
+
+ fn compute_paths(&self, mut result: Vec<&Value>) -> Vec<Vec<String>> {
+ fn _walk(
+ origin: &Value,
+ target: &mut Vec<&Value>,
+ tokens: &mut Vec<String>,
+ visited: &mut HashSet<*const Value>,
+ visited_order: &mut Vec<Vec<String>>,
+ ) -> bool {
+ trace!("{:?}, {:?}", target, tokens);
+
+ if target.is_empty() {
+ return true;
+ }
+
+ target.retain(|t| {
+ if std::ptr::eq(origin, *t) {
+ if visited.insert(*t) {
+ visited_order.push(tokens.to_vec());
+ }
+ false
+ } else {
+ true
+ }
+ });
+
+ match origin {
+ Value::Array(vec) => {
+ for (i, v) in vec.iter().enumerate() {
+ tokens.push(i.to_string());
+ if _walk(v, target, tokens, visited, visited_order) {
+ return true;
+ }
+ tokens.pop();
+ }
+ }
+ Value::Object(map) => {
+ for (k, v) in map {
+ tokens.push(k.clone());
+ if _walk(v, target, tokens, visited, visited_order) {
+ return true;
+ }
+ tokens.pop();
+ }
+ }
+ _ => {}
+ }
+
+ false
+ }
+
+ let mut visited = HashSet::new();
+ let mut visited_order = Vec::new();
+
+ if let Some(origin) = &self.value {
+ let mut tokens = Vec::new();
+ _walk(
+ origin,
+ &mut result,
+ &mut tokens,
+ &mut visited,
+ &mut visited_order,
+ );
+ }
+
+ visited_order
+ }
+
+ pub fn delete(&mut self) -> Result<&mut Self, JsonPathError> {
+ self.replace_with(&mut |_| Some(Value::Null))
+ }
+
+ pub fn remove(&mut self) -> Result<&mut Self, JsonPathError> {
+ self.replace_with(&mut |_| None)
+ }
+
+ fn select(&self) -> Result<Vec<&Value>, JsonPathError> {
+ if let Some(node) = &self.path {
+ let mut selector = Selector::default();
+ selector.compiled_path(&node);
+
+ if let Some(value) = &self.value {
+ selector.value(value);
+ }
+
+ Ok(selector.select()?)
+ } else {
+ Err(JsonPathError::EmptyPath)
+ }
+ }
+
+ pub fn replace_with<F: FnMut(Value) -> Option<Value>>(
+ &mut self,
+ fun: &mut F,
+ ) -> Result<&mut Self, JsonPathError> {
+ let paths = {
+ let result = self.select()?;
+ self.compute_paths(result)
+ };
+
+ if let Some(ref mut value) = &mut self.value {
+ for tokens in paths {
+ replace_value(tokens, value, fun);
+ }
+ }
+
+ Ok(self)
+ }
+}
+
+
+#[cfg(test)]
+mod select_inner_tests {
+ use serde_json::Value;
+
+ #[test]
+ fn to_f64_i64() {
+ let number = 0_i64;
+ let v: Value = serde_json::from_str(&format!("{}", number)).unwrap();
+ if let Value::Number(n) = v {
+ assert_eq!((super::to_f64(&n) - number as f64).abs() == 0_f64, true);
+ } else {
+ panic!();
+ }
+ }
+
+ #[test]
+ fn to_f64_f64() {
+ let number = 0.1_f64;
+ let v: Value = serde_json::from_str(&format!("{}", number)).unwrap();
+ if let Value::Number(n) = v {
+ assert_eq!((super::to_f64(&n) - number).abs() == 0_f64, true);
+ } else {
+ panic!();
+ }
+ }
+
+ #[test]
+ fn to_f64_u64() {
+ let number = u64::max_value();
+ let v: Value = serde_json::from_str(&format!("{}", number)).unwrap();
+ if let Value::Number(n) = v {
+ assert_eq!((super::to_f64(&n) - number as f64).abs() == 0_f64, true);
+ } else {
+ panic!();
+ }
+ }
+} \ No newline at end of file
diff --git a/vendor/jsonpath_lib/src/select/value_walker.rs b/vendor/jsonpath_lib/src/select/value_walker.rs
new file mode 100644
index 000000000..e7b4de0e0
--- /dev/null
+++ b/vendor/jsonpath_lib/src/select/value_walker.rs
@@ -0,0 +1,99 @@
+use serde_json::Value;
+use std::collections::HashSet;
+
+pub(super) struct ValueWalker;
+
+impl<'a> ValueWalker {
+ pub fn all_with_num(vec: &[&'a Value], tmp: &mut Vec<&'a Value>, index: f64) {
+ Self::walk(vec, tmp, &|v| if v.is_array() {
+ if let Some(item) = v.get(index as usize) {
+ Some(vec![item])
+ } else {
+ None
+ }
+ } else {
+ None
+ });
+ }
+
+ pub fn all_with_str(vec: &[&'a Value], tmp: &mut Vec<&'a Value>, key: &str, is_filter: bool) {
+ if is_filter {
+ Self::walk(vec, tmp, &|v| match v {
+ Value::Object(map) if map.contains_key(key) => Some(vec![v]),
+ _ => None,
+ });
+ } else {
+ Self::walk(vec, tmp, &|v| match v {
+ Value::Object(map) => match map.get(key) {
+ Some(v) => Some(vec![v]),
+ _ => None,
+ },
+ _ => None,
+ });
+ }
+ }
+
+ pub fn all(vec: &[&'a Value], tmp: &mut Vec<&'a Value>) {
+ Self::walk(vec, tmp, &|v| match v {
+ Value::Array(vec) => Some(vec.iter().collect()),
+ Value::Object(map) => {
+ let mut tmp = Vec::new();
+ for (_, v) in map {
+ tmp.push(v);
+ }
+ Some(tmp)
+ }
+ _ => None,
+ });
+ }
+
+ fn walk<F>(vec: &[&'a Value], tmp: &mut Vec<&'a Value>, fun: &F) where F: Fn(&Value) -> Option<Vec<&Value>> {
+ for v in vec {
+ Self::_walk(v, tmp, fun);
+ }
+ }
+
+ fn _walk<F>(v: &'a Value, tmp: &mut Vec<&'a Value>, fun: &F) where F: Fn(&Value) -> Option<Vec<&Value>> {
+ if let Some(mut ret) = fun(v) {
+ tmp.append(&mut ret);
+ }
+
+ match v {
+ Value::Array(vec) => {
+ for v in vec {
+ Self::_walk(v, tmp, fun);
+ }
+ }
+ Value::Object(map) => {
+ for (_, v) in map {
+ Self::_walk(&v, tmp, fun);
+ }
+ }
+ _ => {}
+ }
+ }
+
+ pub fn walk_dedup(v: &'a Value,
+ tmp: &mut Vec<&'a Value>,
+ key: &str,
+ visited: &mut HashSet<*const Value>, ) {
+ match v {
+ Value::Object(map) => {
+ if map.contains_key(key) {
+ let ptr = v as *const Value;
+ if !visited.contains(&ptr) {
+ visited.insert(ptr);
+ tmp.push(v)
+ }
+ }
+ }
+ Value::Array(vec) => {
+ for v in vec {
+ Self::walk_dedup(v, tmp, key, visited);
+ }
+ }
+ _ => {}
+ }
+ }
+}
+