summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorMatthias Beyer <mail@beyermatthias.de>2020-01-10 13:06:37 +0100
committerMatthias Beyer <mail@beyermatthias.de>2020-01-10 13:06:37 +0100
commit5f6654ce1b93c60a30a8caa93b6daf07877915f7 (patch)
tree0a61bbcd673c832abde6261aeaaf80ee62991468
Initial import of serde-select code
Signed-off-by: Matthias Beyer <mail@beyermatthias.de>
-rw-r--r--.gitignore3
-rw-r--r--Cargo.toml33
-rw-r--r--src/error.rs69
-rw-r--r--src/lib.rs11
-rw-r--r--src/object.rs143
-rw-r--r--src/query.rs11
-rw-r--r--src/read.rs101
-rw-r--r--src/tokenizer.rs552
8 files changed, 923 insertions, 0 deletions
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..6936990
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,3 @@
+/target
+**/*.rs.bk
+Cargo.lock
diff --git a/Cargo.toml b/Cargo.toml
new file mode 100644
index 0000000..83d21cd
--- /dev/null
+++ b/Cargo.toml
@@ -0,0 +1,33 @@
+[package]
+name = "serde-select"
+version = "0.1.0"
+authors = ["Matthias Beyer <mail@beyermatthias.de>"]
+edition = "2018"
+description = "Query serde de/serializable objects"
+readme = "README.md"
+keywords = ["serde", "extension"]
+license = "MPL-2.0"
+repository = "https://github.com/matthiasbeyer/serde-select"
+
+[dependencies]
+failure = "0.1"
+failure_derive = "0.1"
+serde = "1"
+regex = "1.0"
+log = "0.4"
+lazy_static = "1.0"
+
+[dependencies.serde_json]
+versions = "1"
+optional = true
+
+[dependencies.toml]
+versions = "0.5"
+optional = true
+
+[features]
+default = ["backend_toml", "backend_serde_json"]
+
+backend_toml = ["toml"]
+backend_serde_json = ["serde_json"]
+
diff --git a/src/error.rs b/src/error.rs
new file mode 100644
index 0000000..eb9d161
--- /dev/null
+++ b/src/error.rs
@@ -0,0 +1,69 @@
+/// Error types
+
+pub type Result<T> = ::std::result::Result<T, Error>;
+
+#[derive(Debug, Fail)]
+pub enum Error {
+ #[cfg(feature = "typed")]
+ #[fail(display = "{}", _0)]
+ Serialize(#[cause] ::serde::ser::Error),
+
+ #[cfg(feature = "typed")]
+ #[fail(display = "{}", _0)]
+ Deserialize(#[cause] ::serde::de::Error),
+
+ // Errors for tokenizer
+ #[fail(display = "Parsing the query '{}' failed", _0)]
+ QueryParsingError(String),
+
+ #[fail(display = "The query on the document is empty")]
+ EmptyQueryError,
+
+ #[fail(display = "The passed query has an empty identifier")]
+ EmptyIdentifier,
+
+ #[fail(display = "The passed query tries to access an array but does not specify the index")]
+ ArrayAccessWithoutIndex,
+
+ #[fail(
+ display = "The passed query tries to access an array but does not specify a valid index"
+ )]
+ ArrayAccessWithInvalidIndex,
+
+ // Errors for Resolver
+ #[fail(display = "The identfier '{}' is not present in the document", _0)]
+ IdentifierNotFoundInDocument(String),
+
+ #[fail(display = "Got an index query '[{}]' but have table", _0)]
+ NoIndexInTable(usize),
+
+ #[fail(display = "Got an identifier query '{}' but have array", _0)]
+ NoIdentifierInArray(String),
+
+ #[fail(display = "Got an identifier query '{}' but have value", _0)]
+ QueryingValueAsTable(String),
+
+ #[fail(display = "Got an index query '{}' but have value", _0)]
+ QueryingValueAsArray(usize),
+
+ #[fail(display = "Cannot delete table '{:?}' which is not empty", _0)]
+ CannotDeleteNonEmptyTable(Option<String>),
+
+ #[fail(display = "Cannot delete array '{:?}' which is not empty", _0)]
+ CannotDeleteNonEmptyArray(Option<String>),
+
+ #[fail(display = "Cannot access {} because expected {}", _0, _1)]
+ CannotAccessBecauseTypeMismatch(&'static str, &'static str),
+
+ #[fail(display = "Cannot delete in array at {}, array has length {}", _0, _1)]
+ ArrayIndexOutOfBounds(usize, usize),
+
+ #[fail(display = "Cannot access array at {}, array has length {}", _0, _1)]
+ IndexOutOfBounds(usize, usize),
+
+ #[fail(display = "Type Error. Requested {}, but got {}", _0, _1)]
+ TypeError(&'static str, &'static str),
+
+ #[fail(display = "Value at '{}' not there", _0)]
+ NotAvailable(String),
+}
diff --git a/src/lib.rs b/src/lib.rs
new file mode 100644
index 0000000..8b34f33
--- /dev/null
+++ b/src/lib.rs
@@ -0,0 +1,11 @@
+#[macro_use] extern crate log;
+#[macro_use] extern crate failure;
+#[macro_use] extern crate failure_derive;
+#[macro_use] extern crate regex;
+#[macro_use] extern crate lazy_static;
+
+pub mod error;
+pub mod object;
+pub mod tokenizer;
+pub mod query;
+pub mod read;
diff --git a/src/object.rs b/src/object.rs
new file mode 100644
index 0000000..e0eae87
--- /dev/null
+++ b/src/object.rs
@@ -0,0 +1,143 @@
+use crate::error::*;
+
+pub enum ObjectType {
+ Atom,
+ Map,
+ Array,
+}
+
+pub trait Object {
+ fn get_type(&self) -> ObjectType;
+ fn has_key(&self, key: &str) -> bool;
+ fn has_index(&self, idx: usize) -> bool;
+
+ fn at_index<'a>(&'a self, idx: usize) -> Result<Option<&'a Self>>;
+ fn at_index_mut<'a>(&'a mut self, idx: usize) -> Result<Option<&'a mut Self>>;
+ fn at_key<'a>(&'a self, key: &str) -> Result<Option<&'a Self>>;
+ fn at_key_mut<'a>(&'a mut self, key: &str) -> Result<Option<&'a mut Self>>;
+}
+
+#[cfg(feature = "backend_toml")]
+impl Object for toml::Value {
+ fn get_type(&self) -> ObjectType {
+ match self {
+ toml::Value::Boolean(_)
+ | toml::Value::Datetime(_)
+ | toml::Value::Float(_)
+ | toml::Value::Integer(_)
+ | toml::Value::String(_) => ObjectType::Atom,
+
+ toml::Value::Array(_) => ObjectType::Array,
+ toml::Value::Table(_) => ObjectType::Map,
+ }
+ }
+
+ fn has_key(&self, key: &str) -> bool {
+ match self {
+ toml::Value::Table(t) => t.contains_key(key),
+ _ => false
+ }
+ }
+
+ fn has_index(&self, idx: usize) -> bool {
+ match self {
+ toml::Value::Array(a) => a.len() > idx,
+ _ => false
+ }
+ }
+
+ fn at_index<'a>(&'a self, idx: usize) -> Result<Option<&'a Self>> {
+ match self {
+ toml::Value::Array(a) => Ok(a.get(idx)),
+ toml::Value::Table(_) => Err(crate::error::Error::NoIndexInTable(idx)),
+ _ => Err(crate::error::Error::QueryingValueAsArray(idx)),
+ }
+ }
+
+ fn at_index_mut<'a>(&'a mut self, idx: usize) -> Result<Option<&'a mut Self>> {
+ match self {
+ toml::Value::Array(a) => Ok(a.get_mut(idx)),
+ toml::Value::Table(_) => Err(crate::error::Error::NoIndexInTable(idx)),
+ _ => Err(crate::error::Error::QueryingValueAsArray(idx)),
+ }
+ }
+
+ fn at_key<'a>(&'a self, key: &str) -> Result<Option<&'a Self>> {
+ match self {
+ toml::Value::Table(t) => Ok(t.get(key)),
+ toml::Value::Array(_) => Err(crate::error::Error::NoIdentifierInArray(key.to_string())),
+ _ => Err(crate::error::Error::QueryingValueAsTable(key.to_string())),
+ }
+ }
+
+ fn at_key_mut<'a>(&'a mut self, key: &str) -> Result<Option<&'a mut Self>> {
+ match self {
+ toml::Value::Table(t) => Ok(t.get_mut(key)),
+ toml::Value::Array(_) => Err(crate::error::Error::NoIdentifierInArray(key.to_string())),
+ _ => Err(crate::error::Error::QueryingValueAsTable(key.to_string())),
+ }
+ }
+}
+
+
+#[cfg(feature = "backend_serde_json")]
+impl Object for serde_json::Value {
+ fn get_type(&self) -> ObjectType {
+ match self {
+ serde_json::Value::Null
+ | serde_json::Value::Bool(_)
+ | serde_json::Value::Number(_)
+ | serde_json::Value::String(_) => ObjectType::Atom,
+
+ serde_json::Value::Array(_) => ObjectType::Array,
+ serde_json::Value::Object(_) => ObjectType::Map,
+ }
+ }
+
+ fn has_key(&self, key: &str) -> bool {
+ match self {
+ serde_json::Value::Object(t) => t.contains_key(key),
+ _ => false
+ }
+ }
+
+ fn has_index(&self, idx: usize) -> bool {
+ match self {
+ serde_json::Value::Array(a) => a.len() > idx,
+ _ => false
+ }
+ }
+
+ fn at_index<'a>(&'a self, idx: usize) -> Result<Option<&'a Self>> {
+ match self {
+ serde_json::Value::Array(a) => Ok(a.get(idx)),
+ serde_json::Value::Object(_) => Err(crate::error::Error::NoIndexInTable(idx)),
+ _ => Err(crate::error::Error::QueryingValueAsArray(idx)),
+ }
+ }
+
+ fn at_index_mut<'a>(&'a mut self, idx: usize) -> Result<Option<&'a mut Self>> {
+ match self {
+ serde_json::Value::Array(a) => Ok(a.get_mut(idx)),
+ serde_json::Value::Object(_) => Err(crate::error::Error::NoIndexInTable(idx)),
+ _ => Err(crate::error::Error::QueryingValueAsArray(idx)),
+ }
+ }
+
+ fn at_key<'a>(&'a self, key: &str) -> Result<Option<&'a Self>> {
+ match self {
+ serde_json::Value::Object(t) => Ok(t.get(key)),
+ serde_json::Value::Array(_) => Err(crate::error::Error::NoIdentifierInArray(key.to_string())),
+ _ => Err(crate::error::Error::QueryingValueAsTable(key.to_string())),
+ }
+ }
+
+ fn at_key_mut<'a>(&'a mut self, key: &str) -> Result<Option<&'a mut Self>> {
+ match self {
+ serde_json::Value::Object(t) => Ok(t.get_mut(key)),
+ serde_json::Value::Array(_) => Err(crate::error::Error::NoIdentifierInArray(key.to_string())),
+ _ => Err(crate::error::Error::QueryingValueAsTable(key.to_string())),
+ }
+ }
+}
+
diff --git a/src/query.rs b/src/query.rs
new file mode 100644
index 0000000..d321501
--- /dev/null
+++ b/src/query.rs
@@ -0,0 +1,11 @@
+use crate::tokenizer::Token;
+
+pub struct Query {
+ token: Token,
+}
+
+impl Query {
+ pub(crate) fn token(&self) -> &Token {
+ &self.token
+ }
+}
diff --git a/src/read.rs b/src/read.rs
new file mode 100644
index 0000000..0d18447
--- /dev/null
+++ b/src/read.rs
@@ -0,0 +1,101 @@
+use serde::{Deserialize, Serialize};
+
+use crate::error::*;
+use crate::tokenizer::tokenize_with_seperator;
+use crate::object::*;
+use crate::tokenizer::Token;
+use crate::query::Query;
+
+pub trait Read<'doc> : Object {
+ fn read(&'doc self, query: &Query) -> Result<Option<&'doc Self>>;
+ fn read_mut(&'doc mut self, query: &Query) -> Result<Option<&'doc mut Self>>;
+}
+
+#[cfg(feature = "backend_toml")]
+impl<'doc> Read<'doc> for toml::Value {
+ fn read(&'doc self, query: &Query) -> Result<Option<&'doc Self>> {
+ query_with_token(self, query.token())
+ }
+
+ fn read_mut(&'doc mut self, query: &Query) -> Result<Option<&'doc mut Self>> {
+ query_with_token_mut(self, query.token())
+ }
+}
+
+#[cfg(feature = "backend_serde_json")]
+impl<'doc> Read<'doc> for serde_json::Value {
+ fn read(&'doc self, query: &Query) -> Result<Option<&'doc Self>> {
+ query_with_token(self, query.token())
+ }
+
+ fn read_mut(&'doc mut self, query: &Query) -> Result<Option<&'doc mut Self>> {
+ query_with_token_mut(self, query.token())
+ }
+}
+
+fn query_with_token<'doc, O>(obj: &'doc O, token: &Token) -> Result<Option<&'doc O>>
+ where O: Object
+{
+ match token {
+ Token::Identifier{ ident, .. } => {
+ let object = obj.at_key(ident)?;
+ if let Some(object) = object {
+ if let Some(next_token) = token.next() {
+ query_with_token(object, next_token)
+ } else {
+ Ok(Some(object))
+ }
+ } else {
+ Ok(None)
+ }
+ }
+
+ Token::Index { idx, .. } => {
+ let object = obj.at_index(*idx)?;
+ if let Some(object) = object {
+ if let Some(next_token) = token.next() {
+ query_with_token(object, next_token)
+ } else {
+ Ok(Some(object))
+ }
+ } else {
+ Ok(None)
+ }
+ }
+ }
+}
+
+fn query_with_token_mut<'doc, O>(obj: &'doc mut O, token: &Token) -> Result<Option<&'doc mut O>>
+ where O: Object
+{
+ match token {
+ Token::Identifier{ ident, .. } => {
+ let object = obj.at_key_mut(ident)?;
+ if let Some(object) = object {
+ if let Some(next_token) = token.next() {
+ query_with_token_mut(object, next_token)
+ } else {
+ Ok(Some(object))
+ }
+ } else {
+ Ok(None)
+ }
+ }
+
+ Token::Index { idx, .. } => {
+ let object = obj.at_index_mut(*idx)?;
+ if let Some(object) = object {
+ if let Some(next_token) = token.next() {
+ query_with_token_mut(object, next_token)
+ } else {
+ Ok(Some(object))
+ }
+ } else {
+ Ok(None)
+ }
+ }
+ }
+}
+
+
+
diff --git a/src/tokenizer.rs b/src/tokenizer.rs
new file mode 100644
index 0000000..e8c3ff7
--- /dev/null
+++ b/src/tokenizer.rs
@@ -0,0 +1,552 @@
+/// The tokenizer for the query interpreter
+use crate::error::{Error, Result};
+
+#[derive(Debug, PartialEq, Eq)]
+pub enum Token {
+ Identifier {
+ ident: String,
+ next: Option<Box<Token>>,
+ },
+
+ Index {
+ idx: usize,
+ next: Option<Box<Token>>,
+ },
+}
+
+impl Token {
+ pub fn next(&self) -> Option<&Token> {
+ trace!("Matching token (self): {:?}", self);
+ match self {
+ Token::Identifier { ref next, .. } => next.as_ref().map(|t| &**t),
+ Token::Index { ref next, .. } => next.as_ref().map(|t| &**t),
+ }
+ }
+
+ /// Convenience function for `token.next().is_some()`
+ pub fn has_next(&self) -> bool {
+ trace!("self.has_next(): {:?}", self.next().is_some());
+ self.next().is_some()
+ }
+
+ pub fn set_next(&mut self, token: Token) {
+ trace!("self.set_next({:?})", token);
+ match self {
+ Token::Identifier { ref mut next, .. } => *next = Some(Box::new(token)),
+ Token::Index { ref mut next, .. } => *next = Some(Box::new(token)),
+ }
+ }
+
+ /// Pop the last token from the chain of tokens
+ ///
+ /// Returns None if the current Token has no next token
+ pub fn pop_last(&mut self) -> Option<Box<Token>> {
+ trace!("self.pop_last()");
+ if !self.has_next() {
+ trace!("self.pop_last(): No next");
+ None
+ } else {
+ trace!("self.pop_last(): Having next");
+ match self {
+ Token::Identifier { ref mut next, .. } => {
+ trace!("self.pop_last(): self is Identifier");
+ if next.is_some() {
+ trace!("self.pop_last(): next is Some(_)");
+ let mut n = next.take().unwrap();
+ if n.has_next() {
+ trace!("self.pop_last(): next also has a next");
+
+ trace!("self.pop_last(): Recursing now");
+ let result = n.pop_last();
+
+ *next = Some(n);
+
+ trace!("self.pop_last(): Returning Result");
+ result
+ } else {
+ trace!("self.pop_last(): next itself has no next, returning Some");
+ Some(n)
+ }
+ } else {
+ trace!("self.pop_last(): next is none, returning None");
+ None
+ }
+ }
+
+ Token::Index { ref mut next, .. } => {
+ trace!("self.pop_last(): self is Index");
+ if next.is_some() {
+ trace!("self.pop_last(): next is Some(_)");
+
+ let mut n = next.take().unwrap();
+ if n.has_next() {
+ trace!("self.pop_last(): next also has a next");
+
+ trace!("self.pop_last(): Recursing now");
+ let result = n.pop_last();
+
+ *next = Some(n);
+
+ trace!("self.pop_last(): Returning Result");
+ result
+ } else {
+ trace!("self.pop_last(): next itself has no next, returning Some");
+ Some(n)
+ }
+ } else {
+ trace!("self.pop_last(): next is none, returning None");
+ None
+ }
+ }
+ }
+ }
+ }
+
+ #[cfg(test)]
+ pub fn identifier(&self) -> &String {
+ trace!("self.identifier()");
+ match self {
+ Token::Identifier { ref ident, .. } => &ident,
+ _ => unreachable!(),
+ }
+ }
+
+ #[cfg(test)]
+ pub fn idx(&self) -> usize {
+ trace!("self.idx()");
+ match self {
+ Token::Index { idx: i, .. } => *i,
+ _ => unreachable!(),
+ }
+ }
+}
+
+pub fn tokenize_with_seperator(query: &str, seperator: char) -> Result<Token> {
+ use std::str::Split;
+ trace!(
+ "tokenize_with_seperator(query: {:?}, seperator: {:?})",
+ query,
+ seperator
+ );
+
+ /// Creates a Token object from a string
+ ///
+ /// # Panics
+ ///
+ /// * If the internal regex does not compile (should never happen)
+ /// * If the token is non-valid (that is, a array index with a non-i64)
+ /// * If the regex does not find anything
+ /// * If the integer in the brackets (`[]`) cannot be parsed to a valid i64
+ ///
+ /// # Incorrect behaviour
+ ///
+ /// * If the regex finds multiple captures
+ ///
+ /// # Returns
+ ///
+ /// The `Token` object with the correct identifier/index for this token and no next token.
+ ///
+ fn mk_token_object(s: &str) -> Result<Token> {
+ use regex::Regex;
+ use std::str::FromStr;
+
+ trace!("mk_token_object(s: {:?})", s);
+
+ lazy_static! {
+ static ref RE: Regex = Regex::new(r"^\[\d+\]$").unwrap();
+ }
+
+ if !has_array_brackets(s) {
+ trace!("returning Ok(Identifier(ident: {:?}, next: None))", s);
+ return Ok(Token::Identifier {
+ ident: String::from(s),
+ next: None,
+ });
+ }
+
+ match RE.captures(s) {
+ None => Err(Error::ArrayAccessWithoutIndex),
+ Some(captures) => {
+ trace!("Captured: {:?}", captures);
+ match captures.get(0) {
+ None => Ok(Token::Identifier {
+ ident: String::from(s),
+ next: None,
+ }),
+ Some(mtch) => {
+ trace!("First capture: {:?}", mtch);
+
+ let mtch = without_array_brackets(mtch.as_str());
+ trace!(".. without array brackets: {:?}", mtch);
+
+ let i: usize = FromStr::from_str(&mtch).unwrap(); // save because regex
+
+ trace!("returning Ok(Index(idx: {}, next: None)", i);
+ Ok(Token::Index { idx: i, next: None })
+ }
+ }
+ }
+ }
+ }
+
+ /// Check whether a str begins with '[' and ends with ']'
+ fn has_array_brackets(s: &str) -> bool {
+ trace!("has_array_brackets({:?})", s);
+ s.as_bytes()[0] == b'[' && s.as_bytes()[s.len() - 1] == b']'
+ }
+
+ /// Remove '[' and ']' from a str
+ fn without_array_brackets(s: &str) -> String {
+ trace!("without_array_brackets({:?})", s);
+ s.replace("[", "").replace("]", "")
+ }
+
+ fn build_token_tree(split: &mut Split<'_, char>, last: &mut Token) -> Result<()> {
+ trace!("build_token_tree(split: {:?}, last: {:?})", split, last);
+ match split.next() {
+ None => { /* No more tokens */ }
+ Some(token) => {
+ trace!("build_token_tree(...): next from split: {:?}", token);
+
+ if token.is_empty() {
+ trace!("build_token_tree(...): Empty identifier... returning Error");
+ return Err(Error::EmptyIdentifier);
+ }
+
+ let mut token = mk_token_object(token)?;
+ build_token_tree(split, &mut token)?;
+ last.set_next(token);
+ }
+ }
+
+ trace!("build_token_tree(...): returning Ok(())");
+ Ok(())
+ }
+
+ if query.is_empty() {
+ trace!("Query is empty. Returning error");
+ return Err(Error::EmptyQueryError);
+ }
+
+ let mut tokens = query.split(seperator);
+ trace!("Tokens splitted: {:?}", tokens);
+
+ match tokens.next() {
+ None => Err(Error::EmptyQueryError),
+ Some(token) => {
+ trace!("next Token: {:?}", token);
+
+ if token.is_empty() {
+ trace!("Empty token. Returning Error");
+ return Err(Error::EmptyIdentifier);
+ }
+
+ let mut tok = mk_token_object(token)?;
+ build_token_tree(&mut tokens, &mut tok)?;
+
+ trace!("Returning Ok({:?})", tok);
+ Ok(tok)
+ }
+ }
+}
+
+#[cfg(test)]
+mod test {
+ use super::*;
+ use crate::error::Error;
+
+ use std::ops::Deref;
+
+ #[test]
+ fn test_tokenize_empty_query_to_error() {
+ let tokens = tokenize_with_seperator(&String::from(""), '.');
+ assert!(tokens.is_err());
+ let tokens = tokens.unwrap_err();
+
+ assert!(is_match!(tokens, Error::EmptyQueryError { .. }));
+ }
+
+ #[test]
+ fn test_tokenize_seperator_only() {
+ let tokens = tokenize_with_seperator(&String::from("."), '.');
+ assert!(tokens.is_err());
+ let tokens = tokens.unwrap_err();
+
+ assert!(is_match!(tokens, Error::EmptyIdentifier { .. }));
+ }
+
+ #[test]
+ fn test_tokenize_array_brackets_only() {
+ let tokens = tokenize_with_seperator(&String::from("[]"), '.');
+ assert!(tokens.is_err());
+ let tokens = tokens.unwrap_err();
+
+ assert!(is_match!(tokens, Error::ArrayAccessWithoutIndex { .. }));
+ }
+
+ #[test]
+ fn test_tokenize_identifiers_with_array_brackets_only() {
+ let tokens = tokenize_with_seperator(&String::from("a.b.c.[]"), '.');
+ assert!(tokens.is_err());
+ let tokens = tokens.unwrap_err();
+
+ assert!(is_match!(tokens, Error::ArrayAccessWithoutIndex { .. }));
+ }
+
+ #[test]
+ fn test_tokenize_identifiers_in_array_brackets() {
+ let tokens = tokenize_with_seperator(&String::from("[a]"), '.');
+ assert!(tokens.is_err());
+ let tokens = tokens.unwrap_err();
+
+ assert!(is_match!(tokens, Error::ArrayAccessWithoutIndex { .. }));
+ }
+
+ #[test]
+ fn test_tokenize_single_token_query() {
+ let tokens = tokenize_with_seperator(&String::from("example"), '.');
+ assert!(tokens.is_ok());
+ let tokens = tokens.unwrap();
+
+ assert!(match tokens {
+ Token::Identifier {
+ ref ident,
+ next: None,
+ } => {
+ assert_eq!("example", ident);
+ true
+ }
+ _ => false,
+ });
+ }
+
+ #[test]
+ fn test_tokenize_double_token_query() {
+ let tokens = tokenize_with_seperator(&String::from("a.b"), '.');
+ assert!(tokens.is_ok());
+ let tokens = tokens.unwrap();
+
+ assert!(match tokens {
+ Token::Identifier {
+ next: Some(ref next),
+ ..
+ } => {
+ assert_eq!("b", next.deref().identifier());
+ match next.deref() {
+ Token::Identifier { next: None, .. } => true,
+ _ => false,
+ }
+ }
+ _ => false,
+ });
+ assert_eq!("a", tokens.identifier());
+ }
+
+ #[test]
+ fn test_tokenize_ident_then_array_query() {
+ let tokens = tokenize_with_seperator(&String::from("a.[0]"), '.');
+ assert!(tokens.is_ok());
+ let tokens = tokens.unwrap();
+
+ assert_eq!("a", tokens.identifier());
+ assert!(match tokens {
+ Token::Identifier {
+ next: Some(ref next),
+ ..
+ } => match next.deref() {
+ Token::Index { idx: 0, next: None } => true,
+ _ => false,
+ },
+ _ => false,
+ });
+ }
+
+ #[test]
+ fn test_tokenize_many_idents_then_array_query() {
+ let tokens = tokenize_with_seperator(&String::from("a.b.c.[1000]"), '.');
+ assert!(tokens.is_ok());
+ let tokens = tokens.unwrap();
+
+ assert_eq!("a", tokens.identifier());
+
+ let expected = Token::Identifier {
+ ident: String::from("a"),
+ next: Some(Box::new(Token::Identifier {
+ ident: String::from("b"),
+ next: Some(Box::new(Token::Identifier {
+ ident: String::from("c"),
+ next: Some(Box::new(Token::Index {
+ idx: 1000,
+ next: None,
+ })),
+ })),
+ })),
+ };
+
+ assert_eq!(expected, tokens);
+ }
+
+ #[test]
+ fn test_tokenize_empty_token_after_good_token() {
+ let tokens = tokenize_with_seperator(&String::from("a..b"), '.');
+ assert!(tokens.is_err());
+ let tokens = tokens.unwrap_err();
+
+ assert!(is_match!(tokens, Error::EmptyIdentifier { .. }));
+ }
+
+ quickcheck! {
+ fn test_array_index(i: usize) -> bool {
+ match tokenize_with_seperator(&format!("[{}]", i), '.') {
+ Ok(Token::Index { next: None, .. }) => true,
+ _ => false,
+ }
+ }
+ }
+
+ #[test]
+ fn test_pop_last_token_from_single_identifier_token_is_none() {
+ let mut token = Token::Identifier {
+ ident: String::from("something"),
+ next: None,
+ };
+
+ let last = token.pop_last();
+ assert!(last.is_none());
+ }
+
+ #[test]
+ fn test_pop_last_token_from_single_index_token_is_none() {
+ let mut token = Token::Index { idx: 0, next: None };
+
+ let last = token.pop_last();
+ assert!(last.is_none());
+ }
+
+ #[test]
+ fn test_pop_last_token_from_single_identifier_token_is_one() {
+ let mut token = Token::Identifier {
+ ident: String::from("some"),
+ next: Some(Box::new(Token::Identifier {
+ ident: String::from("thing"),
+ next: None,
+ })),
+ };
+
+ let last = token.pop_last();
+
+ assert!(last.is_some());
+ let last = last.unwrap();
+
+ assert!(is_match!(*last, Token::Identifier { .. }));
+ match *last {
+ Token::Identifier { ident, .. } => {
+ assert_eq!("thing", ident);
+ }
+ _ => panic!("What just happened?"),
+ }
+ }
+
+ #[test]
+ fn test_pop_last_token_from_single_index_token_is_one() {
+ let mut token = Token::Index {
+ idx: 0,
+ next: Some(Box::new(Token::Index { idx: 1, next: None })),
+ };
+
+ let last = token.pop_last();
+
+ assert!(last.is_some());
+ let last = last.unwrap();
+
+ assert!(is_match!(*last, Token::Index { idx: 1, .. }));
+ }
+
+ #[test]
+ fn test_pop_last_token_from_identifier_chain() {
+ let tokens = tokenize_with_seperator(&String::from("a.b.c.d.e.f"), '.');
+ assert!(tokens.is_ok());
+ let mut tokens = tokens.unwrap();
+
+ let last = tokens.pop_last();
+ assert!(last.is_some());
+ assert_eq!("f", last.unwrap().identifier());
+ }
+
+ #[test]
+ fn test_pop_last_token_from_mixed_chain() {
+ let tokens = tokenize_with_seperator(&String::from("a.[100].c.[3].e.f"), '.');
+ assert!(tokens.is_ok());
+ let mut tokens = tokens.unwrap();
+
+ let last = tokens.pop_last();
+ assert!(last.is_some());
+ assert_eq!("f", last.unwrap().identifier());
+ }
+
+ #[test]
+ fn test_pop_last_token_from_identifier_chain_is_array() {
+ let tokens = tokenize_with_seperator(&String::from("a.b.c.d.e.f.[1000]"), '.');
+ assert!(tokens.is_ok());
+ let mut tokens = tokens.unwrap();
+
+ let last = tokens.pop_last();
+ assert!(last.is_some());
+ assert_eq!(1000, last.unwrap().idx());
+ }
+
+ #[test]
+ fn test_pop_last_token_from_mixed_chain_is_array() {
+ let tokens = tokenize_with_seperator(&String::from("a.[100].c.[3].e.f.[1000]"), '.');
+ assert!(tokens.is_ok());
+ let mut tokens = tokens.unwrap();
+
+ let last = tokens.pop_last();
+ assert!(last.is_some());
+ assert_eq!(1000, last.unwrap().idx());
+ }
+
+ #[test]
+ fn test_pop_last_token_from_one_token() {
+ let tokens = tokenize_with_seperator(&String::from("a"), '.');
+ assert!(tokens.is_ok());
+ let mut tokens = tokens.unwrap();
+
+ let last = tokens.pop_last();
+ assert!(last.is_none());
+ }
+
+ #[test]
+ fn test_pop_last_chain() {
+ let tokens = tokenize_with_seperator(&String::from("a.[100].c.[3].e.f.[1000]"), '.');
+ assert!(tokens.is_ok());
+ let mut tokens = tokens.unwrap();
+
+ let last = tokens.pop_last();
+ assert!(last.is_some());
+ assert_eq!(1000, last.unwrap().idx());
+
+ let last = tokens.pop_last();
+ assert!(last.is_some());
+ assert_eq!("f", last.unwrap().identifier());
+
+ let last = tokens.pop_last();
+ assert!(last.is_some());
+ assert_eq!("e", last.unwrap().identifier());
+
+ let last = tokens.pop_last();
+ assert!(last.is_some());
+ assert_eq!(3, last.unwrap().idx());
+
+ let last = tokens.pop_last();
+ assert!(last.is_some());
+ assert_eq!("c", last.unwrap().identifier());
+
+ let last = tokens.pop_last();
+ assert!(last.is_some());
+ assert_eq!(100, last.unwrap().idx());
+
+ let last = tokens.pop_last();
+ assert!(last.is_none());
+ }
+}