summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorMario Krehl <mario-krehl@gmx.de>2017-12-16 11:33:54 +0100
committerMario Krehl <mario-krehl@gmx.de>2017-12-16 11:33:54 +0100
commit32cfc8d684b8cda57f122fc13d7b89f9efa2050c (patch)
treed8f92be902fbab21b8b2d672c4c809a2a709866f
parentaff5f24382de6fa6f00be31383d7fd0cefd193a7 (diff)
Iterate the HashMap via an additional function; reduce query function
-rw-r--r--src/main.rs133
1 files changed, 70 insertions, 63 deletions
diff --git a/src/main.rs b/src/main.rs
index 6f00175..4e4492c 100644
--- a/src/main.rs
+++ b/src/main.rs
@@ -72,19 +72,77 @@ fn query(data: Json<Query>, config: State<Config>) -> Result<Json<QueryResponse>
debug!("handling query: {:?}", data.0);
- let targets = data.0.targets;
- debug!("targets: {:?}", targets);
+ Ok(
+ Json(
+ QueryResponse{
+ 0 : hash_map_iter(
+ hash_map_targets(&config, data.0.targets)?,
+ data.0.range.from.timestamp(),
+ data.0.range.to.timestamp()
+ )?
+ }
+ )
+ )
+}
- // create hashmap to iterate over
- let mut target_hash = hash_map_targets(&config, targets)?;
+/// If there are several targets, it is possible they would different data
+/// from the same file;
+/// this HashMap is created for the sole purpose of being able to read and
+/// apply a regex on a potentially huge file only once.
+/// HashMap
+/// |------- Alias : &String
+/// \
+/// Tuple
+/// |------- &LogItem
+/// |------- Vector of Tuple
+/// |--- capturegroup name : String
+/// |--- target/metric
+fn hash_map_targets<'a>(c : &'a Config, targets : Vec<Target>)
+ -> Result<HashMap<&'a String, (&'a LogItem, Vec<(String, String)>)>> {
- let date_from = data.0.range.from.timestamp();
- let date_to = data.0.range.to.timestamp();
+ debug!("targets: {:?}", targets);
+ let mut _res : HashMap<&String, (&LogItem, Vec<(String, String)>)> = HashMap::new();
+ for li in c.items() {
+ for t in targets.clone() {
+ if li.aliases().contains(&t.target) {
+ if _res.contains_key(&li.alias()) {
+ if let Some(&mut (_litem, ref mut cnames)) = _res.get_mut(&li.alias()) {
+ cnames.push((
+ t.target
+ .split('.')
+ .nth(1)
+ .ok_or(Error::from("no capture name found"))?
+ .into(),
+ t.target.clone())
+ );
+ }
+ }
+ else {
+ _res.insert(
+ li.alias(),
+ (&li, vec![(
+ t.target
+ .split('.')
+ .nth(1)
+ .ok_or(Error::from("no capture name found"))?
+ .into(),
+ t.target.clone())
+ ]
+ )
+ );
+ }
+ }
+ }
+ }
+ Ok(_res)
+}
- let mut response : Vec<TargetData> = Vec::new();
+/// Iterate the hashmap created with the above function
+fn hash_map_iter(h : HashMap<&String, (&LogItem, Vec<(String, String)>)>, d_from : i64, d_to : i64)
+ -> Result<Vec<TargetData>> {
- // iterate the HashMap
- for (_alias, &(logitem, ref cns)) in target_hash.iter() {
+ let mut _res = Vec::new();
+ for (_alias, &(logitem, ref cns)) in h.iter() {
// prepare an empty Vector of Series
let mut series_vec = Vec::new();
@@ -110,7 +168,7 @@ fn query(data: Json<Query>, config: State<Config>) -> Result<Json<QueryResponse>
.chain_err(|| "Failed to parse the filestamp")?;
// ignore every entry not in the timerange
- if (timestamp as i64) > date_from && (timestamp as i64) < date_to {
+ if (timestamp as i64) > d_from && (timestamp as i64) < d_to {
// Multiple Vectors need to be accessed with the same
// index, so no iterator here.
@@ -143,64 +201,13 @@ fn query(data: Json<Query>, config: State<Config>) -> Result<Json<QueryResponse>
// fill the prepared vector with all Series's
for series in series_vec.iter() {
- response.push(TargetData::Series((*series).clone()));
- }
- }
-
- Ok( Json( QueryResponse{ 0 : response } ) )
-}
-
-/// If there are several targets, it is possible they would different data
-/// from the same file;
-/// this HashMap is created for the sole purpose of being able to read and
-/// apply a regex on a potentially huge file only once.
-/// HashMap
-/// |------- Alias : &String
-/// \
-/// Tuple
-/// |------- &LogItem
-/// |------- Vector of Tuple
-/// |--- capturegroup name : String
-/// |--- target/metric
-fn hash_map_targets<'a>(c : &'a Config, targets : Vec<Target>)
- -> Result<HashMap<&'a String, (&'a LogItem, Vec<(String, String)>)>> {
-
- let mut _res : HashMap<&String, (&LogItem, Vec<(String, String)>)> = HashMap::new();
- for li in c.items() {
- for t in targets.clone() {
- if li.aliases().contains(&t.target) {
- if _res.contains_key(&li.alias()) {
- if let Some(&mut (_litem, ref mut cnames)) = _res.get_mut(&li.alias()) {
- cnames.push((
- t.target
- .split('.')
- .nth(1)
- .ok_or(Error::from("no capture name found"))?
- .into(),
- t.target.clone())
- );
- }
- }
- else {
- _res.insert(
- li.alias(),
- (&li, vec![(
- t.target
- .split('.')
- .nth(1)
- .ok_or(Error::from("no capture name found"))?
- .into(),
- t.target.clone())
- ]
- )
- );
- }
- }
+ _res.push(TargetData::Series((*series).clone()));
}
}
Ok(_res)
}
+
fn main() {
let matches = App::new("aklog-server")