summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authormario-kr <mario-krehl@gmx.de>2017-12-10 18:21:41 +0100
committerGitHub <noreply@github.com>2017-12-10 18:21:41 +0100
commitc8d3b86591f3a5734535003e66be46ea27b00490 (patch)
treef262fe32eef439258d0a4b7912f7ee105150b210
parent0b7f6cd62bd821b96cc9c7bf53e08da7db7b7835 (diff)
parenteb67a0911631d8017a075bb853306d31c418e8a4 (diff)
Merge pull request #1 from mario-kr/impl-query
Impl query
-rw-r--r--src/api.rs2
-rw-r--r--src/main.rs109
2 files changed, 84 insertions, 27 deletions
diff --git a/src/api.rs b/src/api.rs
index 01feed5..d124174 100644
--- a/src/api.rs
+++ b/src/api.rs
@@ -38,7 +38,7 @@ pub enum TargetData {
#[derive(Serialize, Deserialize, Debug, PartialEq, Clone)]
pub struct Series {
pub target: String,
- pub datapoints: Vec<[u64; 2]>,
+ pub datapoints: Vec<[f64; 2]>,
}
#[derive(Serialize, Deserialize, Debug, PartialEq, Clone)]
diff --git a/src/main.rs b/src/main.rs
index c784d7c..6bd119c 100644
--- a/src/main.rs
+++ b/src/main.rs
@@ -16,15 +16,15 @@ extern crate rocket_contrib;
extern crate serde;
#[macro_use]
extern crate serde_derive;
-#[macro_use]
extern crate serde_json;
extern crate simplelog;
-use std::process::exit;
+use std::collections::HashMap;
use std::fs::File;
+use std::io::{BufReader, BufRead};
+use std::process::exit;
use clap::{App, Arg};
-use chrono::prelude::*;
use rocket::State;
use rocket_contrib::Json;
use simplelog::{SimpleLogger, LogLevelFilter, Config as LogConfig};
@@ -56,46 +56,103 @@ fn query(data: Json<Query>, config: State<Config>) -> Result<Json<QueryResponse>
debug!("handling query: {:?}", data.0);
let targets = data.0.targets;
debug!("targets: {:?}", targets);
- let response : Vec<TargetData> = Vec::new();
- let target_collection : Vec<(LogItem, Vec<String>)> = Vec::new();
- let mut target_hash : std::collections::HashMap<&String, (&LogItem, &Vec<String>)> = std::collections::HashMap::new();
+ let mut target_hash : HashMap<&String, (&LogItem, Vec<(String, String)>)> = HashMap::new();
for li in config.items() {
for t in targets.clone() {
if li.aliases().contains(&t.target) {
- if let Some(&(_, ref mut cnames)) = target_hash.get(&li.alias()) {
- cnames.push(t.target.split('.').nth(1).ok_or(Error::from("no capture name found"))?.into());
+ if target_hash.contains_key(&li.alias()) {
+ if let Some(&mut (_litem, ref mut cnames)) = target_hash.get_mut(&li.alias()) {
+ cnames.push((
+ t.target
+ .split('.')
+ .nth(1)
+ .ok_or(Error::from("no capture name found"))?
+ .into(),
+ t.target.clone())
+ );
+ }
}
else {
target_hash.insert(
li.alias(),
- (&li, &vec![t.target.split('.').nth(1).ok_or(Error::from("no capture name found"))?.into()])
+ (&li, vec![(
+ t.target
+ .split('.')
+ .nth(1)
+ .ok_or(Error::from("no capture name found"))?
+ .into(),
+ t.target.clone())
+ ]
+ )
);
}
}
}
}
+ let date_from = data.0.range.from.timestamp();
+ let date_to = data.0.range.to.timestamp();
+
+ let mut response : Vec<TargetData> = Vec::new();
+ for (_alias, &(logitem, ref cns)) in target_hash.iter() {
+ let mut series_vec = Vec::new();
+ for &(_, ref t) in cns.iter() {
+ series_vec.push(Series{ target : (*t).clone(), datapoints : Vec::new() });
+ }
+ let mut line_iter = BufReader::new(
+ File::open(logitem.file())
+ .chain_err(|| format!("antikoerper log file could not be opened: {}", logitem.file()))?
+ ).lines();
+ while let Some(Ok(line)) = line_iter.next() {
+ if let Some(capture_groups) = logitem.regex().captures_iter(&line).next() {
+ let timestamp = capture_groups["ts"]
+ .parse::<f64>()
+ .chain_err(|| "Failed to parse the filestamp")?;
+ if (timestamp as i64) > date_from && (timestamp as i64) < date_to {
+ for i in 0..cns.len() {
+ let captured = capture_groups[
+ cns.get(i)
+ .ok_or(Error::from("out of bounds: capture_groups"))?
+ .0.as_str()
+ ].parse::<f64>()
+ .chain_err(|| "failed to parse the capture group")?;
+ series_vec
+ .get_mut(i)
+ .ok_or(Error::from("out of bounds: series_vec"))?
+ .datapoints
+ .push([
+ captured,
+ timestamp * 1000.0
+ ]);
+ }
+ }
+ }
+ }
+ for series in series_vec.iter() {
+ response.push(TargetData::Series((*series).clone()));
+ }
+ }
- Err(Error::from("not implemented"))
+ Ok( Json( QueryResponse{ 0 : response } ) )
}
fn main() {
let matches = App::new("aklog-server")
- .version("0.1.0")
- .author("Mario Krehl <mario-krehl@gmx.de>")
- .about("Presents antikoerper-logfiles to grafana")
- .arg(Arg::with_name("config")
- .short("c")
- .long("config")
- .value_name("FILE")
- .help("configuration file to use")
- .takes_value(true)
- .required(true))
- .arg(Arg::with_name("verbosity")
- .short("v")
- .long("verbose")
- .help("sets the level of verbosity")
- .multiple(true))
- .get_matches();
+ .version("0.1.0")
+ .author("Mario Krehl <mario-krehl@gmx.de>")
+ .about("Presents antikoerper-logfiles to grafana")
+ .arg(Arg::with_name("config")
+ .short("c")
+ .long("config")
+ .value_name("FILE")
+ .help("configuration file to use")
+ .takes_value(true)
+ .required(true))
+ .arg(Arg::with_name("verbosity")
+ .short("v")
+ .long("verbose")
+ .help("sets the level of verbosity")
+ .multiple(true))
+ .get_matches();
match matches.occurrences_of("verbosity") {
0 => SimpleLogger::init(LogLevelFilter::Warn, LogConfig::default()).unwrap(),