summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authormario-kr <mario-krehl@gmx.de>2017-12-10 19:10:18 +0100
committerGitHub <noreply@github.com>2017-12-10 19:10:18 +0100
commited7e9d018f20c8f1793142b625d843c401b54c2d (patch)
tree0fbbedaed47aae239c962dd689c0447a451970f0
parentc8d3b86591f3a5734535003e66be46ea27b00490 (diff)
parent952bfe1567882ee84a9162383f47550714a26477 (diff)
Merge pull request #2 from mario-kr/docu
Docu
-rw-r--r--example_config.toml6
-rw-r--r--src/config.rs24
-rw-r--r--src/error.rs3
-rw-r--r--src/main.rs60
4 files changed, 93 insertions, 0 deletions
diff --git a/example_config.toml b/example_config.toml
index 20646b7..7a443b3 100644
--- a/example_config.toml
+++ b/example_config.toml
@@ -1,3 +1,9 @@
+# all three attributes are required for every item
+# use unique aliases; only use letters/number or - and _
+# do not use a dot in an alias
+# only named captures will be used
+# named capture need to match a numeric value (int or a float like 10.654321)
+# \s, \d, \b a.s.o need to be escaped for toml
[[item]]
file = "/var/log/antikoerper/temp.zone1"
diff --git a/src/config.rs b/src/config.rs
index c004493..ef0e330 100644
--- a/src/config.rs
+++ b/src/config.rs
@@ -28,15 +28,21 @@ pub struct ConfigDeser {
}
impl ConfigDeser {
+
+ /// Tries to open, read and parse a toml-file
pub fn load(filename : String) -> Result<ConfigDeser> {
+
debug!("configuration file name: '{}'", filename);
+
let mut file = File::open(filename.clone())
.chain_err(|| "configuration file could not be opened")?;
debug!("configuration file successfully opened");
+
let mut content = String::new();
file.read_to_string(&mut content)
.chain_err(|| "configuration file could not be read")?;
debug!("configuration file successfully read");
+
match toml::from_str(content.as_str()) {
Ok(config) => {
info!("successfully parsed configuration file");
@@ -55,6 +61,9 @@ impl ConfigDeser {
// struct to access data later on //
//------------------------------------//
+/// The deserialized Item would nearly always require some operation on its
+/// contents to use it, so we do those operations beforehand and only access
+/// the useful data from main().
pub struct LogItem {
file : String,
regex : Regex,
@@ -65,10 +74,13 @@ pub struct LogItem {
impl LogItem {
+ /// Transforms a LogItemDeser into a more immediately usable LogItem
fn from_log_item_deser(lid : LogItemDeser) -> Result<LogItem> {
+
debug!("trying to parse regex `{}`", lid.regex);
let l_regex = Regex::new(lid.regex.as_str())
.chain_err(|| format!("regex not parseable: '{}'", lid.regex))?;
+
// first capture is the whole match and nameless
// second capture is always the timestamp
let cnames : Vec<String> = l_regex
@@ -78,6 +90,10 @@ impl LogItem {
.map(|n| String::from(n))
.collect();
debug!("capture names: {:?}", cnames);
+
+ // The metric seen by grafana will be `alias.capturegroup_name`
+ // One Regex may contain multiple named capture groups, so a vector
+ // with all names is prepared here.
let mut als : Vec<String> = Vec::new();
for name in cnames.clone() {
let mut temp = String::from(lid.alias.as_str());
@@ -119,6 +135,7 @@ impl LogItem {
}
}
+/// Containts more immediately usable data
pub struct Config {
items : Vec<LogItem>,
all_aliases : Vec<String>,
@@ -126,18 +143,25 @@ pub struct Config {
impl Config {
+ /// Lets serde do the deserialization, and transforms the given data
+ /// for later access
pub fn load(filename : String) -> Result<Self> {
+
let conf_deser = ConfigDeser::load(filename)?;
+
let mut l_items : Vec<LogItem> = Vec::new();
for lid in conf_deser.get_items() {
l_items.push(LogItem::from_log_item_deser((*lid).clone())?);
}
+
+ // combines all aliases into one Vec for the /search endpoint
let mut all_als : Vec<String> = Vec::new();
for li in &l_items {
for als in li.aliases() {
all_als.push((*als).clone());
}
}
+
Ok(Config { items: l_items, all_aliases : all_als })
}
diff --git a/src/error.rs b/src/error.rs
index 6258c10..e9a25a7 100644
--- a/src/error.rs
+++ b/src/error.rs
@@ -1,4 +1,7 @@
+// Module for errors using error_chain
+// Might be expanded in the future
+
error_chain! {
errors {
ConfigParseError(filename: String) {
diff --git a/src/main.rs b/src/main.rs
index 6bd119c..6a84be1 100644
--- a/src/main.rs
+++ b/src/main.rs
@@ -32,17 +32,26 @@ use simplelog::{SimpleLogger, LogLevelFilter, Config as LogConfig};
mod api;
mod config;
mod error;
+
use api::*;
use config::{Config, LogItem};
use error::*;
#[get("/")]
fn index() -> &'static str {
+
+ //! grafana only needs a "200 Ok" on /
+
"Hello there!"
}
#[post("/search", format = "application/json", data = "<data>")]
fn search(data : Json<Search>, config: State<Config>) -> Json<SearchResponse> {
+
+ //! /search is used to query what metrics are offered.
+ //! In this case, those are the `alias.capturegroup_name` configured by
+ //! the user of this programm.
+
debug!("handling search request: {:?}", data.0);
Json(
SearchResponse(
@@ -53,9 +62,31 @@ fn search(data : Json<Search>, config: State<Config>) -> Json<SearchResponse> {
#[post("/query", format = "application/json", data = "<data>")]
fn query(data: Json<Query>, config: State<Config>) -> Result<Json<QueryResponse>> {
+
+ //! /query needs to return actual data (if available).
+ //! the required metrics are sent by grafana in the `targets` field, as
+ //! well as is the wanted timerange.
+ //! The only sort of response written here is a `Series`, basically an
+ //! Array/Vector of two float-values, the second being a timestamp.
+ //! Returning a table is not implemented.
+
debug!("handling query: {:?}", data.0);
+
let targets = data.0.targets;
debug!("targets: {:?}", targets);
+
+ // If there are several targets, it is possible they would different data
+ // from the same file;
+ // this HashMap is created for the sole purpose of being able to read and
+ // apply a regex on a potentially huge file only once.
+ // HashMap
+ // |------- Alias : &String
+ // \
+ // Tuple
+ // |------- &LogItem
+ // |------- Vector of Tuple
+ // |--- capturegroup name : String
+ // |--- target/metric
let mut target_hash : HashMap<&String, (&LogItem, Vec<(String, String)>)> = HashMap::new();
for li in config.items() {
for t in targets.clone() {
@@ -89,44 +120,71 @@ fn query(data: Json<Query>, config: State<Config>) -> Result<Json<QueryResponse>
}
}
}
+
let date_from = data.0.range.from.timestamp();
let date_to = data.0.range.to.timestamp();
let mut response : Vec<TargetData> = Vec::new();
+
+ // iterate the HashMap
for (_alias, &(logitem, ref cns)) in target_hash.iter() {
+
+ // prepare an empty Vector of Series
let mut series_vec = Vec::new();
for &(_, ref t) in cns.iter() {
series_vec.push(Series{ target : (*t).clone(), datapoints : Vec::new() });
}
+
+ // open the current file for reading
let mut line_iter = BufReader::new(
File::open(logitem.file())
.chain_err(|| format!("antikoerper log file could not be opened: {}", logitem.file()))?
).lines();
+
+ // read the file line by line...
while let Some(Ok(line)) = line_iter.next() {
+
+ // ...and apply the configured regex to it.
if let Some(capture_groups) = logitem.regex().captures_iter(&line).next() {
+
+ // save the timestamp for later
let timestamp = capture_groups["ts"]
.parse::<f64>()
.chain_err(|| "Failed to parse the filestamp")?;
+
+ // ignore every entry not in the timerange
if (timestamp as i64) > date_from && (timestamp as i64) < date_to {
+
+ // Multiple Vectors need to be accessed with the same
+ // index, so no iterator here.
for i in 0..cns.len() {
+
+ // get the current metric and parse its content as a
+ // float
let captured = capture_groups[
cns.get(i)
.ok_or(Error::from("out of bounds: capture_groups"))?
.0.as_str()
].parse::<f64>()
.chain_err(|| "failed to parse the capture group")?;
+
+ // put the current metric and timestamp into the right
+ // Series
series_vec
.get_mut(i)
.ok_or(Error::from("out of bounds: series_vec"))?
.datapoints
.push([
captured,
+ // grafana requires ms
timestamp * 1000.0
]);
}
}
}
}
+
+ // fill the prepared vector with all Series's
for series in series_vec.iter() {
response.push(TargetData::Series((*series).clone()));
}
@@ -136,6 +194,7 @@ fn query(data: Json<Query>, config: State<Config>) -> Result<Json<QueryResponse>
}
fn main() {
+
let matches = App::new("aklog-server")
.version("0.1.0")
.author("Mario Krehl <mario-krehl@gmx.de>")
@@ -154,6 +213,7 @@ fn main() {
.multiple(true))
.get_matches();
+ // Set level of verbosity and initialize the logger
match matches.occurrences_of("verbosity") {
0 => SimpleLogger::init(LogLevelFilter::Warn, LogConfig::default()).unwrap(),
1 => SimpleLogger::init(LogLevelFilter::Info, LogConfig::default()).unwrap(),