summaryrefslogtreecommitdiffstats
path: root/src/main.rs
blob: 56cf35190f21efde01fe5b9dfc61549aa425dac5 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
#![recursion_limit = "1024"]

#![feature(plugin)]
#![plugin(rocket_codegen)]

extern crate clap;
extern crate chrono;
extern crate dimensioned;
#[macro_use]
extern crate error_chain;
#[macro_use]
extern crate log;
extern crate regex;
extern crate rocket;
extern crate rocket_contrib;
extern crate serde;
#[macro_use]
extern crate serde_derive;
extern crate serde_json;
extern crate simplelog;

use std::collections::HashMap;
use std::fs::File;
use std::io::{BufReader, BufRead};
use std::process::exit;

use clap::{App, Arg};
use rocket::State;
use rocket_contrib::Json;
use simplelog::{SimpleLogger, LogLevelFilter, Config as LogConfig};

mod api;
mod config;
mod error;

use api::*;
use config::{Config, LogItem};
use error::*;

#[get("/")]
fn index() -> &'static str {

    //! grafana only needs a "200 Ok" on /

    "Hello there!"
}

#[post("/search", format = "application/json", data = "<data>")]
fn search(data : Json<Search>, config: State<Config>) -> Json<SearchResponse> {

    //! /search is used to query what metrics are offered.
    //! In this case, those are the `alias.capturegroup_name` configured by
    //! the user of this programm.

    debug!("handling search request: {:?}", data.0);
    Json(
        SearchResponse(
            (*config.all_aliases()).clone()
        )
    )
}

#[post("/query", format = "application/json", data = "<data>")]
fn query(data: Json<Query>, config: State<Config>) -> Result<Json<QueryResponse>> {

    //! /query needs to return actual data (if available).
    //! the required metrics are sent by grafana in the `targets` field, as
    //! well as is the wanted timerange.
    //! The only sort of response written here is a `Series`, basically an
    //! Array/Vector of two float-values, the second being a timestamp.
    //! Returning a table is not implemented.

    debug!("handling query: {:?}", data.0);

    Ok(
        Json(
            QueryResponse{
                0 : hash_map_iter(
                        hash_map_targets(&config, data.0.targets)?,
                        data.0.range.from.timestamp(),
                        data.0.range.to.timestamp()
                    )?
            }
        )
    )
}

/// If there are several targets, it is possible they would different data
/// from the same file;
/// this HashMap is created for the sole purpose of being able to read and
/// apply a regex on a potentially huge file only once.
/// HashMap
/// |------- Alias : &String
/// \
///  Tuple
///  |------- &LogItem
///  |------- Vector of Tuple
///           |--- capturegroup name : String
///           |--- target/metric
fn hash_map_targets<'a>(c : &'a Config, targets : Vec<Target>)
    -> Result<HashMap<&'a String, (&'a LogItem, Vec<(String, String)>)>> {

    debug!("targets: {:?}", targets);
    let mut _res : HashMap<&String, (&LogItem, Vec<(String, String)>)> = HashMap::new();
    for li in c.items() {
        for t in targets.clone() {
            if li.aliases().contains(&t.target) {
                if _res.contains_key(&li.alias()) {
                    if let Some(&mut (_litem, ref mut cnames)) = _res.get_mut(&li.alias()) {
                        cnames.push((
                                cname_from_target(&t.target)?,
                                t.target.clone())
                        );
                    }
                }
                else {
                    _res.insert(
                        li.alias(),
                        (
                            &li,
                            vec![(cname_from_target(&t.target)?, t.target.clone())]
                        )
                    );
                }
            }
        }
    }
    Ok(_res)
}

/// splits the target and return the capture name part
fn cname_from_target<'a>(t : &'a String) -> Result<String> {
    Ok(
        t.split('.')
        .nth(1)
        .ok_or(Error::from("no capture name found"))?
        .into()
    )
}

/// Iterate the hashmap created with the above function
fn hash_map_iter(h : HashMap<&String, (&LogItem, Vec<(String, String)>)>, d_from : i64, d_to : i64)
    -> Result<Vec<TargetData>> {

    let mut _res = Vec::new();
    for (_alias, &(logitem, ref cns)) in h.iter() {

        // prepare an empty Vector of Series
        let mut series_vec = Vec::new();
        for &(_, ref t) in cns.iter() {
            series_vec.push(Series{ target : (*t).clone(), datapoints : Vec::new() });
        }

        // open the current file for reading
        let mut line_iter = BufReader::new(
            File::open(logitem.file())
            .chain_err(|| format!("antikoerper log file could not be opened: {}", logitem.file()))?
            ).lines();

        // read the file line by line...
        while let Some(Ok(line)) = line_iter.next() {

            // ...and apply the configured regex to it.
            if let Some(capture_groups) = logitem.regex().captures_iter(&line).next() {

                // save the timestamp for later
                let timestamp = capture_groups["ts"]
                    .parse::<f64>()
                    .chain_err(|| "Failed to parse the filestamp")?;

                // ignore every entry not in the timerange
                if (timestamp as i64) > d_from && (timestamp as i64) < d_to {

                    // Multiple Vectors need to be accessed with the same
                    // index, so no iterator here.
                    for i in 0..cns.len() {

                        // get the current metric and parse its content as a
                        // float
                        let captured = capture_groups[
                            cns.get(i)
                                .ok_or(Error::from("out of bounds: capture_groups"))?
                                .0.as_str()
                        ].parse::<f64>()
                        .chain_err(|| "failed to parse the capture group")?;

                        // put the current metric and timestamp into the right
                        // Series
                        series_vec
                            .get_mut(i)
                            .ok_or(Error::from("out of bounds: series_vec"))?
                            .datapoints
                            .push([
                                  captured,
                                  // grafana requires ms
                                  timestamp * 1000.0
                            ]);
                    }
                }
            }
        }

        // fill the prepared vector with all Series's
        for series in series_vec.iter() {
            _res.push(TargetData::Series((*series).clone()));
        }
    }
    Ok(_res)
}


fn main() {

    let matches = App::new("aklog-server")
        .version("0.1.0")
        .author("Mario Krehl <mario-krehl@gmx.de>")
        .about("Presents antikoerper-logfiles to grafana")
        .arg(Arg::with_name("config")
             .short("c")
             .long("config")
             .value_name("FILE")
             .help("configuration file to use")
             .takes_value(true)
             .required(true))
        .arg(Arg::with_name("verbosity")
             .short("v")
             .long("verbose")
             .help("sets the level of verbosity")
             .multiple(true))
        .get_matches();

    // Set level of verbosity and initialize the logger
    match matches.occurrences_of("verbosity") {
        0 => SimpleLogger::init(LogLevelFilter::Warn, LogConfig::default()).unwrap(),
        1 => SimpleLogger::init(LogLevelFilter::Info, LogConfig::default()).unwrap(),
        2 => SimpleLogger::init(LogLevelFilter::Debug, LogConfig::default()).unwrap(),
        3 | _  => SimpleLogger::init(LogLevelFilter::Trace, LogConfig::default()).unwrap(),
    };
    debug!("Initialized logger");

    let config_file = matches.value_of("config").unwrap();
    let config = match Config::load(String::from(config_file)) {
        Ok(c) => c,
        Err(e) => {
            error!("{}", e);
            exit(1);
        },
    };

    rocket::ignite()
        .manage(config)
        .mount("/", routes![index, search, query])
        .launch();
}