summaryrefslogtreecommitdiffstats
path: root/src/stackexchange/search.rs
blob: 956a97d97608226c3a117804007b0b8aa308b188 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
use futures::stream::StreamExt;
use rayon::prelude::*;
use reqwest::header;
use reqwest::Client;
use std::sync::Arc;

use crate::config::{Config, SearchEngine};
use crate::error::{Error, Result};
use crate::tui::markdown;
use crate::tui::markdown::Markdown;

use super::api::{Answer, Api, Question};
use super::local_storage::SiteMap;
use super::scraper::{DuckDuckGo, Google, ScrapedData, Scraper};

/// Limit on concurrent requests (gets passed to `buffer_unordered`)
const CONCURRENT_REQUESTS_LIMIT: usize = 8;

/// Mock user agent to get real DuckDuckGo results
// TODO copy other user agents and use random one each time
const USER_AGENT: &str =
    "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.7; rv:11.0) Gecko/20100101 Firefox/11.0";

/// This structure provides methods to search queries and get StackExchange
/// questions/answers in return.
// TODO this really needs a better name...
#[derive(Debug, Clone)]
pub struct Search {
    pub api: Api,
    pub config: Config,
    pub query: String,
    pub site_map: Arc<SiteMap>,
}

#[derive(Debug, Clone)]
pub struct LuckyAnswer {
    /// Preprocessed markdown content
    pub answer: Answer<String>,
    /// Parent question
    pub question: Question<String>,
}

impl Search {
    pub fn new(config: Config, site_map: Arc<SiteMap>, query: String) -> Self {
        let api = Api::new(config.api_key.clone());
        Search {
            api,
            config,
            query,
            site_map,
        }
    }

    /// Search query and get the top answer body
    ///
    /// For StackExchange engine, use only the first configured site,
    /// since, parodoxically, sites with the worst results will finish
    /// executing first, because there's less data to retrieve.
    ///
    /// Needs mut because it temporarily changes self.config
    pub async fn search_lucky(&mut self) -> Result<LuckyAnswer> {
        let original_config = self.config.clone();
        // Temp set lucky config
        self.config.limit = 1;
        if let SearchEngine::StackExchange = self.config.search_engine {
            self.config.sites.truncate(1);
        }
        // Run search with temp config
        let result = self.search().await;
        // Reset config
        self.config = original_config;

        let question = result?.into_iter().next().ok_or(Error::NoResults)?;

        let answer = question.answers.first().cloned().ok_or_else(|| {
            Error::StackExchange(String::from("Received question with no answers"))
        })?;

        Ok(LuckyAnswer { answer, question })
    }

    /// Search and parse to Markdown for TUI
    pub async fn search_md(&self) -> Result<Vec<Question<Markdown>>> {
        Ok(parse_markdown(self.search().await?))
    }

    /// Search using the configured search engine
    pub async fn search(&self) -> Result<Vec<Question<String>>> {
        match self.config.search_engine {
            SearchEngine::DuckDuckGo => self.search_by_scraper(DuckDuckGo).await,
            SearchEngine::Google => self.search_by_scraper(Google).await,
            SearchEngine::StackExchange => self.parallel_search_advanced().await,
        }
        .and_then(|qs| {
            if qs.is_empty() {
                Err(Error::NoResults)
            } else {
                Ok(qs)
            }
        })
    }

    /// Search query at duckduckgo and then fetch the resulting questions from SE.
    async fn search_by_scraper(&self, scraper: impl Scraper) -> Result<Vec<Question<String>>> {
        let url = scraper.get_url(&self.query, self.site_map.values());
        let html = Client::new()
            .get(url)
            .header(header::USER_AGENT, USER_AGENT)
            .send()
            .await?
            .text()
            .await?;
        let data = scraper.parse(&html, self.site_map.as_ref(), self.config.limit)?;
        self.parallel_questions(data).await
    }

    /// Parallel requests against the SE question endpoint across all sites in data.
    // TODO I'm sure there is a way to DRY the following two functions
    async fn parallel_questions(&self, data: ScrapedData) -> Result<Vec<Question<String>>> {
        let ScrapedData {
            question_ids,
            ordering,
        } = data;
        futures::stream::iter(question_ids)
            .map(|(site, ids)| {
                let api = self.api.clone();
                tokio::spawn(async move { api.questions(&site, ids).await })
            })
            .buffer_unordered(CONCURRENT_REQUESTS_LIMIT)
            .collect::<Vec<_>>()
            .await
            .into_iter()
            .map(|r| r.map_err(Error::from).and_then(|x| x))
            .collect::<Result<Vec<Vec<_>>>>()
            .map(|v| {
                let mut qs: Vec<Question<String>> = v.into_iter().flatten().collect();
                qs.sort_unstable_by_key(|q| ordering.get(&q.id.to_string()).unwrap());
                qs
            })
    }

    /// Parallel requests against the SE search/advanced endpoint across all configured sites
    async fn parallel_search_advanced(&self) -> Result<Vec<Question<String>>> {
        futures::stream::iter(self.config.sites.clone())
            .map(|site| {
                let api = self.api.clone();
                let limit = self.config.limit;
                let query = self.query.clone();
                tokio::spawn(async move { api.search_advanced(&query, &site, limit).await })
            })
            .buffer_unordered(CONCURRENT_REQUESTS_LIMIT)
            .collect::<Vec<_>>()
            .await
            .into_iter()
            .map(|r| r.map_err(Error::from).and_then(|x| x))
            .collect::<Result<Vec<Vec<_>>>>()
            .map(|v| {
                let mut qs: Vec<Question<String>> = v.into_iter().flatten().collect();
                if self.config.sites.len() > 1 {
                    qs.sort_unstable_by_key(|q| -q.score);
                }
                qs
            })
    }
}

/// Parse all markdown fields
/// This only happens for content going into the cursive TUI (not lucky prompt)
fn parse_markdown(qs: Vec<Question<String>>) -> Vec<Question<Markdown>> {
    qs.into_par_iter()
        .map(|q| {
            let body = markdown::parse(q.body);
            let answers = q
                .answers
                .into_par_iter()
                .map(|a| {
                    let body = markdown::parse(a.body);
                    Answer {
                        body,
                        id: a.id,
                        score: a.score,
                        is_accepted: a.is_accepted,
                    }
                })
                .collect::<Vec<_>>();
            Question {
                body,
                answers,
                id: q.id,
                score: q.score,
                title: q.title,
                site: q.site,
            }
        })
        .collect::<Vec<_>>()
}

// TODO find a query that returns no results so that I can test it and
// differentiate it from a blocked request
#[cfg(test)]
mod tests {

    #[test]
    fn test_duckduckgo_response() {
        // TODO make sure results are either 1) answers 2) failed connection 3) blocked
    }
}