summaryrefslogtreecommitdiffstats
path: root/src
diff options
context:
space:
mode:
authorSam Tay <sam.chong.tay@gmail.com>2020-06-17 23:30:33 -0700
committerSam Tay <sam.chong.tay@gmail.com>2020-06-17 23:30:33 -0700
commitec92f930344d364e3be359a41aebea78f8205fa7 (patch)
treeb041fa1404d383d44be46c9e1f4e8e94ef75e254 /src
parentd422c8424ae76fc85e0fdf55257e0cee7fa38271 (diff)
Use async http requests via tokio
Diffstat (limited to 'src')
-rw-r--r--src/main.rs131
-rw-r--r--src/stackexchange.rs79
2 files changed, 101 insertions, 109 deletions
diff --git a/src/main.rs b/src/main.rs
index 06cacd8..27c7109 100644
--- a/src/main.rs
+++ b/src/main.rs
@@ -14,76 +14,14 @@ use stackexchange::{LocalStorage, StackExchange};
use term::mk_print_error;
use termimad::{CompoundStyle, MadSkin};
-fn main() {
+#[tokio::main]
+async fn main() -> Result<(), Error> {
let mut skin = MadSkin::default();
// TODO style configuration
skin.inline_code = CompoundStyle::with_fg(Color::Cyan);
skin.code_block.set_fgbg(Color::Cyan, termimad::gray(20));
let mut print_error = mk_print_error(&skin);
- (|| {
- let opts = cli::get_opts()?;
- let config = opts.config;
- let site = &config.site;
- let lucky = config.lucky;
- let mut ls = LocalStorage::new()?;
-
- if let Some(key) = opts.set_api_key {
- config::set_api_key(key)?;
- }
-
- if opts.update_sites {
- ls.update_sites()?;
- }
-
- if opts.list_sites {
- let sites = ls.sites()?;
- let mut md = String::new();
- md.push_str("|:-:|:-:|\n");
- md.push_str("|Site Code|Site URL|\n");
- md.push_str("|-:|:-|\n");
- for s in sites.iter() {
- md.push_str(&format!("|{}|{}\n", s.api_site_parameter, s.site_url));
- }
- md.push_str("|-\n");
- termimad::print_text(&md);
- return Ok(());
- }
-
- if !ls.validate_site(site)? {
- print_error!(skin, "$0 is not a valid StackExchange site.\n\n", site)?;
- // TODO should only use inline for single lines; use termimad::text stuff
- print_notice!(
- skin,
- "If you think this is incorrect, try running\n\
- ```\n\
- so --update-sites\n\
- ```\n\
- to update the cached site listing. You can also run `so --list-sites` \
- to list all available sites.",
- )?;
- return Ok(());
- }
-
- if let Some(q) = opts.query {
- let se = StackExchange::new(config);
- // TODO get the rest of the results in the background
- if lucky {
- // TODO this needs preprocessing; all the more reason to do it at SE level
- let md = se.search_lucky(&q)?;
- skin.print_text(&md);
- skin.print_text(
- "\nPress **[SPACE]** to see more results, or any other key to exit",
- );
- if !utils::wait_for_char(' ')? {
- return Ok(());
- }
- }
- let qs = se.search(&q)?;
- tui::run(qs)?;
- }
- Ok(())
- })()
- .or_else(|e: Error| {
+ run(&mut skin).await.or_else(|e: Error| {
print_error(&e.to_string())?;
match e {
Error::EmptySites => {
@@ -92,5 +30,66 @@ fn main() {
_ => Ok(()),
}
})
- .unwrap();
+}
+
+async fn run(skin: &mut MadSkin) -> Result<(), Error> {
+ let opts = cli::get_opts()?;
+ let config = opts.config;
+ let site = &config.site;
+ let lucky = config.lucky;
+ let mut ls = LocalStorage::new()?;
+
+ if let Some(key) = opts.set_api_key {
+ config::set_api_key(key)?;
+ }
+
+ if opts.update_sites {
+ ls.update_sites().await?;
+ }
+
+ if opts.list_sites {
+ let sites = ls.sites().await?;
+ let mut md = String::new();
+ md.push_str("|:-:|:-:|\n");
+ md.push_str("|Site Code|Site URL|\n");
+ md.push_str("|-:|:-|\n");
+ for s in sites.iter() {
+ md.push_str(&format!("|{}|{}\n", s.api_site_parameter, s.site_url));
+ }
+ md.push_str("|-\n");
+ termimad::print_text(&md);
+ return Ok(());
+ }
+
+ if !ls.validate_site(site).await? {
+ print_error!(skin, "$0 is not a valid StackExchange site.\n\n", site)?;
+ // TODO should only use inline for single lines; use termimad::text stuff
+ print_notice!(
+ skin,
+ "If you think this is incorrect, try running\n\
+ ```\n\
+ so --update-sites\n\
+ ```\n\
+ to update the cached site listing. You can also run `so --list-sites` \
+ to list all available sites.",
+ )?;
+ return Ok(());
+ }
+
+ if let Some(q) = opts.query {
+ let se = StackExchange::new(config);
+ // TODO get the rest of the results in the background
+ if lucky {
+ // TODO this needs preprocessing; all the more reason to do it at SE level
+ let md = se.search_lucky(&q).await?;
+ skin.print_text(&md);
+ skin.print_text("\nPress **[SPACE]** to see more results, or any other key to exit");
+ if !utils::wait_for_char(' ')? {
+ return Ok(());
+ }
+ }
+ let qs = se.search(&q).await?;
+ tui::run(qs)?;
+ }
+ Ok(())
}
diff --git a/src/stackexchange.rs b/src/stackexchange.rs
index e9bc6d9..2b4da67 100644
--- a/src/stackexchange.rs
+++ b/src/stackexchange.rs
@@ -1,5 +1,4 @@
-use flate2::read::GzDecoder;
-use reqwest::blocking::Client;
+use reqwest::Client;
use reqwest::Url;
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
@@ -83,9 +82,10 @@ impl StackExchange {
// TODO also return a future with the rest of the questions
/// Search query at stack exchange and get the top answer body
- pub fn search_lucky(&self, q: &str) -> Result<String> {
+ pub async fn search_lucky(&self, q: &str) -> Result<String> {
let ans = self
- .search_advanced(q, 1)?
+ .search_advanced(q, 1)
+ .await?
.into_iter()
.next()
.ok_or(Error::NoResults)?
@@ -99,15 +99,15 @@ impl StackExchange {
}
/// Search query at stack exchange and get a list of relevant questions
- pub fn search(&self, q: &str) -> Result<Vec<Question>> {
- self.search_advanced(q, self.config.limit)
+ pub async fn search(&self, q: &str) -> Result<Vec<Question>> {
+ self.search_advanced(q, self.config.limit).await
}
/// Search against the search/advanced endpoint with a given query.
/// Only fetches questions that have at least one answer.
/// TODO async
/// TODO parallel requests over multiple sites
- fn search_advanced(&self, q: &str, limit: u16) -> Result<Vec<Question>> {
- let resp_body = self
+ async fn search_advanced(&self, q: &str, limit: u16) -> Result<Vec<Question>> {
+ Ok(self
.client
.get(stackexchange_url("search/advanced"))
.header("Accepts", "application/json")
@@ -120,24 +120,18 @@ impl StackExchange {
("order", "desc"),
("sort", "relevance"),
])
- .send()?;
-
- let gz = GzDecoder::new(resp_body);
- let wrapper: ResponseWrapper<Question> = serde_json::from_reader(gz).map_err(|e| {
- Error::StackExchange(format!(
- "Error decoding questions from the StackExchange API: {}",
- e
- ))
- })?;
- let qs = wrapper
+ .send()
+ .await?
+ .json::<ResponseWrapper<Question>>()
+ .await?
.items
.into_iter()
.map(|mut q| {
+ // TODO parallelize this (and preprocess <kbd> stuff too)
q.answers.sort_unstable_by_key(|a| -a.score);
q
})
- .collect();
- Ok(qs)
+ .collect())
}
fn get_default_opts(&self) -> HashMap<&str, &str> {
@@ -163,10 +157,10 @@ impl LocalStorage {
}
// TODO inform user if we are downloading
- pub fn sites(&mut self) -> Result<&Vec<Site>> {
+ pub async fn sites(&mut self) -> Result<&Vec<Site>> {
// Stop once Option ~ Some or Result ~ Err
if self.sites.is_none() && !self.fetch_local_sites()? {
- self.fetch_remote_sites()?;
+ self.fetch_remote_sites().await?;
}
match &self.sites {
Some(sites) if sites.is_empty() => Err(Error::EmptySites),
@@ -175,13 +169,14 @@ impl LocalStorage {
}
}
- pub fn update_sites(&mut self) -> Result<()> {
- self.fetch_remote_sites()
+ pub async fn update_sites(&mut self) -> Result<()> {
+ self.fetch_remote_sites().await
}
- pub fn validate_site(&mut self, site_code: &str) -> Result<bool> {
+ pub async fn validate_site(&mut self, site_code: &str) -> Result<bool> {
Ok(self
- .sites()?
+ .sites()
+ .await?
.iter()
.any(|site| site.api_site_parameter == *site_code))
}
@@ -198,23 +193,21 @@ impl LocalStorage {
}
// TODO decide whether or not I should give LocalStorage an api key..
- fn fetch_remote_sites(&mut self) -> Result<()> {
- let resp_body = Client::new()
- .get(stackexchange_url("sites"))
- .header("Accepts", "application/json")
- .query(&[
- ("pagesize", SE_SITES_PAGESIZE.to_string()),
- ("page", "1".to_string()),
- ])
- .send()?;
- let gz = GzDecoder::new(resp_body);
- let wrapper: ResponseWrapper<Site> = serde_json::from_reader(gz).map_err(|e| {
- Error::StackExchange(format!(
- "Error decoding sites from the StackExchange API: {}",
- e
- ))
- })?;
- self.sites = Some(wrapper.items);
+ async fn fetch_remote_sites(&mut self) -> Result<()> {
+ self.sites = Some(
+ Client::new()
+ .get(stackexchange_url("sites"))
+ .header("Accepts", "application/json")
+ .query(&[
+ ("pagesize", SE_SITES_PAGESIZE.to_string()),
+ ("page", "1".to_string()),
+ ])
+ .send()
+ .await?
+ .json::<ResponseWrapper<Site>>()
+ .await?
+ .items,
+ );
self.store_local_sites()
}