summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorThang Pham <phamducthang1234@gmail.com>2021-06-26 22:12:12 +0900
committerGitHub <noreply@github.com>2021-06-26 22:12:12 +0900
commit2cb923905063f58d346448cf5d87d9f9202e4402 (patch)
tree4d302478ce1266e120b338ac5035faec3b7ef6ed
parentb596a8cdb92f73e5b288be14ab4f84df5693f862 (diff)
[no-issue] refactor modules (#41)
-rw-r--r--hackernews_tui/src/client/lazy.rs79
-rw-r--r--hackernews_tui/src/client/mod.rs265
-rw-r--r--hackernews_tui/src/client/parser.rs209
-rw-r--r--hackernews_tui/src/client/query.rs95
-rw-r--r--hackernews_tui/src/config.rs254
-rw-r--r--hackernews_tui/src/config/keybindings.rs (renamed from hackernews_tui/src/keybindings.rs)35
-rw-r--r--hackernews_tui/src/config/mod.rs132
-rw-r--r--hackernews_tui/src/config/theme.rs103
-rw-r--r--hackernews_tui/src/hn_client.rs656
-rw-r--r--hackernews_tui/src/main.rs105
-rw-r--r--hackernews_tui/src/prelude.rs2
-rw-r--r--hackernews_tui/src/utils.rs3
-rw-r--r--hackernews_tui/src/view/article_view.rs6
-rw-r--r--hackernews_tui/src/view/async_view.rs9
-rw-r--r--hackernews_tui/src/view/comment_view.rs34
-rw-r--r--hackernews_tui/src/view/error_view.rs1
-rw-r--r--hackernews_tui/src/view/search_view.rs30
-rw-r--r--hackernews_tui/src/view/story_view.rs26
18 files changed, 987 insertions, 1057 deletions
diff --git a/hackernews_tui/src/client/lazy.rs b/hackernews_tui/src/client/lazy.rs
new file mode 100644
index 0000000..3b43af9
--- /dev/null
+++ b/hackernews_tui/src/client/lazy.rs
@@ -0,0 +1,79 @@
+use super::parser::*;
+use crate::prelude::*;
+use rayon::prelude::*;
+use std::{sync::Arc, sync::RwLock};
+
+/// `LazyLoadingComments` lazily loads comments on demand. It stores
+/// a list of top comment's ids and a comment buffer. When more comments are needed,
+/// the buffer is clear to load more comments. Additional comments are then
+/// requested in background to reset the comment buffer.
+pub struct LazyLoadingComments {
+ client: client::client::HNClient,
+ ids: Vec<u32>,
+ comments: Arc<RwLock<Vec<Comment>>>,
+}
+
+impl LazyLoadingComments {
+ pub fn new(client: client::HNClient, ids: Vec<u32>) -> Self {
+ LazyLoadingComments {
+ client,
+ ids,
+ comments: Arc::new(RwLock::new(vec![])),
+ }
+ }
+
+ /// load all available comments in the current comment buffer then clear the buffer
+ pub fn load_all(&self) -> Vec<Comment> {
+ self.comments.write().unwrap().drain(..).collect::<Vec<_>>()
+ }
+
+ fn retrieve_comments_from_ids(
+ client: client::HNClient,
+ ids: Vec<u32>,
+ comments: &Arc<RwLock<Vec<Comment>>>,
+ ) {
+ type ResultT = Vec<Result<Comment>>;
+
+ let results: ResultT = ids
+ .into_par_iter()
+ .map(|id| {
+ let response = client.get_item_from_id::<CommentResponse>(id)?;
+ Ok(response.into())
+ })
+ .collect();
+
+ let (oks, errs): (ResultT, ResultT) =
+ results.into_iter().partition(|result| result.is_ok());
+
+ errs.into_iter().for_each(|err| {
+ warn!("failed to get comment: {:#?}", err);
+ });
+
+ let mut comments = comments.write().unwrap();
+ oks.into_iter().for_each(|ok| {
+ comments.push(ok.unwrap());
+ });
+ }
+
+ /// drain the first `size` comment ids from the queue list,
+ /// then request comments with the corresponding ids.
+ /// parameter `block` determines whether the retrieving process should happen in background
+ pub fn drain(&mut self, size: usize, block: bool) {
+ if self.ids.is_empty() {
+ return;
+ }
+
+ let ids: Vec<_> = self
+ .ids
+ .drain(0..std::cmp::min(self.ids.len(), size))
+ .collect();
+
+ let client = self.client.clone();
+ if !block {
+ let comments = Arc::clone(&self.comments);
+ std::thread::spawn(move || Self::retrieve_comments_from_ids(client, ids, &comments));
+ } else {
+ Self::retrieve_comments_from_ids(client, ids, &self.comments);
+ }
+ }
+}
diff --git a/hackernews_tui/src/client/mod.rs b/hackernews_tui/src/client/mod.rs
new file mode 100644
index 0000000..09367f6
--- /dev/null
+++ b/hackernews_tui/src/client/mod.rs
@@ -0,0 +1,265 @@
+// modules
+mod lazy;
+mod parser;
+mod query;
+
+// re-export
+pub use lazy::LazyLoadingComments;
+pub use parser::{Comment, Story};
+pub use query::StoryNumericFilters;
+
+use crate::prelude::*;
+use parser::*;
+
+const HN_ALGOLIA_PREFIX: &str = "https://hn.algolia.com/api/v1";
+const HN_OFFICIAL_PREFIX: &str = "https://hacker-news.firebaseio.com/v0";
+const HN_SEARCH_QUERY_STRING: &str =
+ "tags=story&restrictSearchableAttributes=title,url&typoTolerance=false";
+pub const HN_HOST_URL: &str = "https://news.ycombinator.com";
+
+static CLIENT: once_cell::sync::OnceCell<HNClient> = once_cell::sync::OnceCell::new();
+
+/// HNClient is a HTTP client to communicate with Hacker News APIs.
+#[derive(Clone)]
+pub struct HNClient {
+ client: ureq::Agent,
+}
+
+impl HNClient {
+ /// Create a new Hacker News Client
+ pub fn new() -> Result<HNClient> {
+ let timeout = get_config().client.client_timeout;
+ Ok(HNClient {
+ client: ureq::AgentBuilder::new()
+ .timeout(std::time::Duration::from_secs(timeout))
+ .build(),
+ })
+ }
+
+ /// Get data of a HN item based on its id then parse the data
+ /// to a corresponding struct representing that item
+ pub fn get_item_from_id<T>(&self, id: u32) -> Result<T>
+ where
+ T: serde::de::DeserializeOwned,
+ {
+ let request_url = format!("{}/items/{}", HN_ALGOLIA_PREFIX, id);
+ let time = std::time::SystemTime::now();
+ let item = self.client.get(&request_url).call()?.into_json::<T>()?;
+ if let Ok(elapsed) = time.elapsed() {
+ info!("get item id={} took {}ms", id, elapsed.as_millis());
+ }
+ Ok(item)
+ }
+
+ /// Get all comments from a story with a given id.
+ pub fn get_comments_from_story(
+ &self,
+ story_id: u32,
+ focus_top_comment_id: u32,
+ ) -> Result<lazy::LazyLoadingComments> {
+ let request_url = format!("{}/item/{}.json", HN_OFFICIAL_PREFIX, story_id);
+ let mut ids = self
+ .client
+ .get(&request_url)
+ .call()?
+ .into_json::<HNStoryResponse>()?
+ .kids;
+ if let Some(pos) = ids.iter().position(|id| *id == focus_top_comment_id) {
+ // move `pos` to the beginning of the list.
+ ids.remove(pos);
+ ids.insert(0, focus_top_comment_id);
+ };
+
+ let mut comments = lazy::LazyLoadingComments::new(self.clone(), ids);
+
+ let cfg = &(get_config().client.lazy_loading_comments);
+ comments.drain(cfg.num_comments_init, true);
+ comments.drain(cfg.num_comments_after, false);
+ Ok(comments)
+ }
+
+ /// Get a story based on its id
+ pub fn get_story_from_story_id(&self, id: u32) -> Result<Story> {
+ let request_url = format!("{}/search?tags=story,story_{}", HN_ALGOLIA_PREFIX, id);
+ let time = std::time::SystemTime::now();
+ let response = self
+ .client
+ .get(&request_url)
+ .call()?
+ .into_json::<StoriesResponse>()?;
+ if let Ok(elapsed) = time.elapsed() {
+ info!("get story (id={}) took {}ms", id, elapsed.as_millis());
+ }
+
+ let stories: Vec<Story> = response.into();
+ Ok(stories.first().unwrap().clone())
+ }
+
+ /// Get a list of stories matching certain conditions
+ pub fn get_matched_stories(
+ &self,
+ query: &str,
+ by_date: bool,
+ page: usize,
+ ) -> Result<Vec<Story>> {
+ let search_story_limit = get_config().client.story_limit.search;
+ let request_url = format!(
+ "{}/{}?{}&hitsPerPage={}&page={}",
+ HN_ALGOLIA_PREFIX,
+ if by_date { "search_by_date" } else { "search" },
+ HN_SEARCH_QUERY_STRING,
+ search_story_limit,
+ page
+ );
+ let time = std::time::SystemTime::now();
+ let response = self
+ .client
+ .get(&request_url)
+ .query("query", query)
+ .call()?
+ .into_json::<StoriesResponse>()?;
+ if let Ok(elapsed) = time.elapsed() {
+ info!(
+ "get matched stories with query {} (by_date={}) took {}ms",
+ query,
+ by_date,
+ elapsed.as_millis()
+ );
+ }
+
+ Ok(response.into())
+ }
+
+ // reorder the front_page stories to follow the same order
+ // as in the official Hacker News site.
+ // Needs to do this because stories returned by Algolia APIs
+ // are sorted by `points`.
+ fn reoder_front_page_stories(&self, stories: Vec<Story>, ids: &[u32]) -> Vec<Story> {
+ let mut stories = stories;
+ stories.sort_by(|story_x, story_y| {
+ let story_x_pos = ids
+ .iter()
+ .enumerate()
+ .find(|&(_, story_id)| *story_id == story_x.id)
+ .unwrap()
+ .0;
+ let story_y_pos = ids
+ .iter()
+ .enumerate()
+ .find(|&(_, story_id)| *story_id == story_y.id)
+ .unwrap()
+ .0;
+
+ story_x_pos.cmp(&story_y_pos)
+ });
+ stories
+ }
+
+ // retrieve a list of front_page story ids using HN Official API then
+ // compose a HN Algolia API to retrieve the corresponding stories.
+ fn get_front_page_stories(
+ &self,
+ story_limit: usize,
+ page: usize,
+ numeric_filters: query::StoryNumericFilters,
+ ) -> Result<Vec<Story>> {
+ let request_url = format!("{}/topstories.json", HN_OFFICIAL_PREFIX);
+ let time = std::time::SystemTime::now();
+ let stories = self
+ .client
+ .get(&request_url)
+ .call()?
+ .into_json::<Vec<u32>>()?;
+ if let Ok(elapsed) = time.elapsed() {
+ info!(
+ "get front_page story ids using {} took {}ms",
+ request_url,
+ elapsed.as_millis()
+ );
+ }
+
+ let start_id = story_limit * page;
+ if start_id >= stories.len() {
+ return Ok(vec![]);
+ }
+
+ let end_id = std::cmp::min(start_id + story_limit, stories.len());
+ let ids = &stories[start_id..end_id];
+
+ let request_url = format!(
+ "{}/search?tags=story,({})&hitsPerPage={}{}",
+ HN_ALGOLIA_PREFIX,
+ ids.iter().fold("".to_owned(), |tags, story_id| format!(
+ "{}story_{},",
+ tags, story_id
+ )),
+ story_limit,
+ numeric_filters.query(),
+ );
+
+ let response = self
+ .client
+ .get(&request_url)
+ .call()?
+ .into_json::<StoriesResponse>()?;
+ if let Ok(elapsed) = time.elapsed() {
+ info!(
+ "get stories (tag=front_page, by_date=false, page={}) took {}ms",
+ page,
+ elapsed.as_millis()
+ );
+ }
+
+ Ok(self.reoder_front_page_stories(response.into(), ids))
+ }
+
+ /// Get a list of stories filtering on a specific tag
+ pub fn get_stories_by_tag(
+ &self,
+ tag: &str,
+ by_date: bool,
+ page: usize,
+ numeric_filters: query::StoryNumericFilters,
+ ) -> Result<Vec<Story>> {
+ let story_limit = get_config().client.story_limit.get_story_limit_by_tag(tag);
+
+ if tag == "front_page" {
+ return self.get_front_page_stories(story_limit, page, numeric_filters);
+ }
+ let request_url = format!(
+ "{}/{}?tags={}&hitsPerPage={}&page={}{}",
+ HN_ALGOLIA_PREFIX,
+ if by_date { "search_by_date" } else { "search" },
+ tag,
+ story_limit,
+ page,
+ numeric_filters.query(),
+ );
+
+ let time = std::time::SystemTime::now();
+ let response = self
+ .client
+ .get(&request_url)
+ .call()?
+ .into_json::<StoriesResponse>()?;
+ if let Ok(elapsed) = time.elapsed() {
+ info!(
+ "get stories (tag={}, by_date={}, page={}) took {}ms",
+ tag,
+ by_date,
+ page,
+ elapsed.as_millis()
+ );
+ }
+
+ Ok(response.into())
+ }
+}
+
+pub fn init_client() -> &'static HNClient {
+ let client = HNClient::new().unwrap();
+ CLIENT.set(client).unwrap_or_else(|_| {
+ panic!("failed to set up the application's HackerNews Client");
+ });
+ &CLIENT.get().unwrap()
+}
diff --git a/hackernews_tui/src/client/parser.rs b/hackernews_tui/src/client/parser.rs
new file mode 100644
index 0000000..25144ff
--- /dev/null
+++ b/hackernews_tui/src/client/parser.rs
@@ -0,0 +1,209 @@
+use crate::{config, utils};
+use serde::{de, Deserialize, Deserializer};
+
+// serde helper functions
+
+fn parse_id<'de, D>(d: D) -> std::result::Result<u32, D::Error>
+where
+ D: Deserializer<'de>,
+{
+ let s = String::deserialize(d)?;
+ s.parse::<u32>().map_err(de::Error::custom)
+}
+
+fn parse_null_default<'de, D, T>(deserializer: D) -> Result<T, D::Error>
+where
+ T: Default + Deserialize<'de>,
+ D: Deserializer<'de>,
+{
+ let opt = Option::deserialize(deserializer)?;
+ Ok(opt.unwrap_or_default())
+}
+
+// API response structs
+
+#[derive(Debug, Deserialize)]
+#[serde(rename_all(deserialize = "camelCase"))]
+struct MatchResult {
+ value: String,
+ #[serde(default)]
+ matched_words: Vec<String>,
+}
+
+#[derive(Debug, Deserialize)]
+struct HighlightResultResponse {
+ title: Option<MatchResult>,
+ url: Option<MatchResult>,
+ author: Option<MatchResult>,
+}
+
+#[derive(Debug, Deserialize)]
+/// StoryResponse represents the story data received from HN_ALGOLIA APIs
+pub struct StoryResponse {
+ #[serde(default)]
+ #[serde(rename(deserialize = "objectID"))]
+ #[serde(deserialize_with = "parse_id")]
+ id: u32,
+
+ #[serde(default)]
+ children: Vec<CommentResponse>,
+
+ title: Option<String>,
+ author: Option<String>,
+ url: Option<String>,
+
+ #[serde(default)]
+ #[serde(deserialize_with = "parse_null_default")]
+ points: u32,
+ #[serde(default)]
+ #[serde(deserialize_with = "parse_null_default")]
+ num_comments: usize,
+
+ #[serde(rename(deserialize = "created_at_i"))]
+ time: u64,
+
+ // search result
+ #[serde(rename(deserialize = "_highlightResult"))]
+ highlight_result: Option<HighlightResultResponse>,
+}
+
+#[derive(Debug, Deserialize)]
+/// HNStoryResponse represents the story data received from the official HackerNews APIs
+pub struct HNStoryResponse {
+ #[serde(default)]
+ pub kids: Vec<u32>,
+}
+
+#[derive(Debug, Deserialize)]
+/// CommentResponse represents the comment data received from HN_ALGOLIA APIs
+pub struct CommentResponse {
+ id: u32,
+ #[serde(default)]
+ #[serde(deserialize_with = "parse_null_default")]
+ parent_id: u32,
+ #[serde(default)]
+ #[serde(deserialize_with = "parse_null_default")]
+ story_id: u32,
+
+ #[serde(default)]
+ children: Vec<CommentResponse>,
+
+ text: Option<String>,
+ author: Option<String>,
+
+ #[serde(rename(deserialize = "created_at_i"))]
+ time: u64,
+}
+
+#[derive(Debug, Deserialize)]
+/// StoriesResponse represents the stories data received from HN_ALGOLIA APIs
+pub struct StoriesResponse {
+ pub hits: Vec<StoryResponse>,
+}
+
+// parsed structs
+
+// HN client get Story,Comment data by calling HN_ALGOLIA APIs
+// and parsing the result into a corresponding struct
+
+/// HighlightResult represents matched results when
+/// searching stories matching certain conditions
+#[derive(Debug, Clone)]
+pub struct HighlightResult {
+ pub title: String,
+ pub url: String,
+ pub author: String,
+}
+
+/// Story represents a Hacker News story
+#[derive(Debug, Clone)]
+pub struct Story {
+ pub id: u32,
+ pub title: String,
+ pub url: String,
+ pub author: String,
+ pub points: u32,
+ pub num_comments: usize,
+ pub time: u64,
+ pub highlight_result: HighlightResult,
+}
+
+/// Comment represents a Hacker News comment
+#[derive(Debug, Clone)]
+pub struct Comment {
+ pub id: u32,
+ pub story_id: u32,
+ pub parent_id: u32,
+ pub text: String,
+ pub author: String,
+ pub time: u64,
+ pub children: Vec<Comment>,
+}
+
+impl From<StoriesResponse> for Vec<Story> {
+ fn from(s: StoriesResponse) -> Vec<Story> {
+ s.hits
+ .into_iter()
+ .filter(|story| story.highlight_result.is_some() && story.title.is_some())
+ .map(|story| story.into())
+ .collect()
+ }
+}
+
+impl From<CommentResponse> for Comment {
+ fn from(c: CommentResponse) -> Self {
+ let children = c
+ .children
+ .into_iter()
+ .filter(|comment| comment.author.is_some() && comment.text.is_some())
+ .map(|comment| comment.into())
+ .collect();
+ let text: String = if !config::get_config().allow_unicode {
+ c.text
+ .unwrap()
+ .chars()
+ .filter(|c| utils::allow_unicode_char(c))
+ .collect()
+ } else {
+ c.text.unwrap()
+ };
+ Comment {
+ id: c.id,
+ story_id: c.story_id,
+ parent_id: c.parent_id,
+ author: c.author.unwrap(),
+ time: c.time,
+ text,
+ children,
+ }
+ }
+}
+
+impl From<StoryResponse> for Story {
+ fn from(s: StoryResponse) -> Self {
+ // need to make sure that highlight_result is not none,
+ // and its title field is not none,
+ let highlight_result_raw = s.highlight_result.unwrap();
+ let highlight_result = HighlightResult {
+ title: highlight_result_raw.title.unwrap().value,
+ url: match highlight_result_raw.url {
+ None => String::new(),
+ Some(url) => url.value,
+ },
+ author: match highlight_result_raw.author {
+ None => String::from("[deleted]"),
+ Some(author) => author.value,
+ },
+ };
+ Story {
+ title: s.title.unwrap(),
+ url: s.url.unwrap_or_default(),
+ author: s.author.unwrap_or_else(|| String::from("[deleted]")),
+ id: s.id,
+ points: s.points,
+ num_comments: s.num_comments,
+ time: s.time,
+ highlight_result,
+ }
+ }
+}
diff --git a/hackernews_tui/src/client/query.rs b/hackernews_tui/src/client/query.rs
new file mode 100644
index 0000000..79888ad
--- /dev/null
+++ b/hackernews_tui/src/client/query.rs
@@ -0,0 +1,95 @@
+use crate::prelude::*;
+use serde::Deserialize;
+
+#[derive(Debug, Clone, Copy, Default, Deserialize)]
+pub struct FilterInterval<T> {
+ start: Option<T>,
+ end: Option<T>,
+}
+
+impl<T: std::fmt::Display + Copy> FilterInterval<T> {
+ pub fn query(&self, field: &str) -> String {
+ format!(
+ "{}{}",
+ match self.start {
+ Some(x) => format!(",{}>={}", field, x),
+ None => "".to_string(),
+ },
+ match self.end {
+ Some(x) => format!(",{}<{}", field, x),
+ None => "".to_string(),
+ },
+ )
+ }
+
+ pub fn desc(&self, field: &str) -> String {
+ format!(
+ "{}: [{}:{}]",
+ field,
+ match self.start {
+ Some(x) => x.to_string(),
+ None => "".to_string(),
+ },
+ match self.end {
+ Some(x) => x.to_string(),
+ None => "".to_string(),
+ }
+ )
+ }
+}
+
+#[derive(Debug, Clone, Copy, Default, Deserialize)]
+/// `StoryNumericFilters` defines a list of options to filter stories
+pub struct StoryNumericFilters {
+ #[serde(default)]
+ elapsed_days_interval: FilterInterval<u32>,
+ #[serde(default)]
+ points_interval: FilterInterval<u32>,
+ #[serde(default)]
+ num_comments_interval: FilterInterval<usize>,
+}
+
+impl StoryNumericFilters {
+ fn from_elapsed_days_to_unix_time(elapsed_days: Option<u32>) -> Option<u64> {
+ match elapsed_days {
+ None => None,
+ Some(day_offset) => {
+ let current_time = std::time::SystemTime::now()
+ .duration_since(std::time::SystemTime::UNIX_EPOCH)
+ .unwrap()
+ .as_secs();
+ Some(current_time - from_day_offset_to_time_offset_in_secs(day_offset))
+ }
+ }
+ }
+
+ pub fn desc(&self) -> String {
+ format!(
+ "{}, {}, {}",
+ self.elapsed_days_interval.desc("elapsed_days"),
+ self.points_interval.desc("points"),
+ self.num_comments_interval.desc("num_comments")
+ )
+ }
+
+ pub fn query(&self) -> String {
+ // convert elapsed_days to unix time (in seconds)
+ let time_interval = FilterInterval {
+ end: Self::from_elapsed_days_to_unix_time(self.elapsed_days_interval.start),
+ start: Self::from_elapsed_days_to_unix_time(self.elapsed_days_interval.end),
+ };
+
+ let mut query = format!(
+ "{}{}{}",
+ time_interval.query("created_at_i"),
+ self.points_interval.query("points"),
+ self.num_comments_interval.query("num_comments")
+ );
+ if !query.is_empty() {
+ query.remove(0); // remove trailing ,
+ format!("&numericFilters={}", query)
+ } else {
+ "".to_string()
+ }
+ }
+}
diff --git a/hackernews_tui/src/config.rs b/hackernews_tui/src/config.rs
deleted file mode 100644
index c342f5f..0000000
--- a/hackernews_tui/src/config.rs
+++ /dev/null
@@ -1,254 +0,0 @@
-use anyhow::Result;
-use config_parser2::*;
-use cursive::theme;
-use log::warn;
-use once_cell::sync::OnceCell;
-use serde::{de, Deserialize, Deserializer};
-
-use super::keybindings::*;
-
-#[derive(Debug, Deserialize, ConfigParse)]
-/// Config is a struct storing the application's configurations
-pub struct Config {
- pub allow_unicode: bool,
- pub page_scrolling: bool,
- pub scroll_offset: usize,
- pub url_open_command: String,
- pub article_parse_command: ArticleParseCommand,
- pub client: Client,
- pub theme: Theme,
- pub keymap: KeyMap,
-}
-
-#[derive(Debug, Deserialize, Clone, ConfigParse)]
-pub struct LazyLoadingComments {
- pub num_comments_init: usize,
- pub num_comments_after: usize,
-}
-
-#[derive(Debug, Deserialize, Clone, ConfigParse)]
-pub struct ArticleParseCommand {
- pub command: String,
- pub options: Vec<String>,
-}
-
-#[derive(Debug, Deserialize, ConfigParse)]
-pub struct StoryLimit {
- pub front_page: usize,
- pub story: usize,
- pub ask_hn: usize,
- pub show_hn: usize,
- pub job: usize,
- pub search: usize,
-}
-
-#[derive(Debug, Deserialize, ConfigParse)]
-pub struct Client {
- pub story_limit: StoryLimit,
- pub lazy_loading_comments: LazyLoadingComments,
- pub client_timeout: u64,
-}
-
-#[derive(Debug, Clone)]
-pub struct Color {
- pub color: theme::Color,
-}
-
-impl Color {
- fn parse(s: &str) -> Option<Self> {
- theme::Color::parse(s).map(|color| Color { color })
- }
-}
-
-impl StoryLimit {
- pub fn get_story_limit_by_tag(&self, tag: &str) -> usize {
- match tag {
- "front_page" => self.front_page,
- "story" => self.story,
- "job" => self.job,
- "ask_hn" => self.ask_hn,
- "show_hn" => self.show_hn,
- _ => panic!("unknown tag: {}", tag),
- }
- }
-}
-
-impl<'de> de::Deserialize<'de> for Color {
- fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
- where
- D: Deserializer<'de>,
- {
- let s = String::deserialize(deserializer)?;
- match Color::parse(&s) {
- None => Err(de::Error::custom(format!("failed to parse color: {}", s))),
- Some(color) => Ok(color),
- }
- }
-}
-
-config_parser_impl!(Color);
-
-#[derive(Debug, Deserialize, Clone, ConfigParse)]
-pub struct Theme {
- // cursive's palette colors
- pub background: Color,
- pub view: Color,
- pub shadow: Color,
- pub primary: Color,
- pub secondary: Color,
- pub tertiary: Color,
- pub title_primary: Color,
- pub title_secondary: Color,
- pub highlight: Color,
- pub highlight_inactive: Color,
- pub highlight_text: Color,
-
- // additional custom colors
- pub link_text: Color,
- pub link_id_bg: Color,
- pub search_highlight_bg: Color,
- pub status_bar_bg: Color,
- pub code_block_bg: Color,
-}
-
-impl Theme {
- pub fn update_theme(&self, theme: &mut theme::Theme) {
- theme.palette.set_color("background", self.background.color);
- theme.palette.set_color("view", self.view.color);
- theme.palette.set_color("shadow", self.shadow.color);
- theme.palette.set_color("primary", self.primary.color);
- theme.palette.set_color("secondary", self.secondary.color);
- theme.palette.set_color("tertiary", self.tertiary.color);
- theme
- .palette
- .set_color("title_primary", self.title_primary.color);
- theme
- .palette
- .set_color("title_secondary", self.title_secondary.color);
- theme.palette.set_color("highlight", self.highlight.color);
- theme
- .palette
- .set_color("highlight_inactive", self.highlight_inactive.color);
- theme
- .palette
- .set_color("highlight_text", self.highlight_text.color);
- }
-}
-
-impl Config {
- // parse config struct from a file
- pub fn from_config_file(file_path: &str) -> Result<Self> {
- match std::fs::read_to_string(file_path) {
- // if cannot open the file, use the default configurations
- Err(err) => {
- warn!(
- "failed to open {}: {:#?}\n...Use the default configurations instead",
- file_path, err
- );
- Ok(Self::default())
- }
- Ok(config_str) => {
- let value = toml::from_str::<toml::Value>(&config_str)?;
- let mut config = Self::default();
- config.parse(value)?;
- Ok(config)
- }
- }
- }
-}
-
-impl Default for Theme {
- fn default() -> Self {
- Theme {
- background: Color::parse("#f6f6ef").unwrap(),
- shadow: Color::parse("#000000").unwrap(),
- view: Color::parse("#f6f6ef").unwrap(),
- primary: Color::parse("#4a4a48").unwrap(),
- secondary: Color::parse("#a5a5a5").unwrap(),
- tertiary: Color::parse("#ffffff").unwrap(),
- title_primary: Color::parse("#000000").unwrap(),
- title_secondary: Color::parse("#ffff00").unwrap(),
- highlight: Color::parse("#6c6c6c").unwrap(),
- highlight_inactive: Color::parse("#0000ff").unwrap(),
- highlight_text: Color::parse("#c3bbbb").unwrap(),
-
- link_text: Color::parse("#4fbbfd").unwrap(),
- link_id_bg: Color::parse("#ffff00").unwrap(),
- search_highlight_bg: Color::parse("#ffff00").unwrap(),
- status_bar_bg: Color::parse("#ff6600").unwrap(),
- code_block_bg: Color::parse("#c8c8c8").unwrap(),
- }
- }
-}
-
-impl Default for Config {
- fn default() -> Self {
- Config {
- allow_unicode: false,
- page_scrolling: true,
- scroll_offset: 3,
- url_open_command: "xdg-open".to_string(),
- article_parse_command: ArticleParseCommand {
- command: "mercury-parser".to_string(),
- options: vec!["--format".to_string(), "markdown".to_string()],
- },
- client: Client {
- lazy_loading_comments: LazyLoadingComments {
- num_comments_init: 5,
- num_comments_after: 10,
- },
- story_limit: StoryLimit {
- search: 10,
- front_page: 20,
- story: 20,
- ask_hn: 15,
- show_hn: 15,
- job: 15,
- },
- client_timeout: 32,
- },
- theme: Theme::default(),
- keymap: KeyMap::default(),
- }
- }
-}
-
-static CONFIG: OnceCell<Config> = OnceCell::new();
-
-pub fn init_config(config: Config) {
- CONFIG.set(config).unwrap_or_else(|_| {
- panic!("failed to set up the application's configurations");
- });
-}
-
-pub fn get_config() -> &'static Config {
- &CONFIG.get().unwrap()
-}
-
-pub fn get_config_theme() -> &'static Theme {
- &get_config().theme
-}
-
-pub fn get_custom_keymap() -> &'static CustomKeyMap {
- &get_config().keymap.custom_keymap
-}
-
-pub fn get_global_keymap() -> &'static GlobalKeyMap {
- &get_config().keymap.global_keymap
-}
-
-pub fn get_story_view_keymap() -> &'static StoryViewKeyMap {
- &get_config().keymap.story_view_keymap
-}
-
-pub fn get_search_view_keymap() -> &'static SearchViewKeyMap {
- &get_config().keymap.search_view_keymap
-}
-
-pub fn get_comment_view_keymap() -> &'static CommentViewKeyMap {
- &get_config().keymap.comment_view_keymap
-}
-
-pub fn get_article_view_keymap() -> &'static ArticleViewKeyMap {
- &get_config().keymap.article_view_keymap
-}
diff --git a/hackernews_tui/src/keybindings.rs b/hackernews_tui/src/config/keybindings.rs
index 8b7ce9d..432ad43 100644
--- a/hackernews_tui/src/keybindings.rs
+++ b/hackernews_tui/src/config/keybindings.rs
@@ -1,14 +1,11 @@
-use core::fmt;
-
use config_parser2::*;
use cursive::event::{self, Event, EventTrigger};
use serde::{de, Deserialize, Deserializer};
-use crate::hn_client;
+use crate::client;
#[derive(Debug, Clone, Deserialize, ConfigParse)]