summaryrefslogtreecommitdiffstats
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/fail.rs10
-rw-r--r--src/file_browser.rs79
-rw-r--r--src/files.rs822
-rw-r--r--src/fscache.rs81
-rw-r--r--src/listview.rs290
-rw-r--r--src/preview.rs7
-rw-r--r--src/trait_ext.rs5
7 files changed, 721 insertions, 573 deletions
diff --git a/src/fail.rs b/src/fail.rs
index 36a88d4..0f05455 100644
--- a/src/fail.rs
+++ b/src/fail.rs
@@ -103,7 +103,9 @@ pub enum HError {
#[fail(display = "{}", _0)]
KeyBind(KeyBindError),
#[fail(display = "FileBrowser needs to know about all tab's files to run exec!")]
- FileBrowserNeedTabFiles
+ FileBrowserNeedTabFiles,
+ #[fail(display = "{}", _0)]
+ FileError(crate::files::FileError)
}
impl HError {
@@ -425,3 +427,9 @@ impl From<ini::ini::Error> for KeyBindError {
KeyBindError::IniError(Arc::new(err))
}
}
+
+impl From<crate::files::FileError> for HError {
+ fn from(err: crate::files::FileError) -> Self {
+ HError::FileError(err)
+ }
+}
diff --git a/src/file_browser.rs b/src/file_browser.rs
index 8c99409..3a84ee1 100644
--- a/src/file_browser.rs
+++ b/src/file_browser.rs
@@ -197,7 +197,7 @@ impl Tabbable for TabView<FileBrowser> {
tab.left_async_widget_mut().map(|async_w| {
async_w.widget.on_ready(move |mut w, _| {
w.as_mut()
- .map(|mut w| {
+ .map(|w| {
if w.content.show_hidden != show_hidden {
w.content.show_hidden = show_hidden;
w.content.recalculate_len();
@@ -211,7 +211,7 @@ impl Tabbable for TabView<FileBrowser> {
tab.main_async_widget_mut().map(|async_w| {
async_w.widget.on_ready(move |mut w, _| {
w.as_mut()
- .map(|mut w| {
+ .map(|w| {
if w.content.show_hidden != show_hidden {
w.content.show_hidden = show_hidden;
w.content.recalculate_len();
@@ -262,11 +262,9 @@ impl FileBrowser {
let cache = fs_cache.clone();
let main_widget = AsyncWidget::new(&core, move |stale| {
- let dir = File::new_from_path(&main_path, None)?;
+ let dir = File::new_from_path(&main_path)?;
let source = FileSource::Path(dir);
ListView::builder(core_m, source)
- .meta_all()
- // .prerender()
.with_cache(cache)
.with_stale(stale.clone())
.build()
@@ -275,11 +273,9 @@ impl FileBrowser {
let cache = fs_cache.clone();
if let Some(left_path) = left_path {
let left_widget = AsyncWidget::new(&core_l.clone(), move |stale| {
- let dir = File::new_from_path(&left_path, None)?;
+ let dir = File::new_from_path(&left_path)?;
let source = FileSource::Path(dir);
ListView::builder(core_l, source)
- // .meta_all()
- // .prerender()
.with_cache(cache)
.with_stale(stale.clone())
.build()
@@ -311,7 +307,7 @@ impl FileBrowser {
columns.refresh().log();
- let cwd = File::new_from_path(&cwd, None).unwrap();
+ let cwd = File::new_from_path(&cwd).unwrap();
let proc_view = ProcView::new(&core);
let bookmarks = BMPopup::new(&core);
@@ -363,8 +359,6 @@ impl FileBrowser {
};
ListView::builder(core, source)
- .meta_all()
- // .prerender()
.with_cache(cache)
.with_stale(stale.clone())
.build()
@@ -380,7 +374,6 @@ impl FileBrowser {
};
ListView::builder(core, source)
- .prerender()
.with_cache(cache)
.with_stale(stale.clone())
.build()
@@ -479,6 +472,10 @@ impl FileBrowser {
}
pub fn main_widget_goto(&mut self, dir: &File) -> HResult<()> {
+ self.preview_widget_mut()
+ .map(|p| p.set_stale())
+ .ok();
+
let dir = dir.clone();
let cache = self.fs_cache.clone();
@@ -489,8 +486,6 @@ impl FileBrowser {
let main_async_widget = self.main_async_widget_mut()?;
main_async_widget.change_to(move |stale: &Stale, core| {
let view = ListView::builder(core, file_source)
- .meta_all()
- // .prerender()
.with_cache(cache)
.with_stale(stale.clone())
.build()?;
@@ -507,9 +502,7 @@ impl FileBrowser {
}).log();
}
- self.preview_widget_mut()
- .map(|p| p.set_stale())
- .ok();
+
Ok(())
}
@@ -527,8 +520,6 @@ impl FileBrowser {
let left_async_widget = self.left_async_widget_mut()?;
left_async_widget.change_to(move |stale, core| {
let view = ListView::builder(core, file_source)
- // .meta_all()
- // .prerender()
.with_cache(cache)
.with_stale(stale.clone())
.build()?;
@@ -559,8 +550,6 @@ impl FileBrowser {
self.main_async_widget_mut()?.change_to(move |stale, core| {
ListView::builder(core, file_source)
.select(main_selection)
- .meta_all()
- // .prerender()
.with_cache(cache)
.with_stale(stale.clone())
.build()
@@ -571,7 +560,6 @@ impl FileBrowser {
let cache = self.fs_cache.clone();
self.left_async_widget_mut()?.change_to(move |stale, core| {
ListView::builder(core, file_source)
- // .prerender()
.with_cache(cache)
.with_stale(stale.clone())
.build()
@@ -612,7 +600,7 @@ impl FileBrowser {
pub fn go_home(&mut self) -> HResult<()> {
let home = crate::paths::home_path().unwrap_or(PathBuf::from("~/"));
- let home = File::new_from_path(&home, None)?;
+ let home = File::new_from_path(&home)?;
self.main_widget_goto(&home)
}
@@ -649,7 +637,7 @@ impl FileBrowser {
pub fn goto_bookmark(&mut self) -> HResult<()> {
let path = self.get_boomark()?;
- let path = File::new_from_path(&PathBuf::from(path), None)?;
+ let path = File::new_from_path(&PathBuf::from(path))?;
self.main_widget_goto(&path)?;
Ok(())
}
@@ -700,23 +688,35 @@ impl FileBrowser {
let selection = self.cwd()?.clone();
- self.cwd.parent_as_file()
- .map(|dir| self.fs_cache
- .set_selection(dir.clone(), selection.clone())).log();
+ // Saves doing iteration to find file's position
+ if let Some(ref current_selection) = self.left_widget()?.current_item {
+ if current_selection.name == selection.name {
+ return Ok(());
+ }
+ }
+
self.left_widget_mut()?.select_file(&selection);
+ let selected_file = self.left_widget()?.selected_file();
+ self.cwd.parent_as_file()
+ .map(|dir| {
+ self.fs_cache
+ .set_selection(dir.clone(), selected_file.clone())
+ }).log();
+
+
Ok(())
}
pub fn take_main_files(&mut self) -> HResult<Files> {
- let mut w = self.main_widget_mut()?;
+ let w = self.main_widget_mut()?;
let files = std::mem::take(&mut w.content);
w.content.len = 0;
Ok(files)
}
pub fn take_left_files(&mut self) -> HResult<Files> {
- let mut w = self.left_widget_mut()?;
+ let w = self.left_widget_mut()?;
let files = std::mem::take(&mut w.content);
w.content.len = 0;
Ok(files)
@@ -879,7 +879,7 @@ impl FileBrowser {
let dir = self.core.minibuffer("cd")?;
let path = std::path::PathBuf::from(&dir);
- let dir = File::new_from_path(&path.canonicalize()?, None)?;
+ let dir = File::new_from_path(&path.canonicalize()?)?;
self.main_widget_goto(&dir)?;
Ok(())
@@ -931,11 +931,11 @@ impl FileBrowser {
let path = &paths[0];
if path.exists() {
if path.is_dir() {
- let dir = File::new_from_path(&path, None)?;
+ let dir = File::new_from_path(&path)?;
self.main_widget_goto(&dir).log();
} else if path.is_file() {
- let file = File::new_from_path(&path, None)?;
+ let file = File::new_from_path(&path)?;
let dir = file.parent_as_file()?;
self.main_widget_goto(&dir).log();
@@ -964,7 +964,7 @@ impl FileBrowser {
let dir_path = file_path.parent()?;
if self.cwd.path != dir_path {
- let file_dir = File::new_from_path(&dir_path, None);
+ let file_dir = File::new_from_path(&dir_path);
self.main_widget_goto_wait(&file_dir?).log();
}
@@ -1033,7 +1033,7 @@ impl FileBrowser {
if path.exists() {
if path.is_dir() {
- let dir = File::new_from_path(&path, None)?;
+ let dir = File::new_from_path(&path)?;
self.main_widget_goto(&dir).log();
}
else {
@@ -1231,12 +1231,15 @@ impl Widget for FileBrowser {
let file = self.selected_file()?;
let name = &file.name;
+ let fcolor = file.get_color();
+
let color = if file.is_dir() {
crate::term::highlight_color() }
- else if file.color.is_none() {
- crate::term::normal_color()
- } else {
- crate::term::from_lscolor(file.color.as_ref().unwrap())
+ else {
+ match fcolor {
+ Some(color) => color,
+ None => crate::term::normal_color()
+ }
};
let path = self.cwd.short_string();
diff --git a/src/files.rs b/src/files.rs
index de8dc29..43c603f 100644
--- a/src/files.rs
+++ b/src/files.rs
@@ -8,8 +8,10 @@ use std::sync::{Arc, Mutex, RwLock};
use std::sync::mpsc::Sender;
use std::hash::{Hash, Hasher};
use std::str::FromStr;
-use std::sync::atomic::{AtomicU32, Ordering};
+use std::sync::atomic::{AtomicBool, AtomicUsize, Ordering};
+use failure;
+use failure::Fail;
use lscolors::LsColors;
use tree_magic;
use users::{get_current_username,
@@ -27,33 +29,61 @@ use pathbuftools::PathBufTools;
use async_value::{Async, Stale, StopIter};
use crate::fail::{HResult, HError, ErrorLog};
-use crate::dirty::{AsyncDirtyBit, DirtyBit, Dirtyable};
+use crate::dirty::{DirtyBit, Dirtyable};
use crate::widget::Events;
use crate::icon::Icons;
-use crate::fscache::FsEvent;
-
+use crate::fscache::{FsCache, FsEvent};
lazy_static! {
static ref COLORS: LsColors = LsColors::from_env().unwrap_or_default();
static ref TAGS: RwLock<(bool, Vec<PathBuf>)> = RwLock::new((false, vec![]));
static ref ICONS: Icons = Icons::new();
+ static ref IOPOOL: Mutex<Option<ThreadPool>> = Mutex::new(None);
+ static ref IOTICK: AtomicUsize = AtomicUsize::default();
+ static ref TICKING: AtomicBool = AtomicBool::new(false);
+}
+
+pub fn tick() {
+ IOTICK.fetch_add(1, Ordering::Relaxed);
+}
+
+pub fn get_tick() -> usize {
+ IOTICK.load(Ordering::Relaxed)
+}
+
+pub fn tick_str() -> &'static str {
+ // Using mod 5 for that nice nonlinear look
+ match get_tick() % 5 {
+ 0 => " ",
+ 1 => ". ",
+ 2 => ".. ",
+ _ => "..."
+ }
+}
+
+pub fn is_ticking() -> bool {
+ TICKING.load(Ordering::Acquire)
+}
+
+pub fn set_ticking(val: bool) {
+ TICKING.store(val, Ordering::Release);
+}
+
+#[derive(Fail, Debug, Clone)]
+pub enum FileError {
+ #[fail(display = "Metadata still pending!")]
+ MetaPending
}
-fn make_pool(sender: Option<Sender<Events>>) -> ThreadPool {
- let sender = Arc::new(Mutex::new(sender));
+pub fn get_pool() -> ThreadPool {
+ // Optimal number of threads depends on many things. This is a reasonable default.
+ const THREAD_NUM: usize = 8;
+
ThreadPoolBuilder::new()
- .num_threads(8)
- .exit_handler(move |thread_num| {
- if thread_num == 0 {
- if let Ok(lock) = sender.lock() {
- if let Some(sender) = lock.as_ref() {
- sender.send(Events::WidgetReady).ok();
- }
- }
- }
- })
+ .num_threads(THREAD_NUM)
+ .thread_name(|i| format!("hunter_iothread_{}", i))
.build()
- .expect("Failed to create thread pool")
+ .unwrap()
}
pub fn load_tags() -> HResult<()> {
@@ -100,22 +130,22 @@ pub fn tags_loaded() -> HResult<()> {
else { HError::tags_not_loaded() }
}
-
-#[derive(PartialEq, Eq, Hash, Clone, Debug)]
+#[derive(Derivative)]
+#[derivative(PartialEq, Eq, Hash, Clone, Debug)]
pub struct RefreshPackage {
pub new_files: Option<Vec<File>>,
- pub new_buffer: Option<Vec<String>>,
pub new_len: usize,
+ #[derivative(Debug="ignore")]
+ #[derivative(PartialEq="ignore")]
+ #[derivative(Hash="ignore")]
+ pub jobs: Vec<Job>
}
impl RefreshPackage {
- fn new(mut files: Files,
- old_buffer: Vec<String>,
- events: Vec<FsEvent>,
- render_fn: impl Fn(&File) -> String) -> RefreshPackage {
+ fn new(mut files: Files, events: Vec<FsEvent>) -> RefreshPackage {
use FsEvent::*;
// If there is only a placeholder at this point, remove it now
@@ -137,15 +167,6 @@ impl RefreshPackage {
.map(|(i, file)| (file, i))
.collect();
-
- // Need to know which line of the ListView buffer belongs to which file
- let list_pos_map: HashMap<&File, usize> = static_files
- .iter_files()
- .enumerate()
- .take_while(|&(i, _)| i < old_buffer.len())
- .map(|(i, file)| (file, i))
- .collect();
-
// Save new files to add them all at once later
let mut new_files = Vec::with_capacity(event_count);
@@ -155,23 +176,35 @@ impl RefreshPackage {
// Save deletions to delete them efficiently later
let mut deleted_files = HashSet::with_capacity(event_count);
- for event in events.into_iter() {
+ // Stores jobs to asynchronously fetch metadata
+ let mut jobs = Vec::with_capacity(event_count);
+
+ let cache = &files.cache.take().unwrap();
+
+ // Drop would set this stale after the function returns
+ let stale = files.stale.take().unwrap();
+
+
+ for event in events.into_iter().stop_stale(stale.clone()) {
match event {
Create(mut file) => {
- file.meta_sync().log();
+ let job = file.prepare_meta_job(cache);
+ job.map(|j| jobs.push(j));
new_files.push(file);
}
Change(file) => {
if let Some(&fpos) = file_pos_map.get(&file) {
- files.files[fpos].meta_sync().log();
+ let job = files.files[fpos].refresh_meta_job();
+ jobs.push(job);
changed_files.insert(file);
}
}
Rename(old, new) => {
if let Some(&fpos) = file_pos_map.get(&old) {
files.files[fpos].rename(&new.path).log();
- files.files[fpos].meta_sync().log();
- }
+ let job = files.files[fpos].refresh_meta_job();
+ jobs.push(job);
+ }
}
Remove(file) => {
if let Some(_) = file_pos_map.get(&file) {
@@ -181,6 +214,15 @@ impl RefreshPackage {
}
}
+ // Bail out without further processing
+ if stale.is_stale().unwrap_or(true) {
+ return RefreshPackage {
+ new_files: None,
+ new_len: 0,
+ jobs: jobs
+ }
+ }
+
if deleted_files.len() > 0 {
files.files.retain(|file| !deleted_files.contains(file));
}
@@ -192,41 +234,28 @@ impl RefreshPackage {
files.recalculate_len();
files.sort();
- // Prerender new buffer in current thread
- let mut old_buffer = old_buffer;
-
- let new_buffer = files.iter_files()
- .map(|file| {
- match list_pos_map.get(&file) {
- Some(&old_pos) =>
- match changed_files.contains(&file) {
- true => render_fn(&file),
- false => std::mem::take(&mut old_buffer[old_pos])
- }
- None => render_fn(&file)
- }
- }).collect();
-
// Need to unpack this to prevent issue with recursive Files type
- // Also, if no files remain add placeholder and set len
- let (files, new_len, new_buffer) = if files.len() > 0 {
- (files.files, files.len, new_buffer)
+ // Also, if no files remain add placeholder and set len
+ let (files, new_len) = if files.len() > 0 {
+ (std::mem::take(&mut files.files), files.len)
} else {
let placeholder = File::new_placeholder(&files.directory.path).unwrap();
- let buffer = vec![render_fn(&placeholder)];
files.files.push(placeholder);
- (files.files, 1, buffer)
+ (std::mem::take(&mut files.files), 1)
};
-
RefreshPackage {
new_files: Some(files),
- new_buffer: Some(new_buffer),
- new_len: new_len
+ new_len: new_len,
+ jobs: jobs
}
}
}
+// Tuple that stores path and "slots" to store metaadata in
+pub type Job = (PathBuf,
+ Option<Arc<RwLock<Option<Metadata>>>>,
+ Option<Arc<(AtomicBool, AtomicUsize)>>);
#[derive(Derivative)]
#[derivative(PartialEq, Eq, Hash, Clone, Debug)]
@@ -250,6 +279,18 @@ pub struct Files {
pub filter: Option<String>,
pub filter_selected: bool,
pub dirty: DirtyBit,
+ #[derivative(Debug="ignore")]
+ #[derivative(PartialEq="ignore")]
+ #[derivative(Hash="ignore")]
+ pub jobs: Vec<Job>,
+ #[derivative(Debug="ignore")]
+ #[derivative(PartialEq="ignore")]
+ #[derivative(Hash="ignore")]
+ pub cache: Option<FsCache>,
+ #[derivative(Debug="ignore")]
+ #[derivative(PartialEq="ignore")]
+ #[derivative(Hash="ignore")]
+ pub stale: Option<Stale>
}
impl Index<usize> for Files {
@@ -292,83 +333,173 @@ impl Default for Files {
filter: None,
filter_selected: false,
dirty: DirtyBit::new(),
+ jobs: vec![],
+ cache: None,
+ stale: None
}
}
}
+// Stop processing stuff when Files is dropped
+impl Drop for Files {
+ fn drop(&mut self) {
+ self.stale
+ .as_ref()
+ .map(|s| s.set_stale());
+ }
+}
+
impl Files {
- pub fn new_from_path(path: &Path) -> HResult<Files> {
- let direntries: Result<Vec<_>, _> = std::fs::read_dir(&path)?.collect();
- let dirty_meta = AsyncDirtyBit::new();
- let tags = &TAGS.read().ok()?.1;
+ pub fn new_from_path_cancellable(path: &Path, stale: Stale) -> HResult<Files> {
+ let direntries: Vec<std::fs::DirEntry> = std::fs::read_dir(&path)?
+ .stop_stale(stale.clone())
+ .collect::<Result<Vec<std::fs::DirEntry>, _>>()?;
- let files: Vec<_> = direntries?
- .iter()
- .map(|file| {
- let name = file.file_name();
- let name = name.to_string_lossy();
- let path = file.path();
- let mut file = File::new(&name,
- path,
- Some(dirty_meta.clone()));
- file.set_tag_status(&tags);
- Some(file)
+ let nonhidden = AtomicUsize::default();
+
+ let direntries: Vec<_> = direntries
+ .into_par_iter()
+ .map(|f| {
+ let f = File::new_from_direntry(f);
+ // Fast check to avoid iterating twice
+ if f.name.as_bytes()[0] != b'.' {
+ nonhidden.fetch_add(1, Ordering::Relaxed);
+ }
+ f
})
.collect();
- let len = files.len();
+ if stale.is_stale()? {
+ HError::stale()?;
+ }
let mut files = Files::default();
- files.directory = File::new_from_path(&path, None)?;
- files.len = len;
+ files.directory = File::new_from_path(&path)?;
+ files.files = direntries;
+ files.len = nonhidden.load(Ordering::Relaxed);
+ files.stale = Some(stale);
Ok(files)
}
- pub fn new_from_path_cancellable(path: &Path,
- stale: Stale)
- -> HResult<Files> {
- let direntries: Result<Vec<_>, _> = std::fs::read_dir(&path)?.collect();
- let dirty = DirtyBit::new();
- let dirty_meta = AsyncDirtyBit::new();
+ pub fn enqueue_jobs(&mut self, n: usize) {
+ let pool = get_pool();
+ let from = self.meta_upto.unwrap_or(0);
+ self.meta_upto = Some(from + n);
+
+ let mut jobs =
+ pool.install(|| {
+ let c = match self.cache.clone() {
+ Some(cache) => cache,
+ None => return vec![]
+ };
+
+ self.iter_files_mut()
+ .skip(from)
+ .take(n)
+ // To turn into IndexedParallelIter
+ .collect::<Vec<&mut File>>()
+ .into_par_iter()
+ .filter_map(|f| f.prepare_meta_job(&c))
+ .collect::<Vec<_>>()
+ });
- let files: Vec<_> = direntries?
- .into_iter()
- .stop_stale(stale.clone())
- .par_bridge()
- .map(|file| {
- let file = File::new_from_direntry(file,
- Some(dirty_meta.clone()));
- file
- })
- .collect();
+ self.jobs.append(&mut jobs);
+ }
- if stale.is_stale()? {
- return Err(crate::fail::HError::StalePreviewError {
- file: path.to_string_lossy().to_string()
- })?;
- }
+ pub fn run_jobs(&mut self, sender: Sender<Events>) {
+ use std::time::Duration;
+ let jobs = std::mem::take(&mut self.jobs);
+ let stale = self.stale.clone()
+ .unwrap_or(Stale::new());
- let mut files = Files {
- directory: File::new_from_path(&path, None)?,
- files: files,
- len: 0,
- pending_events: Arc::new(RwLock::new(vec![])),
- refresh: None,
- meta_upto: None,
- sort: SortBy::Name,
- dirs_first: true,
- reverse: false,
- show_hidden: false,
- filter: None,
- filter_selected: false,
- dirty: dirty,
- };
+ if jobs.len() == 0 { return; }
- files.recalculate_len();
+ std::thread::spawn(move || {
+ let pool = get_pool();
+ let jobs_left = AtomicUsize::new(jobs.len());
+ let jobs_left = &jobs_left;
+ let stale = &stale;
- Ok(files)
+ let ticker = move || {
+ // Gently slow down refreshes
+ let backoff = Duration::from_millis(10);
+ let mut cooldown = Duration::from_millis(10);
+
+ loop {
+ // Send refresh event before sleeping
+ sender.send(crate::widget::Events::WidgetReady)
+ .unwrap();
+ std::thread::sleep(cooldown);
+
+ // Slow down up to 1 second
+ if cooldown < Duration::from_secs(1) {
+ cooldown += backoff;
+ }
+
+ // All jobs done?
+ if jobs_left.load(Ordering::Relaxed) == 0 {
+ // Refresh one last time
+ sender.send(crate::widget::Events::WidgetReady)
+ .unwrap();
+ crate::files::set_ticking(false);
+ return;
+ }
+
+ crate::files::tick();
+ }
+ };
+
+ // To allow calling without consuming, all while Sender can't be shared
+ let mut ticker = Some(ticker);
+
+ // Finally this returns the ticker function as an Option
+ let mut ticker = move || {
+ // Only return ticker if no one's ticking
+ match !crate::files::is_ticking() {
+ true => {
+ crate::files::set_ticking(true);
+ ticker.take()
+ }
+ false => None
+ }
+ };
+
+ pool.scope_fifo(move |s| {
+ // Noop with other pool running ticker
+ ticker().map(|t| s.spawn_fifo(move |_| t()));
+
+ for (path, mslot, dirsize) in jobs.into_iter().stop_stale(stale.clone())
+ {
+ s.spawn_fifo(move |_| {
+ if let Some(mslot) = mslot {
+ if let Ok(meta) = std::fs::symlink_metadata(&path) {
+ *mslot.write().unwrap() = Some(meta);
+ }
+ }
+
+ if let Some(dirsize) = dirsize {
+ std::fs::read_dir(&path)
+ .map(|dirs| {
+ let size = dirs.count();
+ dirsize.0.store(true, Ordering::Relaxed);
+ dirsize.1.store(size, Ordering::Relaxed);
+ }).map_err(|e| {
+ dirsize.0.store(true, Ordering::Relaxed);
+ dirsize.1.store(0, Ordering::Relaxed);
+ HError::from(e)
+ }).log();
+ }
+
+ // Ticker will only stop after this reaches 0
+ jobs_left.fetch_sub(1, Ordering::Relaxed);
+ });
+
+ ticker().map(|t| s.spawn_fifo(move |_| t()));
+ }
+ });
+ });
}
pub fn recalculate_len(&mut self) {
@@ -444,12 +575,14 @@ impl Files {
}
#[allow(trivial_bounds)]
- pub fn into_iter_files(self) -> impl Iterator<Item=File> {
- let filter = self.filter;
+ pub fn into_iter_files(mut self) -> impl Iterator<Item=File> {
+ let filter = std::mem::take(&mut self.filter);
let filter_selected = self.filter_selected;
let show_hidden = self.show_hidden;
- self.files
+ let files = std::mem::take(&mut self.files);
+
+ files
.into_iter()
.filter(move |f|
f.kind == Kind::Placeholder ||
@@ -457,86 +590,122 @@ impl Files {
!f.name.contains(filter.as_ref().unwrap())) &&
(!filter_selected || f.selected))
.filter(move |f| !(!show_hidden && f.name.starts_with(".")))
+ // Just stuff self in there so drop() doesn't get called immediately
+ .filter(move |_| { &self; true })
}
- pub fn sort(&mut self) {
+ #[allow(trivial_bounds)]
+ pub fn take_into_iter_files(&mut self) -> impl Iterator<Item=File> {
+ let filter = self.filter.clone();
+ let filter_selected = self.filter_selected;
+ let show_hidden = self.show_hidden;
+
+ let files = std::mem::take(&mut self.files);
+ self.files.clear();
+
+ files.into_iter()
+ .filter(move |f|
+ f.kind == Kind::Placeholder ||
+ !(filter.is_some() &&
+ !f.name.contains(filter.as_ref().unwrap())) &&
+ (!filter_selected || f.selected))
+ .filter(move |f| !(!show_hidden && f.name.starts_with(".")))
+ }
+
+ #[allow(trivial_bounds)]
+ pub fn sorter(&self) -> impl Fn(&File, &File) -> std::cmp::Ordering {
use std::cmp::Ordering::*;
- let dirs_first = self.dirs_first;
-
- match self.sort {
- SortBy::Name => self
- .files
- .par_sort_unstable_by(|a, b| {
- if dirs_first {
- match (a.is_dir(), b.is_dir()) {
- (true, false) => Less,
- (false, true) => Greater,
- _ => compare_str(&a.name, &b.name),
- }
- } else {
- compare_str(&a.name, &b.name)
+ let dirs_first = self.dirs_first.clone();
+ let sort = self.sort.clone();
+
+ let dircmp = move |a: &File, b: &File| {
+ match (a.is_dir(), b.is_dir()) {
+ (true, false) if dirs_first => Less,
+ (false, true) if dirs_first => Greater,
+ _ => Equal
+ }
+ };
+
+
+ let reverse = self.reverse;
+ let namecmp = move |a: &File, b: &File| {
+ let (a, b) = match reverse {
+ true => (b, a),
+ false => (a, b),
+ };
+
+ compare_str(&a.name, &b.name)
+ };
+
+ let reverse = self.reverse;
+ let sizecmp = move |a: &File, b: &File| {
+ let (a, b) = match reverse {
+ true => (b, a),
+ false => (a, b),
+ };
+
+ match (a.meta(), b.meta()) {
+ (Some(a_meta), Some(b_meta)) => {
+ let a_meta = a_meta.as_ref().unwrap();
+ let b_meta = b_meta.as_ref().unwrap();
+ match a_meta.size() == b_meta.size() {
+ true => compare_str(&b.name, &a.name),
+ false => b_meta.size().cmp(&a_meta.size())
}
- }),
- SortBy::Size => {
- if self.meta_upto < Some(self.len()) {
- self.meta_all_sync().log();
}
+ _ => Equal
+ }
+ };
- self.files.par_sort_unstable_by(|a, b| {
- if dirs_first {
- match (a.is_dir(), b.is_dir()) {
- (true, false) => return Less,
- (false, true) => return Greater,
- _ => {}
- }
- }
+ let reverse = self.reverse;
+ let timecmp = move |a: &File, b: &File| {
+ let (a, b) = match reverse {
+ true => (b, a),
+ false => (a, b),
+ };
- match (a.meta(), b.meta()) {
- (Some(a_meta), Some(b_meta)) => {
- match a_meta.size() == b_meta.size() {
- true => compare_str(&b.name, &a.name),
- false => b_meta.size()
- .cmp(&a_meta.size())
- }
- }
- _ => Equal
+ match (a.meta(), b.meta()) {
+ (Some(a_meta), Some(b_meta)) => {
+ let a_meta = a_meta.as_ref().unwrap();
+ let b_meta = b_meta.as_ref().unwrap();
+ match a_meta.mtime() == b_meta.mtime() {
+ true => compare_str(&b.name, &a.name),
+ false => b_meta.mtime().cmp(&a_meta.mtime())
}
- })
- }
- SortBy::MTime => {
- if self.meta_upto < Some(self.len()) {
- self.meta_all_sync().log();
}
+ _ => Equal
+ }
+ };
- self.files.par_sort_unstable_by(|a, b| {
- if dirs_first {
- match (a.is_dir(), b.is_dir()) {
- (true, false) => return Less,
- (false, true) => return Greater,
- _ => {}
- }
- }
- match (a.meta(), b.meta()) {
- (Some(a_meta), Some(b_meta)) => {
- match a_meta.mtime() == b_meta.mtime() {
- true => compare_str(&b.name, &a.name),
- false => b_meta.mtime()
- .cmp(&a_meta.mtime())
- }
- }
- _ => Equal
- }
- })
+ move |a, b| match sort {
+ SortBy::Name => {
+ match dircmp(a, b) {
+ Equal => namecmp(a, b),
+ ord @ _ => ord
+ }
+ },
+ SortBy::Size => {
+ match dircmp(a, b) {
+ Equal => sizecmp(a, b),
+ ord @ _ => ord
+ }
+ }
+ SortBy::MTime => {
+ match dircmp(a, b) {
+ Equal => timecmp(a, b),
+ ord @ _ => ord
+ }
}
}
+ }
- // This could