aboutsummaryrefslogtreecommitdiff
path: root/helix-term
diff options
context:
space:
mode:
Diffstat (limited to 'helix-term')
-rw-r--r--helix-term/Cargo.toml3
-rw-r--r--helix-term/src/application.rs12
-rw-r--r--helix-term/src/commands.rs295
-rw-r--r--helix-term/src/commands/typed.rs38
-rw-r--r--helix-term/src/ui/fuzzy_match.rs239
-rw-r--r--helix-term/src/ui/fuzzy_match/test.rs47
-rw-r--r--helix-term/src/ui/menu.rs51
-rw-r--r--helix-term/src/ui/mod.rs184
-rw-r--r--helix-term/src/ui/picker.rs545
-rw-r--r--helix-term/tests/integration.rs1
-rw-r--r--helix-term/tests/test/picker.rs80
11 files changed, 501 insertions, 994 deletions
diff --git a/helix-term/Cargo.toml b/helix-term/Cargo.toml
index 29c2eea7..5c7fd0de 100644
--- a/helix-term/Cargo.toml
+++ b/helix-term/Cargo.toml
@@ -24,6 +24,7 @@ path = "src/main.rs"
[dependencies]
helix-core = { version = "0.6", path = "../helix-core" }
+helix-event = { version = "0.6", path = "../helix-event" }
helix-view = { version = "0.6", path = "../helix-view" }
helix-lsp = { version = "0.6", path = "../helix-lsp" }
helix-dap = { version = "0.6", path = "../helix-dap" }
@@ -49,7 +50,7 @@ chrono = { version = "0.4", default-features = false, features = ["clock"] }
log = "0.4"
# File picker
-fuzzy-matcher = "0.3"
+nucleo.workspace = true
ignore = "0.4"
# markdown doc rendering
pulldown-cmark = { version = "0.9", default-features = false }
diff --git a/helix-term/src/application.rs b/helix-term/src/application.rs
index 8d5b1ceb..0d40fa66 100644
--- a/helix-term/src/application.rs
+++ b/helix-term/src/application.rs
@@ -257,16 +257,8 @@ impl Application {
scroll: None,
};
- // Acquire mutable access to the redraw_handle lock
- // to ensure that there are no tasks running that want to block rendering
- drop(cx.editor.redraw_handle.1.write().await);
+ helix_event::start_frame();
cx.editor.needs_redraw = false;
- {
- // exhaust any leftover redraw notifications
- let notify = cx.editor.redraw_handle.0.notified();
- tokio::pin!(notify);
- notify.enable();
- }
let area = self
.terminal
@@ -590,7 +582,7 @@ impl Application {
EditorEvent::LanguageServerMessage((id, call)) => {
self.handle_language_server_message(call, id).await;
// limit render calls for fast language server messages
- self.editor.redraw_handle.0.notify_one();
+ helix_event::request_redraw();
}
EditorEvent::DebuggerEvent(payload) => {
let needs_render = self.editor.handle_debugger_message(payload).await;
diff --git a/helix-term/src/commands.rs b/helix-term/src/commands.rs
index 1f88079e..31ea7581 100644
--- a/helix-term/src/commands.rs
+++ b/helix-term/src/commands.rs
@@ -43,7 +43,6 @@ use helix_view::{
};
use anyhow::{anyhow, bail, ensure, Context as _};
-use fuzzy_matcher::FuzzyMatcher;
use insert::*;
use movement::Movement;
@@ -60,7 +59,7 @@ use crate::{
};
use crate::job::{self, Jobs};
-use futures_util::{stream::FuturesUnordered, StreamExt, TryStreamExt};
+use futures_util::{stream::FuturesUnordered, TryStreamExt};
use std::{collections::HashMap, fmt, future::Future};
use std::{collections::HashSet, num::NonZeroUsize};
@@ -75,7 +74,6 @@ use serde::de::{self, Deserialize, Deserializer};
use grep_regex::RegexMatcherBuilder;
use grep_searcher::{sinks, BinaryDetection, SearcherBuilder};
use ignore::{DirEntry, WalkBuilder, WalkState};
-use tokio_stream::wrappers::UnboundedReceiverStream;
pub type OnKeyCallback = Box<dyn FnOnce(&mut Context, KeyEvent)>;
@@ -1715,8 +1713,8 @@ fn select_regex(cx: &mut Context) {
"select:".into(),
Some(reg),
ui::completers::none,
- move |editor, regex, event| {
- let (view, doc) = current!(editor);
+ move |cx, regex, event| {
+ let (view, doc) = current!(cx.editor);
if !matches!(event, PromptEvent::Update | PromptEvent::Validate) {
return;
}
@@ -1737,8 +1735,8 @@ fn split_selection(cx: &mut Context) {
"split:".into(),
Some(reg),
ui::completers::none,
- move |editor, regex, event| {
- let (view, doc) = current!(editor);
+ move |cx, regex, event| {
+ let (view, doc) = current!(cx.editor);
if !matches!(event, PromptEvent::Update | PromptEvent::Validate) {
return;
}
@@ -1902,14 +1900,14 @@ fn searcher(cx: &mut Context, direction: Direction) {
.map(|comp| (0.., std::borrow::Cow::Owned(comp.clone())))
.collect()
},
- move |editor, regex, event| {
+ move |cx, regex, event| {
if event == PromptEvent::Validate {
- editor.registers.last_search_register = reg;
+ cx.editor.registers.last_search_register = reg;
} else if event != PromptEvent::Update {
return;
}
search_impl(
- editor,
+ cx.editor,
&contents,
&regex,
Movement::Move,
@@ -2078,13 +2076,11 @@ fn global_search(cx: &mut Context) {
}
}
- let (all_matches_sx, all_matches_rx) = tokio::sync::mpsc::unbounded_channel::<FileResult>();
let config = cx.editor.config();
let smart_case = config.search.smart_case;
let file_picker_config = config.file_picker.clone();
let reg = cx.register.unwrap_or('/');
-
let completions = search_completions(cx, Some(reg));
ui::regex_prompt(
cx,
@@ -2097,166 +2093,173 @@ fn global_search(cx: &mut Context) {
.map(|comp| (0.., std::borrow::Cow::Owned(comp.clone())))
.collect()
},
- move |editor, regex, event| {
+ move |cx, regex, event| {
if event != PromptEvent::Validate {
return;
}
- editor.registers.last_search_register = reg;
+ cx.editor.registers.last_search_register = reg;
- let documents: Vec<_> = editor
+ let current_path = doc_mut!(cx.editor).path().cloned();
+ let documents: Vec<_> = cx
+ .editor
.documents()
- .map(|doc| (doc.path(), doc.text()))
+ .map(|doc| (doc.path().cloned(), doc.text().to_owned()))
.collect();
if let Ok(matcher) = RegexMatcherBuilder::new()
.case_smart(smart_case)
.build(regex.as_str())
{
- let searcher = SearcherBuilder::new()
- .binary_detection(BinaryDetection::quit(b'\x00'))
- .build();
-
let search_root = helix_loader::current_working_dir();
if !search_root.exists() {
- editor.set_error("Current working directory does not exist");
+ cx.editor
+ .set_error("Current working directory does not exist");
return;
}
+ let (picker, injector) = Picker::stream(current_path);
+
let dedup_symlinks = file_picker_config.deduplicate_links;
let absolute_root = search_root
.canonicalize()
.unwrap_or_else(|_| search_root.clone());
-
- WalkBuilder::new(search_root)
- .hidden(file_picker_config.hidden)
- .parents(file_picker_config.parents)
- .ignore(file_picker_config.ignore)
- .follow_links(file_picker_config.follow_symlinks)
- .git_ignore(file_picker_config.git_ignore)
- .git_global(file_picker_config.git_global)
- .git_exclude(file_picker_config.git_exclude)
- .max_depth(file_picker_config.max_depth)
- .filter_entry(move |entry| {
- filter_picker_entry(entry, &absolute_root, dedup_symlinks)
- })
- .build_parallel()
- .run(|| {
- let mut searcher = searcher.clone();
- let matcher = matcher.clone();
- let all_matches_sx = all_matches_sx.clone();
- let documents = &documents;
- Box::new(move |entry: Result<DirEntry, ignore::Error>| -> WalkState {
- let entry = match entry {
- Ok(entry) => entry,
- Err(_) => return WalkState::Continue,
- };
-
- match entry.file_type() {
- Some(entry) if entry.is_file() => {}
- // skip everything else
- _ => return WalkState::Continue,
- };
-
- let sink = sinks::UTF8(|line_num, _| {
- all_matches_sx
- .send(FileResult::new(entry.path(), line_num as usize - 1))
- .unwrap();
-
- Ok(true)
- });
- let doc = documents.iter().find(|&(doc_path, _)| {
- doc_path.map_or(false, |doc_path| doc_path == entry.path())
- });
-
- let result = if let Some((_, doc)) = doc {
- // there is already a buffer for this file
- // search the buffer instead of the file because it's faster
- // and captures new edits without requireing a save
- if searcher.multi_line_with_matcher(&matcher) {
- // in this case a continous buffer is required
- // convert the rope to a string
- let text = doc.to_string();
- searcher.search_slice(&matcher, text.as_bytes(), sink)
+ let injector_ = injector.clone();
+
+ std::thread::spawn(move || {
+ let searcher = SearcherBuilder::new()
+ .binary_detection(BinaryDetection::quit(b'\x00'))
+ .build();
+ WalkBuilder::new(search_root)
+ .hidden(file_picker_config.hidden)
+ .parents(file_picker_config.parents)
+ .ignore(file_picker_config.ignore)
+ .follow_links(file_picker_config.follow_symlinks)
+ .git_ignore(file_picker_config.git_ignore)
+ .git_global(file_picker_config.git_global)
+ .git_exclude(file_picker_config.git_exclude)
+ .max_depth(file_picker_config.max_depth)
+ .filter_entry(move |entry| {
+ filter_picker_entry(entry, &absolute_root, dedup_symlinks)
+ })
+ .build_parallel()
+ .run(|| {
+ let mut searcher = searcher.clone();
+ let matcher = matcher.clone();
+ let injector = injector_.clone();
+ let documents = &documents;
+ Box::new(move |entry: Result<DirEntry, ignore::Error>| -> WalkState {
+ let entry = match entry {
+ Ok(entry) => entry,
+ Err(_) => return WalkState::Continue,
+ };
+
+ match entry.file_type() {
+ Some(entry) if entry.is_file() => {}
+ // skip everything else
+ _ => return WalkState::Continue,
+ };
+
+ let mut stop = false;
+ let sink = sinks::UTF8(|line_num, _| {
+ stop = injector
+ .push(FileResult::new(entry.path(), line_num as usize - 1))
+ .is_err();
+
+ Ok(!stop)
+ });
+ let doc = documents.iter().find(|&(doc_path, _)| {
+ doc_path
+ .as_ref()
+ .map_or(false, |doc_path| doc_path == entry.path())
+ });
+
+ let result = if let Some((_, doc)) = doc {
+ // there is already a buffer for this file
+ // search the buffer instead of the file because it's faster
+ // and captures new edits without requiring a save
+ if searcher.multi_line_with_matcher(&matcher) {
+ // in this case a continous buffer is required
+ // convert the rope to a string
+ let text = doc.to_string();
+ searcher.search_slice(&matcher, text.as_bytes(), sink)
+ } else {
+ searcher.search_reader(
+ &matcher,
+ RopeReader::new(doc.slice(..)),
+ sink,
+ )
+ }
} else {
- searcher.search_reader(
- &matcher,
- RopeReader::new(doc.slice(..)),
- sink,
- )
+ searcher.search_path(&matcher, entry.path(), sink)
+ };
+
+ if let Err(err) = result {
+ log::error!(
+ "Global search error: {}, {}",
+ entry.path().display(),
+ err
+ );
}
- } else {
- searcher.search_path(&matcher, entry.path(), sink)
- };
-
- if let Err(err) = result {
- log::error!(
- "Global search error: {}, {}",
- entry.path().display(),
- err
- );
- }
- WalkState::Continue
- })
- });
+ if stop {
+ WalkState::Quit
+ } else {
+ WalkState::Continue
+ }
+ })
+ });
+ });
+
+ cx.jobs.callback(async move {
+ let call = move |_: &mut Editor, compositor: &mut Compositor| {
+ let picker = Picker::with_stream(
+ picker,
+ injector,
+ move |cx, FileResult { path, line_num }, action| {
+ let doc = match cx.editor.open(path, action) {
+ Ok(id) => doc_mut!(cx.editor, &id),
+ Err(e) => {
+ cx.editor.set_error(format!(
+ "Failed to open file '{}': {}",
+ path.display(),
+ e
+ ));
+ return;
+ }
+ };
+
+ let line_num = *line_num;
+ let view = view_mut!(cx.editor);
+ let text = doc.text();
+ if line_num >= text.len_lines() {
+ cx.editor.set_error(
+ "The line you jumped to does not exist anymore because the file has changed.",
+ );
+ return;
+ }
+ let start = text.line_to_char(line_num);
+ let end = text.line_to_char((line_num + 1).min(text.len_lines()));
+
+ doc.set_selection(view.id, Selection::single(start, end));
+ if action.align_view(view, doc.id()) {
+ align_view(doc, view, Align::Center);
+ }
+ },
+ )
+ .with_preview(
+ |_editor, FileResult { path, line_num }| {
+ Some((path.clone().into(), Some((*line_num, *line_num))))
+ },
+ );
+ compositor.push(Box::new(overlaid(picker)))
+ };
+ Ok(Callback::EditorCompositor(Box::new(call)))
+ })
} else {
// Otherwise do nothing
// log::warn!("Global Search Invalid Pattern")
}
},
);
-
- let current_path = doc_mut!(cx.editor).path().cloned();
-
- let show_picker = async move {
- let all_matches: Vec<FileResult> =
- UnboundedReceiverStream::new(all_matches_rx).collect().await;
- let call: job::Callback = Callback::EditorCompositor(Box::new(
- move |editor: &mut Editor, compositor: &mut Compositor| {
- if all_matches.is_empty() {
- if !editor.is_err() {
- editor.set_status("No matches found");
- }
- return;
- }
-
- let picker = Picker::new(
- all_matches,
- current_path,
- move |cx, FileResult { path, line_num }, action| {
- let doc = match cx.editor.open(path, action) {
- Ok(id) => doc_mut!(cx.editor, &id),
- Err(e) => {
- cx.editor.set_error(format!(
- "Failed to open file '{}': {}",
- path.display(),
- e
- ));
- return;
- }
- };
- let line_num = *line_num;
- let view = view_mut!(cx.editor);
- let text = doc.text();
- if line_num >= text.len_lines() {
- cx.editor.set_error("The line you jumped to does not exist anymore because the file has changed.");
- return;
- }
- let start = text.line_to_char(line_num);
- let end = text.line_to_char((line_num + 1).min(text.len_lines()));
-
- doc.set_selection(view.id, Selection::single(start, end));
- if action.align_view(view, doc.id()){
- align_view(doc, view, Align::Center);
- }
- }).with_preview(|_editor, FileResult { path, line_num }| {
- Some((path.clone().into(), Some((*line_num, *line_num))))
- });
- compositor.push(Box::new(overlaid(picker)));
- },
- ));
- Ok(call)
- };
- cx.jobs.callback(show_picker);
}
enum Extend {
@@ -4310,8 +4313,8 @@ fn keep_or_remove_selections_impl(cx: &mut Context, remove: bool) {
if remove { "remove:" } else { "keep:" }.into(),
Some(reg),
ui::completers::none,
- move |editor, regex, event| {
- let (view, doc) = current!(editor);
+ move |cx, regex, event| {
+ let (view, doc) = current!(cx.editor);
if !matches!(event, PromptEvent::Update | PromptEvent::Validate) {
return;
}
diff --git a/helix-term/src/commands/typed.rs b/helix-term/src/commands/typed.rs
index a7eb2244..0e1d9431 100644
--- a/helix-term/src/commands/typed.rs
+++ b/helix-term/src/commands/typed.rs
@@ -5,6 +5,7 @@ use crate::job::Job;
use super::*;
+use helix_core::fuzzy::fuzzy_match;
use helix_core::{encoding, shellwords::Shellwords};
use helix_view::document::DEFAULT_LANGUAGE_NAME;
use helix_view::editor::{Action, CloseError, ConfigEvent};
@@ -1265,12 +1266,10 @@ fn reload(
}
let scrolloff = cx.editor.config().scrolloff;
- let redraw_handle = cx.editor.redraw_handle.clone();
let (view, doc) = current!(cx.editor);
- doc.reload(view, &cx.editor.diff_providers, redraw_handle)
- .map(|_| {
- view.ensure_cursor_in_view(doc, scrolloff);
- })?;
+ doc.reload(view, &cx.editor.diff_providers).map(|_| {
+ view.ensure_cursor_in_view(doc, scrolloff);
+ })?;
if let Some(path) = doc.path() {
cx.editor
.language_servers
@@ -1316,8 +1315,7 @@ fn reload_all(
// Ensure that the view is synced with the document's history.
view.sync_changes(doc);
- let redraw_handle = cx.editor.redraw_handle.clone();
- doc.reload(view, &cx.editor.diff_providers, redraw_handle)?;
+ doc.reload(view, &cx.editor.diff_providers)?;
if let Some(path) = doc.path() {
cx.editor
.language_servers
@@ -2902,28 +2900,18 @@ pub(super) fn command_mode(cx: &mut Context) {
":".into(),
Some(':'),
|editor: &Editor, input: &str| {
- static FUZZY_MATCHER: Lazy<fuzzy_matcher::skim::SkimMatcherV2> =
- Lazy::new(fuzzy_matcher::skim::SkimMatcherV2::default);
-
let shellwords = Shellwords::from(input);
let words = shellwords.words();
if words.is_empty() || (words.len() == 1 && !shellwords.ends_with_whitespace()) {
- // If the command has not been finished yet, complete commands.
- let mut matches: Vec<_> = typed::TYPABLE_COMMAND_LIST
- .iter()
- .filter_map(|command| {
- FUZZY_MATCHER
- .fuzzy_match(command.name, input)
- .map(|score| (command.name, score))
- })
- .collect();
-
- matches.sort_unstable_by_key(|(_file, score)| std::cmp::Reverse(*score));
- matches
- .into_iter()
- .map(|(name, _)| (0.., name.into()))
- .collect()
+ fuzzy_match(
+ input,
+ TYPABLE_COMMAND_LIST.iter().map(|command| command.name),
+ false,
+ )
+ .into_iter()
+ .map(|(name, _)| (0.., name.into()))
+ .collect()
} else {
// Otherwise, use the command's completer and the last shellword
// as completion input.
diff --git a/helix-term/src/ui/fuzzy_match.rs b/helix-term/src/ui/fuzzy_match.rs
deleted file mode 100644
index 22dc3a7f..00000000
--- a/helix-term/src/ui/fuzzy_match.rs
+++ /dev/null
@@ -1,239 +0,0 @@
-use fuzzy_matcher::skim::SkimMatcherV2 as Matcher;
-use fuzzy_matcher::FuzzyMatcher;
-
-#[cfg(test)]
-mod test;
-
-struct QueryAtom {
- kind: QueryAtomKind,
- atom: String,
- ignore_case: bool,
- inverse: bool,
-}
-impl QueryAtom {
- fn new(atom: &str) -> Option<QueryAtom> {
- let mut atom = atom.to_string();
- let inverse = atom.starts_with('!');
- if inverse {
- atom.remove(0);
- }
-
- let mut kind = match atom.chars().next() {
- Some('^') => QueryAtomKind::Prefix,
- Some('\'') => QueryAtomKind::Substring,
- _ if inverse => QueryAtomKind::Substring,
- _ => QueryAtomKind::Fuzzy,
- };
-
- if atom.starts_with(['^', '\'']) {
- atom.remove(0);
- }
-
- if atom.is_empty() {
- return None;
- }
-
- if atom.ends_with('$') && !atom.ends_with("\\$") {
- atom.pop();
- kind = if kind == QueryAtomKind::Prefix {
- QueryAtomKind::Exact
- } else {
- QueryAtomKind::Postfix
- }
- }
-
- Some(QueryAtom {
- kind,
- atom: atom.replace('\\', ""),
- // not ideal but fuzzy_matches only knows ascii uppercase so more consistent
- // to behave the same
- ignore_case: kind != QueryAtomKind::Fuzzy
- && atom.chars().all(|c| c.is_ascii_lowercase()),
- inverse,
- })
- }
-
- fn indices(&self, matcher: &Matcher, item: &str, indices: &mut Vec<usize>) -> bool {
- // for inverse there are no indices to return
- // just return whether we matched
- if self.inverse {
- return self.matches(matcher, item);
- }
- let buf;
- let item = if self.ignore_case {
- buf = item.to_ascii_lowercase();
- &buf
- } else {
- item
- };
- let off = match self.kind {
- QueryAtomKind::Fuzzy => {
- if let Some((_, fuzzy_indices)) = matcher.fuzzy_indices(item, &self.atom) {
- indices.extend_from_slice(&fuzzy_indices);
- return true;
- } else {
- return false;
- }
- }
- QueryAtomKind::Substring => {
- if let Some(off) = item.find(&self.atom) {
- off
- } else {
- return false;
- }
- }
- QueryAtomKind::Prefix if item.starts_with(&self.atom) => 0,
- QueryAtomKind::Postfix if item.ends_with(&self.atom) => item.len() - self.atom.len(),
- QueryAtomKind::Exact if item == self.atom => 0,
- _ => return false,
- };
-
- indices.extend(off..(off + self.atom.len()));
- true
- }
-
- fn matches(&self, matcher: &Matcher, item: &str) -> bool {
- let buf;
- let item = if self.ignore_case {
- buf = item.to_ascii_lowercase();
- &buf
- } else {
- item
- };
- let mut res = match self.kind {
- QueryAtomKind::Fuzzy => matcher.fuzzy_match(item, &self.atom).is_some(),
- QueryAtomKind::Substring => item.contains(&self.atom),
- QueryAtomKind::Prefix => item.starts_with(&self.atom),
- QueryAtomKind::Postfix => item.ends_with(&self.atom),
- QueryAtomKind::Exact => item == self.atom,
- };
- if self.inverse {
- res = !res;
- }
- res
- }
-}
-
-#[derive(Debug, PartialEq, Eq, Clone, Copy)]
-enum QueryAtomKind {
- /// Item is a fuzzy match of this behaviour
- ///
- /// Usage: `foo`
- Fuzzy,
- /// Item contains query atom as a continuous substring
- ///
- /// Usage `'foo`
- Substring,
- /// Item starts with query atom
- ///
- /// Usage: `^foo`
- Prefix,
- /// Item ends with query atom
- ///
- /// Usage: `foo$`
- Postfix,
- /// Item is equal to query atom
- ///
- /// Usage `^foo$`
- Exact,
-}
-
-#[derive(Default)]
-pub struct FuzzyQuery {
- first_fuzzy_atom: Option<String>,
- query_atoms: Vec<QueryAtom>,
-}
-
-fn query_atoms(query: &str) -> impl Iterator<Item = &str> + '_ {
- let mut saw_backslash = false;
- query.split(move |c| {
- saw_backslash = match c {
- ' ' if !saw_backslash => return true,
- '\\' => true,
- _ => false,
- };
- false
- })
-}
-
-impl FuzzyQuery {
- pub fn refine(&self, query: &str, old_query: &str) -> (FuzzyQuery, bool) {
- // TODO: we could be a lot smarter about this
- let new_query = Self::new(query);
- let mut is_refinement = query.starts_with(old_query);
-
- // if the last atom is an inverse atom adding more text to it
- // will actually increase the number of matches and we can not refine
- // the matches.
- if is_refinement && !self.query_atoms.is_empty() {
- let last_idx = self.query_atoms.len() - 1;
- if self.query_atoms[last_idx].inverse
- && self.query_atoms[last_idx].atom != new_query.query_atoms[last_idx].atom
- {
- is_refinement = false;
- }
- }
-
- (new_query, is_refinement)
- }
-
- pub fn new(query: &str) -> FuzzyQuery {
- let mut first_fuzzy_query = None;
- let query_atoms = query_atoms(query)
- .filter_map(|atom| {
- let atom = QueryAtom::new(atom)?;
- if atom.kind == QueryAtomKind::Fuzzy && first_fuzzy_query.is_none() {
- first_fuzzy_query = Some(atom.atom);
- None
- } else {
- Some(atom)
- }
- })
- .collect();
- FuzzyQuery {
- first_fuzzy_atom: first_fuzzy_query,
- query_atoms,
- }
- }
-
- pub fn fuzzy_match(&self, item: &str, matcher: &Matcher) -> Option<i64> {
- // use the rank of the first fuzzzy query for the rank, because merging ranks is not really possible
- // this behaviour matches fzf and skim
- let score = self
- .first_fuzzy_atom
- .as_ref()
- .map_or(Some(0), |atom| matcher.fuzzy_match(item, atom))?;
- if self
- .query_atoms
- .iter()
- .any(|atom| !atom.matches(matcher, item))
- {
- return None;
- }
- Some(score)
- }
-
- pub fn fuzzy_indices(&self, item: &str, matcher: &Matcher) -> Option<(i64, Vec<usize>)> {
- let (score, mut indices) = self.first_fuzzy_atom.as_ref().map_or_else(
- || Some((0, Vec::new())),
- |atom| matcher.fuzzy_indices(item, atom),
- )?;
-
- // fast path for the common case of just a single atom
- if self.query_atoms.is_empty() {
- return Some((score, indices));
- }
-
- for atom in &self.query_atoms {
- if !atom.indices(matcher, item, &mut indices) {
- return None;
- }
- }
-
- // deadup and remove duplicate matches
- indices.sort_unstable();
- indices.dedup();
-
- Some((score, indices))
- }
-}
diff --git a/helix-term/src/ui/fuzzy_match/test.rs b/helix-term/src/ui/fuzzy_match/test.rs
deleted file mode 100644
index 5df79eeb..00000000
--- a/helix-term/src/ui/fuzzy_match/test.rs
+++ /dev/null
@@ -1,47 +0,0 @@
-use crate::ui::fuzzy_match::FuzzyQuery;
-use crate::ui::fuzzy_match::Matcher;
-
-fn run_test<'a>(query: &str, items: &'a [&'a str]) -> Vec<String> {
- let query = FuzzyQuery::new(query);
- let matcher = Matcher::default();
- items
- .iter()
- .filter_map(|item| {
- let (_, indices) = query.fuzzy_indices(item, &matcher)?;
- let matched_string = indices
- .iter()
- .map(|&pos| item.chars().nth(pos).unwrap())
- .collect();
- Some(matched_string)
- })
- .collect()
-}
-
-#[test]
-fn match_single_value() {
- let matches = run_test("foo", &["foobar", "foo", "bar"]);
- assert_eq!(matches, &["foo", "foo"])
-}
-
-#[test]
-fn match_multiple_values() {
- let matches = run_test(
- "foo bar",
- &["foo bar", "foo bar", "bar foo", "bar", "foo"],
- );
- assert_eq!(matches, &["foobar", "foobar", "barfoo"])
-}
-
-#[test]
-fn space_escape() {
- let matches = run_test(r"foo\ bar", &["bar foo", "foo bar", "foobar"]);
- assert_eq!(matches, &["foo bar"])
-}
-
-#[test]
-fn trim() {
- let matches = run_test(r" foo bar ", &["bar foo", "foo bar", "foobar"]);
- assert_eq!(matches, &["barfoo", "foobar", "foobar"]);
- let matches = run_test(r" foo bar\ ", &["bar foo", "foo bar", "foobar"]);
- assert_eq!(matches, &["bar foo"])
-}
diff --git a/helix-term/src/ui/menu.rs b/helix-term/src/ui/menu.rs
index c73e7bed..8eeb41ee 100644
--- a/helix-term/src/ui/menu.rs
+++ b/helix-term/src/ui/menu.rs
@@ -1,22 +1,22 @@
-use std::{borrow::Cow, path::PathBuf};
+use std::{borrow::Cow, cmp::Reverse, path::PathBuf};
use crate::{
compositor::{Callback, Component, Compositor, Context, Event, EventResult},
ctrl, key, shift,
};
+use helix_core::fuzzy::MATCHER;
+use nucleo::pattern::{AtomKind, CaseMatching, Pattern};
+use nucleo::{Config, Utf32Str};
use tui::{buffer::Buffer as Surface, widgets::Table};
pub use tui::widgets::{Cell, Row};
-use fuzzy_matcher::skim::SkimMatcherV2 as Matcher;
-use fuzzy_matcher::FuzzyMatcher;
-
use helix_view::{editor::SmartTabConfig, graphics::Rect, Editor};
use tui::layout::Constraint;
-pub trait Item {
+pub trait Item: Sync + Send + 'static {
/// Additional editor state that is used for label calculation.
- type Data;
+ type Data: Sync + Send + 'static;
fn format(&self, data: &Self::Data) -> Row;
@@ -51,9 +51,8 @@ pub struct Menu<T: Item> {
cursor: Option<usize>,
- matcher: Box<Matcher>,
/// (index, score)
- matches: Vec<(usize, i64)>,
+ matches: Vec<(u32, u32)>,
widths: Vec<Constraint>,
@@ -75,11 +74,10 @@ impl<T: Item> Menu<T> {
editor_data: <T as Item>::Data,
callback_fn: impl Fn(&mut Editor, Option<&T>, MenuEvent) + 'static,
) -> Self {
- let matches = (0..options.len()).map(|i| (i, 0)).collect();
+ let matches = (0..options.len() as u32).map(|i| (i, 0)).collect();
Self {
options,
editor_data,
- matcher: Box::new(Matcher::default().ignore_case()),
matches,
cursor: None,
widths: Vec::new(),
@@ -94,20 +92,19 @@ impl<T: Item> Menu<T> {
pub fn score(&mut self, pattern: &str) {
// reuse the matches allocation
self.matches.clear();
- self.matches.extend(
- self.options
- .iter()
- .enumerate()
- .filter_map(|(index, option)| {
- let text = option.filter_text(&self.editor_data);
- // TODO: using fuzzy_indices could give us the char idx for match highlighting
- self.matcher
- .fuzzy_match(&text, pattern)
- .map(|score| (index, score))
- }),
- );
- // Order of equal elements needs to be preserved as LSP preselected items come in order of high to low priority
- self.matches.sort_by_key(|(_, score)| -score);
+ let mut matcher = MATCHER.lock();
+ matcher.config = Config::DEFAULT;
+ let pattern = Pattern::new(pattern, CaseMatching::Ignore, AtomKind::Fuzzy);
+ let mut buf = Vec::new();
+ let matches = self.options.iter().enumerate().filter_map(|(i, option)| {
+ let text = option.filter_text(&self.editor_data);
+ pattern
+ .score(Utf32Str::new(&text, &mut buf), &mut matcher)
+ .map(|score| (i as u32, score))
+ });
+ self.matches.extend(matches);
+ self.matches
+ .sort_unstable_by_key(|&(i, score)| (Reverse(score), i));
// reset cursor position
self.cursor = None;
@@ -201,7 +198,7 @@ impl<T: Item> Menu<T> {
self.cursor.and_then(|cursor| {
self.matches
.get(cursor)
- .map(|(index, _score)| &self.options[*index])
+ .map(|(index, _score)| &self.options[*index as usize])
})
}
@@ -209,7 +206,7 @@ impl<T: Item> Menu<T> {
self.cursor.and_then(|cursor| {
self.matches
.get(cursor)
- .map(|(index, _score)| &mut self.options[*index])
+ .map(|(index, _score)| &mut self.options[*index as usize])
})
}
@@ -332,7 +329,7 @@ impl<T: Item + 'static> Component for Menu<T> {
.iter()
.map(|(index, _score)| {
// (index, self.options.get(*index).unwrap()) // get_unchecked
- &self.options[*index] // get_unchecked
+ &self.options[*index as usize] // get_unchecked
})
.collect();
diff --git a/helix-term/src/ui/mod.rs b/helix-term/src/ui/mod.rs
index 2d15fb32..3ca3d24d 100644
--- a/helix-term/src/ui/mod.rs
+++ b/helix-term/src/ui/mod.rs
@@ -1,7 +1,6 @@
mod completion;
mod document;
pub(crate) mod editor;
-mod fuzzy_match;
mod info;
pub mod lsp;
mod markdown;
@@ -64,7 +63,7 @@ pub fn regex_prompt(
prompt: std::borrow::Cow<'static, str>,
history_register: Option<char>,
completion_fn: impl FnMut(&Editor, &str) -> Vec<prompt::Completion> + 'static,
- fun: impl Fn(&mut Editor, Regex, PromptEvent) + 'static,
+ fun: impl Fn(&mut crate::compositor::Context, Regex, PromptEvent) + 'static,
) {
let (view, doc) = current!(cx.editor);
let doc_id = view.doc;
@@ -111,7 +110,7 @@ pub fn regex_prompt(
view.jumps.push((doc_id, snapshot.clone()));
}
- fun(cx.editor, regex, event);
+ fun(cx, regex, event);
let (view, doc) = current!(cx.editor);
view.ensure_cursor_in_view(doc, config.scrolloff);
@@ -174,6 +173,7 @@ pub fn file_picker(root: PathBuf, config: &helix_view::editor::Config) -> Picker
.git_ignore(config.file_picker.git_ignore)
.git_global(config.file_picker.git_global)
.git_exclude(config.file_picker.git_exclude)
+ .sort_by_file_name(|name1, name2| name1.cmp(name2))
.max_depth(config.file_picker.max_depth)
.filter_entry(move |entry| filter_picker_entry(entry, &absolute_root, dedup_symlinks));
@@ -190,32 +190,16 @@ pub fn file_picker(root: PathBuf, config: &helix_view::editor::Config) -> Picker
.build()
.expect("failed to build excluded_types");
walk_builder.types(excluded_types);
-
- // We want files along with their modification date for sorting
let files = walk_builder.build().filter_map(|entry| {
let entry = entry.ok()?;
- // This is faster than entry.path().is_dir() since it uses cached fs::Metadata fetched by ignore/walkdir
- if entry.file_type()?.is_file() {
- Some(entry.into_path())
- } else {
- None
+ if !entry.file_type()?.is_file() {
+ return None;
}
+ Some(entry.into_path())
});
-
- // Cap the number of files if we aren't in a git project, preventing
- // hangs when using the picker in your home directory
- let mut files: Vec<PathBuf> = if root.join(".git").exists() {
- files.collect()
- } else {
- // const MAX: usize = 8192;
- const MAX: usize = 100_000;
- files.take(MAX).collect()
- };
- files.sort();
-
log::debug!("file_picker init {:?}", Instant::now().duration_since(now));
- Picker::new(files, root, move |cx, path: &PathBuf, action| {
+ let picker = Picker::new(Vec::new(), root, move |cx, path: &PathBuf, action| {
if let Err(e) = cx.editor.open(path, action) {
let err = if let Some(err) = e.source() {
format!("{}", err)
@@ -225,20 +209,27 @@ pub fn file_picker(root: PathBuf, config: &helix_view::editor::Config) -> Picker
cx.editor.set_error(err);
}
})
- .with_preview(|_editor, path| Some((path.clone().into(), None)))
+ .with_preview(|_editor, path| Some((path.clone().into(), None)));
+ let injector = picker.injector();
+ std::thread::spawn(move || {
+ for file in files {
+ if injector.push(file).is_err() {
+ break;
+ }
+ }
+ });
+ picker
}
pub mod completers {
use crate::ui::prompt::Completion;
- use fuzzy_matcher::skim::SkimMatcherV2 as Matcher;
- use fuzzy_matcher::FuzzyMatcher;
+ use helix_core::fuzzy::fuzzy_match;
use helix_core::syntax::LanguageServerFeature;
use helix_view::document::SCRATCH_BUFFER_NAME;
use helix_view::theme;
use helix_view::{editor::Config, Editor};
use once_cell::sync::Lazy;
use std::borrow::Cow;
- use std::cmp::Reverse;
pub type Completer = fn(&Editor, &str) -> Vec<Completion>;
@@ -247,31 +238,16 @@ pub mod completers {
}
pub fn buffer(editor: &Editor, input: &str) -> Vec<Completion> {
- let mut names: Vec<_> = editor
- .documents
- .values()
- .map(|doc| {
- let name = doc
- .relative_path()
- .map(|p| p.display().to_string())
- .unwrap_or_else(|| String::from(SCRATCH_BUFFER_NAME));
- ((0..), Cow::from(name))
- })
- .collect();
-
- let matcher = Matcher::default();
-
- let mut matches: Vec<_> = names
- .into_iter()
- .filter_map(|(_range, name)| {
- matcher.fuzzy_match(&name, input).map(|score| (name, score))
- })
- .collect();
-
- matches.sort_unstable_by_key(|(_file, score)| Reverse(*score));
- names = matches.into_iter().map(|(name, _)| ((0..), name)).collect();
+ let names = editor.documents.values().map(|doc| {
+ doc.relative_path()
+ .map(|p| p.display().to_string().into())
+ .unwrap_or_else(|| Cow::from(SCRATCH_BUFFER_NAME))
+ });
- names
+ fuzzy_match(input, names, true)
+ .into_iter()
+ .map(|(name, _)| ((0..), name))
+ .collect()
}
pub fn theme(_editor: &Editor, input: &str) -> Vec<Completion> {
@@ -284,26 +260,10 @@ pub mod completers {
names.sort();
names.dedup();
- let mut names: Vec<_> = names
+ fuzzy_match(input, names, false)
.into_iter()
- .map(|name| ((0..), Cow::from(name)))
- .collect();
-
- let matcher = Matcher::default();
-
- let mut matches: Vec<_> = names
- .into_iter()
- .filter_map(|(_range, name)| {
- matcher.fuzzy_match(&name, input).map(|score| (name, score))
- })
- .collect();
-
- matches.sort_unstable_by(|(name1, score1), (name2, score2)| {
- (Reverse(*score1), name1).cmp(&(Reverse(*score2), name2))
- });
- names = matches.into_iter().map(|(name, _)| ((0..), name)).collect();
-
- names
+ .map(|(name, _)| ((0..), name.into()))
+ .collect()
}
/// Recursive function to get all keys from this value and add them to vec
@@ -330,15 +290,7 @@ pub mod completers {
keys
});
- let matcher = Matcher::default();
-
- let mut matches: Vec<_> = KEYS
- .iter()
- .filter_map(|name| matcher.fuzzy_match(name, input).map(|score| (name, score)))
- .collect();
-
- matches.sort_unstable_by_key(|(_file, score)| Reverse(*score));
- matches
+ fuzzy_match(input, &*KEYS, false)
.into_iter()
.map(|(name, _)| ((0..), name.into()))
.collect()
@@ -365,8 +317,6 @@ pub mod completers {
}
pub fn language(editor: &Editor, input: &str) -> Vec<Completion> {
- let matcher = Matcher::default();
-
let text: String = "text".into();
let language_ids = editor
@@ -375,27 +325,13 @@ pub mod completers {
.map(|config| &config.language_id)
.chain(std::iter::once(&text));
- let mut matches: Vec<_> = language_ids
- .filter_map(|language_id| {
- matcher
- .fuzzy_match(language_id, input)
- .map(|score| (language_id, score))
- })
- .collect();
-
- matches.sort_unstable_by(|(language1, score1), (language2, score2)| {
- (Reverse(*score1), language1).cmp(&(Reverse(*score2), language2))
- });
-
- matches
+ fuzzy_match(input, language_ids, false)
.into_iter()
- .map(|(language, _score)| ((0..), language.clone().into()))
+ .map(|(name, _)| ((0..), name.to_owned().into()))
.collect()
}
pub fn lsp_workspace_command(editor: &Editor, input: &str) -> Vec<Completion> {
- let matcher = Matcher::default();
-
let Some(options) = doc!(editor)
.language_servers_with_feature(LanguageServerFeature::WorkspaceCommand)
.find_map(|ls| ls.capabilities().execute_command_provider.as_ref())
@@ -403,23 +339,9 @@ pub mod completers {
return vec![];
};
- let mut matches: Vec<_> = options
- .commands
- .iter()
- .filter_map(|command| {
- matcher
- .fuzzy_match(command, input)
- .map(|score| (command, score))
- })
- .collect();
-
- matches.sort_unstable_by(|(command1, score1), (command2, score2)| {
- (Reverse(*score1), command1).cmp(&(Reverse(*score2), command2))
- });
-
- matches
+ fuzzy_match(input, &options.commands, false)
.into_iter()
- .map(|(command, _score)| ((0..), command.clone().into()))
+ .map(|(name, _)| ((0..), name.to_owned().into()))
.collect()
}
@@ -500,7 +422,7 @@ pub mod completers {
let end = input.len()..;
- let mut files: Vec<_> = WalkBuilder::new(&dir)
+ let files = WalkBuilder::new(&dir)
.hidden(false)
.follow_links(false) // We're scanning over depth 1
.git_ignore(git_ignore)
@@ -532,43 +454,25 @@ pub mod completers {
path.push("");
}
- let path = path.to_str()?.to_owned();
- Some((end.clone(), Cow::from(path)))
+ let path = path.into_os_string().into_string().ok()?;
+ Some(Cow::from(path))
})
}) // TODO: unwrap or skip
- .filter(|(_, path)| !path.is_empty()) // TODO
- .collect();
+ .filter(|path| !path.is_empty());
// if empty, return a list of dirs and files in current dir
if let Some(file_name) = file_name {
- let matcher = Matcher::default();
-
- // inefficient, but we need to calculate the scores, filter out None, then sort.
- let mut matches: Vec<_> = files
- .into_iter()
- .filter_map(|(_range, file)| {
- matcher
- .fuzzy_match(&file, &file_name)
- .map(|score| (file, score))
- })
- .collect();
-
let range = (input.len().saturating_sub(file_name.len()))..;
-
- matches.sort_unstable_by(|(file1, score1), (file2, score2)| {
- (Reverse(*score1), file1).cmp(&(Reverse(*score2), file2))
- });
-
- files = matches
+ fuzzy_match(&file_name, files, true)
.into_iter()
- .map(|(file, _)| (range.clone(), file))
- .collect();
+ .map(|(name, _)| (range.clone(), name))
+ .collect()
// TODO: complete to longest common match
} else {
+ let mut files: Vec<_> = files.map(|file| (end.clone(), file)).collect();
files.sort_unstable_by(|(_, path1), (_, path2)| path1.cmp(path2));
+ files
}
-
- files
}
}
diff --git a/helix-term/src/ui/picker.rs b/helix-term/src/ui/picker.rs
index b134eb47..3073a697 100644
--- a/helix-term/src/ui/picker.rs
+++ b/helix-term/src/ui/picker.rs
@@ -7,11 +7,12 @@ use crate::{
ui::{
self,
document::{render_document, LineDecoration, LinePos, TextRenderer},
- fuzzy_match::FuzzyQuery,
EditorView,
},
};
use futures_util::{future::BoxFuture, FutureExt};
+use nucleo::pattern::CaseMatching;
+use nucleo::{Config, Nucleo, Utf32String};
use tui::{
buffer::Buffer as Surface,
layout::Constraint,
@@ -19,16 +20,23 @@ use tui::{
widgets::{Block, BorderType, Borders, Cell, Table},
};
-use fuzzy_matcher::skim::SkimMatcherV2 as Matcher;
use tui::widgets::Widget;
-use std::cmp::{self, Ordering};
-use std::{collections::HashMap, io::Read, path::PathBuf};
+use std::{
+ collections::HashMap,
+ io::Read,
+ path::PathBuf,
+ sync::{
+ atomic::{self, AtomicBool},
+ Arc,
+ },
+};
use crate::ui::{Prompt, PromptEvent};
use helix_core::{
- char_idx_at_visual_offset, movement::Direction, text_annotations::TextAnnotations,
- unicode::segmentation::UnicodeSegmentation, Position, Syntax,
+ char_idx_at_visual_offset, fuzzy::MATCHER, movement::Direction,
+ text_annotations::TextAnnotations, unicode::segmentation::UnicodeSegmentation, Position,
+ Syntax,
};
use helix_view::{
editor::Action,
@@ -114,20 +122,71 @@ impl Preview<'_, '_> {
}
}
+fn item_to_nucleo<T: Item>(item: T, editor_data: &T::Data) -> Option<(T, Utf32String)> {
+ let row = item.format(editor_data);
+ let mut cells = row.cells.iter();
+ let mut text = String::with_capacity(row.cell_text().map(|cell| cell.len()).sum());
+ let cell = cells.next()?;
+ if let Some(cell) = cell.content.lines.first() {
+ for span in &cell.0 {
+ text.push_str(&span.content);
+ }
+ }
+
+ for cell in cells {
+ text.push(' ');
+ if let Some(cell) = cell.content.lines.first() {
+ for span in &cell.0 {
+ text.push_str(&span.content);
+ }
+ }
+ }
+ Some((item, text.into()))
+}
+
+pub struct Injector<T: Item> {
+ dst: nucleo::Injector<T>,
+ editor_data: Arc<T::Data>,
+ shutown: Arc<AtomicBool>,
+}
+
+impl<T: Item> Clone for Injector<T> {
+ fn clone(&self) -> Self {
+ Injector {
+ dst: self.dst.clone(),
+ editor_data: self.editor_data.clone(),
+ shutown: Arc::new(AtomicBool::new(false)),
+ }
+ }
+}
+
+pub struct InjectorShutdown;
+
+impl<T: Item> Injector<T> {
+ pub fn push(&self, item: T) -> Result<(), InjectorShutdown> {
+ if self.shutown.load(atomic::Ordering::Relaxed) {
+ return Err(InjectorShutdown);
+ }
+
+ if let Some((item, matcher_text)) = item_to_nucleo(item, &self.editor_data) {
+ self.dst.push(item, |dst| dst[0] = matcher_text);
+ }
+ Ok(())
+ }
+}
+
pub struct Picker<T: Item> {
- options: Vec<T>,
- editor_data: T::Data,
- // filter: String,
- matcher: Box<Matcher>,
- matches: Vec<PickerMatch>,
+ editor_data: Arc<T::Data>,
+ shutdown: Arc<AtomicBool>,
+ matcher: Nucleo<T>,
/// Current height of the completions box
completion_height: u16,
- cursor: usize,
- // pattern: String,
+ cursor: u32,
prompt: Prompt,
- previous_pattern: (String, FuzzyQuery),
+ previous_pattern: String,
+
/// Whether to show the preview panel (default true)
show_preview: bool,
/// Constraints for tabular formatting
@@ -144,11 +203,60 @@ pub struct Picker<T: Item> {
}
impl<T: Item + 'static> Picker<T> {
+ pub fn stream(editor_data: T::Data) -> (Nucleo<T>, Injector<T>) {
+ let matcher = Nucleo::new(
+ Config::DEFAULT,
+ Arc::new(helix_event::request_redraw),
+ None,
+ 1,
+ );
+ let streamer = Injector {
+ dst: matcher.injector(),
+ editor_data: Arc::new(editor_data),
+ shutown: Arc::new(AtomicBool::new(false)),
+ };
+ (matcher, streamer)
+ }
+
pub fn new(
options: Vec<T>,
editor_data: T::Data,
callback_fn: impl Fn(&mut Context, &T, Action) + 'static,
) -> Self {
+ let matcher = Nucleo::new(
+ Config::DEFAULT,
+ Arc::new(helix_event::request_redraw),
+ None,
+ 1,
+ );
+ let injector = matcher.injector();
+ for item in options {
+ if let Some((item, matcher_text)) = item_to_nucleo(item, &editor_data) {
+ injector.push(item, |dst| dst[0] = matcher_text);
+ }
+ }
+ Self::with(
+ matcher,
+ Arc::new(editor_data),
+ Arc::new(AtomicBool::new(false)),
+ callback_fn,
+ )
+ }
+
+ pub fn with_stream(
+ matcher: Nucleo<T>,
+ injector: Injector<T>,
+ callback_fn: impl Fn(&mut Context, &T, Action) + 'static,
+ ) -> Self {
+ Self::with(matcher, injector.editor_data, injector.shutown, callback_fn)
+ }
+
+ fn with(
+ matcher: Nucleo<T>,
+ editor_data: Arc<T::Data>,
+ shutdown: Arc<AtomicBool>,
+ callback_fn: impl Fn(&mut Context, &T, Action) + 'static,
+ ) -> Self {
let prompt = Prompt::new(
"".into(),
None,
@@ -156,14 +264,13 @@ impl<T: Item + 'static> Picker<T> {
|_editor: &mut Context, _pattern: &str, _event: PromptEvent| {},
);
- let mut picker = Self {
- options,
+ Self {
+ matcher,
editor_data,
- matcher: Box::default(),
- matches: Vec::new(),
+ shutdown,
cursor: 0,
prompt,
- previous_pattern: (String::new(), FuzzyQuery::default()),
+ previous_pattern: String::new(),
truncate_start: true,
show_preview: true,
callback_fn: Box::new(callback_fn),
@@ -172,24 +279,15 @@ impl<T: Item + 'static> Picker<T> {
preview_cache: HashMap::new(),
read_buffer: Vec::with_capacity(1024),
file_fn: None,
- };
-
- picker.calculate_column_widths();
-
- // scoring on empty input
- // TODO: just reuse score()
- picker
- .matches
- .extend(picker.options.iter().enumerate().map(|(index, option)| {
- let text = option.filter_text(&picker.editor_data);
- PickerMatch {
- index,
- score: 0,
- len: text.chars().count(),
- }
- }));
+ }
+ }
- picker
+ pub fn injector(&self) -> Injector<T> {
+ Injector {
+ dst: self.matcher.injector(),
+ editor_data: self.editor_data.clone(),
+ shutown: self.shutdown.clone(),
+ }
}
pub fn truncate_start(mut self, truncate_start: bool) -> Self {
@@ -202,122 +300,25 @@ impl<T: Item + 'static> Picker<T> {
preview_fn: impl Fn(&Editor, &T) -> Option<FileLocation> + 'static,
) -> Self {
self.file_fn = Some(Box::new(preview_fn));
+ // assumption: if we have a preview we are matching paths... If this is ever
+ // not true this could be a separate builder function
+ self.matcher.update_config(Config::DEFAULT.match_paths());
self
}
pub fn set_options(&mut self, new_options: Vec<T>) {
- self.options = new_options;
- self.cursor = 0;
- self.force_score();
- self.calculate_column_widths();
- }
-
- /// Calculate the width constraints using the maximum widths of each column
- /// for the current options.
- fn calculate_column_widths(&mut self) {
- let n = self
- .options
- .first()
- .map(|option| option.format(&self.editor_data).cells.len())
- .unwrap_or_default();
- let max_lens = self.options.iter().fold(vec![0; n], |mut acc, option| {
- let row = option.format(&self.editor_data);
- // maintain max for each column
- for (acc, cell) in acc.iter_mut().zip(row.cells.iter()) {
- let width = cell.content.width();
- if width > *acc {
- *acc = width;
- }
+ self.matcher.restart(false);
+ let injector = self.matcher.injector();
+ for item in new_options {
+ if let Some((item, matcher_text)) = item_to_nucleo(item, &self.editor_data) {
+ injector.push(item, |dst| dst[0] = matcher_text);
}
- acc
- });
- self.widths = max_lens
- .into_iter()
- .map(|len| Constraint::Length(len as u16))
- .collect();
- }
-
- pub fn score(&mut self) {
- let pattern = self.prompt.line();
-
- if pattern == &self.previous_pattern.0 {
- return;
}
-
- let (query, is_refined) = self
- .previous_pattern
- .1
- .refine(pattern, &self.previous_pattern.0);
-
- if pattern.is_empty() {
- // Fast path for no pattern.
- self.matches.clear();
- self.matches
- .extend(self.options.iter().enumerate().map(|(index, option)| {
- let text = option.filter_text(&self.editor_data);
- PickerMatch {
- index,
- score: 0,
- len: text.chars().count(),
- }
- }));
- } else if is_refined {
- // optimization: if the pattern is a more specific version of the previous one
- // then we can score the filtered set.
- self.matches.retain_mut(|pmatch| {
- let option = &self.options[pmatch.index];
- let text = option.sort_text(&self.editor_data);
-
- match query.fuzzy_match(&text, &self.matcher) {
- Some(s) => {
- // Update the score
- pmatch.score = s;
- true
- }
- None => false,
- }
- });
-
- self.matches.sort_unstable();
- } else {
- self.force_score();
- }
-
- // reset cursor position
- self.cursor = 0;
- let pattern = self.prompt.line();
- self.previous_pattern.0.clone_from(pattern);
- self.previous_pattern.1 = query;
- }
-
- pub fn force_score(&mut self) {
- let pattern = self.prompt.line();
-
- let query = FuzzyQuery::new(pattern);
- self.matches.clear();
- self.matches.extend(
- self.options
- .iter()
- .enumerate()
- .filter_map(|(index, option)| {
- let text = option.filter_text(&self.editor_data);
-
- query
- .fuzzy_match(&text, &self.matcher)
- .map(|score| PickerMatch {
- index,
- score,
- len: text.chars().count(),
- })
- }),
- );
-
- self.matches.sort_unstable();
}
/// Move the cursor by a number of lines, either down (`Forward`) or up (`Backward`)
- pub fn move_by(&mut self, amount: usize, direction: Direction) {
- let len = self.matches.len();
+ pub fn move_by(&mut self, amount: u32, direction: Direction) {
+ let len = self.matcher.snapshot().matched_item_count();
if len == 0 {
// No results, can't move.
@@ -336,12 +337,12 @@ impl<T: Item + 'static> Picker<T> {
/// Move the cursor down by exactly one page. After the last page comes the first page.
pub fn page_up(&mut self) {
- self.move_by(self.completion_height as usize, Direction::Backward);
+ self.move_by(self.completion_height as u32, Direction::Backward);
}
/// Move the cursor up by exactly one page. After the first page comes the last page.
pub fn page_down(&mut self) {
- self.move_by(self.completion_height as usize, Direction::Forward);
+ self.move_by(self.completion_height as u32, Direction::Forward);
}
/// Move the cursor to the first entry
@@ -351,13 +352,18 @@ impl<T: Item + 'static> Picker<T> {
/// Move the cursor to the last entry
pub fn to_end(&mut self) {
- self.cursor = self.matches.len().saturating_sub(1);
+ self.cursor = self
+ .matcher
+ .snapshot()
+ .matched_item_count()
+ .saturating_sub(1);
}
pub fn selection(&self) -> Option<&T> {
- self.matches
- .get(self.cursor)
- .map(|pmatch| &self.options[pmatch.index])
+ self.matcher
+ .snapshot()
+ .get_matched_item(self.cursor)
+ .map(|item| item.data)
}
pub fn toggle_preview(&mut self) {
@@ -366,8 +372,17 @@ impl<T: Item + 'static> Picker<T> {
fn prompt_handle_event(&mut self, event: &Event, cx: &mut Context) -> EventResult {
if let EventResult::Consumed(_) = self.prompt.handle_event(event, cx) {
- // TODO: recalculate only if pattern changed
- self.score();
+ let pattern = self.prompt.line();
+ // TODO: better track how the pattern has changed
+ if pattern != &self.previous_pattern {
+ self.matcher.pattern.reparse(
+ 0,
+ pattern,
+ CaseMatching::Smart,
+ pattern.starts_with(&self.previous_pattern),
+ );
+ self.previous_pattern = pattern.clone();
+ }
}
EventResult::Consumed(None)
}
@@ -411,12 +426,9 @@ impl<T: Item + 'static> Picker<T> {
(size, _) if size > MAX_FILE_SIZE_FOR_PREVIEW => {
CachedPreview::LargeFile
}
- _ => {
- // TODO: enable syntax highlighting; blocked by async rendering
- Document::open(path, None, None, editor.config.clone())
- .map(|doc| CachedPreview::Document(Box::new(doc)))
- .unwrap_or(CachedPreview::NotFound)
- }
+ _ => Document::open(path, None, None, editor.config.clone())
+ .map(|doc| CachedPreview::Document(Box::new(doc)))
+ .unwrap_or(CachedPreview::NotFound),
},
)
.unwrap_or(CachedPreview::NotFound);
@@ -495,6 +507,14 @@ impl<T: Item + 'static> Picker<T> {
}
fn render_picker(&mut self, area: Rect, surface: &mut Surface, cx: &mut Context) {
+ let status = self.matcher.tick(10);
+ let snapshot = self.matcher.snapshot();
+ if status.changed {
+ self.cursor = self
+ .cursor
+ .min(snapshot.matched_item_count().saturating_sub(1))
+ }
+
let text_style = cx.editor.theme.get("ui.text");
let selected = cx.editor.theme.get("ui.text.focus");
let highlight_style = cx.editor.theme.get("special").add_modifier(Modifier::BOLD);
@@ -515,8 +535,15 @@ impl<T: Item + 'static> Picker<T> {
// -- Render the input bar:
let area = inner.clip_left(1).with_height(1);
+ // render the prompt first since it will clear its background
+ self.prompt.render(area, surface, cx);
- let count = format!("{}/{}", self.matches.len(), self.options.len());
+ let count = format!(
+ "{}{}/{}",
+ if status.running { "(running) " } else { "" },
+ snapshot.matched_item_count(),
+ snapshot.item_count(),
+ );
surface.set_stringn(
(area.x + area.width).saturating_sub(count.len() as u16 + 1),
area.y,
@@ -525,8 +552,6 @@ impl<T: Item + 'static> Picker<T> {
text_style,
);
- self.prompt.render(area, surface, cx);
-
// -- Separator
let sep_style = cx.editor.theme.get("ui.background.separator");
let borders = BorderType::line_symbols(BorderType::Plain);
@@ -539,106 +564,89 @@ impl<T: Item + 'static> Picker<T> {
// -- Render the contents:
// subtract area of prompt from top
let inner = inner.clip_top(2);
-
- let rows = inner.height;
- let offset = self.cursor - (self.cursor % std::cmp::max(1, rows as usize));
+ let rows = inner.height as u32;
+ let offset = self.cursor - (self.cursor % std::cmp::max(1, rows));
let cursor = self.cursor.saturating_sub(offset);
+ let end = offset
+ .saturating_add(rows)
+ .min(snapshot.matched_item_count());
+ let mut indices = Vec::new();
+ let mut matcher = MATCHER.lock();
+ matcher.config = Config::DEFAULT;
+ if self.file_fn.is_some() {
+ matcher.config.set_match_paths()
+ }
- let options = self
- .matches
- .iter()
- .skip(offset)
- .take(rows as usize)
- .map(|pmatch| &self.options[pmatch.index])
- .map(|option| option.format(&self.editor_data))
- .map(|mut row| {
- const TEMP_CELL_SEP: &str = " ";
-
- let line = row.cell_text().fold(String::new(), |mut s, frag| {
- s.push_str(&frag);
- s.push_str(TEMP_CELL_SEP);
- s
- });
-
- // Items are filtered by using the text returned by menu::Item::filter_text
- // but we do highlighting here using the text in Row and therefore there
- // might be inconsistencies. This is the best we can do since only the
- // text in Row is displayed to the end user.
- let (_score, highlights) = FuzzyQuery::new(self.prompt.line())
- .fuzzy_indices(&line, &self.matcher)
- .unwrap_or_default();
-
- let highlight_byte_ranges: Vec<_> = line
- .char_indices()
- .enumerate()
- .filter_map(|(char_idx, (byte_offset, ch))| {
- highlights
- .contains(&char_idx)
- .then(|| byte_offset..byte_offset + ch.len_utf8())
- })
- .collect();
-
- // The starting byte index of the current (iterating) cell
- let mut cell_start_byte_offset = 0;
- for cell in row.cells.iter_mut() {
- let spans = match cell.content.lines.get(0) {
- Some(s) => s,
- None => {
- cell_start_byte_offset += TEMP_CELL_SEP.len();
- continue;
- }
- };
+ let options = snapshot.matched_items(offset..end).map(|item| {
+ snapshot.pattern().column_pattern(0).indices(
+ item.matcher_columns[0].slice(..),
+ &mut matcher,
+ &mut indices,
+ );
+ indices.sort_unstable();
+ indices.dedup();
+ let mut row = item.data.format(&self.editor_data);
+
+ let mut grapheme_idx = 0u32;
+ let mut indices = indices.drain(..);
+ let mut next_highlight_idx = indices.next().unwrap_or(u32::MAX);
+ if self.widths.len() < row.cells.len() {
+ self.widths.resize(row.cells.len(), Constraint::Length(0));
+ }
+ let mut widths = self.widths.iter_mut();
+ for cell in &mut row.cells {
+ let Some(Constraint::Length(max_width)) = widths.next() else {
+ unreachable!();
+ };
- let mut cell_len = 0;
-
- let graphemes_with_style: Vec<_> = spans
- .0
- .iter()
- .flat_map(|span| {
- span.content
- .grapheme_indices(true)
- .zip(std::iter::repeat(span.style))
- })
- .map(|((grapheme_byte_offset, grapheme), style)| {
- cell_len += grapheme.len();
- let start = cell_start_byte_offset;
-
- let grapheme_byte_range =
- grapheme_byte_offset..grapheme_byte_offset + grapheme.len();
-
- if highlight_byte_ranges.iter().any(|hl_rng| {
- hl_rng.start >= start + grapheme_byte_range.start
- && hl_rng.end <= start + grapheme_byte_range.end
- }) {
- (grapheme, style.patch(highlight_style))
- } else {
- (grapheme, style)
- }
- })
- .collect();
-
- let mut span_list: Vec<(String, Style)> = Vec::new();
- for (grapheme, style) in graphemes_with_style {
- if span_list.last().map(|(_, sty)| sty) == Some(&style) {
- let (string, _) = span_list.last_mut().unwrap();
- string.push_str(grapheme);
+ // merge index highlights on top of existing hightlights
+ let mut span_list = Vec::new();
+ let mut current_span = String::new();
+ let mut current_style = Style::default();
+ let mut width = 0;
+
+ let spans: &[Span] = cell.content.lines.first().map_or(&[], |it| it.0.as_slice());
+ for span in spans {
+ // this looks like a bug on first glance, we are iterating
+ // graphemes but treating them as char indices. The reason that
+ // this is correct is that nucleo will only ever consider the first char
+ // of a grapheme (and discard the rest of the grapheme) so the indices
+ // returned by nucleo are essentially grapheme indecies
+ for grapheme in span.content.graphemes(true) {
+ let style = if grapheme_idx == next_highlight_idx {
+ next_highlight_idx = indices.next().unwrap_or(u32::MAX);
+ span.style.patch(highlight_style)
} else {
- span_list.push((String::from(grapheme), style))
+ span.style
+ };
+ if style != current_style {
+ if !current_span.is_empty() {
+ span_list.push(Span::styled(current_span, current_style))
+ }
+ current_span = String::new();
+ current_style = style;
}
+ current_span.push_str(grapheme);
+ grapheme_idx += 1;
}
+ width += span.width();
+ }
- let spans: Vec<Span> = span_list
- .into_iter()
- .map(|(string, style)| Span::styled(string, style))
- .collect();
- let spans: Spans = spans.into();
- *cell = Cell::from(spans);
+ span_list.push(Span::styled(current_span, current_style));
+ if width as u16 > *max_width {
+ *max_width = width as u16;
+ }
+ *cell = Cell::from(Spans::from(span_list));
- cell_start_byte_offset += cell_len + TEMP_CELL_SEP.len();
+ // spacer
+ if grapheme_idx == next_highlight_idx {
+ next_highlight_idx = indices.next().unwrap_or(u32::MAX);
}
+ grapheme_idx += 1;
+ }
- row
- });
+ row
+ });
let table = Table::new(options)
.style(text_style)
@@ -654,7 +662,7 @@ impl<T: Item + 'static> Picker<T> {
surface,
&mut TableState {
offset: 0,
- selected: Some(cursor),
+ selected: Some(cursor as usize),
},
self.truncate_start,
);
@@ -755,7 +763,7 @@ impl<T: Item + 'static> Picker<T> {
}
}
-impl<T: Item + 'static> Component for Picker<T> {
+impl<T: Item + 'static + Send + Sync> Component for Picker<T> {
fn render(&mut self, area: Rect, surface: &mut Surface, cx: &mut Context) {
// +---------+ +---------+
// |prompt | |preview |
@@ -875,29 +883,10 @@ impl<T: Item + 'static> Component for Picker<T> {
Some((width, height))
}
}
-
-#[derive(PartialEq, Eq, Debug)]
-struct PickerMatch {
- score: i64,
- index: usize,
- len: usize,
-}
-
-impl PickerMatch {
- fn key(&self) -> impl Ord {
- (cmp::Reverse(self.score), self.len, self.index)
- }
-}
-
-impl PartialOrd for PickerMatch {
- fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
- Some(self.cmp(other))
- }
-}
-
-impl Ord for PickerMatch {
- fn cmp(&self, other: &Self) -> Ordering {
- self.key().cmp(&other.key())
+impl<T: Item> Drop for Picker<T> {
+ fn drop(&mut self) {
+ // ensure we cancel any ongoing background threads streaming into the picker
+ self.shutdown.store(true, atomic::Ordering::Relaxed)
}
}
@@ -910,13 +899,13 @@ pub type DynQueryCallback<T> =
/// A picker that updates its contents via a callback whenever the
/// query string changes. Useful for live grep, workspace symbols, etc.
-pub struct DynamicPicker<T: ui::menu::Item + Send> {
+pub struct DynamicPicker<T: ui::menu::Item + Send + Sync> {
file_picker: Picker<T>,
query_callback: DynQueryCallback<T>,
query: String,
}
-impl<T: ui::menu::Item + Send> DynamicPicker<T> {
+impl<T: ui::menu::Item + Send + Sync> DynamicPicker<T> {
pub const ID: &'static str = "dynamic-picker";
pub fn new(file_picker: Picker<T>, query_callback: DynQueryCallback<T>) -> Self {
@@ -928,7 +917,7 @@ impl<T: ui::menu::Item + Send> DynamicPicker<T> {
}
}
-impl<T: Item + Send + 'static> Component for DynamicPicker<T> {
+impl<T: Item + Send + Sync + 'static> Component for DynamicPicker<T> {
fn render(&mut self, area: Rect, surface: &mut Surface, cx: &mut Context) {
self.file_picker.render(area, surface, cx);
}
diff --git a/helix-term/tests/integration.rs b/helix-term/tests/integration.rs
index 9c0e6bbc..35214bcb 100644
--- a/helix-term/tests/integration.rs
+++ b/helix-term/tests/integration.rs
@@ -20,7 +20,6 @@ mod test {
mod commands;
mod languages;
mod movement;
- mod picker;
mod prompt;
mod splits;
}
diff --git a/helix-term/tests/test/picker.rs b/helix-term/tests/test/picker.rs
deleted file mode 100644
index 89e6531f..00000000
--- a/helix-term/tests/test/picker.rs
+++ /dev/null
@@ -1,80 +0,0 @@
-use std::fs;
-
-use helix_core::{path::get_canonicalized_path, Range};
-use helix_loader::{current_working_dir, set_current_working_dir};
-use helix_view::{current_ref, editor::Action};
-use tempfile::{Builder, TempDir};
-
-use super::*;
-
-#[tokio::test(flavor = "multi_thread")]
-async fn test_picker_alt_ret() -> anyhow::Result<()> {
- // Create two files, open the first and run a global search for a word
- // from the second file. Press <alt-ret> to have helix open the second file in the
- // new buffer, but not change focus. Then check whether the word is highlighted
- // correctly and the view of the first file has not changed.
- let tmp_dir = TempDir::new()?;
- set_current_working_dir(tmp_dir.path().into())?;
-
- let mut app = AppBuilder::new().build()?;
-
- log::debug!(
- "set current working directory to {:?}",
- current_working_dir()
- );
-
- // Add prefix so helix doesn't hide these files in a picker
- let files = [
- Builder::new().prefix("1").tempfile_in(&tmp_dir)?,
- Builder::new().prefix("2").tempfile_in(&tmp_dir)?,
- ];
- let paths = files
- .iter()
- .map(|f| get_canonicalized_path(f.path()))
- .collect::<Vec<_>>();
-
- fs::write(&paths[0], "1\n2\n3\n4")?;
- fs::write(&paths[1], "first\nsecond")?;
-
- log::debug!(
- "created and wrote two temporary files: {:?} & {:?}",
- paths[0],
- paths[1]
- );
-
- // Manually open to save the offset, otherwise we won't be able to change the state in the Fn trait
- app.editor.open(files[0].path(), Action::Replace)?;
- let view_offset = current_ref!(app.editor).0.offset;
-
- test_key_sequences(
- &mut app,
- vec![
- (Some("<space>/"), None),
- (Some("second<ret>"), None),
- (
- Some("<A-ret><esc>"),
- Some(&|app| {
- let (view, doc) = current_ref!(app.editor);
- assert_eq!(doc.path().unwrap(), &paths[0]);
- let select_ranges = doc.selection(view.id).ranges();
- assert_eq!(select_ranges[0], Range::new(0, 1));
- assert_eq!(view.offset, view_offset);
- }),
- ),
- (
- Some(":buffer<minus>next<ret>"),
- Some(&|app| {
- let (view, doc) = current_ref!(app.editor);
- assert_eq!(doc.path().unwrap(), &paths[1]);
- let select_ranges = doc.selection(view.id).ranges();
- assert_eq!(select_ranges.len(), 1);
- assert_eq!(select_ranges[0], Range::new(6, 12));
- }),
- ),
- ],
- false,
- )
- .await?;
-
- Ok(())
-}