aboutsummaryrefslogtreecommitdiff
path: root/helix-core/src
diff options
context:
space:
mode:
authorGabriel Dinner-David2024-02-27 13:36:25 +0000
committerGitHub2024-02-27 13:36:25 +0000
commit26b3dc29be886c5a2ed1a5caaaf09eb730829c3e (patch)
tree7235f7dc402daaa9b3f5260d9a3f8aa166592ec6 /helix-core/src
parentf46a09ab4f945273c7baf32e58438b501914fabb (diff)
toggling of block comments (#4718)
Diffstat (limited to 'helix-core/src')
-rw-r--r--helix-core/src/comment.rs270
-rw-r--r--helix-core/src/indent.rs4
-rw-r--r--helix-core/src/lib.rs3
-rw-r--r--helix-core/src/syntax.rs67
4 files changed, 335 insertions, 9 deletions
diff --git a/helix-core/src/comment.rs b/helix-core/src/comment.rs
index 9c7e50f3..536b710a 100644
--- a/helix-core/src/comment.rs
+++ b/helix-core/src/comment.rs
@@ -1,9 +1,12 @@
//! This module contains the functionality toggle comments on lines over the selection
//! using the comment character defined in the user's `languages.toml`
+use smallvec::SmallVec;
+
use crate::{
- find_first_non_whitespace_char, Change, Rope, RopeSlice, Selection, Tendril, Transaction,
+ syntax::BlockCommentToken, Change, Range, Rope, RopeSlice, Selection, Tendril, Transaction,
};
+use helix_stdx::rope::RopeSliceExt;
use std::borrow::Cow;
/// Given text, a comment token, and a set of line indices, returns the following:
@@ -22,12 +25,12 @@ fn find_line_comment(
) -> (bool, Vec<usize>, usize, usize) {
let mut commented = true;
let mut to_change = Vec::new();
- let mut min = usize::MAX; // minimum col for find_first_non_whitespace_char
+ let mut min = usize::MAX; // minimum col for first_non_whitespace_char
let mut margin = 1;
let token_len = token.chars().count();
for line in lines {
let line_slice = text.line(line);
- if let Some(pos) = find_first_non_whitespace_char(line_slice) {
+ if let Some(pos) = line_slice.first_non_whitespace_char() {
let len = line_slice.len_chars();
if pos < min {
@@ -94,6 +97,222 @@ pub fn toggle_line_comments(doc: &Rope, selection: &Selection, token: Option<&st
Transaction::change(doc, changes.into_iter())
}
+#[derive(Debug, PartialEq, Eq)]
+pub enum CommentChange {
+ Commented {
+ range: Range,
+ start_pos: usize,
+ end_pos: usize,
+ start_margin: bool,
+ end_margin: bool,
+ start_token: String,
+ end_token: String,
+ },
+ Uncommented {
+ range: Range,
+ start_pos: usize,
+ end_pos: usize,
+ start_token: String,
+ end_token: String,
+ },
+ Whitespace {
+ range: Range,
+ },
+}
+
+pub fn find_block_comments(
+ tokens: &[BlockCommentToken],
+ text: RopeSlice,
+ selection: &Selection,
+) -> (bool, Vec<CommentChange>) {
+ let mut commented = true;
+ let mut only_whitespace = true;
+ let mut comment_changes = Vec::with_capacity(selection.len());
+ let default_tokens = tokens.first().cloned().unwrap_or_default();
+ // TODO: check if this can be removed on MSRV bump
+ #[allow(clippy::redundant_clone)]
+ let mut start_token = default_tokens.start.clone();
+ #[allow(clippy::redundant_clone)]
+ let mut end_token = default_tokens.end.clone();
+
+ let mut tokens = tokens.to_vec();
+ // sort the tokens by length, so longer tokens will match first
+ tokens.sort_by(|a, b| {
+ if a.start.len() == b.start.len() {
+ b.end.len().cmp(&a.end.len())
+ } else {
+ b.start.len().cmp(&a.start.len())
+ }
+ });
+ for range in selection {
+ let selection_slice = range.slice(text);
+ if let (Some(start_pos), Some(end_pos)) = (
+ selection_slice.first_non_whitespace_char(),
+ selection_slice.last_non_whitespace_char(),
+ ) {
+ let mut line_commented = false;
+ let mut after_start = 0;
+ let mut before_end = 0;
+ let len = (end_pos + 1) - start_pos;
+
+ for BlockCommentToken { start, end } in &tokens {
+ let start_len = start.chars().count();
+ let end_len = end.chars().count();
+ after_start = start_pos + start_len;
+ before_end = end_pos.saturating_sub(end_len);
+
+ if len >= start_len + end_len {
+ let start_fragment = selection_slice.slice(start_pos..after_start);
+ let end_fragment = selection_slice.slice(before_end + 1..end_pos + 1);
+
+ // block commented with these tokens
+ if start_fragment == start.as_str() && end_fragment == end.as_str() {
+ start_token = start.to_string();
+ end_token = end.to_string();
+ line_commented = true;
+ break;
+ }
+ }
+ }
+
+ if !line_commented {
+ comment_changes.push(CommentChange::Uncommented {
+ range: *range,
+ start_pos,
+ end_pos,
+ start_token: default_tokens.start.clone(),
+ end_token: default_tokens.end.clone(),
+ });
+ commented = false;
+ } else {
+ comment_changes.push(CommentChange::Commented {
+ range: *range,
+ start_pos,
+ end_pos,
+ start_margin: selection_slice
+ .get_char(after_start)
+ .map_or(false, |c| c == ' '),
+ end_margin: after_start != before_end
+ && selection_slice
+ .get_char(before_end)
+ .map_or(false, |c| c == ' '),
+ start_token: start_token.to_string(),
+ end_token: end_token.to_string(),
+ });
+ }
+ only_whitespace = false;
+ } else {
+ comment_changes.push(CommentChange::Whitespace { range: *range });
+ }
+ }
+ if only_whitespace {
+ commented = false;
+ }
+ (commented, comment_changes)
+}
+
+#[must_use]
+pub fn create_block_comment_transaction(
+ doc: &Rope,
+ selection: &Selection,
+ commented: bool,
+ comment_changes: Vec<CommentChange>,
+) -> (Transaction, SmallVec<[Range; 1]>) {
+ let mut changes: Vec<Change> = Vec::with_capacity(selection.len() * 2);
+ let mut ranges: SmallVec<[Range; 1]> = SmallVec::with_capacity(selection.len());
+ let mut offs = 0;
+ for change in comment_changes {
+ if commented {
+ if let CommentChange::Commented {
+ range,
+ start_pos,
+ end_pos,
+ start_token,
+ end_token,
+ start_margin,
+ end_margin,
+ } = change
+ {
+ let from = range.from();
+ changes.push((
+ from + start_pos,
+ from + start_pos + start_token.len() + start_margin as usize,
+ None,
+ ));
+ changes.push((
+ from + end_pos - end_token.len() - end_margin as usize + 1,
+ from + end_pos + 1,
+ None,
+ ));
+ }
+ } else {
+ // uncommented so manually map ranges through changes
+ match change {
+ CommentChange::Uncommented {
+ range,
+ start_pos,
+ end_pos,
+ start_token,
+ end_token,
+ } => {
+ let from = range.from();
+ changes.push((
+ from + start_pos,
+ from + start_pos,
+ Some(Tendril::from(format!("{} ", start_token))),
+ ));
+ changes.push((
+ from + end_pos + 1,
+ from + end_pos + 1,
+ Some(Tendril::from(format!(" {}", end_token))),
+ ));
+
+ let offset = start_token.chars().count() + end_token.chars().count() + 2;
+ ranges.push(
+ Range::new(from + offs, from + offs + end_pos + 1 + offset)
+ .with_direction(range.direction()),
+ );
+ offs += offset;
+ }
+ CommentChange::Commented { range, .. } | CommentChange::Whitespace { range } => {
+ ranges.push(Range::new(range.from() + offs, range.to() + offs));
+ }
+ }
+ }
+ }
+ (Transaction::change(doc, changes.into_iter()), ranges)
+}
+
+#[must_use]
+pub fn toggle_block_comments(
+ doc: &Rope,
+ selection: &Selection,
+ tokens: &[BlockCommentToken],
+) -> Transaction {
+ let text = doc.slice(..);
+ let (commented, comment_changes) = find_block_comments(tokens, text, selection);
+ let (mut transaction, ranges) =
+ create_block_comment_transaction(doc, selection, commented, comment_changes);
+ if !commented {
+ transaction = transaction.with_selection(Selection::new(ranges, selection.primary_index()));
+ }
+ transaction
+}
+
+pub fn split_lines_of_selection(text: RopeSlice, selection: &Selection) -> Selection {
+ let mut ranges = SmallVec::new();
+ for range in selection.ranges() {
+ let (line_start, line_end) = range.line_range(text.slice(..));
+ let mut pos = text.line_to_char(line_start);
+ for line in text.slice(pos..text.line_to_char(line_end + 1)).lines() {
+ let start = pos;
+ pos += line.len_chars();
+ ranges.push(Range::new(start, pos));
+ }
+ }
+ Selection::new(ranges, 0)
+}
+
#[cfg(test)]
mod test {
use super::*;
@@ -149,4 +368,49 @@ mod test {
// TODO: account for uncommenting with uneven comment indentation
}
+
+ #[test]
+ fn test_find_block_comments() {
+ // three lines 5 characters.
+ let mut doc = Rope::from("1\n2\n3");
+ // select whole document
+ let selection = Selection::single(0, doc.len_chars());
+
+ let text = doc.slice(..);
+
+ let res = find_block_comments(&[BlockCommentToken::default()], text, &selection);
+
+ assert_eq!(
+ res,
+ (
+ false,
+ vec![CommentChange::Uncommented {
+ range: Range::new(0, 5),
+ start_pos: 0,
+ end_pos: 4,
+ start_token: "/*".to_string(),
+ end_token: "*/".to_string(),
+ }]
+ )
+ );
+
+ // comment
+ let transaction = toggle_block_comments(&doc, &selection, &[BlockCommentToken::default()]);
+ transaction.apply(&mut doc);
+
+ assert_eq!(doc, "/* 1\n2\n3 */");
+
+ // uncomment
+ let selection = Selection::single(0, doc.len_chars());
+ let transaction = toggle_block_comments(&doc, &selection, &[BlockCommentToken::default()]);
+ transaction.apply(&mut doc);
+ assert_eq!(doc, "1\n2\n3");
+
+ // don't panic when there is just a space in comment
+ doc = Rope::from("/* */");
+ let selection = Selection::single(0, doc.len_chars());
+ let transaction = toggle_block_comments(&doc, &selection, &[BlockCommentToken::default()]);
+ transaction.apply(&mut doc);
+ assert_eq!(doc, "");
+ }
}
diff --git a/helix-core/src/indent.rs b/helix-core/src/indent.rs
index c29bb3a0..2a0a3876 100644
--- a/helix-core/src/indent.rs
+++ b/helix-core/src/indent.rs
@@ -1,10 +1,10 @@
use std::{borrow::Cow, collections::HashMap};
+use helix_stdx::rope::RopeSliceExt;
use tree_sitter::{Query, QueryCursor, QueryPredicateArg};
use crate::{
chars::{char_is_line_ending, char_is_whitespace},
- find_first_non_whitespace_char,
graphemes::{grapheme_width, tab_width_at},
syntax::{IndentationHeuristic, LanguageConfiguration, RopeProvider, Syntax},
tree_sitter::Node,
@@ -970,7 +970,7 @@ pub fn indent_for_newline(
let mut num_attempts = 0;
for line_idx in (0..=line_before).rev() {
let line = text.line(line_idx);
- let first_non_whitespace_char = match find_first_non_whitespace_char(line) {
+ let first_non_whitespace_char = match line.first_non_whitespace_char() {
Some(i) => i,
None => {
continue;
diff --git a/helix-core/src/lib.rs b/helix-core/src/lib.rs
index 94802eba..1abd90d1 100644
--- a/helix-core/src/lib.rs
+++ b/helix-core/src/lib.rs
@@ -37,9 +37,6 @@ pub mod unicode {
pub use helix_loader::find_workspace;
-pub fn find_first_non_whitespace_char(line: RopeSlice) -> Option<usize> {
- line.chars().position(|ch| !ch.is_whitespace())
-}
mod rope_reader;
pub use rope_reader::RopeReader;
diff --git a/helix-core/src/syntax.rs b/helix-core/src/syntax.rs
index 0d8559ca..3b224e1b 100644
--- a/helix-core/src/syntax.rs
+++ b/helix-core/src/syntax.rs
@@ -99,7 +99,19 @@ pub struct LanguageConfiguration {
pub shebangs: Vec<String>, // interpreter(s) associated with language
#[serde(default)]
pub roots: Vec<String>, // these indicate project roots <.git, Cargo.toml>
- pub comment_token: Option<String>,
+ #[serde(
+ default,
+ skip_serializing,
+ deserialize_with = "from_comment_tokens",
+ alias = "comment-token"
+ )]
+ pub comment_tokens: Option<Vec<String>>,
+ #[serde(
+ default,
+ skip_serializing,
+ deserialize_with = "from_block_comment_tokens"
+ )]
+ pub block_comment_tokens: Option<Vec<BlockCommentToken>>,
pub text_width: Option<usize>,
pub soft_wrap: Option<SoftWrap>,
@@ -240,6 +252,59 @@ impl<'de> Deserialize<'de> for FileType {
}
}
+fn from_comment_tokens<'de, D>(deserializer: D) -> Result<Option<Vec<String>>, D::Error>
+where
+ D: serde::Deserializer<'de>,
+{
+ #[derive(Deserialize)]
+ #[serde(untagged)]
+ enum CommentTokens {
+ Multiple(Vec<String>),
+ Single(String),
+ }
+ Ok(
+ Option::<CommentTokens>::deserialize(deserializer)?.map(|tokens| match tokens {
+ CommentTokens::Single(val) => vec![val],
+ CommentTokens::Multiple(vals) => vals,
+ }),
+ )
+}
+
+#[derive(Clone, Debug, Serialize, Deserialize)]
+pub struct BlockCommentToken {
+ pub start: String,
+ pub end: String,
+}
+
+impl Default for BlockCommentToken {
+ fn default() -> Self {
+ BlockCommentToken {
+ start: "/*".to_string(),
+ end: "*/".to_string(),
+ }
+ }
+}
+
+fn from_block_comment_tokens<'de, D>(
+ deserializer: D,
+) -> Result<Option<Vec<BlockCommentToken>>, D::Error>
+where
+ D: serde::Deserializer<'de>,
+{
+ #[derive(Deserialize)]
+ #[serde(untagged)]
+ enum BlockCommentTokens {
+ Multiple(Vec<BlockCommentToken>),
+ Single(BlockCommentToken),
+ }
+ Ok(
+ Option::<BlockCommentTokens>::deserialize(deserializer)?.map(|tokens| match tokens {
+ BlockCommentTokens::Single(val) => vec![val],
+ BlockCommentTokens::Multiple(vals) => vals,
+ }),
+ )
+}
+
#[derive(Clone, Copy, Debug, Serialize, Deserialize, PartialEq, Eq, Hash)]
#[serde(rename_all = "kebab-case")]
pub enum LanguageServerFeature {