From 3307f44ce20a71273614f9b30dafb08822e557a1 Mon Sep 17 00:00:00 2001 From: Blaž Hrastnik Date: Fri, 10 Dec 2021 19:23:34 +0900 Subject: ui: popup: Don't allow scrolling past the end of content --- helix-term/src/ui/markdown.rs | 5 ----- 1 file changed, 5 deletions(-) (limited to 'helix-term/src/ui/markdown.rs') diff --git a/helix-term/src/ui/markdown.rs b/helix-term/src/ui/markdown.rs index ca8303dd..46657fb9 100644 --- a/helix-term/src/ui/markdown.rs +++ b/helix-term/src/ui/markdown.rs @@ -241,11 +241,6 @@ impl Component for Markdown { } else if content_width > text_width { text_width = content_width; } - - if height >= viewport.1 { - height = viewport.1; - break; - } } Some((text_width + padding, height)) -- cgit v1.2.3-70-g09d2 From 96d4ca5f7349f2df6c3e9a949242a0e49b2ce853 Mon Sep 17 00:00:00 2001 From: Skyler Hawthorne Date: Mon, 17 Jan 2022 20:41:44 -0500 Subject: Dependabot/cargo/pulldown cmark 0.9.1 (#1533) * build(deps): bump pulldown-cmark from 0.8.0 to 0.9.1 Bumps [pulldown-cmark](https://github.com/raphlinus/pulldown-cmark) from 0.8.0 to 0.9.1. - [Release notes](https://github.com/raphlinus/pulldown-cmark/releases) - [Commits](https://github.com/raphlinus/pulldown-cmark/compare/v0.8.0...v0.9.1) --- updated-dependencies: - dependency-name: pulldown-cmark dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * cmark 0.9 fixes Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>--- Cargo.lock | 4 ++-- helix-term/Cargo.toml | 2 +- helix-term/src/ui/markdown.rs | 6 ++++-- 3 files changed, 7 insertions(+), 5 deletions(-) (limited to 'helix-term/src/ui/markdown.rs') diff --git a/Cargo.lock b/Cargo.lock index 8e4d941b..dbafd95c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -758,9 +758,9 @@ dependencies = [ [[package]] name = "pulldown-cmark" -version = "0.8.0" +version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ffade02495f22453cd593159ea2f59827aae7f53fa8323f756799b670881dcf8" +checksum = "34f197a544b0c9ab3ae46c359a7ec9cbbb5c7bf97054266fecb7ead794a181d6" dependencies = [ "bitflags", "memchr", diff --git a/helix-term/Cargo.toml b/helix-term/Cargo.toml index 28b4fe2a..e379b369 100644 --- a/helix-term/Cargo.toml +++ b/helix-term/Cargo.toml @@ -46,7 +46,7 @@ log = "0.4" fuzzy-matcher = "0.3" ignore = "0.4" # markdown doc rendering -pulldown-cmark = { version = "0.8", default-features = false } +pulldown-cmark = { version = "0.9", default-features = false } # file type detection content_inspector = "0.2.4" diff --git a/helix-term/src/ui/markdown.rs b/helix-term/src/ui/markdown.rs index 46657fb9..1f0fc38c 100644 --- a/helix-term/src/ui/markdown.rs +++ b/helix-term/src/ui/markdown.rs @@ -77,7 +77,9 @@ fn parse<'a>( Event::End(tag) => { tags.pop(); match tag { - Tag::Heading(_) | Tag::Paragraph | Tag::CodeBlock(CodeBlockKind::Fenced(_)) => { + Tag::Heading(_, _, _) + | Tag::Paragraph + | Tag::CodeBlock(CodeBlockKind::Fenced(_)) => { // whenever code block or paragraph closes, new line let spans = std::mem::take(&mut spans); if !spans.is_empty() { @@ -158,7 +160,7 @@ fn parse<'a>( lines.push(Spans::from(span)); } } - } else if let Some(Tag::Heading(_)) = tags.last() { + } else if let Some(Tag::Heading(_, _, _)) = tags.last() { let mut span = to_span(text); span.style = heading_style; spans.push(span); -- cgit v1.2.3-70-g09d2 From 6728e4449038e9481b72251441182d508c165a9c Mon Sep 17 00:00:00 2001 From: Blaž Hrastnik Date: Sun, 7 Nov 2021 00:21:03 +0900 Subject: syntax: Split parsing and highlighting --- Cargo.lock | 1 + helix-core/Cargo.toml | 1 + helix-core/src/indent.rs | 2 +- helix-core/src/syntax.rs | 1126 +++++++++++++++++++---------------------- helix-term/src/ui/editor.rs | 27 +- helix-term/src/ui/markdown.rs | 15 +- helix-term/src/ui/picker.rs | 9 +- helix-view/src/document.rs | 13 +- helix-view/src/editor.rs | 9 +- 9 files changed, 541 insertions(+), 662 deletions(-) (limited to 'helix-term/src/ui/markdown.rs') diff --git a/Cargo.lock b/Cargo.lock index 4e18a995..3c4d64f8 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -381,6 +381,7 @@ dependencies = [ "serde", "serde_json", "similar", + "slotmap", "smallvec", "tendril", "toml", diff --git a/helix-core/Cargo.toml b/helix-core/Cargo.toml index 6d694fb3..9056b1f6 100644 --- a/helix-core/Cargo.toml +++ b/helix-core/Cargo.toml @@ -22,6 +22,7 @@ unicode-segmentation = "1.8" unicode-width = "0.1" unicode-general-category = "0.4" # slab = "0.4.2" +slotmap = "1.0" tree-sitter = "0.20" once_cell = "1.9" arc-swap = "1" diff --git a/helix-core/src/indent.rs b/helix-core/src/indent.rs index 1fc2b8a5..ac2a1208 100644 --- a/helix-core/src/indent.rs +++ b/helix-core/src/indent.rs @@ -454,7 +454,7 @@ where let language_config = loader.language_config_for_scope("source.rust").unwrap(); let highlight_config = language_config.highlight_config(&[]).unwrap(); - let syntax = Syntax::new(&doc, highlight_config.clone()); + let syntax = Syntax::new(&doc, highlight_config.clone(), std::sync::Arc::new(loader)); let text = doc.slice(..); let tab_width = 4; diff --git a/helix-core/src/syntax.rs b/helix-core/src/syntax.rs index c7a3e1cc..858b9bdf 100644 --- a/helix-core/src/syntax.rs +++ b/helix-core/src/syntax.rs @@ -9,6 +9,7 @@ use crate::{ pub use helix_syntax::get_language; use arc_swap::ArcSwap; +use slotmap::{DefaultKey as LayerId, HopSlotMap}; use std::{ borrow::Cow, @@ -388,9 +389,9 @@ thread_local! { #[derive(Debug)] pub struct Syntax { - config: Arc, - - root_layer: LanguageLayer, + layers: HopSlotMap, + root: LayerId, + loader: Arc, } fn byte_range_to_str(range: std::ops::Range, source: RopeSlice) -> Cow { @@ -400,38 +401,36 @@ fn byte_range_to_str(range: std::ops::Range, source: RopeSlice) -> Cow, - ) -> Self { - let root_layer = LanguageLayer { tree: None }; + pub fn new(source: &Rope, config: Arc, loader: Arc) -> Self { + let root_layer = LanguageLayer { + tree: None, + config, + depth: 0, + ranges: vec![Range { + start_byte: 0, + end_byte: usize::MAX, + start_point: Point::new(0, 0), + end_point: Point::new(usize::MAX, usize::MAX), + }], + }; // track markers of injections // track scope_descriptor: a Vec of scopes for item in tree + let mut layers = HopSlotMap::default(); + let root = layers.insert(root_layer); + let mut syntax = Self { // grammar, - config, - root_layer, + root, + layers, + loader, }; - // update root layer - PARSER.with(|ts_parser| { - // TODO: handle the returned `Result` properly. - let _ = syntax.root_layer.parse( - &mut ts_parser.borrow_mut(), - &syntax.config, - source, - 0, - vec![Range { - start_byte: 0, - end_byte: usize::MAX, - start_point: Point::new(0, 0), - end_point: Point::new(usize::MAX, usize::MAX), - }], - ); - }); + syntax + .update(source, source, &ChangeSet::new(&source)) + .unwrap(); + syntax } @@ -441,30 +440,197 @@ impl Syntax { source: &Rope, changeset: &ChangeSet, ) -> Result<(), Error> { + use std::collections::VecDeque; + let mut queue = VecDeque::new(); + // let source = source.slice(..); + let injection_callback = |language: &str| { + self.loader + .language_configuration_for_injection_string(language) + .and_then(|language_config| { + // TODO: get these theme.scopes from somewhere, probably make them settable on Loader + let scopes = &[ + "attribute", + "constant", + "function.builtin", + "function", + "keyword", + "operator", + "property", + "punctuation", + "punctuation.bracket", + "punctuation.delimiter", + "string", + "string.special", + "tag", + "type", + "type.builtin", + "variable", + "variable.builtin", + "variable.parameter", + ]; + language_config.highlight_config( + &scopes + .iter() + .map(|scope| scope.to_string()) + .collect::>(), + ) + }) + }; + + queue.push_back(self.root); + + // HAXX: for now, clear all layers except root so they get re-parsed + self.layers.retain(|id, _| id == self.root); + + // Workaround for Syntax::new() with empty changeset + if !changeset.is_empty() { + // TODO: do this in a recursive way + // Notify the tree about all the changes + let edits = generate_edits(old_source.slice(..), changeset); + let tree = self.layers[self.root].tree.as_mut().unwrap(); + for edit in edits.iter().rev() { + // apply the edits in reverse. If we applied them in order then edit 1 would disrupt + // the positioning of edit 2 + tree.edit(edit); + } + } + PARSER.with(|ts_parser| { - self.root_layer.update( - &mut ts_parser.borrow_mut(), - &self.config, - old_source, - source, - changeset, - ) - }) + let ts_parser = &mut ts_parser.borrow_mut(); + let mut cursor = ts_parser.cursors.pop().unwrap_or_else(QueryCursor::new); + // TODO: might need to set cursor range + + while let Some(layer_id) = queue.pop_front() { + // Re-parse the tree. + self.layers[layer_id].parse(ts_parser, source)?; + + let source = source.slice(..); + let layer = &self.layers[layer_id]; + + // Process injections. + let matches = cursor.matches( + &layer.config.injections_query, + layer.tree().root_node(), + RopeProvider(source), + ); + let mut injections = Vec::new(); + for mat in matches { + let (language_name, content_node, include_children) = injection_for_match( + &layer.config, + &layer.config.injections_query, + &mat, + source, + ); + + // Explicitly remove this match so that none of its other captures will remain + // in the stream of captures. + mat.remove(); // TODO: is this still necessary? + + // If a language is found with the given name, then add a new language layer + // to the highlighted document. + if let (Some(language_name), Some(content_node)) = (language_name, content_node) + { + if let Some(config) = (injection_callback)(&language_name) { + let ranges = + intersect_ranges(&layer.ranges, &[content_node], include_children); + + if !ranges.is_empty() { + log::info!("{} {:?}", language_name, ranges); + injections.push((config, ranges)); + } + } + } + } - // TODO: deal with injections and update them too + // Process combined injections. + if let Some(combined_injections_query) = &layer.config.combined_injections_query { + let mut injections_by_pattern_index = + vec![(None, Vec::new(), false); combined_injections_query.pattern_count()]; + let matches = cursor.matches( + combined_injections_query, + layer.tree().root_node(), + RopeProvider(source), + ); + for mat in matches { + let entry = &mut injections_by_pattern_index[mat.pattern_index]; + let (language_name, content_node, include_children) = injection_for_match( + &layer.config, + combined_injections_query, + &mat, + source, + ); + if language_name.is_some() { + entry.0 = language_name; + } + if let Some(content_node) = content_node { + entry.1.push(content_node); + } + entry.2 = include_children; + } + for (lang_name, content_nodes, includes_children) in injections_by_pattern_index + { + if let (Some(lang_name), false) = (lang_name, content_nodes.is_empty()) { + if let Some(config) = (injection_callback)(&lang_name) { + let ranges = intersect_ranges( + &layer.ranges, + &content_nodes, + includes_children, + ); + if !ranges.is_empty() { + injections.push((config, ranges)); + } + } + } + } + } + + let depth = layer.depth + 1; + // TODO: can't inline this since matches borrows self.layers + for (config, ranges) in injections { + let layer_id = self.layers.insert(LanguageLayer { + tree: None, + config, + depth, + ranges, + }); + queue.push_back(layer_id); + } + } + + // Return the cursor back in the pool. + ts_parser.cursors.push(cursor); + + Ok(()) // so we can use the try operator + })?; + + Ok(()) } // fn buffer_changed -> call layer.update(range, new_text) on root layer and then all marker layers - // call this on transaction.apply() -> buffer_changed(changes) - // - // fn parse(language, old_tree, ranges) - // + pub fn tree(&self) -> &Tree { - self.root_layer.tree() + self.layers[self.root].tree() } + + // root: Tree + // injections: Vec<(Tree, Range marker)> + + // handle updates that go over a part of the layer by truncating them to start/end appropriately + + // injections tracked by marker: + // if marker areas match it's fine and update + // if not found add new layer + // if length 0 then area got removed, clean up the layer // - // + // layer update: + // if range.len = 0 then remove the layer + // for change in changes { tree.edit(change) } + // tree = parser.parse(.., tree, ..) + // calculate affected range and update injections + // injection update: + // look for existing injections + // if present, range = (first injection start, last injection end) // Highlighting @@ -474,61 +640,76 @@ impl Syntax { source: RopeSlice<'a>, range: Option>, cancellation_flag: Option<&'a AtomicUsize>, - injection_callback: impl FnMut(&str) -> Option<&'a HighlightConfiguration> + 'a, ) -> impl Iterator> + 'a { - // The `captures` iterator borrows the `Tree` and the `QueryCursor`, which - // prevents them from being moved. But both of these values are really just - // pointers, so it's actually ok to move them. - - // reuse a cursor from the pool if possible - let mut cursor = PARSER.with(|ts_parser| { - let highlighter = &mut ts_parser.borrow_mut(); - highlighter.cursors.pop().unwrap_or_else(QueryCursor::new) + let mut layers = self + .layers + .iter() + .map(|(_, layer)| { + // Reuse a cursor from the pool if available. + let mut cursor = PARSER.with(|ts_parser| { + let highlighter = &mut ts_parser.borrow_mut(); + highlighter.cursors.pop().unwrap_or_else(QueryCursor::new) + }); + + // The `captures` iterator borrows the `Tree` and the `QueryCursor`, which + // prevents them from being moved. But both of these values are really just + // pointers, so it's actually ok to move them. + let cursor_ref = + unsafe { mem::transmute::<_, &'static mut QueryCursor>(&mut cursor) }; + + // if reusing cursors & no range this resets to whole range + // TODO: handle intersect (range & layer.range) + // cursor_ref.set_byte_range(range.clone().unwrap_or(0..usize::MAX)); + cursor_ref.set_byte_range(0..usize::MAX); + + let captures = cursor_ref + .captures( + &layer.config.query, + layer.tree().root_node(), + RopeProvider(source), + ) + .peekable(); + + HighlightIterLayer { + highlight_end_stack: Vec::new(), + scope_stack: vec![LocalScope { + inherits: false, + range: 0..usize::MAX, + local_defs: Vec::new(), + }], + cursor, + _tree: None, + captures, + config: layer.config.as_ref(), // TODO: just reuse + depth: layer.depth, // TODO: just reuse + ranges: layer.ranges.clone(), + } + }) + .collect::>(); + + log::info!("--"); + + // HAXX: arrange layers by byte range, with deeper layers positioned first + layers.sort_by_key(|layer| { + ( + layer.ranges.first().cloned(), + std::cmp::Reverse(layer.depth), + ) }); - let tree_ref = self.tree(); - let cursor_ref = unsafe { mem::transmute::<_, &'static mut QueryCursor>(&mut cursor) }; - let query_ref = &self.config.query; - let config_ref = self.config.as_ref(); - - // if reusing cursors & no range this resets to whole range - cursor_ref.set_byte_range(range.clone().unwrap_or(0..usize::MAX)); - - let captures = cursor_ref - .captures(query_ref, tree_ref.root_node(), RopeProvider(source)) - .peekable(); - - // manually craft the root layer based on the existing tree - let layer = HighlightIterLayer { - highlight_end_stack: Vec::new(), - scope_stack: vec![LocalScope { - inherits: false, - range: 0..usize::MAX, - local_defs: Vec::new(), - }], - cursor, - depth: 0, - _tree: None, - captures, - config: config_ref, - ranges: vec![Range { - start_byte: 0, - end_byte: usize::MAX, - start_point: Point::new(0, 0), - end_point: Point::new(usize::MAX, usize::MAX), - }], - }; let mut result = HighlightIter { source, byte_offset: range.map_or(0, |r| r.start), // TODO: simplify - injection_callback, cancellation_flag, iter_count: 0, - layers: vec![layer], + layers, next_event: None, last_highlight_range: None, }; result.sort_layers(); + for layer in &result.layers { + log::info!("> {:?} {:?}", layer.depth, layer.ranges); // <- for some reason layers are reversed here + } result } // on_tokenize @@ -556,234 +737,155 @@ impl Syntax { pub struct LanguageLayer { // mode // grammar - // depth + pub config: Arc, pub(crate) tree: Option, + pub ranges: Vec, + pub depth: usize, } impl LanguageLayer { - // pub fn new() -> Self { - // Self { tree: None } - // } - pub fn tree(&self) -> &Tree { // TODO: no unwrap self.tree.as_ref().unwrap() } - fn parse( - &mut self, - ts_parser: &mut TsParser, - config: &HighlightConfiguration, - source: &Rope, - _depth: usize, - ranges: Vec, - ) -> Result<(), Error> { - if ts_parser.parser.set_included_ranges(&ranges).is_ok() { - ts_parser - .parser - .set_language(config.language) - .map_err(|_| Error::InvalidLanguage)?; - - // unsafe { syntax.parser.set_cancellation_flag(cancellation_flag) }; - let tree = ts_parser - .parser - .parse_with( - &mut |byte, _| { - if byte <= source.len_bytes() { - let (chunk, start_byte, _, _) = source.chunk_at_byte(byte); - chunk[byte - start_byte..].as_bytes() - } else { - // out of range - &[] - } - }, - self.tree.as_ref(), - ) - .ok_or(Error::Cancelled)?; - - self.tree = Some(tree) - } + fn parse(&mut self, ts_parser: &mut TsParser, source: &Rope) -> Result<(), Error> { + ts_parser.parser.set_included_ranges(&self.ranges).unwrap(); + + ts_parser + .parser + .set_language(self.config.language) + .map_err(|_| Error::InvalidLanguage)?; + + // unsafe { syntax.parser.set_cancellation_flag(cancellation_flag) }; + let tree = ts_parser + .parser + .parse_with( + &mut |byte, _| { + if byte <= source.len_bytes() { + let (chunk, start_byte, _, _) = source.chunk_at_byte(byte); + chunk[byte - start_byte..].as_bytes() + } else { + // out of range + &[] + } + }, + self.tree.as_ref(), + ) + .ok_or(Error::Cancelled)?; + // unsafe { ts_parser.parser.set_cancellation_flag(None) }; + self.tree = Some(tree); Ok(()) } +} - pub(crate) fn generate_edits( - old_text: RopeSlice, - changeset: &ChangeSet, - ) -> Vec { - use Operation::*; - let mut old_pos = 0; +pub(crate) fn generate_edits( + old_text: RopeSlice, + changeset: &ChangeSet, +) -> Vec { + use Operation::*; + let mut old_pos = 0; - let mut edits = Vec::new(); + let mut edits = Vec::new(); - let mut iter = changeset.changes.iter().peekable(); + let mut iter = changeset.changes.iter().peekable(); - // TODO; this is a lot easier with Change instead of Operation. + // TODO; this is a lot easier with Change instead of Operation. - fn point_at_pos(text: RopeSlice, pos: usize) -> (usize, Point) { - let byte = text.char_to_byte(pos); // <- attempted to index past end - let line = text.char_to_line(pos); - let line_start_byte = text.line_to_byte(line); - let col = byte - line_start_byte; + fn point_at_pos(text: RopeSlice, pos: usize) -> (usize, Point) { + let byte = text.char_to_byte(pos); // <- attempted to index past end + let line = text.char_to_line(pos); + let line_start_byte = text.line_to_byte(line); + let col = byte - line_start_byte; - (byte, Point::new(line, col)) - } + (byte, Point::new(line, col)) + } - fn traverse(point: Point, text: &Tendril) -> Point { - let Point { - mut row, - mut column, - } = point; - - // TODO: there should be a better way here. - let mut chars = text.chars().peekable(); - while let Some(ch) = chars.next() { - if char_is_line_ending(ch) && !(ch == '\r' && chars.peek() == Some(&'\n')) { - row += 1; - column = 0; - } else { - column += 1; - } + fn traverse(point: Point, text: &Tendril) -> Point { + let Point { + mut row, + mut column, + } = point; + + // TODO: there should be a better way here. + let mut chars = text.chars().peekable(); + while let Some(ch) = chars.next() { + if char_is_line_ending(ch) && !(ch == '\r' && chars.peek() == Some(&'\n')) { + row += 1; + column = 0; + } else { + column += 1; } - Point { row, column } } + Point { row, column } + } - while let Some(change) = iter.next() { - let len = match change { - Delete(i) | Retain(i) => *i, - Insert(_) => 0, - }; - let mut old_end = old_pos + len; + while let Some(change) = iter.next() { + let len = match change { + Delete(i) | Retain(i) => *i, + Insert(_) => 0, + }; + let mut old_end = old_pos + len; + + match change { + Retain(_) => {} + Delete(_) => { + let (start_byte, start_position) = point_at_pos(old_text, old_pos); + let (old_end_byte, old_end_position) = point_at_pos(old_text, old_end); + + // TODO: Position also needs to be byte based... + // let byte = char_to_byte(old_pos) + // let line = char_to_line(old_pos) + // let line_start_byte = line_to_byte() + // Position::new(line, line_start_byte - byte) + + // deletion + edits.push(tree_sitter::InputEdit { + start_byte, // old_pos to byte + old_end_byte, // old_end to byte + new_end_byte: start_byte, // old_pos to byte + start_position, // old pos to coords + old_end_position, // old_end to coords + new_end_position: start_position, // old pos to coords + }); + } + Insert(s) => { + let (start_byte, start_position) = point_at_pos(old_text, old_pos); - match change { - Retain(_) => {} - Delete(_) => { - let (start_byte, start_position) = point_at_pos(old_text, old_pos); + // a subsequent delete means a replace, consume it + if let Some(Delete(len)) = iter.peek() { + old_end = old_pos + len; let (old_end_byte, old_end_position) = point_at_pos(old_text, old_end); - // TODO: Position also needs to be byte based... - // let byte = char_to_byte(old_pos) - // let line = char_to_line(old_pos) - // let line_start_byte = line_to_byte() - // Position::new(line, line_start_byte - byte) + iter.next(); - // deletion + // replacement edits.push(tree_sitter::InputEdit { - start_byte, // old_pos to byte - old_end_byte, // old_end to byte - new_end_byte: start_byte, // old_pos to byte - start_position, // old pos to coords - old_end_position, // old_end to coords - new_end_position: start_position, // old pos to coords + start_byte, // old_pos to byte + old_end_byte, // old_end to byte + new_end_byte: start_byte + s.len(), // old_pos to byte + s.len() + start_position, // old pos to coords + old_end_position, // old_end to coords + new_end_position: traverse(start_position, s), // old pos + chars, newlines matter too (iter over) + }); + } else { + // insert + edits.push(tree_sitter::InputEdit { + start_byte, // old_pos to byte + old_end_byte: start_byte, // same + new_end_byte: start_byte + s.len(), // old_pos + s.len() + start_position, // old pos to coords + old_end_position: start_position, // same + new_end_position: traverse(start_position, s), // old pos + chars, newlines matter too (iter over) }); - } - Insert(s) => { - let (start_byte, start_position) = point_at_pos(old_text, old_pos); - - // a subsequent delete means a replace, consume it - if let Some(Delete(len)) = iter.peek() { - old_end = old_pos + len; - let (old_end_byte, old_end_position) = point_at_pos(old_text, old_end); - - iter.next(); - - // replacement - edits.push(tree_sitter::InputEdit { - start_byte, // old_pos to byte - old_end_byte, // old_end to byte - new_end_byte: start_byte + s.len(), // old_pos to byte + s.len() - start_position, // old pos to coords - old_end_position, // old_end to coords - new_end_position: traverse(start_position, s), // old pos + chars, newlines matter too (iter over) - }); - } else { - // insert - edits.push(tree_sitter::InputEdit { - start_byte, // old_pos to byte - old_end_byte: start_byte, // same - new_end_byte: start_byte + s.len(), // old_pos + s.len() - start_position, // old pos to coords - old_end_position: start_position, // same - new_end_position: traverse(start_position, s), // old pos + chars, newlines matter too (iter over) - }); - } } } - old_pos = old_end; - } - edits - } - - fn update( - &mut self, - ts_parser: &mut TsParser, - config: &HighlightConfiguration, - old_source: &Rope, - source: &Rope, - changeset: &ChangeSet, - ) -> Result<(), Error> { - if changeset.is_empty() { - return Ok(()); } - - let edits = Self::generate_edits(old_source.slice(..), changeset); - - // Notify the tree about all the changes - for edit in edits.iter().rev() { - // apply the edits in reverse. If we applied them in order then edit 1 would disrupt - // the positioning of edit 2 - self.tree.as_mut().unwrap().edit(edit); - } - - self.parse( - ts_parser, - config, - source, - 0, - // TODO: what to do about this range on update - vec![Range { - start_byte: 0, - end_byte: usize::MAX, - start_point: Point::new(0, 0), - end_point: Point::new(usize::MAX, usize::MAX), - }], - ) + old_pos = old_end; } - - // fn highlight_iter() -> same as Mode but for this layer. Mode composits these - // fn buffer_changed - // fn update(range) - // fn update_injections() + edits } -// -- refactored from tree-sitter-highlight to be able to retain state -// TODO: add seek() to iter - -// problem: any time a layer is updated it must update it's injections on the parent (potentially -// removing some from use) -// can't modify to vec and exist in it at the same time since that would violate borrows -// maybe we can do with an arena -// maybe just caching on the top layer and nevermind the injections for now? -// -// Grammar { -// layers: Vec> to prevent memory moves when vec is modified -// } -// injections tracked by marker: -// if marker areas match it's fine and update -// if not found add new layer -// if length 0 then area got removed, clean up the layer -// -// layer update: -// if range.len = 0 then remove the layer -// for change in changes { tree.edit(change) } -// tree = parser.parse(.., tree, ..) -// calculate affected range and update injections -// injection update: -// look for existing injections -// if present, range = (first injection start, last injection end) -// -// For now cheat and just throw out non-root layers if they exist. This should still improve -// parsing in majority of cases. - use std::sync::atomic::{AtomicUsize, Ordering}; use std::{iter, mem, ops, str, usize}; use tree_sitter::{ @@ -820,8 +922,8 @@ pub enum HighlightEvent { pub struct HighlightConfiguration { pub language: Grammar, pub query: Query, + injections_query: Query, combined_injections_query: Option, - locals_pattern_index: usize, highlights_pattern_index: usize, highlight_indices: ArcSwap>>, non_local_variable_patterns: Vec, @@ -848,13 +950,9 @@ struct LocalScope<'a> { } #[derive(Debug)] -struct HighlightIter<'a, F> -where - F: FnMut(&str) -> Option<&'a HighlightConfiguration> + 'a, -{ +struct HighlightIter<'a> { source: RopeSlice<'a>, byte_offset: usize, - injection_callback: F, cancellation_flag: Option<&'a AtomicUsize>, layers: Vec>, iter_count: usize, @@ -894,8 +992,8 @@ struct HighlightIterLayer<'a> { config: &'a HighlightConfiguration, highlight_end_stack: Vec, scope_stack: Vec>, - ranges: Vec, depth: usize, + ranges: Vec, // TEMP } impl<'a> fmt::Debug for HighlightIterLayer<'a> { @@ -927,38 +1025,32 @@ impl HighlightConfiguration { ) -> Result { // Concatenate the query strings, keeping track of the start offset of each section. let mut query_source = String::new(); - query_source.push_str(injection_query); - let locals_query_offset = query_source.len(); query_source.push_str(locals_query); let highlights_query_offset = query_source.len(); query_source.push_str(highlights_query); // Construct a single query by concatenating the three query strings, but record the // range of pattern indices that belong to each individual string. - let mut query = Query::new(language, &query_source)?; - let mut locals_pattern_index = 0; + let query = Query::new(language, &query_source)?; let mut highlights_pattern_index = 0; for i in 0..(query.pattern_count()) { let pattern_offset = query.start_byte_for_pattern(i); if pattern_offset < highlights_query_offset { - if pattern_offset < highlights_query_offset { - highlights_pattern_index += 1; - } - if pattern_offset < locals_query_offset { - locals_pattern_index += 1; - } + highlights_pattern_index += 1; } } + let mut injections_query = Query::new(language, injection_query)?; + // Construct a separate query just for dealing with the 'combined injections'. // Disable the combined injection patterns in the main query. let mut combined_injections_query = Query::new(language, injection_query)?; let mut has_combined_queries = false; - for pattern_index in 0..locals_pattern_index { - let settings = query.property_settings(pattern_index); + for pattern_index in 0..injections_query.pattern_count() { + let settings = injections_query.property_settings(pattern_index); if settings.iter().any(|s| &*s.key == "injection.combined") { has_combined_queries = true; - query.disable_pattern(pattern_index); + injections_query.disable_pattern(pattern_index); } else { combined_injections_query.disable_pattern(pattern_index); } @@ -990,8 +1082,6 @@ impl HighlightConfiguration { for (i, name) in query.capture_names().iter().enumerate() { let i = Some(i as u32); match name.as_str() { - "injection.content" => injection_content_capture_index = i, - "injection.language" => injection_language_capture_index = i, "local.definition" => local_def_capture_index = i, "local.definition-value" => local_def_value_capture_index = i, "local.reference" => local_ref_capture_index = i, @@ -1000,12 +1090,21 @@ impl HighlightConfiguration { } } + for (i, name) in injections_query.capture_names().iter().enumerate() { + let i = Some(i as u32); + match name.as_str() { + "injection.content" => injection_content_capture_index = i, + "injection.language" => injection_language_capture_index = i, + _ => {} + } + } + let highlight_indices = ArcSwap::from_pointee(vec![None; query.capture_names().len()]); Ok(Self { language, query, + injections_query, combined_injections_query, - locals_pattern_index, highlights_pattern_index, highlight_indices, non_local_variable_patterns, @@ -1070,238 +1169,6 @@ impl HighlightConfiguration { } impl<'a> HighlightIterLayer<'a> { - /// Create a new 'layer' of highlighting for this document. - /// - /// In the even that the new layer contains "combined injections" (injections where multiple - /// disjoint ranges are parsed as one syntax tree), these will be eagerly processed and - /// added to the returned vector. - fn new Option<&'a HighlightConfiguration> + 'a>( - source: RopeSlice<'a>, - cancellation_flag: Option<&'a AtomicUsize>, - injection_callback: &mut F, - mut config: &'a HighlightConfiguration, - mut depth: usize, - mut ranges: Vec, - ) -> Result, Error> { - let mut result = Vec::with_capacity(1); - let mut queue = Vec::new(); - loop { - // --> Tree parsing part - - PARSER.with(|ts_parser| { - let highlighter = &mut ts_parser.borrow_mut(); - - if highlighter.parser.set_included_ranges(&ranges).is_ok() { - highlighter - .parser - .set_language(config.language) - .map_err(|_| Error::InvalidLanguage)?; - - unsafe { highlighter.parser.set_cancellation_flag(cancellation_flag) }; - let tree = highlighter - .parser - .parse_with( - &mut |byte, _| { - if byte <= source.len_bytes() { - let (chunk, start_byte, _, _) = source.chunk_at_byte(byte); - chunk[byte - start_byte..].as_bytes() - } else { - // out of range - &[] - } - }, - None, - ) - .ok_or(Error::Cancelled)?; - unsafe { highlighter.parser.set_cancellation_flag(None) }; - let mut cursor = highlighter.cursors.pop().unwrap_or_else(QueryCursor::new); - - // Process combined injections. - if let Some(combined_injections_query) = &config.combined_injections_query { - let mut injections_by_pattern_index = vec![ - (None, Vec::new(), false); - combined_injections_query - .pattern_count() - ]; - let matches = cursor.matches( - combined_injections_query, - tree.root_node(), - RopeProvider(source), - ); - for mat in matches { - let entry = &mut injections_by_pattern_index[mat.pattern_index]; - let (language_name, content_node, include_children) = - injection_for_match( - config, - combined_injections_query, - &mat, - source, - ); - if language_name.is_some() { - entry.0 = language_name; - } - if let Some(content_node) = content_node { - entry.1.push(content_node); - } - entry.2 = include_children; - } - for (lang_name, content_nodes, includes_children) in - injections_by_pattern_index - { - if let (Some(lang_name), false) = (lang_name, content_nodes.is_empty()) - { - if let Some(next_config) = (injection_callback)(&lang_name) { - let ranges = Self::intersect_ranges( - &ranges, - &content_nodes, - includes_children, - ); - if !ranges.is_empty() { - queue.push((next_config, depth + 1, ranges)); - } - } - } - } - } - - // --> Highlighting query part - - // The `captures` iterator borrows the `Tree` and the `QueryCursor`, which - // prevents them from being moved. But both of these values are really just - // pointers, so it's actually ok to move them. - let tree_ref = unsafe { mem::transmute::<_, &'static Tree>(&tree) }; - let cursor_ref = - unsafe { mem::transmute::<_, &'static mut QueryCursor>(&mut cursor) }; - let captures = cursor_ref - .captures(&config.query, tree_ref.root_node(), RopeProvider(source)) - .peekable(); - - result.push(HighlightIterLayer { - highlight_end_stack: Vec::new(), - scope_stack: vec![LocalScope { - inherits: false, - range: 0..usize::MAX, - local_defs: Vec::new(), - }], - cursor, - depth, - _tree: Some(tree), - captures, - config, - ranges, - }); - } - - Ok(()) // so we can use the try operator - })?; - - if queue.is_empty() { - break; - } - - let (next_config, next_depth, next_ranges) = queue.remove(0); - config = next_config; - depth = next_depth; - ranges = next_ranges; - } - - Ok(result) - } - - // Compute the ranges that should be included when parsing an injection. - // This takes into account three things: - // * `parent_ranges` - The ranges must all fall within the *current* layer's ranges. - // * `nodes` - Every injection takes place within a set of nodes. The injection ranges - // are the ranges of those nodes. - // * `includes_children` - For some injections, the content nodes' children should be - // excluded from the nested document, so that only the content nodes' *own* content - // is reparsed. For other injections, the content nodes' entire ranges should be - // reparsed, including the ranges of their children. - fn intersect_ranges( - parent_ranges: &[Range], - nodes: &[Node], - includes_children: bool, - ) -> Vec { - let mut cursor = nodes[0].walk(); - let mut result = Vec::new(); - let mut parent_range_iter = parent_ranges.iter(); - let mut parent_range = parent_range_iter - .next() - .expect("Layers should only be constructed with non-empty ranges vectors"); - for node in nodes.iter() { - let mut preceding_range = Range { - start_byte: 0, - start_point: Point::new(0, 0), - end_byte: node.start_byte(), - end_point: node.start_position(), - }; - let following_range = Range { - start_byte: node.end_byte(), - start_point: node.end_position(), - end_byte: usize::MAX, - end_point: Point::new(usize::MAX, usize::MAX), - }; - - for excluded_range in node - .children(&mut cursor) - .filter_map(|child| { - if includes_children { - None - } else { - Some(child.range()) - } - }) - .chain([following_range].iter().cloned()) - { - let mut range = Range { - start_byte: preceding_range.end_byte, - start_point: preceding_range.end_point, - end_byte: excluded_range.start_byte, - end_point: excluded_range.start_point, - }; - preceding_range = excluded_range; - - if range.end_byte < parent_range.start_byte { - continue; - } - - while parent_range.start_byte <= range.end_byte { - if parent_range.end_byte > range.start_byte { - if range.start_byte < parent_range.start_byte { - range.start_byte = parent_range.start_byte; - range.start_point = parent_range.start_point; - } - - if parent_range.end_byte < range.end_byte { - if range.start_byte < parent_range.end_byte { - result.push(Range { - start_byte: range.start_byte, - start_point: range.start_point, - end_byte: parent_range.end_byte, - end_point: parent_range.end_point, - }); - } - range.start_byte = parent_range.end_byte; - range.start_point = parent_range.end_point; - } else { - if range.start_byte < range.end_byte { - result.push(range); - } - break; - } - } - - if let Some(next_range) = parent_range_iter.next() { - parent_range = next_range; - } else { - return result; - } - } - } - } - result - } - // First, sort scope boundaries by their byte offset in the document. At a // given position, emit scope endings before scope beginnings. Finally, emit // scope boundaries from deeper layers first. @@ -1327,10 +1194,101 @@ impl<'a> HighlightIterLayer<'a> { } } -impl<'a, F> HighlightIter<'a, F> -where - F: FnMut(&str) -> Option<&'a HighlightConfiguration> + 'a, -{ +// Compute the ranges that should be included when parsing an injection. +// This takes into account three things: +// * `parent_ranges` - The ranges must all fall within the *current* layer's ranges. +// * `nodes` - Every injection takes place within a set of nodes. The injection ranges +// are the ranges of those nodes. +// * `includes_children` - For some injections, the content nodes' children should be +// excluded from the nested document, so that only the content nodes' *own* content +// is reparsed. For other injections, the content nodes' entire ranges should be +// reparsed, including the ranges of their children. +fn intersect_ranges( + parent_ranges: &[Range], + nodes: &[Node], + includes_children: bool, +) -> Vec { + let mut cursor = nodes[0].walk(); + let mut result = Vec::new(); + let mut parent_range_iter = parent_ranges.iter(); + let mut parent_range = parent_range_iter + .next() + .expect("Layers should only be constructed with non-empty ranges vectors"); + for node in nodes.iter() { + let mut preceding_range = Range { + start_byte: 0, + start_point: Point::new(0, 0), + end_byte: node.start_byte(), + end_point: node.start_position(), + }; + let following_range = Range { + start_byte: node.end_byte(), + start_point: node.end_position(), + end_byte: usize::MAX, + end_point: Point::new(usize::MAX, usize::MAX), + }; + + for excluded_range in node + .children(&mut cursor) + .filter_map(|child| { + if includes_children { + None + } else { + Some(child.range()) + } + }) + .chain([following_range].iter().cloned()) + { + let mut range = Range { + start_byte: preceding_range.end_byte, + start_point: preceding_range.end_point, + end_byte: excluded_range.start_byte, + end_point: excluded_range.start_point, + }; + preceding_range = excluded_range; + + if range.end_byte < parent_range.start_byte { + continue; + } + + while parent_range.start_byte <= range.end_byte { + if parent_range.end_byte > range.start_byte { + if range.start_byte < parent_range.start_byte { + range.start_byte = parent_range.start_byte; + range.start_point = parent_range.start_point; + } + + if parent_range.end_byte < range.end_byte { + if range.start_byte < parent_range.end_byte { + result.push(Range { + start_byte: range.start_byte, + start_point: range.start_point, + end_byte: parent_range.end_byte, + end_point: parent_range.end_point, + }); + } + range.start_byte = parent_range.end_byte; + range.start_point = parent_range.end_point; + } else { + if range.start_byte < range.end_byte { + result.push(range); + } + break; + } + } + + if let Some(next_range) = parent_range_iter.next() { + parent_range = next_range; + } else { + return result; + } + } + } + } + result +} + +impl<'a> HighlightIter<'a> { fn emit_event( &mut self, offset: usize, @@ -1361,6 +1319,12 @@ where i += 1; continue; } + } else { + let layer = self.layers.remove(i + 1); + PARSER.with(|ts_parser| { + let highlighter = &mut ts_parser.borrow_mut(); + highlighter.cursors.push(layer.cursor); + }); } break; } @@ -1377,30 +1341,9 @@ where } } } - - fn insert_layer(&mut self, mut layer: HighlightIterLayer<'a>) { - if let Some(sort_key) = layer.sort_key() { - let mut i = 1; - while i < self.layers.len() { - if let Some(sort_key_i) = self.layers[i].sort_key() { - if sort_key_i > sort_key { - self.layers.insert(i, layer); - return; - } - i += 1; - } else { - self.layers.remove(i); - } - } - self.layers.push(layer); - } - } } -impl<'a, F> Iterator for HighlightIter<'a, F> -where - F: FnMut(&str) -> Option<&'a HighlightConfiguration> + 'a, -{ +impl<'a> Iterator for HighlightIter<'a> { type Item = Result; fn next(&mut self) -> Option { @@ -1460,55 +1403,12 @@ where layer.highlight_end_stack.pop(); return self.emit_event(end_byte, Some(HighlightEvent::HighlightEnd)); } else { - // return self.emit_event(self.source.len(), None); - return None; + return self.emit_event(self.source.len_bytes(), None); }; let (mut match_, capture_index) = layer.captures.next().unwrap(); let mut capture = match_.captures[capture_index]; - // If this capture represents an injection, then process the injection. - if match_.pattern_index < layer.config.locals_pattern_index { - let (language_name, content_node, include_children) = - injection_for_match(layer.config, &layer.config.query, &match_, self.source); - - // Explicitly remove this match so that none of its other captures will remain - // in the stream of captures. - match_.remove(); - - // If a language is found with the given name, then add a new language layer - // to the highlighted document. - if let (Some(language_name), Some(content_node)) = (language_name, content_node) { - if let Some(config) = (self.injection_callback)(&language_name) { - let ranges = HighlightIterLayer::intersect_ranges( - &self.layers[0].ranges, - &[content_node], - include_children, - ); - if !ranges.is_empty() { - match HighlightIterLayer::new( - self.source, - self.cancellation_flag, - &mut self.injection_callback, - config, - self.layers[0].depth + 1, - ranges, - ) { - Ok(layers) => { - for layer in layers { - self.insert_layer(layer); - } - } - Err(e) => return Some(Err(e)), - } - } - } - } - - self.sort_layers(); - continue 'main; - } - // Remove from the local scope stack any local scopes that have already ended. while range.start > layer.scope_stack.last().unwrap().range.end { layer.scope_stack.pop(); @@ -1703,14 +1603,6 @@ fn injection_for_match<'a>( (language_name, content_node, include_children) } -// fn shrink_and_clear(vec: &mut Vec, capacity: usize) { -// if vec.len() > capacity { -// vec.truncate(capacity); -// vec.shrink_to_fit(); -// } -// vec.clear(); -// } - pub struct Merge { iter: I, spans: Box)>>, @@ -1877,6 +1769,8 @@ mod test { .map(String::from) .collect(); + let loader = Loader::new(Configuration { language: vec![] }); + let language = get_language(&crate::RUNTIME_DIR, "Rust").unwrap(); let config = HighlightConfiguration::new( language, @@ -1899,7 +1793,7 @@ mod test { fn main() {} ", ); - let syntax = Syntax::new(&source, Arc::new(config)); + let syntax = Syntax::new(&source, Arc::new(config), Arc::new(loader)); let tree = syntax.tree(); let root = tree.root_node(); assert_eq!(root.kind(), "source_file"); @@ -1926,7 +1820,7 @@ mod test { &doc, vec![(6, 11, Some("test".into())), (12, 17, None)].into_iter(), ); - let edits = LanguageLayer::generate_edits(doc.slice(..), transaction.changes()); + let edits = generate_edits(doc.slice(..), transaction.changes()); // transaction.apply(&mut state); assert_eq!( @@ -1955,7 +1849,7 @@ mod test { let mut doc = Rope::from("fn test() {}"); let transaction = Transaction::change(&doc, vec![(8, 8, Some("a: u32".into()))].into_iter()); - let edits = LanguageLayer::generate_edits(doc.slice(..), transaction.changes()); + let edits = generate_edits(doc.slice(..), transaction.changes()); transaction.apply(&mut doc); assert_eq!(doc, "fn test(a: u32) {}"); diff --git a/helix-term/src/ui/editor.rs b/helix-term/src/ui/editor.rs index c8c94e14..acf6b8d8 100644 --- a/helix-term/src/ui/editor.rs +++ b/helix-term/src/ui/editor.rs @@ -68,13 +68,12 @@ impl EditorView { surface: &mut Surface, theme: &Theme, is_focused: bool, - loader: &syntax::Loader, config: &helix_view::editor::Config, ) { let inner = view.inner_area(); let area = view.area; - let highlights = Self::doc_syntax_highlights(doc, view.offset, inner.height, theme, loader); + let highlights = Self::doc_syntax_highlights(doc, view.offset, inner.height, theme); let highlights = syntax::merge(highlights, Self::doc_diagnostics_highlights(doc, theme)); let highlights: Box> = if is_focused { Box::new(syntax::merge( @@ -121,8 +120,7 @@ impl EditorView { doc: &'doc Document, offset: Position, height: u16, - theme: &Theme, - loader: &syntax::Loader, + _theme: &Theme, ) -> Box + 'doc> { let text = doc.text().slice(..); let last_line = std::cmp::min( @@ -142,25 +140,8 @@ impl EditorView { // TODO: range doesn't actually restrict source, just highlight range let highlights = match doc.syntax() { Some(syntax) => { - let scopes = theme.scopes(); syntax - .highlight_iter(text.slice(..), Some(range), None, |language| { - loader.language_configuration_for_injection_string(language) - .and_then(|language_config| { - let config = language_config.highlight_config(scopes)?; - let config_ref = config.as_ref(); - // SAFETY: the referenced `HighlightConfiguration` behind - // the `Arc` is guaranteed to remain valid throughout the - // duration of the highlight. - let config_ref = unsafe { - std::mem::transmute::< - _, - &'static syntax::HighlightConfiguration, - >(config_ref) - }; - Some(config_ref) - }) - }) + .highlight_iter(text.slice(..), Some(range), None) .map(|event| event.unwrap()) .collect() // TODO: we collect here to avoid holding the lock, fix later } @@ -1070,7 +1051,6 @@ impl Component for EditorView { for (view, is_focused) in cx.editor.tree.views() { let doc = cx.editor.document(view.doc).unwrap(); - let loader = &cx.editor.syn_loader; self.render_view( doc, view, @@ -1078,7 +1058,6 @@ impl Component for EditorView { surface, &cx.editor.theme, is_focused, - loader, &cx.editor.config, ); } diff --git a/helix-term/src/ui/markdown.rs b/helix-term/src/ui/markdown.rs index 1f0fc38c..00da2c11 100644 --- a/helix-term/src/ui/markdown.rs +++ b/helix-term/src/ui/markdown.rs @@ -38,7 +38,7 @@ impl Markdown { fn parse<'a>( contents: &'a str, theme: Option<&Theme>, - loader: &syntax::Loader, + loader: Arc, ) -> tui::text::Text<'a> { // // also 2021-03-04T16:33:58.553 helix_lsp::transport [INFO] <- {"contents":{"kind":"markdown","value":"\n```rust\ncore::num\n```\n\n```rust\npub const fn saturating_sub(self, rhs:Self) ->Self\n```\n\n---\n\n```rust\n```"},"range":{"end":{"character":61,"line":101},"start":{"character":47,"line":101}}} // let text = "\n```rust\ncore::iter::traits::iterator::Iterator\n```\n\n```rust\nfn collect>(self) -> B\nwhere\n Self: Sized,\n```\n\n---\n\nTransforms an iterator into a collection.\n\n`collect()` can take anything iterable, and turn it into a relevant\ncollection. This is one of the more powerful methods in the standard\nlibrary, used in a variety of contexts.\n\nThe most basic pattern in which `collect()` is used is to turn one\ncollection into another. You take a collection, call [`iter`](https://doc.rust-lang.org/nightly/core/iter/traits/iterator/trait.Iterator.html) on it,\ndo a bunch of transformations, and then `collect()` at the end.\n\n`collect()` can also create instances of types that are not typical\ncollections. For example, a [`String`](https://doc.rust-lang.org/nightly/core/iter/std/string/struct.String.html) can be built from [`char`](type@char)s,\nand an iterator of [`Result`](https://doc.rust-lang.org/nightly/core/result/enum.Result.html) items can be collected\ninto `Result, E>`. See the examples below for more.\n\nBecause `collect()` is so general, it can cause problems with type\ninference. As such, `collect()` is one of the few times you'll see\nthe syntax affectionately known as the 'turbofish': `::<>`. This\nhelps the inference algorithm understand specifically which collection\nyou're trying to collect into.\n\n# Examples\n\nBasic usage:\n\n```rust\nlet a = [1, 2, 3];\n\nlet doubled: Vec = a.iter()\n .map(|&x| x * 2)\n .collect();\n\nassert_eq!(vec![2, 4, 6], doubled);\n```\n\nNote that we needed the `: Vec` on the left-hand side. This is because\nwe could collect into, for example, a [`VecDeque`](https://doc.rust-lang.org/nightly/core/iter/std/collections/struct.VecDeque.html) instead:\n\n```rust\nuse std::collections::VecDeque;\n\nlet a = [1, 2, 3];\n\nlet doubled: VecDeque = a.iter().map(|&x| x * 2).collect();\n\nassert_eq!(2, doubled[0]);\nassert_eq!(4, doubled[1]);\nassert_eq!(6, doubled[2]);\n```\n\nUsing the 'turbofish' instead of annotating `doubled`:\n\n```rust\nlet a = [1, 2, 3];\n\nlet doubled = a.iter().map(|x| x * 2).collect::>();\n\nassert_eq!(vec![2, 4, 6], doubled);\n```\n\nBecause `collect()` only cares about what you're collecting into, you can\nstill use a partial type hint, `_`, with the turbofish:\n\n```rust\nlet a = [1, 2, 3];\n\nlet doubled = a.iter().map(|x| x * 2).collect::>();\n\nassert_eq!(vec![2, 4, 6], doubled);\n```\n\nUsing `collect()` to make a [`String`](https://doc.rust-lang.org/nightly/core/iter/std/string/struct.String.html):\n\n```rust\nlet chars = ['g', 'd', 'k', 'k', 'n'];\n\nlet hello: String = chars.iter()\n .map(|&x| x as u8)\n .map(|x| (x + 1) as char)\n .collect();\n\nassert_eq!(\"hello\", hello);\n```\n\nIf you have a list of [`Result`](https://doc.rust-lang.org/nightly/core/result/enum.Result.html)s, you can use `collect()` to\nsee if any of them failed:\n\n```rust\nlet results = [Ok(1), Err(\"nope\"), Ok(3), Err(\"bad\")];\n\nlet result: Result, &str> = results.iter().cloned().collect();\n\n// gives us the first error\nassert_eq!(Err(\"nope\"), result);\n\nlet results = [Ok(1), Ok(3)];\n\nlet result: Result, &str> = results.iter().cloned().collect();\n\n// gives us the list of answers\nassert_eq!(Ok(vec![1, 3]), result);\n```"; @@ -98,14 +98,13 @@ fn parse<'a>( let syntax = loader .language_configuration_for_injection_string(language) .and_then(|config| config.highlight_config(theme.scopes())) - .map(|config| Syntax::new(&rope, config)); + .map(|config| Syntax::new(&rope, config, loader.clone())); if let Some(syntax) = syntax { // if we have a syntax available, highlight_iter and generate spans let mut highlights = Vec::new(); - for event in syntax.highlight_iter(rope.slice(..), None, None, |_| None) - { + for event in syntax.highlight_iter(rope.slice(..), None, None) { match event.unwrap() { HighlightEvent::HighlightStart(span) => { highlights.push(span); @@ -211,7 +210,11 @@ impl Component for Markdown { fn render(&mut self, area: Rect, surface: &mut Surface, cx: &mut Context) { use tui::widgets::{Paragraph, Widget, Wrap}; - let text = parse(&self.contents, Some(&cx.editor.theme), &self.config_loader); + let text = parse( + &self.contents, + Some(&cx.editor.theme), + self.config_loader.clone(), + ); let par = Paragraph::new(text) .wrap(Wrap { trim: false }) @@ -229,7 +232,7 @@ impl Component for Markdown { if padding >= viewport.1 || padding >= viewport.0 { return None; } - let contents = parse(&self.contents, None, &self.config_loader); + let contents = parse(&self.contents, None, self.config_loader.clone()); // TODO: account for tab width let max_text_width = (viewport.0 - padding).min(120); let mut text_width = 0; diff --git a/helix-term/src/ui/picker.rs b/helix-term/src/ui/picker.rs index a061513c..00236050 100644 --- a/helix-term/src/ui/picker.rs +++ b/helix-term/src/ui/picker.rs @@ -221,13 +221,8 @@ impl Component for FilePicker { let offset = Position::new(first_line, 0); - let highlights = EditorView::doc_syntax_highlights( - doc, - offset, - area.height, - &cx.editor.theme, - &cx.editor.syn_loader, - ); + let highlights = + EditorView::doc_syntax_highlights(doc, offset, area.height, &cx.editor.theme); EditorView::render_text_highlights( doc, offset, diff --git a/helix-view/src/document.rs b/helix-view/src/document.rs index 579349ee..0ac8e80b 100644 --- a/helix-view/src/document.rs +++ b/helix-view/src/document.rs @@ -359,7 +359,7 @@ impl Document { path: &Path, encoding: Option<&'static encoding::Encoding>, theme: Option<&Theme>, - config_loader: Option<&syntax::Loader>, + config_loader: Option>, ) -> Result { // Open the file if it exists, otherwise assume it is a new file (and thus empty). let (rope, encoding) = if path.exists() { @@ -498,12 +498,12 @@ impl Document { } /// Detect the programming language based on the file type. - pub fn detect_language(&mut self, theme: Option<&Theme>, config_loader: &syntax::Loader) { + pub fn detect_language(&mut self, theme: Option<&Theme>, config_loader: Arc) { if let Some(path) = &self.path { let language_config = config_loader .language_config_for_file_name(path) .or_else(|| config_loader.language_config_for_shebang(self.text())); - self.set_language(theme, language_config); + self.set_language(theme, language_config, Some(config_loader)); } } @@ -579,11 +579,12 @@ impl Document { &mut self, theme: Option<&Theme>, language_config: Option>, + loader: Option>, ) { - if let Some(language_config) = language_config { + if let (Some(language_config), Some(loader)) = (language_config, loader) { let scopes = theme.map(|theme| theme.scopes()).unwrap_or(&[]); if let Some(highlight_config) = language_config.highlight_config(scopes) { - let syntax = Syntax::new(&self.text, highlight_config); + let syntax = Syntax::new(&self.text, highlight_config, loader); self.syntax = Some(syntax); // TODO: config.configure(scopes) is now delayed, is that ok? } @@ -605,7 +606,7 @@ impl Document { ) { let language_config = config_loader.language_config_for_scope(scope); - self.set_language(theme, language_config); + self.set_language(theme, language_config, Some(config_loader)); } /// Set the LSP. diff --git a/helix-view/src/editor.rs b/helix-view/src/editor.rs index 7406b475..9ad1558b 100644 --- a/helix-view/src/editor.rs +++ b/helix-view/src/editor.rs @@ -283,7 +283,7 @@ impl Editor { /// Refreshes the language server for a given document pub fn refresh_language_server(&mut self, doc_id: DocumentId) -> Option<()> { let doc = self.documents.get_mut(&doc_id)?; - doc.detect_language(Some(&self.theme), &self.syn_loader); + doc.detect_language(Some(&self.theme), self.syn_loader.clone()); Self::launch_language_server(&mut self.language_servers, doc) } @@ -462,7 +462,12 @@ impl Editor { let id = if let Some(id) = id { id } else { - let mut doc = Document::open(&path, None, Some(&self.theme), Some(&self.syn_loader))?; + let mut doc = Document::open( + &path, + None, + Some(&self.theme), + Some(self.syn_loader.clone()), + )?; let _ = Self::launch_language_server(&mut self.language_servers, &mut doc); -- cgit v1.2.3-70-g09d2 From d49e5323f9230f3195d3ee4c5e682cd6d8c2cb1a Mon Sep 17 00:00:00 2001 From: CossonLeo Date: Mon, 24 Jan 2022 09:41:25 +0800 Subject: Use markup scopes for the Markdown component (#1363) --- book/src/themes.md | 12 ++ helix-term/src/commands.rs | 3 +- helix-term/src/ui/completion.rs | 9 +- helix-term/src/ui/markdown.rs | 313 +++++++++++++++++++++------------------- 4 files changed, 184 insertions(+), 153 deletions(-) (limited to 'helix-term/src/ui/markdown.rs') diff --git a/book/src/themes.md b/book/src/themes.md index 1325de8c..9abcfe8c 100644 --- a/book/src/themes.md +++ b/book/src/themes.md @@ -190,6 +190,18 @@ We use a similar set of scopes as These scopes are used for theming the editor interface. +- `markup` + - `normal` + - `completion` - for completion doc popup ui + - `hover` - for hover popup ui + - `heading` + - `completion` - for completion doc popup ui + - `hover` - for hover popup ui + - `raw` + - `inline` + - `completion` - for completion doc popup ui + - `hover` - for hover popup ui + | Key | Notes | | --- | --- | diff --git a/helix-term/src/commands.rs b/helix-term/src/commands.rs index fc0db6ed..7144ebb9 100644 --- a/helix-term/src/commands.rs +++ b/helix-term/src/commands.rs @@ -5455,7 +5455,8 @@ fn hover(cx: &mut Context) { // skip if contents empty - let contents = ui::Markdown::new(contents, editor.syn_loader.clone()); + let contents = + ui::Markdown::new(contents, editor.syn_loader.clone()).style_group("hover"); let popup = Popup::new("hover", contents); if let Some(doc_popup) = compositor.find_id("hover") { *doc_popup = popup; diff --git a/helix-term/src/ui/completion.rs b/helix-term/src/ui/completion.rs index c9ed3b4a..35afe81e 100644 --- a/helix-term/src/ui/completion.rs +++ b/helix-term/src/ui/completion.rs @@ -304,6 +304,9 @@ impl Component for Completion { let cursor_pos = doc.selection(view.id).primary().cursor(text); let coords = helix_core::visual_coords_at_pos(text, cursor_pos, doc.tab_width()); let cursor_pos = (coords.row - view.offset.row) as u16; + + let markdown_ui = + |content, syn_loader| Markdown::new(content, syn_loader).style_group("completion"); let mut markdown_doc = match &option.documentation { Some(lsp::Documentation::String(contents)) | Some(lsp::Documentation::MarkupContent(lsp::MarkupContent { @@ -311,7 +314,7 @@ impl Component for Completion { value: contents, })) => { // TODO: convert to wrapped text - Markdown::new( + markdown_ui( format!( "```{}\n{}\n```\n{}", language, @@ -326,7 +329,7 @@ impl Component for Completion { value: contents, })) => { // TODO: set language based on doc scope - Markdown::new( + markdown_ui( format!( "```{}\n{}\n```\n{}", language, @@ -340,7 +343,7 @@ impl Component for Completion { // TODO: copied from above // TODO: set language based on doc scope - Markdown::new( + markdown_ui( format!( "```{}\n{}\n```", language, diff --git a/helix-term/src/ui/markdown.rs b/helix-term/src/ui/markdown.rs index 00da2c11..003266d3 100644 --- a/helix-term/src/ui/markdown.rs +++ b/helix-term/src/ui/markdown.rs @@ -21,6 +21,10 @@ pub struct Markdown { contents: String, config_loader: Arc, + + text_style: String, + block_style: String, + heading_style: String, } // TODO: pre-render and self reference via Pin @@ -31,121 +35,139 @@ impl Markdown { Self { contents, config_loader, + text_style: "markup.normal".into(), + block_style: "markup.raw.inline".into(), + heading_style: "markup.heading".into(), } } -} -fn parse<'a>( - contents: &'a str, - theme: Option<&Theme>, - loader: Arc, -) -> tui::text::Text<'a> { - // // also 2021-03-04T16:33:58.553 helix_lsp::transport [INFO] <- {"contents":{"kind":"markdown","value":"\n```rust\ncore::num\n```\n\n```rust\npub const fn saturating_sub(self, rhs:Self) ->Self\n```\n\n---\n\n```rust\n```"},"range":{"end":{"character":61,"line":101},"start":{"character":47,"line":101}}} - // let text = "\n```rust\ncore::iter::traits::iterator::Iterator\n```\n\n```rust\nfn collect>(self) -> B\nwhere\n Self: Sized,\n```\n\n---\n\nTransforms an iterator into a collection.\n\n`collect()` can take anything iterable, and turn it into a relevant\ncollection. This is one of the more powerful methods in the standard\nlibrary, used in a variety of contexts.\n\nThe most basic pattern in which `collect()` is used is to turn one\ncollection into another. You take a collection, call [`iter`](https://doc.rust-lang.org/nightly/core/iter/traits/iterator/trait.Iterator.html) on it,\ndo a bunch of transformations, and then `collect()` at the end.\n\n`collect()` can also create instances of types that are not typical\ncollections. For example, a [`String`](https://doc.rust-lang.org/nightly/core/iter/std/string/struct.String.html) can be built from [`char`](type@char)s,\nand an iterator of [`Result`](https://doc.rust-lang.org/nightly/core/result/enum.Result.html) items can be collected\ninto `Result, E>`. See the examples below for more.\n\nBecause `collect()` is so general, it can cause problems with type\ninference. As such, `collect()` is one of the few times you'll see\nthe syntax affectionately known as the 'turbofish': `::<>`. This\nhelps the inference algorithm understand specifically which collection\nyou're trying to collect into.\n\n# Examples\n\nBasic usage:\n\n```rust\nlet a = [1, 2, 3];\n\nlet doubled: Vec = a.iter()\n .map(|&x| x * 2)\n .collect();\n\nassert_eq!(vec![2, 4, 6], doubled);\n```\n\nNote that we needed the `: Vec` on the left-hand side. This is because\nwe could collect into, for example, a [`VecDeque`](https://doc.rust-lang.org/nightly/core/iter/std/collections/struct.VecDeque.html) instead:\n\n```rust\nuse std::collections::VecDeque;\n\nlet a = [1, 2, 3];\n\nlet doubled: VecDeque = a.iter().map(|&x| x * 2).collect();\n\nassert_eq!(2, doubled[0]);\nassert_eq!(4, doubled[1]);\nassert_eq!(6, doubled[2]);\n```\n\nUsing the 'turbofish' instead of annotating `doubled`:\n\n```rust\nlet a = [1, 2, 3];\n\nlet doubled = a.iter().map(|x| x * 2).collect::>();\n\nassert_eq!(vec![2, 4, 6], doubled);\n```\n\nBecause `collect()` only cares about what you're collecting into, you can\nstill use a partial type hint, `_`, with the turbofish:\n\n```rust\nlet a = [1, 2, 3];\n\nlet doubled = a.iter().map(|x| x * 2).collect::>();\n\nassert_eq!(vec![2, 4, 6], doubled);\n```\n\nUsing `collect()` to make a [`String`](https://doc.rust-lang.org/nightly/core/iter/std/string/struct.String.html):\n\n```rust\nlet chars = ['g', 'd', 'k', 'k', 'n'];\n\nlet hello: String = chars.iter()\n .map(|&x| x as u8)\n .map(|x| (x + 1) as char)\n .collect();\n\nassert_eq!(\"hello\", hello);\n```\n\nIf you have a list of [`Result`](https://doc.rust-lang.org/nightly/core/result/enum.Result.html)s, you can use `collect()` to\nsee if any of them failed:\n\n```rust\nlet results = [Ok(1), Err(\"nope\"), Ok(3), Err(\"bad\")];\n\nlet result: Result, &str> = results.iter().cloned().collect();\n\n// gives us the first error\nassert_eq!(Err(\"nope\"), result);\n\nlet results = [Ok(1), Ok(3)];\n\nlet result: Result, &str> = results.iter().cloned().collect();\n\n// gives us the list of answers\nassert_eq!(Ok(vec![1, 3]), result);\n```"; - - let mut options = Options::empty(); - options.insert(Options::ENABLE_STRIKETHROUGH); - let parser = Parser::new_ext(contents, options); - - // TODO: if possible, render links as terminal hyperlinks: https://gist.github.com/egmontkob/eb114294efbcd5adb1944c9f3cb5feda - let mut tags = Vec::new(); - let mut spans = Vec::new(); - let mut lines = Vec::new(); - - fn to_span(text: pulldown_cmark::CowStr) -> Span { - use std::ops::Deref; - Span::raw::>(match text { - CowStr::Borrowed(s) => s.into(), - CowStr::Boxed(s) => s.to_string().into(), - CowStr::Inlined(s) => s.deref().to_owned().into(), - }) + pub fn style_group(mut self, suffix: &str) -> Self { + self.text_style = format!("markup.normal.{}", suffix); + self.block_style = format!("markup.raw.inline.{}", suffix); + self.heading_style = format!("markup.heading.{}", suffix); + self } - let text_style = theme.map(|theme| theme.get("ui.text")).unwrap_or_default(); - - // TODO: use better scopes for these, `markup.raw.block`, `markup.heading` - let code_style = theme - .map(|theme| theme.get("ui.text.focus")) - .unwrap_or_default(); // white - let heading_style = theme - .map(|theme| theme.get("ui.linenr.selected")) - .unwrap_or_default(); // lilac - - for event in parser { - match event { - Event::Start(tag) => tags.push(tag), - Event::End(tag) => { - tags.pop(); - match tag { - Tag::Heading(_, _, _) - | Tag::Paragraph - | Tag::CodeBlock(CodeBlockKind::Fenced(_)) => { - // whenever code block or paragraph closes, new line - let spans = std::mem::take(&mut spans); - if !spans.is_empty() { - lines.push(Spans::from(spans)); + fn parse(&self, theme: Option<&Theme>) -> tui::text::Text<'_> { + // // also 2021-03-04T16:33:58.553 helix_lsp::transport [INFO] <- {"contents":{"kind":"markdown","value":"\n```rust\ncore::num\n```\n\n```rust\npub const fn saturating_sub(self, rhs:Self) ->Self\n```\n\n---\n\n```rust\n```"},"range":{"end":{"character":61,"line":101},"start":{"character":47,"line":101}}} + // let text = "\n```rust\ncore::iter::traits::iterator::Iterator\n```\n\n```rust\nfn collect>(self) -> B\nwhere\n Self: Sized,\n```\n\n---\n\nTransforms an iterator into a collection.\n\n`collect()` can take anything iterable, and turn it into a relevant\ncollection. This is one of the more powerful methods in the standard\nlibrary, used in a variety of contexts.\n\nThe most basic pattern in which `collect()` is used is to turn one\ncollection into another. You take a collection, call [`iter`](https://doc.rust-lang.org/nightly/core/iter/traits/iterator/trait.Iterator.html) on it,\ndo a bunch of transformations, and then `collect()` at the end.\n\n`collect()` can also create instances of types that are not typical\ncollections. For example, a [`String`](https://doc.rust-lang.org/nightly/core/iter/std/string/struct.String.html) can be built from [`char`](type@char)s,\nand an iterator of [`Result`](https://doc.rust-lang.org/nightly/core/result/enum.Result.html) items can be collected\ninto `Result, E>`. See the examples below for more.\n\nBecause `collect()` is so general, it can cause problems with type\ninference. As such, `collect()` is one of the few times you'll see\nthe syntax affectionately known as the 'turbofish': `::<>`. This\nhelps the inference algorithm understand specifically which collection\nyou're trying to collect into.\n\n# Examples\n\nBasic usage:\n\n```rust\nlet a = [1, 2, 3];\n\nlet doubled: Vec = a.iter()\n .map(|&x| x * 2)\n .collect();\n\nassert_eq!(vec![2, 4, 6], doubled);\n```\n\nNote that we needed the `: Vec` on the left-hand side. This is because\nwe could collect into, for example, a [`VecDeque`](https://doc.rust-lang.org/nightly/core/iter/std/collections/struct.VecDeque.html) instead:\n\n```rust\nuse std::collections::VecDeque;\n\nlet a = [1, 2, 3];\n\nlet doubled: VecDeque = a.iter().map(|&x| x * 2).collect();\n\nassert_eq!(2, doubled[0]);\nassert_eq!(4, doubled[1]);\nassert_eq!(6, doubled[2]);\n```\n\nUsing the 'turbofish' instead of annotating `doubled`:\n\n```rust\nlet a = [1, 2, 3];\n\nlet doubled = a.iter().map(|x| x * 2).collect::>();\n\nassert_eq!(vec![2, 4, 6], doubled);\n```\n\nBecause `collect()` only cares about what you're collecting into, you can\nstill use a partial type hint, `_`, with the turbofish:\n\n```rust\nlet a = [1, 2, 3];\n\nlet doubled = a.iter().map(|x| x * 2).collect::>();\n\nassert_eq!(vec![2, 4, 6], doubled);\n```\n\nUsing `collect()` to make a [`String`](https://doc.rust-lang.org/nightly/core/iter/std/string/struct.String.html):\n\n```rust\nlet chars = ['g', 'd', 'k', 'k', 'n'];\n\nlet hello: String = chars.iter()\n .map(|&x| x as u8)\n .map(|x| (x + 1) as char)\n .collect();\n\nassert_eq!(\"hello\", hello);\n```\n\nIf you have a list of [`Result`](https://doc.rust-lang.org/nightly/core/result/enum.Result.html)s, you can use `collect()` to\nsee if any of them failed:\n\n```rust\nlet results = [Ok(1), Err(\"nope\"), Ok(3), Err(\"bad\")];\n\nlet result: Result, &str> = results.iter().cloned().collect();\n\n// gives us the first error\nassert_eq!(Err(\"nope\"), result);\n\nlet results = [Ok(1), Ok(3)];\n\nlet result: Result, &str> = results.iter().cloned().collect();\n\n// gives us the list of answers\nassert_eq!(Ok(vec![1, 3]), result);\n```"; + + let mut options = Options::empty(); + options.insert(Options::ENABLE_STRIKETHROUGH); + let parser = Parser::new_ext(&self.contents, options); + + // TODO: if possible, render links as terminal hyperlinks: https://gist.github.com/egmontkob/eb114294efbcd5adb1944c9f3cb5feda + let mut tags = Vec::new(); + let mut spans = Vec::new(); + let mut lines = Vec::new(); + + fn to_span(text: pulldown_cmark::CowStr) -> Span { + use std::ops::Deref; + Span::raw::>(match text { + CowStr::Borrowed(s) => s.into(), + CowStr::Boxed(s) => s.to_string().into(), + CowStr::Inlined(s) => s.deref().to_owned().into(), + }) + } + + macro_rules! get_theme { + ($s1: expr) => { + theme + .map(|theme| theme.try_get($s1.as_str())) + .flatten() + .unwrap_or_default() + }; + } + let text_style = get_theme!(self.text_style); + let code_style = get_theme!(self.block_style); + let heading_style = get_theme!(self.heading_style); + + for event in parser { + match event { + Event::Start(tag) => tags.push(tag), + Event::End(tag) => { + tags.pop(); + match tag { + Tag::Heading(_, _, _) + | Tag::Paragraph + | Tag::CodeBlock(CodeBlockKind::Fenced(_)) => { + // whenever code block or paragraph closes, new line + let spans = std::mem::take(&mut spans); + if !spans.is_empty() { + lines.push(Spans::from(spans)); + } + lines.push(Spans::default()); } - lines.push(Spans::default()); + _ => (), } - _ => (), } - } - Event::Text(text) => { - // TODO: temp workaround - if let Some(Tag::CodeBlock(CodeBlockKind::Fenced(language))) = tags.last() { - if let Some(theme) = theme { - let rope = Rope::from(text.as_ref()); - let syntax = loader - .language_configuration_for_injection_string(language) - .and_then(|config| config.highlight_config(theme.scopes())) - .map(|config| Syntax::new(&rope, config, loader.clone())); - - if let Some(syntax) = syntax { - // if we have a syntax available, highlight_iter and generate spans - let mut highlights = Vec::new(); - - for event in syntax.highlight_iter(rope.slice(..), None, None) { - match event.unwrap() { - HighlightEvent::HighlightStart(span) => { - highlights.push(span); - } - HighlightEvent::HighlightEnd => { - highlights.pop(); - } - HighlightEvent::Source { start, end } => { - let style = match highlights.first() { - Some(span) => theme.get(&theme.scopes()[span.0]), - None => text_style, - }; - - // TODO: replace tabs with indentation - - let mut slice = &text[start..end]; - // TODO: do we need to handle all unicode line endings - // here, or is just '\n' okay? - while let Some(end) = slice.find('\n') { - // emit span up to newline - let text = &slice[..end]; - let text = text.replace('\t', " "); // replace tabs - let span = Span::styled(text, style); - spans.push(span); - - // truncate slice to after newline - slice = &slice[end + 1..]; - - // make a new line - let spans = std::mem::take(&mut spans); - lines.push(Spans::from(spans)); + Event::Text(text) => { + // TODO: temp workaround + if let Some(Tag::CodeBlock(CodeBlockKind::Fenced(language))) = tags.last() { + if let Some(theme) = theme { + let rope = Rope::from(text.as_ref()); + let syntax = self + .config_loader + .language_configuration_for_injection_string(language) + .and_then(|config| config.highlight_config(theme.scopes())) + .map(|config| { + Syntax::new(&rope, config, self.config_loader.clone()) + }); + + if let Some(syntax) = syntax { + // if we have a syntax available, highlight_iter and generate spans + let mut highlights = Vec::new(); + + for event in syntax.highlight_iter(rope.slice(..), None, None) { + match event.unwrap() { + HighlightEvent::HighlightStart(span) => { + highlights.push(span); } + HighlightEvent::HighlightEnd => { + highlights.pop(); + } + HighlightEvent::Source { start, end } => { + let style = match highlights.first() { + Some(span) => theme.get(&theme.scopes()[span.0]), + None => text_style, + }; + + // TODO: replace tabs with indentation + + let mut slice = &text[start..end]; + // TODO: do we need to handle all unicode line endings + // here, or is just '\n' okay? + while let Some(end) = slice.find('\n') { + // emit span up to newline + let text = &slice[..end]; + let text = text.replace('\t', " "); // replace tabs + let span = Span::styled(text, style); + spans.push(span); + + // truncate slice to after newline + slice = &slice[end + 1..]; + + // make a new line + let spans = std::mem::take(&mut spans); + lines.push(Spans::from(spans)); + } - // if there's anything left, emit it too - if !slice.is_empty() { - let span = - Span::styled(slice.replace('\t', " "), style); - spans.push(span); + // if there's anything left, emit it too + if !slice.is_empty() { + let span = Span::styled( + slice.replace('\t', " "), + style, + ); + spans.push(span); + } } } } + } else { + for line in text.lines() { + let span = Span::styled(line.to_string(), code_style); + lines.push(Spans::from(span)); + } } } else { for line in text.lines() { @@ -153,68 +175,60 @@ fn parse<'a>( lines.push(Spans::from(span)); } } + } else if let Some(Tag::Heading(_, _, _)) = tags.last() { + let mut span = to_span(text); + span.style = heading_style; + spans.push(span); } else { - for line in text.lines() { - let span = Span::styled(line.to_string(), code_style); - lines.push(Spans::from(span)); - } + let mut span = to_span(text); + span.style = text_style; + spans.push(span); } - } else if let Some(Tag::Heading(_, _, _)) = tags.last() { - let mut span = to_span(text); - span.style = heading_style; - spans.push(span); - } else { + } + Event::Code(text) | Event::Html(text) => { let mut span = to_span(text); - span.style = text_style; + span.style = code_style; spans.push(span); } + Event::SoftBreak | Event::HardBreak => { + // let spans = std::mem::replace(&mut spans, Vec::new()); + // lines.push(Spans::from(spans)); + spans.push(Span::raw(" ")); + } + Event::Rule => { + let mut span = Span::raw("---"); + span.style = code_style; + lines.push(Spans::from(span)); + lines.push(Spans::default()); + } + // TaskListMarker(bool) true if checked + _ => { + log::warn!("unhandled markdown event {:?}", event); + } } - Event::Code(text) | Event::Html(text) => { - let mut span = to_span(text); - span.style = code_style; - spans.push(span); - } - Event::SoftBreak | Event::HardBreak => { - // let spans = std::mem::replace(&mut spans, Vec::new()); - // lines.push(Spans::from(spans)); - spans.push(Span::raw(" ")); - } - Event::Rule => { - let mut span = Span::raw("---"); - span.style = code_style; - lines.push(Spans::from(span)); - lines.push(Spans::default()); - } - // TaskListMarker(bool) true if checked - _ => { - log::warn!("unhandled markdown event {:?}", event); - } + // build up a vec of Paragraph tui widgets } - // build up a vec of Paragraph tui widgets - } - if !spans.is_empty() { - lines.push(Spans::from(spans)); - } + if !spans.is_empty() { + lines.push(Spans::from(spans)); + } - // if last line is empty, remove it - if let Some(line) = lines.last() { - if line.0.is_empty() { - lines.pop(); + // if last line is empty, remove it + if let Some(line) = lines.last() { + if line.0.is_empty() { + lines.pop(); + } } - } - Text::from(lines) + Text::from(lines) + } } + impl Component for Markdown { fn render(&mut self, area: Rect, surface: &mut Surface, cx: &mut Context) { use tui::widgets::{Paragraph, Widget, Wrap}; - let text = parse( - &self.contents, - Some(&cx.editor.theme), - self.config_loader.clone(), - ); + let text = self.parse(Some(&cx.editor.theme)); let par = Paragraph::new(text) .wrap(Wrap { trim: false }) @@ -232,7 +246,8 @@ impl Component for Markdown { if padding >= viewport.1 || padding >= viewport.0 { return None; } - let contents = parse(&self.contents, None, self.config_loader.clone()); + let contents = self.parse(None); + // TODO: account for tab width let max_text_width = (viewport.0 - padding).min(120); let mut text_width = 0; -- cgit v1.2.3-70-g09d2 From e7f5ec55617f9f6cddfd78b3c42960f757705f2b Mon Sep 17 00:00:00 2001 From: Blaž Hrastnik Date: Mon, 7 Feb 2022 10:28:05 +0900 Subject: fix: There is no such thing as markup.normal, use ui.text --- helix-term/src/ui/markdown.rs | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) (limited to 'helix-term/src/ui/markdown.rs') diff --git a/helix-term/src/ui/markdown.rs b/helix-term/src/ui/markdown.rs index 003266d3..6a7b641a 100644 --- a/helix-term/src/ui/markdown.rs +++ b/helix-term/src/ui/markdown.rs @@ -22,7 +22,6 @@ pub struct Markdown { config_loader: Arc, - text_style: String, block_style: String, heading_style: String, } @@ -35,14 +34,12 @@ impl Markdown { Self { contents, config_loader, - text_style: "markup.normal".into(), block_style: "markup.raw.inline".into(), heading_style: "markup.heading".into(), } } pub fn style_group(mut self, suffix: &str) -> Self { - self.text_style = format!("markup.normal.{}", suffix); self.block_style = format!("markup.raw.inline.{}", suffix); self.heading_style = format!("markup.heading.{}", suffix); self @@ -78,7 +75,7 @@ impl Markdown { .unwrap_or_default() }; } - let text_style = get_theme!(self.text_style); + let text_style = theme.map(|theme| theme.get("ui.text")).unwrap_or_default(); let code_style = get_theme!(self.block_style); let heading_style = get_theme!(self.heading_style); -- cgit v1.2.3-70-g09d2