From 29c053e84e2624feb786f520ebae4c752bc23279 Mon Sep 17 00:00:00 2001 From: Kirawi Date: Wed, 8 Dec 2021 02:11:18 -0500 Subject: Only use a single documentation popup (#1241) --- helix-term/src/ui/completion.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'helix-term/src/ui/completion.rs') diff --git a/helix-term/src/ui/completion.rs b/helix-term/src/ui/completion.rs index dd782d29..fcd63199 100644 --- a/helix-term/src/ui/completion.rs +++ b/helix-term/src/ui/completion.rs @@ -168,7 +168,7 @@ impl Completion { } }; }); - let popup = Popup::new(menu); + let popup = Popup::new("completion", menu); let mut completion = Self { popup, start_offset, -- cgit v1.2.3-70-g09d2 From e1889261381cfc7a5980d84b3d6bd061c56e3ba2 Mon Sep 17 00:00:00 2001 From: Midnight Exigent Date: Tue, 14 Dec 2021 02:14:23 +0100 Subject: Fix panic when scrolling through completion popup (#1260) * fix(completion_popup): Fixes #1256 * Update helix-term/src/ui/completion.rs Co-authored-by: Blaž Hrastnik --- helix-term/src/ui/completion.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) (limited to 'helix-term/src/ui/completion.rs') diff --git a/helix-term/src/ui/completion.rs b/helix-term/src/ui/completion.rs index fcd63199..a55201ff 100644 --- a/helix-term/src/ui/completion.rs +++ b/helix-term/src/ui/completion.rs @@ -328,8 +328,8 @@ impl Component for Completion { let y = popup_y; if let Some((rel_width, rel_height)) = markdown_doc.required_size((width, height)) { - width = rel_width; - height = rel_height; + width = rel_width.min(width); + height = rel_height.min(height); } Rect::new(x, y, width, height) } else { -- cgit v1.2.3-70-g09d2 From 5d7b5db8ab284e0c2a41e6fbda08857f87406780 Mon Sep 17 00:00:00 2001 From: Gabriel Berto Date: Sat, 25 Dec 2021 07:00:57 -0300 Subject: Resolve completion item (#1315) Co-authored-by: Gabriel Berto --- helix-lsp/src/client.rs | 8 ++++++++ helix-term/src/ui/completion.rs | 40 ++++++++++++++++++++++++++++++++++++++-- 2 files changed, 46 insertions(+), 2 deletions(-) (limited to 'helix-term/src/ui/completion.rs') diff --git a/helix-lsp/src/client.rs b/helix-lsp/src/client.rs index f1de8752..43804daa 100644 --- a/helix-lsp/src/client.rs +++ b/helix-lsp/src/client.rs @@ -556,6 +556,14 @@ impl Client { self.call::(params) } + pub async fn resolve_completion_item( + &self, + completion_item: lsp::CompletionItem, + ) -> Result { + self.request::(completion_item) + .await + } + pub fn text_document_signature_help( &self, text_document: lsp::TextDocumentIdentifier, diff --git a/helix-term/src/ui/completion.rs b/helix-term/src/ui/completion.rs index a55201ff..274330c0 100644 --- a/helix-term/src/ui/completion.rs +++ b/helix-term/src/ui/completion.rs @@ -154,8 +154,19 @@ impl Completion { ); doc.apply(&transaction, view.id); - if let Some(additional_edits) = &item.additional_text_edits { - // gopls uses this to add extra imports + // apply additional edits, mostly used to auto import unqualified types + let resolved_additional_text_edits = if item.additional_text_edits.is_some() { + None + } else { + Completion::resolve_completion_item(doc, item.clone()) + .and_then(|item| item.additional_text_edits) + }; + + if let Some(additional_edits) = item + .additional_text_edits + .as_ref() + .or_else(|| resolved_additional_text_edits.as_ref()) + { if !additional_edits.is_empty() { let transaction = util::generate_transaction_from_edits( doc.text(), @@ -181,6 +192,31 @@ impl Completion { completion } + fn resolve_completion_item( + doc: &Document, + completion_item: lsp::CompletionItem, + ) -> Option { + let language_server = doc.language_server()?; + let completion_resolve_provider = language_server + .capabilities() + .completion_provider + .as_ref()? + .resolve_provider; + if completion_resolve_provider != Some(true) { + return None; + } + + let future = language_server.resolve_completion_item(completion_item); + let response = helix_lsp::block_on(future); + match response { + Ok(completion_item) => Some(completion_item), + Err(err) => { + log::error!("execute LSP command: {}", err); + None + } + } + } + pub fn recompute_filter(&mut self, editor: &Editor) { // recompute menu based on matches let menu = self.popup.contents_mut(); -- cgit v1.2.3-70-g09d2 From e7eab95b943ff15396c5d512a9c95650ab98a902 Mon Sep 17 00:00:00 2001 From: Blaž Hrastnik Date: Fri, 14 Jan 2022 12:25:59 +0900 Subject: Update to rust 1.58, fix a bunch of optional lints --- flake.lock | 24 ++++++++++++------------ helix-core/src/increment/date_time.rs | 32 ++++++++++++++++---------------- helix-term/src/commands.rs | 35 ++++++++++++++++------------------- helix-term/src/job.rs | 14 +++++++------- helix-term/src/keymap.rs | 8 ++++---- helix-term/src/ui/completion.rs | 2 +- helix-tui/src/widgets/block.rs | 10 +++++----- helix-tui/src/widgets/paragraph.rs | 2 +- helix-view/src/info.rs | 4 ++-- 9 files changed, 64 insertions(+), 67 deletions(-) (limited to 'helix-term/src/ui/completion.rs') diff --git a/flake.lock b/flake.lock index 606a72f3..94e443e3 100644 --- a/flake.lock +++ b/flake.lock @@ -2,11 +2,11 @@ "nodes": { "devshell": { "locked": { - "lastModified": 1640301433, - "narHash": "sha256-eplae8ZNiEmxbOgwUn9IihaJfEUxoUilkwciRPGskYE=", + "lastModified": 1641980203, + "narHash": "sha256-RiWJ3+6V267Ji+P54K1Xrj1Nsah9BfG/aLfIhqgVyBY=", "owner": "numtide", "repo": "devshell", - "rev": "f87fb932740abe1c1b46f6edd8a36ade17881666", + "rev": "d897c1ddb4eab66cc2b783c7868d78555b9880ad", "type": "github" }, "original": { @@ -41,11 +41,11 @@ ] }, "locked": { - "lastModified": 1641449444, - "narHash": "sha256-InqsyCVafPqXmK7YqUfFVpb6eVYJWUWbYXEvey0J+3c=", + "lastModified": 1642054253, + "narHash": "sha256-kHh9VmaB7gbS6pheheC4x0uT84LEmhfbsbWEQJgU2E4=", "owner": "yusdacra", "repo": "nix-cargo-integration", - "rev": "d1aa15a832db331a97082b7f8c7da737a6789c77", + "rev": "f8fa9af990195a3f63fe2dde84aa187e193da793", "type": "github" }, "original": { @@ -56,11 +56,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1639699734, - "narHash": "sha256-tlX6WebGmiHb2Hmniff+ltYp+7dRfdsBxw9YczLsP60=", + "lastModified": 1641887635, + "narHash": "sha256-kDGpufwzVaiGe5e1sBUBPo9f1YN+nYHJlYqCaVpZTQQ=", "owner": "nixos", "repo": "nixpkgs", - "rev": "03ec468b14067729a285c2c7cfa7b9434a04816c", + "rev": "b2737d4980a17cc2b7d600d7d0b32fd7333aca88", "type": "github" }, "original": { @@ -99,11 +99,11 @@ "nixpkgs": "nixpkgs_2" }, "locked": { - "lastModified": 1639880499, - "narHash": "sha256-/BibDmFwgWuuTUkNVO6YlvuTSWM9dpBvlZoTAPs7ORI=", + "lastModified": 1642128126, + "narHash": "sha256-av8JUACdrTfQYl/ftZJvKpZEmZfa0avCq7tt5Usdoq0=", "owner": "oxalica", "repo": "rust-overlay", - "rev": "c6c83589ae048af20d93d01eb07a4176012093d0", + "rev": "ce4ef6f2d74f2b68f7547df1de22d1b0037ce4ad", "type": "github" }, "original": { diff --git a/helix-core/src/increment/date_time.rs b/helix-core/src/increment/date_time.rs index e3cfe107..1703c3ba 100644 --- a/helix-core/src/increment/date_time.rs +++ b/helix-core/src/increment/date_time.rs @@ -195,82 +195,82 @@ struct DateField { impl DateField { fn from_specifier(specifier: &str) -> Option { match specifier { - "Y" => Some(DateField { + "Y" => Some(Self { regex: r"\d{4}", unit: DateUnit::Years, max_len: 5, }), - "y" => Some(DateField { + "y" => Some(Self { regex: r"\d\d", unit: DateUnit::Years, max_len: 2, }), - "m" => Some(DateField { + "m" => Some(Self { regex: r"[0-1]\d", unit: DateUnit::Months, max_len: 2, }), - "d" => Some(DateField { + "d" => Some(Self { regex: r"[0-3]\d", unit: DateUnit::Days, max_len: 2, }), - "-d" => Some(DateField { + "-d" => Some(Self { regex: r"[1-3]?\d", unit: DateUnit::Days, max_len: 2, }), - "a" => Some(DateField { + "a" => Some(Self { regex: r"Sun|Mon|Tue|Wed|Thu|Fri|Sat", unit: DateUnit::Days, max_len: 3, }), - "A" => Some(DateField { + "A" => Some(Self { regex: r"Sunday|Monday|Tuesday|Wednesday|Thursday|Friday|Saturday", unit: DateUnit::Days, max_len: 9, }), - "b" | "h" => Some(DateField { + "b" | "h" => Some(Self { regex: r"Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec", unit: DateUnit::Months, max_len: 3, }), - "B" => Some(DateField { + "B" => Some(Self { regex: r"January|February|March|April|May|June|July|August|September|October|November|December", unit: DateUnit::Months, max_len: 9, }), - "H" => Some(DateField { + "H" => Some(Self { regex: r"[0-2]\d", unit: DateUnit::Hours, max_len: 2, }), - "M" => Some(DateField { + "M" => Some(Self { regex: r"[0-5]\d", unit: DateUnit::Minutes, max_len: 2, }), - "S" => Some(DateField { + "S" => Some(Self { regex: r"[0-5]\d", unit: DateUnit::Seconds, max_len: 2, }), - "I" => Some(DateField { + "I" => Some(Self { regex: r"[0-1]\d", unit: DateUnit::Hours, max_len: 2, }), - "-I" => Some(DateField { + "-I" => Some(Self { regex: r"1?\d", unit: DateUnit::Hours, max_len: 2, }), - "P" => Some(DateField { + "P" => Some(Self { regex: r"am|pm", unit: DateUnit::AmPm, max_len: 2, }), - "p" => Some(DateField { + "p" => Some(Self { regex: r"AM|PM", unit: DateUnit::AmPm, max_len: 2, diff --git a/helix-term/src/commands.rs b/helix-term/src/commands.rs index c14216c0..9ac12931 100644 --- a/helix-term/src/commands.rs +++ b/helix-term/src/commands.rs @@ -173,7 +173,7 @@ macro_rules! static_commands { impl MappableCommand { pub fn execute(&self, cx: &mut Context) { match &self { - MappableCommand::Typable { name, args, doc: _ } => { + Self::Typable { name, args, doc: _ } => { let args: Vec> = args.iter().map(Cow::from).collect(); if let Some(command) = cmd::TYPABLE_COMMAND_MAP.get(name.as_str()) { let mut cx = compositor::Context { @@ -186,21 +186,21 @@ impl MappableCommand { } } } - MappableCommand::Static { fun, .. } => (fun)(cx), + Self::Static { fun, .. } => (fun)(cx), } } pub fn name(&self) -> &str { match &self { - MappableCommand::Typable { name, .. } => name, - MappableCommand::Static { name, .. } => name, + Self::Typable { name, .. } => name, + Self::Static { name, .. } => name, } } pub fn doc(&self) -> &str { match &self { - MappableCommand::Typable { doc, .. } => doc, - MappableCommand::Static { doc, .. } => doc, + Self::Typable { doc, .. } => doc, + Self::Static { doc, .. } => doc, } } @@ -3494,11 +3494,9 @@ pub fn apply_document_resource_op(op: &lsp::ResourceOp) -> std::io::Result<()> { match op { ResourceOp::Create(op) => { let path = op.uri.to_file_path().unwrap(); - let ignore_if_exists = if let Some(options) = &op.options { + let ignore_if_exists = op.options.as_ref().map_or(false, |options| { !options.overwrite.unwrap_or(false) && options.ignore_if_exists.unwrap_or(false) - } else { - false - }; + }); if ignore_if_exists && path.exists() { Ok(()) } else { @@ -3508,11 +3506,12 @@ pub fn apply_document_resource_op(op: &lsp::ResourceOp) -> std::io::Result<()> { ResourceOp::Delete(op) => { let path = op.uri.to_file_path().unwrap(); if path.is_dir() { - let recursive = if let Some(options) = &op.options { - options.recursive.unwrap_or(false) - } else { - false - }; + let recursive = op + .options + .as_ref() + .and_then(|options| options.recursive) + .unwrap_or(false); + if recursive { fs::remove_dir_all(&path) } else { @@ -3527,11 +3526,9 @@ pub fn apply_document_resource_op(op: &lsp::ResourceOp) -> std::io::Result<()> { ResourceOp::Rename(op) => { let from = op.old_uri.to_file_path().unwrap(); let to = op.new_uri.to_file_path().unwrap(); - let ignore_if_exists = if let Some(options) = &op.options { + let ignore_if_exists = op.options.as_ref().map_or(false, |options| { !options.overwrite.unwrap_or(false) && options.ignore_if_exists.unwrap_or(false) - } else { - false - }; + }); if ignore_if_exists && to.exists() { Ok(()) } else { diff --git a/helix-term/src/job.rs b/helix-term/src/job.rs index 4fa38174..f5a0a425 100644 --- a/helix-term/src/job.rs +++ b/helix-term/src/job.rs @@ -22,8 +22,8 @@ pub struct Jobs { } impl Job { - pub fn new> + Send + 'static>(f: F) -> Job { - Job { + pub fn new> + Send + 'static>(f: F) -> Self { + Self { future: f.map(|r| r.map(|()| None)).boxed(), wait: false, } @@ -31,22 +31,22 @@ impl Job { pub fn with_callback> + Send + 'static>( f: F, - ) -> Job { - Job { + ) -> Self { + Self { future: f.map(|r| r.map(Some)).boxed(), wait: false, } } - pub fn wait_before_exiting(mut self) -> Job { + pub fn wait_before_exiting(mut self) -> Self { self.wait = true; self } } impl Jobs { - pub fn new() -> Jobs { - Jobs::default() + pub fn new() -> Self { + Self::default() } pub fn spawn> + Send + 'static>(&mut self, f: F) { diff --git a/helix-term/src/keymap.rs b/helix-term/src/keymap.rs index c4bd25ed..79a06206 100644 --- a/helix-term/src/keymap.rs +++ b/helix-term/src/keymap.rs @@ -344,7 +344,7 @@ pub struct Keymap { impl Keymap { pub fn new(root: KeyTrie) -> Self { - Keymap { + Self { root, state: Vec::new(), sticky: None, @@ -368,7 +368,7 @@ impl Keymap { /// key cancels pending keystrokes. If there are no pending keystrokes but a /// sticky node is in use, it will be cleared. pub fn get(&mut self, key: KeyEvent) -> KeymapResult { - if let key!(Esc) = key { + if key!(Esc) == key { if !self.state.is_empty() { return KeymapResult::new( // Note that Esc is not included here @@ -477,7 +477,7 @@ impl DerefMut for Keymaps { } impl Default for Keymaps { - fn default() -> Keymaps { + fn default() -> Self { let normal = keymap!({ "Normal mode" "h" | "left" => move_char_left, "j" | "down" => move_line_down, @@ -784,7 +784,7 @@ impl Default for Keymaps { "C-x" => completion, "C-r" => insert_register, }); - Keymaps(hashmap!( + Self(hashmap!( Mode::Normal => Keymap::new(normal), Mode::Select => Keymap::new(select), Mode::Insert => Keymap::new(insert), diff --git a/helix-term/src/ui/completion.rs b/helix-term/src/ui/completion.rs index 274330c0..c9ed3b4a 100644 --- a/helix-term/src/ui/completion.rs +++ b/helix-term/src/ui/completion.rs @@ -158,7 +158,7 @@ impl Completion { let resolved_additional_text_edits = if item.additional_text_edits.is_some() { None } else { - Completion::resolve_completion_item(doc, item.clone()) + Self::resolve_completion_item(doc, item.clone()) .and_then(|item| item.additional_text_edits) }; diff --git a/helix-tui/src/widgets/block.rs b/helix-tui/src/widgets/block.rs index d43ad41d..26223c3e 100644 --- a/helix-tui/src/widgets/block.rs +++ b/helix-tui/src/widgets/block.rs @@ -15,12 +15,12 @@ pub enum BorderType { } impl BorderType { - pub fn line_symbols(border_type: BorderType) -> line::Set { + pub fn line_symbols(border_type: Self) -> line::Set { match border_type { - BorderType::Plain => line::NORMAL, - BorderType::Rounded => line::ROUNDED, - BorderType::Double => line::DOUBLE, - BorderType::Thick => line::THICK, + Self::Plain => line::NORMAL, + Self::Rounded => line::ROUNDED, + Self::Double => line::DOUBLE, + Self::Thick => line::THICK, } } } diff --git a/helix-tui/src/widgets/paragraph.rs b/helix-tui/src/widgets/paragraph.rs index 8bafbb9a..4e839162 100644 --- a/helix-tui/src/widgets/paragraph.rs +++ b/helix-tui/src/widgets/paragraph.rs @@ -166,7 +166,7 @@ impl<'a> Widget for Paragraph<'a> { Box::new(WordWrapper::new(&mut styled, text_area.width, trim)) } else { let mut line_composer = Box::new(LineTruncator::new(&mut styled, text_area.width)); - if let Alignment::Left = self.alignment { + if self.alignment == Alignment::Left { line_composer.set_horizontal_offset(self.scroll.1); } line_composer diff --git a/helix-view/src/info.rs b/helix-view/src/info.rs index b5a002fa..73856154 100644 --- a/helix-view/src/info.rs +++ b/helix-view/src/info.rs @@ -16,7 +16,7 @@ pub struct Info { } impl Info { - pub fn new(title: &str, body: Vec<(&str, BTreeSet)>) -> Info { + pub fn new(title: &str, body: Vec<(&str, BTreeSet)>) -> Self { let body = body .into_iter() .map(|(desc, events)| { @@ -38,7 +38,7 @@ impl Info { ); } - Info { + Self { title: title.to_string(), width: text.lines().map(|l| l.width()).max().unwrap() as u16, height: body.len() as u16, -- cgit v1.2.3-70-g09d2 From d49e5323f9230f3195d3ee4c5e682cd6d8c2cb1a Mon Sep 17 00:00:00 2001 From: CossonLeo Date: Mon, 24 Jan 2022 09:41:25 +0800 Subject: Use markup scopes for the Markdown component (#1363) --- book/src/themes.md | 12 ++ helix-term/src/commands.rs | 3 +- helix-term/src/ui/completion.rs | 9 +- helix-term/src/ui/markdown.rs | 313 +++++++++++++++++++++------------------- 4 files changed, 184 insertions(+), 153 deletions(-) (limited to 'helix-term/src/ui/completion.rs') diff --git a/book/src/themes.md b/book/src/themes.md index 1325de8c..9abcfe8c 100644 --- a/book/src/themes.md +++ b/book/src/themes.md @@ -190,6 +190,18 @@ We use a similar set of scopes as These scopes are used for theming the editor interface. +- `markup` + - `normal` + - `completion` - for completion doc popup ui + - `hover` - for hover popup ui + - `heading` + - `completion` - for completion doc popup ui + - `hover` - for hover popup ui + - `raw` + - `inline` + - `completion` - for completion doc popup ui + - `hover` - for hover popup ui + | Key | Notes | | --- | --- | diff --git a/helix-term/src/commands.rs b/helix-term/src/commands.rs index fc0db6ed..7144ebb9 100644 --- a/helix-term/src/commands.rs +++ b/helix-term/src/commands.rs @@ -5455,7 +5455,8 @@ fn hover(cx: &mut Context) { // skip if contents empty - let contents = ui::Markdown::new(contents, editor.syn_loader.clone()); + let contents = + ui::Markdown::new(contents, editor.syn_loader.clone()).style_group("hover"); let popup = Popup::new("hover", contents); if let Some(doc_popup) = compositor.find_id("hover") { *doc_popup = popup; diff --git a/helix-term/src/ui/completion.rs b/helix-term/src/ui/completion.rs index c9ed3b4a..35afe81e 100644 --- a/helix-term/src/ui/completion.rs +++ b/helix-term/src/ui/completion.rs @@ -304,6 +304,9 @@ impl Component for Completion { let cursor_pos = doc.selection(view.id).primary().cursor(text); let coords = helix_core::visual_coords_at_pos(text, cursor_pos, doc.tab_width()); let cursor_pos = (coords.row - view.offset.row) as u16; + + let markdown_ui = + |content, syn_loader| Markdown::new(content, syn_loader).style_group("completion"); let mut markdown_doc = match &option.documentation { Some(lsp::Documentation::String(contents)) | Some(lsp::Documentation::MarkupContent(lsp::MarkupContent { @@ -311,7 +314,7 @@ impl Component for Completion { value: contents, })) => { // TODO: convert to wrapped text - Markdown::new( + markdown_ui( format!( "```{}\n{}\n```\n{}", language, @@ -326,7 +329,7 @@ impl Component for Completion { value: contents, })) => { // TODO: set language based on doc scope - Markdown::new( + markdown_ui( format!( "```{}\n{}\n```\n{}", language, @@ -340,7 +343,7 @@ impl Component for Completion { // TODO: copied from above // TODO: set language based on doc scope - Markdown::new( + markdown_ui( format!( "```{}\n{}\n```", language, diff --git a/helix-term/src/ui/markdown.rs b/helix-term/src/ui/markdown.rs index 00da2c11..003266d3 100644 --- a/helix-term/src/ui/markdown.rs +++ b/helix-term/src/ui/markdown.rs @@ -21,6 +21,10 @@ pub struct Markdown { contents: String, config_loader: Arc, + + text_style: String, + block_style: String, + heading_style: String, } // TODO: pre-render and self reference via Pin @@ -31,121 +35,139 @@ impl Markdown { Self { contents, config_loader, + text_style: "markup.normal".into(), + block_style: "markup.raw.inline".into(), + heading_style: "markup.heading".into(), } } -} -fn parse<'a>( - contents: &'a str, - theme: Option<&Theme>, - loader: Arc, -) -> tui::text::Text<'a> { - // // also 2021-03-04T16:33:58.553 helix_lsp::transport [INFO] <- {"contents":{"kind":"markdown","value":"\n```rust\ncore::num\n```\n\n```rust\npub const fn saturating_sub(self, rhs:Self) ->Self\n```\n\n---\n\n```rust\n```"},"range":{"end":{"character":61,"line":101},"start":{"character":47,"line":101}}} - // let text = "\n```rust\ncore::iter::traits::iterator::Iterator\n```\n\n```rust\nfn collect>(self) -> B\nwhere\n Self: Sized,\n```\n\n---\n\nTransforms an iterator into a collection.\n\n`collect()` can take anything iterable, and turn it into a relevant\ncollection. This is one of the more powerful methods in the standard\nlibrary, used in a variety of contexts.\n\nThe most basic pattern in which `collect()` is used is to turn one\ncollection into another. You take a collection, call [`iter`](https://doc.rust-lang.org/nightly/core/iter/traits/iterator/trait.Iterator.html) on it,\ndo a bunch of transformations, and then `collect()` at the end.\n\n`collect()` can also create instances of types that are not typical\ncollections. For example, a [`String`](https://doc.rust-lang.org/nightly/core/iter/std/string/struct.String.html) can be built from [`char`](type@char)s,\nand an iterator of [`Result`](https://doc.rust-lang.org/nightly/core/result/enum.Result.html) items can be collected\ninto `Result, E>`. See the examples below for more.\n\nBecause `collect()` is so general, it can cause problems with type\ninference. As such, `collect()` is one of the few times you'll see\nthe syntax affectionately known as the 'turbofish': `::<>`. This\nhelps the inference algorithm understand specifically which collection\nyou're trying to collect into.\n\n# Examples\n\nBasic usage:\n\n```rust\nlet a = [1, 2, 3];\n\nlet doubled: Vec = a.iter()\n .map(|&x| x * 2)\n .collect();\n\nassert_eq!(vec![2, 4, 6], doubled);\n```\n\nNote that we needed the `: Vec` on the left-hand side. This is because\nwe could collect into, for example, a [`VecDeque`](https://doc.rust-lang.org/nightly/core/iter/std/collections/struct.VecDeque.html) instead:\n\n```rust\nuse std::collections::VecDeque;\n\nlet a = [1, 2, 3];\n\nlet doubled: VecDeque = a.iter().map(|&x| x * 2).collect();\n\nassert_eq!(2, doubled[0]);\nassert_eq!(4, doubled[1]);\nassert_eq!(6, doubled[2]);\n```\n\nUsing the 'turbofish' instead of annotating `doubled`:\n\n```rust\nlet a = [1, 2, 3];\n\nlet doubled = a.iter().map(|x| x * 2).collect::>();\n\nassert_eq!(vec![2, 4, 6], doubled);\n```\n\nBecause `collect()` only cares about what you're collecting into, you can\nstill use a partial type hint, `_`, with the turbofish:\n\n```rust\nlet a = [1, 2, 3];\n\nlet doubled = a.iter().map(|x| x * 2).collect::>();\n\nassert_eq!(vec![2, 4, 6], doubled);\n```\n\nUsing `collect()` to make a [`String`](https://doc.rust-lang.org/nightly/core/iter/std/string/struct.String.html):\n\n```rust\nlet chars = ['g', 'd', 'k', 'k', 'n'];\n\nlet hello: String = chars.iter()\n .map(|&x| x as u8)\n .map(|x| (x + 1) as char)\n .collect();\n\nassert_eq!(\"hello\", hello);\n```\n\nIf you have a list of [`Result`](https://doc.rust-lang.org/nightly/core/result/enum.Result.html)s, you can use `collect()` to\nsee if any of them failed:\n\n```rust\nlet results = [Ok(1), Err(\"nope\"), Ok(3), Err(\"bad\")];\n\nlet result: Result, &str> = results.iter().cloned().collect();\n\n// gives us the first error\nassert_eq!(Err(\"nope\"), result);\n\nlet results = [Ok(1), Ok(3)];\n\nlet result: Result, &str> = results.iter().cloned().collect();\n\n// gives us the list of answers\nassert_eq!(Ok(vec![1, 3]), result);\n```"; - - let mut options = Options::empty(); - options.insert(Options::ENABLE_STRIKETHROUGH); - let parser = Parser::new_ext(contents, options); - - // TODO: if possible, render links as terminal hyperlinks: https://gist.github.com/egmontkob/eb114294efbcd5adb1944c9f3cb5feda - let mut tags = Vec::new(); - let mut spans = Vec::new(); - let mut lines = Vec::new(); - - fn to_span(text: pulldown_cmark::CowStr) -> Span { - use std::ops::Deref; - Span::raw::>(match text { - CowStr::Borrowed(s) => s.into(), - CowStr::Boxed(s) => s.to_string().into(), - CowStr::Inlined(s) => s.deref().to_owned().into(), - }) + pub fn style_group(mut self, suffix: &str) -> Self { + self.text_style = format!("markup.normal.{}", suffix); + self.block_style = format!("markup.raw.inline.{}", suffix); + self.heading_style = format!("markup.heading.{}", suffix); + self } - let text_style = theme.map(|theme| theme.get("ui.text")).unwrap_or_default(); - - // TODO: use better scopes for these, `markup.raw.block`, `markup.heading` - let code_style = theme - .map(|theme| theme.get("ui.text.focus")) - .unwrap_or_default(); // white - let heading_style = theme - .map(|theme| theme.get("ui.linenr.selected")) - .unwrap_or_default(); // lilac - - for event in parser { - match event { - Event::Start(tag) => tags.push(tag), - Event::End(tag) => { - tags.pop(); - match tag { - Tag::Heading(_, _, _) - | Tag::Paragraph - | Tag::CodeBlock(CodeBlockKind::Fenced(_)) => { - // whenever code block or paragraph closes, new line - let spans = std::mem::take(&mut spans); - if !spans.is_empty() { - lines.push(Spans::from(spans)); + fn parse(&self, theme: Option<&Theme>) -> tui::text::Text<'_> { + // // also 2021-03-04T16:33:58.553 helix_lsp::transport [INFO] <- {"contents":{"kind":"markdown","value":"\n```rust\ncore::num\n```\n\n```rust\npub const fn saturating_sub(self, rhs:Self) ->Self\n```\n\n---\n\n```rust\n```"},"range":{"end":{"character":61,"line":101},"start":{"character":47,"line":101}}} + // let text = "\n```rust\ncore::iter::traits::iterator::Iterator\n```\n\n```rust\nfn collect>(self) -> B\nwhere\n Self: Sized,\n```\n\n---\n\nTransforms an iterator into a collection.\n\n`collect()` can take anything iterable, and turn it into a relevant\ncollection. This is one of the more powerful methods in the standard\nlibrary, used in a variety of contexts.\n\nThe most basic pattern in which `collect()` is used is to turn one\ncollection into another. You take a collection, call [`iter`](https://doc.rust-lang.org/nightly/core/iter/traits/iterator/trait.Iterator.html) on it,\ndo a bunch of transformations, and then `collect()` at the end.\n\n`collect()` can also create instances of types that are not typical\ncollections. For example, a [`String`](https://doc.rust-lang.org/nightly/core/iter/std/string/struct.String.html) can be built from [`char`](type@char)s,\nand an iterator of [`Result`](https://doc.rust-lang.org/nightly/core/result/enum.Result.html) items can be collected\ninto `Result, E>`. See the examples below for more.\n\nBecause `collect()` is so general, it can cause problems with type\ninference. As such, `collect()` is one of the few times you'll see\nthe syntax affectionately known as the 'turbofish': `::<>`. This\nhelps the inference algorithm understand specifically which collection\nyou're trying to collect into.\n\n# Examples\n\nBasic usage:\n\n```rust\nlet a = [1, 2, 3];\n\nlet doubled: Vec = a.iter()\n .map(|&x| x * 2)\n .collect();\n\nassert_eq!(vec![2, 4, 6], doubled);\n```\n\nNote that we needed the `: Vec` on the left-hand side. This is because\nwe could collect into, for example, a [`VecDeque`](https://doc.rust-lang.org/nightly/core/iter/std/collections/struct.VecDeque.html) instead:\n\n```rust\nuse std::collections::VecDeque;\n\nlet a = [1, 2, 3];\n\nlet doubled: VecDeque = a.iter().map(|&x| x * 2).collect();\n\nassert_eq!(2, doubled[0]);\nassert_eq!(4, doubled[1]);\nassert_eq!(6, doubled[2]);\n```\n\nUsing the 'turbofish' instead of annotating `doubled`:\n\n```rust\nlet a = [1, 2, 3];\n\nlet doubled = a.iter().map(|x| x * 2).collect::>();\n\nassert_eq!(vec![2, 4, 6], doubled);\n```\n\nBecause `collect()` only cares about what you're collecting into, you can\nstill use a partial type hint, `_`, with the turbofish:\n\n```rust\nlet a = [1, 2, 3];\n\nlet doubled = a.iter().map(|x| x * 2).collect::>();\n\nassert_eq!(vec![2, 4, 6], doubled);\n```\n\nUsing `collect()` to make a [`String`](https://doc.rust-lang.org/nightly/core/iter/std/string/struct.String.html):\n\n```rust\nlet chars = ['g', 'd', 'k', 'k', 'n'];\n\nlet hello: String = chars.iter()\n .map(|&x| x as u8)\n .map(|x| (x + 1) as char)\n .collect();\n\nassert_eq!(\"hello\", hello);\n```\n\nIf you have a list of [`Result`](https://doc.rust-lang.org/nightly/core/result/enum.Result.html)s, you can use `collect()` to\nsee if any of them failed:\n\n```rust\nlet results = [Ok(1), Err(\"nope\"), Ok(3), Err(\"bad\")];\n\nlet result: Result, &str> = results.iter().cloned().collect();\n\n// gives us the first error\nassert_eq!(Err(\"nope\"), result);\n\nlet results = [Ok(1), Ok(3)];\n\nlet result: Result, &str> = results.iter().cloned().collect();\n\n// gives us the list of answers\nassert_eq!(Ok(vec![1, 3]), result);\n```"; + + let mut options = Options::empty(); + options.insert(Options::ENABLE_STRIKETHROUGH); + let parser = Parser::new_ext(&self.contents, options); + + // TODO: if possible, render links as terminal hyperlinks: https://gist.github.com/egmontkob/eb114294efbcd5adb1944c9f3cb5feda + let mut tags = Vec::new(); + let mut spans = Vec::new(); + let mut lines = Vec::new(); + + fn to_span(text: pulldown_cmark::CowStr) -> Span { + use std::ops::Deref; + Span::raw::>(match text { + CowStr::Borrowed(s) => s.into(), + CowStr::Boxed(s) => s.to_string().into(), + CowStr::Inlined(s) => s.deref().to_owned().into(), + }) + } + + macro_rules! get_theme { + ($s1: expr) => { + theme + .map(|theme| theme.try_get($s1.as_str())) + .flatten() + .unwrap_or_default() + }; + } + let text_style = get_theme!(self.text_style); + let code_style = get_theme!(self.block_style); + let heading_style = get_theme!(self.heading_style); + + for event in parser { + match event { + Event::Start(tag) => tags.push(tag), + Event::End(tag) => { + tags.pop(); + match tag { + Tag::Heading(_, _, _) + | Tag::Paragraph + | Tag::CodeBlock(CodeBlockKind::Fenced(_)) => { + // whenever code block or paragraph closes, new line + let spans = std::mem::take(&mut spans); + if !spans.is_empty() { + lines.push(Spans::from(spans)); + } + lines.push(Spans::default()); } - lines.push(Spans::default()); + _ => (), } - _ => (), } - } - Event::Text(text) => { - // TODO: temp workaround - if let Some(Tag::CodeBlock(CodeBlockKind::Fenced(language))) = tags.last() { - if let Some(theme) = theme { - let rope = Rope::from(text.as_ref()); - let syntax = loader - .language_configuration_for_injection_string(language) - .and_then(|config| config.highlight_config(theme.scopes())) - .map(|config| Syntax::new(&rope, config, loader.clone())); - - if let Some(syntax) = syntax { - // if we have a syntax available, highlight_iter and generate spans - let mut highlights = Vec::new(); - - for event in syntax.highlight_iter(rope.slice(..), None, None) { - match event.unwrap() { - HighlightEvent::HighlightStart(span) => { - highlights.push(span); - } - HighlightEvent::HighlightEnd => { - highlights.pop(); - } - HighlightEvent::Source { start, end } => { - let style = match highlights.first() { - Some(span) => theme.get(&theme.scopes()[span.0]), - None => text_style, - }; - - // TODO: replace tabs with indentation - - let mut slice = &text[start..end]; - // TODO: do we need to handle all unicode line endings - // here, or is just '\n' okay? - while let Some(end) = slice.find('\n') { - // emit span up to newline - let text = &slice[..end]; - let text = text.replace('\t', " "); // replace tabs - let span = Span::styled(text, style); - spans.push(span); - - // truncate slice to after newline - slice = &slice[end + 1..]; - - // make a new line - let spans = std::mem::take(&mut spans); - lines.push(Spans::from(spans)); + Event::Text(text) => { + // TODO: temp workaround + if let Some(Tag::CodeBlock(CodeBlockKind::Fenced(language))) = tags.last() { + if let Some(theme) = theme { + let rope = Rope::from(text.as_ref()); + let syntax = self + .config_loader + .language_configuration_for_injection_string(language) + .and_then(|config| config.highlight_config(theme.scopes())) + .map(|config| { + Syntax::new(&rope, config, self.config_loader.clone()) + }); + + if let Some(syntax) = syntax { + // if we have a syntax available, highlight_iter and generate spans + let mut highlights = Vec::new(); + + for event in syntax.highlight_iter(rope.slice(..), None, None) { + match event.unwrap() { + HighlightEvent::HighlightStart(span) => { + highlights.push(span); } + HighlightEvent::HighlightEnd => { + highlights.pop(); + } + HighlightEvent::Source { start, end } => { + let style = match highlights.first() { + Some(span) => theme.get(&theme.scopes()[span.0]), + None => text_style, + }; + + // TODO: replace tabs with indentation + + let mut slice = &text[start..end]; + // TODO: do we need to handle all unicode line endings + // here, or is just '\n' okay? + while let Some(end) = slice.find('\n') { + // emit span up to newline + let text = &slice[..end]; + let text = text.replace('\t', " "); // replace tabs + let span = Span::styled(text, style); + spans.push(span); + + // truncate slice to after newline + slice = &slice[end + 1..]; + + // make a new line + let spans = std::mem::take(&mut spans); + lines.push(Spans::from(spans)); + } - // if there's anything left, emit it too - if !slice.is_empty() { - let span = - Span::styled(slice.replace('\t', " "), style); - spans.push(span); + // if there's anything left, emit it too + if !slice.is_empty() { + let span = Span::styled( + slice.replace('\t', " "), + style, + ); + spans.push(span); + } } } } + } else { + for line in text.lines() { + let span = Span::styled(line.to_string(), code_style); + lines.push(Spans::from(span)); + } } } else { for line in text.lines() { @@ -153,68 +175,60 @@ fn parse<'a>( lines.push(Spans::from(span)); } } + } else if let Some(Tag::Heading(_, _, _)) = tags.last() { + let mut span = to_span(text); + span.style = heading_style; + spans.push(span); } else { - for line in text.lines() { - let span = Span::styled(line.to_string(), code_style); - lines.push(Spans::from(span)); - } + let mut span = to_span(text); + span.style = text_style; + spans.push(span); } - } else if let Some(Tag::Heading(_, _, _)) = tags.last() { - let mut span = to_span(text); - span.style = heading_style; - spans.push(span); - } else { + } + Event::Code(text) | Event::Html(text) => { let mut span = to_span(text); - span.style = text_style; + span.style = code_style; spans.push(span); } + Event::SoftBreak | Event::HardBreak => { + // let spans = std::mem::replace(&mut spans, Vec::new()); + // lines.push(Spans::from(spans)); + spans.push(Span::raw(" ")); + } + Event::Rule => { + let mut span = Span::raw("---"); + span.style = code_style; + lines.push(Spans::from(span)); + lines.push(Spans::default()); + } + // TaskListMarker(bool) true if checked + _ => { + log::warn!("unhandled markdown event {:?}", event); + } } - Event::Code(text) | Event::Html(text) => { - let mut span = to_span(text); - span.style = code_style; - spans.push(span); - } - Event::SoftBreak | Event::HardBreak => { - // let spans = std::mem::replace(&mut spans, Vec::new()); - // lines.push(Spans::from(spans)); - spans.push(Span::raw(" ")); - } - Event::Rule => { - let mut span = Span::raw("---"); - span.style = code_style; - lines.push(Spans::from(span)); - lines.push(Spans::default()); - } - // TaskListMarker(bool) true if checked - _ => { - log::warn!("unhandled markdown event {:?}", event); - } + // build up a vec of Paragraph tui widgets } - // build up a vec of Paragraph tui widgets - } - if !spans.is_empty() { - lines.push(Spans::from(spans)); - } + if !spans.is_empty() { + lines.push(Spans::from(spans)); + } - // if last line is empty, remove it - if let Some(line) = lines.last() { - if line.0.is_empty() { - lines.pop(); + // if last line is empty, remove it + if let Some(line) = lines.last() { + if line.0.is_empty() { + lines.pop(); + } } - } - Text::from(lines) + Text::from(lines) + } } + impl Component for Markdown { fn render(&mut self, area: Rect, surface: &mut Surface, cx: &mut Context) { use tui::widgets::{Paragraph, Widget, Wrap}; - let text = parse( - &self.contents, - Some(&cx.editor.theme), - self.config_loader.clone(), - ); + let text = self.parse(Some(&cx.editor.theme)); let par = Paragraph::new(text) .wrap(Wrap { trim: false }) @@ -232,7 +246,8 @@ impl Component for Markdown { if padding >= viewport.1 || padding >= viewport.0 { return None; } - let contents = parse(&self.contents, None, self.config_loader.clone()); + let contents = self.parse(None); + // TODO: account for tab width let max_text_width = (viewport.0 - padding).min(120); let mut text_width = 0; -- cgit v1.2.3-70-g09d2