aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.github/ISSUE_TEMPLATE/bug_report.md1
-rw-r--r--.gitmodules16
-rw-r--r--Cargo.lock85
-rw-r--r--Cargo.toml1
-rw-r--r--TODO.md13
-rw-r--r--base16_theme.toml11
-rw-r--r--book/src/generated/lang-support.md5
-rw-r--r--book/src/generated/typable-cmd.md1
-rw-r--r--book/src/keymap.md7
-rw-r--r--book/src/usage.md2
-rw-r--r--flake.lock24
-rw-r--r--flake.nix81
-rw-r--r--helix-core/Cargo.toml3
-rw-r--r--helix-core/src/auto_pairs.rs255
-rw-r--r--helix-core/src/diff.rs4
-rw-r--r--helix-core/src/graphemes.rs86
-rw-r--r--helix-core/src/increment/date_time.rs32
-rw-r--r--helix-core/src/indent.rs2
-rw-r--r--helix-core/src/object.rs90
-rw-r--r--helix-core/src/syntax.rs1237
m---------helix-syntax/languages/tree-sitter-elm0
m---------helix-syntax/languages/tree-sitter-git-config0
m---------helix-syntax/languages/tree-sitter-go0
m---------helix-syntax/languages/tree-sitter-graphql0
m---------helix-syntax/languages/tree-sitter-lean0
m---------helix-syntax/languages/tree-sitter-php0
m---------helix-syntax/languages/tree-sitter-zig0
-rw-r--r--helix-term/Cargo.toml2
-rw-r--r--helix-term/src/application.rs8
-rw-r--r--helix-term/src/commands.rs146
-rw-r--r--helix-term/src/job.rs14
-rw-r--r--helix-term/src/keymap.rs19
-rw-r--r--helix-term/src/ui/completion.rs2
-rw-r--r--helix-term/src/ui/editor.rs45
-rw-r--r--helix-term/src/ui/markdown.rs21
-rw-r--r--helix-term/src/ui/menu.rs2
-rw-r--r--helix-term/src/ui/picker.rs23
-rw-r--r--helix-term/src/ui/prompt.rs2
-rw-r--r--helix-tui/src/backend/test.rs3
-rw-r--r--helix-tui/src/buffer.rs96
-rw-r--r--helix-tui/src/widgets/block.rs26
-rw-r--r--helix-tui/src/widgets/paragraph.rs4
-rw-r--r--helix-view/Cargo.toml2
-rw-r--r--helix-view/src/document.rs48
-rw-r--r--helix-view/src/editor.rs19
-rw-r--r--helix-view/src/graphics.rs8
-rw-r--r--helix-view/src/info.rs4
-rw-r--r--languages.toml71
-rw-r--r--runtime/queries/bash/injections.scm2
-rw-r--r--runtime/queries/c-sharp/injections.scm2
-rw-r--r--runtime/queries/css/injections.scm2
-rw-r--r--runtime/queries/dart/injections.scm2
-rw-r--r--runtime/queries/elm/highlights.scm83
-rw-r--r--runtime/queries/elm/injections.scm4
-rw-r--r--runtime/queries/elm/locals.scm14
-rw-r--r--runtime/queries/elm/tags.scm19
-rw-r--r--runtime/queries/git-config/highlights.scm27
-rw-r--r--runtime/queries/go/highlights.scm6
-rw-r--r--runtime/queries/go/injections.scm2
-rw-r--r--runtime/queries/graphql/highlights.scm163
-rw-r--r--runtime/queries/haskell/injections.scm2
-rw-r--r--runtime/queries/html/injections.scm3
-rw-r--r--runtime/queries/java/injections.scm2
-rw-r--r--runtime/queries/javascript/injections.scm8
-rw-r--r--runtime/queries/julia/injections.scm6
-rw-r--r--runtime/queries/latex/injections.scm2
-rw-r--r--runtime/queries/lean/folds.scm15
-rw-r--r--runtime/queries/lean/highlights.scm217
-rw-r--r--runtime/queries/lean/injections.scm2
-rw-r--r--runtime/queries/lean/locals.scm5
-rw-r--r--runtime/queries/lua/injections.scm2
-rw-r--r--runtime/queries/make/injections.scm2
-rw-r--r--runtime/queries/ocaml-interface/injections.scm2
-rw-r--r--runtime/queries/ocaml/injections.scm2
-rw-r--r--runtime/queries/perl/injections.scm2
-rw-r--r--runtime/queries/php/highlights.scm6
-rw-r--r--runtime/queries/php/injections.scm3
-rw-r--r--runtime/queries/protobuf/injections.scm2
-rw-r--r--runtime/queries/ruby/injections.scm2
-rw-r--r--runtime/queries/scala/injections.scm3
-rw-r--r--runtime/queries/toml/injections.scm2
-rw-r--r--runtime/queries/tsx/injections.scm1
-rw-r--r--runtime/queries/typescript/injections.scm1
-rw-r--r--runtime/queries/vue/injections.scm3
-rw-r--r--runtime/queries/wgsl/injections.scm2
-rw-r--r--runtime/queries/yaml/injections.scm2
-rw-r--r--runtime/queries/zig/highlights.scm5
-rw-r--r--runtime/queries/zig/indents.toml3
-rw-r--r--runtime/queries/zig/injections.scm2
-rw-r--r--runtime/themes/base16_default_dark.toml9
-rw-r--r--runtime/themes/base16_default_light.toml9
-rw-r--r--runtime/themes/base16_terminal.toml11
-rw-r--r--runtime/themes/bogster.toml10
-rw-r--r--runtime/themes/dark_plus.toml10
-rw-r--r--runtime/themes/dracula.toml11
-rw-r--r--runtime/themes/everforest_dark.toml10
-rw-r--r--runtime/themes/everforest_light.toml10
-rw-r--r--runtime/themes/gruvbox.toml8
-rw-r--r--runtime/themes/gruvbox_light.toml96
-rw-r--r--runtime/themes/ingrid.toml10
-rw-r--r--runtime/themes/monokai.toml10
-rw-r--r--runtime/themes/monokai_pro.toml8
-rw-r--r--runtime/themes/monokai_pro_machine.toml8
-rw-r--r--runtime/themes/monokai_pro_octagon.toml8
-rw-r--r--runtime/themes/monokai_pro_ristretto.toml8
-rw-r--r--runtime/themes/monokai_pro_spectrum.toml8
-rw-r--r--runtime/themes/nord.toml10
-rw-r--r--runtime/themes/onedark.toml2
-rw-r--r--runtime/themes/rose_pine.toml2
-rw-r--r--runtime/themes/rose_pine_dawn.toml2
-rw-r--r--runtime/themes/solarized_dark.toml10
-rw-r--r--runtime/themes/solarized_light.toml10
-rw-r--r--runtime/themes/spacebones_light.toml10
113 files changed, 2380 insertions, 1066 deletions
diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md
index 958407bb..41b00230 100644
--- a/.github/ISSUE_TEMPLATE/bug_report.md
+++ b/.github/ISSUE_TEMPLATE/bug_report.md
@@ -17,6 +17,7 @@ Please search on the issue tracker before creating one. -->
### Environment
- Platform: <!-- macOS / Windows / Linux -->
+- Terminal emulator:
- Helix version: <!-- 'hx -V' if using a release, 'git describe' if building from master -->
<details><summary>~/.cache/helix/helix.log</summary>
diff --git a/.gitmodules b/.gitmodules
index e6d9d654..780d1b63 100644
--- a/.gitmodules
+++ b/.gitmodules
@@ -190,6 +190,10 @@
path = helix-syntax/languages/tree-sitter-git-rebase
url = https://github.com/the-mikedavis/tree-sitter-git-rebase.git
shallow = true
+[submodule "helix-syntax/languages/tree-sitter-lean"]
+ path = helix-syntax/languages/tree-sitter-lean
+ url = https://github.com/Julian/tree-sitter-lean
+ shallow = true
[submodule "helix-syntax/languages/tree-sitter-regex"]
path = helix-syntax/languages/tree-sitter-regex
url = https://github.com/tree-sitter/tree-sitter-regex.git
@@ -198,3 +202,15 @@
path = helix-syntax/languages/tree-sitter-make
url = https://github.com/alemuller/tree-sitter-make
shallow = true
+[submodule "helix-syntax/languages/tree-sitter-git-config"]
+ path = helix-syntax/languages/tree-sitter-git-config
+ url = https://github.com/the-mikedavis/tree-sitter-git-config.git
+ shallow = true
+[submodule "helix-syntax/languages/tree-sitter-graphql"]
+ path = helix-syntax/languages/tree-sitter-graphql
+ url = https://github.com/bkegley/tree-sitter-graphql
+ shallow = true
+[submodule "helix-syntax/languages/tree-sitter-elm"]
+ path = helix-syntax/languages/tree-sitter-elm
+ url = https://github.com/elm-tooling/tree-sitter-elm
+ shallow = true
diff --git a/Cargo.lock b/Cargo.lock
index 1a01da82..6e5b8ff5 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -13,9 +13,9 @@ dependencies = [
[[package]]
name = "anyhow"
-version = "1.0.52"
+version = "1.0.53"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "84450d0b4a8bd1ba4144ce8ce718fbc5d071358b1e5384bace6536b3d1f2d5b3"
+checksum = "94a45b455c14666b85fc40a019e8ab9eb75e3a124e05494f5397122bc9eb06e0"
[[package]]
name = "arc-swap"
@@ -101,9 +101,9 @@ dependencies = [
[[package]]
name = "clipboard-win"
-version = "4.2.2"
+version = "4.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3db8340083d28acb43451166543b98c838299b7e0863621be53a338adceea0ed"
+checksum = "1951fb8aa063a2ee18b4b4d217e4aa2ec9cc4f2430482983f607fa10cd36d7aa"
dependencies = [
"error-code",
"str-buf",
@@ -121,9 +121,9 @@ dependencies = [
[[package]]
name = "crossbeam-utils"
-version = "0.8.5"
+version = "0.8.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d82cfc11ce7f2c3faef78d8a684447b40d503d9681acebed6cb728d45940c4db"
+checksum = "cfcae03edb34f947e64acdb1c33ec169824e20657e9ecb61cef6c8c74dcb8120"
dependencies = [
"cfg-if",
"lazy_static",
@@ -202,9 +202,9 @@ dependencies = [
[[package]]
name = "error-code"
-version = "2.3.0"
+version = "2.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b5115567ac25674e0043e472be13d14e537f37ea8aa4bdc4aef0c89add1db1ff"
+checksum = "64f18991e7bf11e7ffee451b5318b5c1a73c52d0d0ada6e5a3017c8c1ced6a21"
dependencies = [
"libc",
"str-buf",
@@ -303,9 +303,9 @@ dependencies = [
[[package]]
name = "getrandom"
-version = "0.2.3"
+version = "0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7fcd999463524c52659517fe2cea98493cfe485d10565e7b0fb07dbba7ad2753"
+checksum = "418d37c8b1d42553c93648be529cb70f920d3baf8ef469b74b9638df426e0b4c"
dependencies = [
"cfg-if",
"libc",
@@ -381,6 +381,7 @@ dependencies = [
"serde",
"serde_json",
"similar",
+ "slotmap",
"smallvec",
"tendril",
"toml",
@@ -560,15 +561,15 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
[[package]]
name = "libc"
-version = "0.2.104"
+version = "0.2.113"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7b2f96d100e1cf1929e7719b7edb3b90ab5298072638fccd77be9ce942ecdfce"
+checksum = "eef78b64d87775463c549fbd80e19249ef436ea3bf1de2a1eb7e717ec7fab1e9"
[[package]]
name = "libloading"
-version = "0.7.2"
+version = "0.7.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "afe203d669ec979b7128619bae5a63b7b42e9203c1b29146079ee05e2f604b52"
+checksum = "efbc0f03f9a775e9f6aed295c6a1ba2253c5757a9e03d55c6caa46a681abcddd"
dependencies = [
"cfg-if",
"winapi",
@@ -737,9 +738,9 @@ checksum = "d4fd5641d01c8f18a23da7b6fe29298ff4b55afcccdf78973b24cf3175fee32e"
[[package]]
name = "pin-project-lite"
-version = "0.2.7"
+version = "0.2.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8d31d11c69a6b52a174b42bdc0c30e5e11670f90788b2c471c31c1d17d449443"
+checksum = "e280fbe77cc62c91527259e9442153f4688736748d24660126286329742b4c6c"
[[package]]
name = "pin-utils"
@@ -749,18 +750,18 @@ checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184"
[[package]]
name = "proc-macro2"
-version = "1.0.30"
+version = "1.0.36"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "edc3358ebc67bc8b7fa0c007f945b0b18226f78437d61bec735a9eb96b61ee70"
+checksum = "c7342d5883fbccae1cc37a2353b09c87c9b0f3afd73f5fb9bba687a1f733b029"
dependencies = [
"unicode-xid",
]
[[package]]
name = "pulldown-cmark"
-version = "0.8.0"
+version = "0.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ffade02495f22453cd593159ea2f59827aae7f53fa8323f756799b670881dcf8"
+checksum = "34f197a544b0c9ab3ae46c359a7ec9cbbb5c7bf97054266fecb7ead794a181d6"
dependencies = [
"bitflags",
"memchr",
@@ -778,9 +779,9 @@ dependencies = [
[[package]]
name = "quote"
-version = "1.0.10"
+version = "1.0.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "38bc8cc6a5f2e3655e0899c1b848643b2562f853f114bfec7be120678e3ace05"
+checksum = "864d3e96a899863136fc6e99f3d7cae289dafe43bf2c5ac19b70df7210c0a145"
dependencies = [
"proc-macro2",
]
@@ -856,9 +857,9 @@ dependencies = [
[[package]]
name = "ryu"
-version = "1.0.5"
+version = "1.0.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "71d301d4193d031abdd79ff7e3dd721168a9572ef3fe51a1517aba235bd8f86e"
+checksum = "73b4b750c782965c211b42f022f59af1fbceabdd026623714f104152f1ec149f"
[[package]]
name = "same-file"
@@ -877,18 +878,18 @@ checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
[[package]]
name = "serde"
-version = "1.0.133"
+version = "1.0.135"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "97565067517b60e2d1ea8b268e59ce036de907ac523ad83a0475da04e818989a"
+checksum = "2cf9235533494ea2ddcdb794665461814781c53f19d87b76e571a1c35acbad2b"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_derive"
-version = "1.0.133"
+version = "1.0.135"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ed201699328568d8d08208fdd080e3ff594e6c422e438b6705905da01005d537"
+checksum = "8dcde03d87d4c973c04be249e7d8f0b35db1c848c487bd43032808e59dd8328d"
dependencies = [
"proc-macro2",
"quote",
@@ -897,9 +898,9 @@ dependencies = [
[[package]]
name = "serde_json"
-version = "1.0.74"
+version = "1.0.78"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ee2bb9cd061c5865d345bb02ca49fcef1391741b672b54a0bf7b679badec3142"
+checksum = "d23c1ba4cf0efd44be32017709280b32d1cea5c3f1275c3b6d9e8bc54f758085"
dependencies = [
"itoa",
"ryu",
@@ -949,9 +950,9 @@ dependencies = [
[[package]]
name = "signal-hook-tokio"
-version = "0.3.0"
+version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f6c5d32165ff8b94e68e7b3bdecb1b082e958c22434b363482cfb89dcd6f3ff8"
+checksum = "213241f76fb1e37e27de3b6aa1b068a2c333233b59cca6634f634b80a27ecf1e"
dependencies = [
"futures-core",
"libc",
@@ -982,9 +983,9 @@ dependencies = [
[[package]]
name = "smallvec"
-version = "1.7.0"
+version = "1.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1ecab6c735a6bb4139c0caafd0cc3635748bbb3acf4550e8138122099251f309"
+checksum = "f2dd574626839106c320a323308629dcb1acfc96e32a8cba364ddc61ac23ee83"
[[package]]
name = "str-buf"
@@ -994,9 +995,9 @@ checksum = "d44a3643b4ff9caf57abcee9c2c621d6c03d9135e0d8b589bd9afb5992cb176a"
[[package]]
name = "syn"
-version = "1.0.80"
+version = "1.0.86"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d010a1623fbd906d51d650a9916aaefc05ffa0e4053ff7fe601167f3e715d194"
+checksum = "8a65b3f4ffa0092e9887669db0eae07941f023991ab58ea44da8fe8e2d511c6b"
dependencies = [
"proc-macro2",
"quote",
@@ -1054,9 +1055,9 @@ dependencies = [
[[package]]
name = "tinyvec"
-version = "1.5.0"
+version = "1.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f83b2a3d4d9091d0abd7eba4dc2710b1718583bd4d8992e2190720ea38f391f7"
+checksum = "2c1c1d5a42b6245520c249549ec267180beaffcc0615401ac8e31853d4b6d8d2"
dependencies = [
"tinyvec_macros",
]
@@ -1119,9 +1120,9 @@ dependencies = [
[[package]]
name = "tree-sitter"
-version = "0.20.2"
+version = "0.20.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c36be3222512d85a112491ae0cc280a38076022414f00b64582da1b7565ffd82"
+checksum = "a2355eeb5e7d836fe4cf144555855dffb04f395e5f20a15af8c53d1e1bcbd0bf"
dependencies = [
"cc",
"regex",
@@ -1196,9 +1197,9 @@ checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9"
[[package]]
name = "version_check"
-version = "0.9.3"
+version = "0.9.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5fecdca9a5291cc2b8dcf7dc02453fee791a280f3743cb0905f8822ae463b3fe"
+checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
[[package]]
name = "walkdir"
diff --git a/Cargo.toml b/Cargo.toml
index 8c3ee671..76e3ae51 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -18,3 +18,4 @@ split-debuginfo = "unpacked"
[profile.release]
lto = "thin"
+# debug = true
diff --git a/TODO.md b/TODO.md
index 80a9be05..ab94cf9a 100644
--- a/TODO.md
+++ b/TODO.md
@@ -1,25 +1,12 @@
-- tree sitter:
- - markdown
- - regex
- - kotlin
- - clojure
- - erlang
-
- [ ] completion isIncomplete support
-
-1
- [ ] respect view fullscreen flag
- [ ] Implement marks (superset of Selection/Range)
- [ ] = for auto indent line/selection
-- [ ] :x for closing buffers
- [ ] lsp: signature help
2
-- [ ] macro recording
-- [ ] extend selection (treesitter select parent node) (replaces viw, vi(, va( etc )
-- [ ] selection align
- [ ] store some state between restarts: file positions, prompt history
- [ ] highlight matched characters in picker
diff --git a/base16_theme.toml b/base16_theme.toml
index bb60a3ea..42e02a98 100644
--- a/base16_theme.toml
+++ b/base16_theme.toml
@@ -11,7 +11,7 @@
"ui.statusline" = { fg = "black", bg = "white" }
"ui.statusline.inactive" = { fg = "gray", bg = "white" }
"ui.help" = { modifiers = ["reversed"] }
-"ui.cursor" = { modifiers = ["reversed"] }
+"ui.cursor" = { fg = "white", modifiers = ["reversed"] }
"variable" = "red"
"constant.numeric" = "yellow"
"constant" = "yellow"
@@ -29,6 +29,15 @@
"namespace" = "magenta"
"ui.help" = { fg = "white", bg = "black" }
+"markup.heading" = "blue"
+"markup.list" = "red"
+"markup.bold" = { fg = "yellow", modifiers = ["bold"] }
+"markup.italic" = { fg = "magenta", modifiers = ["italic"] }
+"markup.link.url" = { fg = "yellow", modifiers = ["underlined"] }
+"markup.link.text" = "red"
+"markup.quote" = "cyan"
+"markup.raw" = "green"
+
"diff.plus" = "green"
"diff.delta" = "yellow"
"diff.minus" = "red"
diff --git a/book/src/generated/lang-support.md b/book/src/generated/lang-support.md
index c093144f..e4070117 100644
--- a/book/src/generated/lang-support.md
+++ b/book/src/generated/lang-support.md
@@ -10,18 +10,23 @@
| dart | ✓ | | ✓ | `dart` |
| dockerfile | ✓ | | | `docker-langserver` |
| elixir | ✓ | | | `elixir-ls` |
+| elm | ✓ | | | `elm-language-server` |
| fish | ✓ | ✓ | ✓ | |
| git-commit | ✓ | | | |
+| git-config | ✓ | | | |
| git-diff | ✓ | | | |
| git-rebase | ✓ | | | |
| glsl | ✓ | | ✓ | |
| go | ✓ | ✓ | ✓ | `gopls` |
+| graphql | ✓ | | | |
+| haskell | ✓ | | | `haskell-language-server-wrapper` |
| html | ✓ | | | |
| java | ✓ | | | |
| javascript | ✓ | | ✓ | `typescript-language-server` |
| json | ✓ | | ✓ | |
| julia | ✓ | | | `julia` |
| latex | ✓ | | | |
+| lean | ✓ | | | `lean` |
| ledger | ✓ | | | |
| llvm | ✓ | ✓ | ✓ | |
| llvm-mir | ✓ | ✓ | ✓ | |
diff --git a/book/src/generated/typable-cmd.md b/book/src/generated/typable-cmd.md
index 65b2dc5f..aed75cbd 100644
--- a/book/src/generated/typable-cmd.md
+++ b/book/src/generated/typable-cmd.md
@@ -45,3 +45,4 @@
| `:set-option`, `:set` | Set a config option at runtime |
| `:sort` | Sort ranges in selection. |
| `:rsort` | Sort ranges in selection in reverse order. |
+| `:tree-sitter-subtree`, `:ts-subtree` | Display tree sitter subtree under cursor, primarily for debugging queries. |
diff --git a/book/src/keymap.md b/book/src/keymap.md
index 70ec13b3..905ec48f 100644
--- a/book/src/keymap.md
+++ b/book/src/keymap.md
@@ -25,6 +25,7 @@
| `f` | Find next char | `find_next_char` |
| `T` | Find 'till previous char | `till_prev_char` |
| `F` | Find previous char | `find_prev_char` |
+| `G` | Go to line number `<n>` | `goto_line` |
| `Alt-.` | Repeat last motion (`f`, `t` or `m`) | `repeat_last_motion` |
| `Home` | Move to the start of the line | `goto_line_start` |
| `End` | Move to the end of the line | `goto_line_end` |
@@ -118,6 +119,10 @@
| `Alt-K` | Remove selections matching the regex | `remove_selections` |
| `$` | Pipe each selection into shell command, keep selections where command returned 0 | `shell_keep_pipe` |
| `Ctrl-c` | Comment/uncomment the selections | `toggle_comments` |
+| `Alt-k` | Expand selection to parent syntax node | `expand_selection` |
+| `Alt-j` | Shrink syntax tree object selection | `shrink_selection` |
+| `Alt-h` | Select previous sibling node in syntax tree | `select_prev_sibling` |
+| `Alt-l` | Select next sibling node in syntax tree | `select_next_sibling` |
### Search
@@ -261,8 +266,6 @@ Mappings in the style of [vim-unimpaired](https://github.com/tpope/vim-unimpaire
| `]D` | Go to last diagnostic in document (**LSP**) | `goto_last_diag` |
| `[space` | Add newline above | `add_newline_above` |
| `]space` | Add newline below | `add_newline_below` |
-| `]o` | Expand syntax tree object selection. | `expand_selection` |
-| `[o` | Shrink syntax tree object selection. | `shrink_selection` |
## Insert Mode
diff --git a/book/src/usage.md b/book/src/usage.md
index cf7d9d48..a76bfafc 100644
--- a/book/src/usage.md
+++ b/book/src/usage.md
@@ -42,7 +42,7 @@ helix. The keymappings have been inspired from [vim-sandwich](https://github.com
`ms` acts on a selection, so select the text first and use `ms<char>`. `mr` and `md` work
on the closest pairs found and selections are not required; use counts to act in outer pairs.
-It can also act on multiple seletions (yay!). For example, to change every occurance of `(use)` to `[use]`:
+It can also act on multiple selections (yay!). For example, to change every occurrence of `(use)` to `[use]`:
- `%` to select the whole file
- `s` to split the selections on a search term
diff --git a/flake.lock b/flake.lock
index acd10f76..94e443e3 100644
--- a/flake.lock
+++ b/flake.lock
@@ -2,11 +2,11 @@
"nodes": {
"devshell": {
"locked": {
- "lastModified": 1639692811,
- "narHash": "sha256-wOOBH0fVsfNqw/5ZWRoKspyesoXBgiwEOUBH4c7JKEo=",
+ "lastModified": 1641980203,
+ "narHash": "sha256-RiWJ3+6V267Ji+P54K1Xrj1Nsah9BfG/aLfIhqgVyBY=",
"owner": "numtide",
"repo": "devshell",
- "rev": "d3a1f5bec3632b33346865b1c165bf2420bb2f52",
+ "rev": "d897c1ddb4eab66cc2b783c7868d78555b9880ad",
"type": "github"
},
"original": {
@@ -41,11 +41,11 @@
]
},
"locked": {
- "lastModified": 1639807801,
- "narHash": "sha256-y32tMq1LTRVbMW3QN5i98iOQjQt2QSsif3ayUkD1o3g=",
+ "lastModified": 1642054253,
+ "narHash": "sha256-kHh9VmaB7gbS6pheheC4x0uT84LEmhfbsbWEQJgU2E4=",
"owner": "yusdacra",
"repo": "nix-cargo-integration",
- "rev": "b5bbaa4f5239e6f0619846f9a5380f07baa853d3",
+ "rev": "f8fa9af990195a3f63fe2dde84aa187e193da793",
"type": "github"
},
"original": {
@@ -56,11 +56,11 @@
},
"nixpkgs": {
"locked": {
- "lastModified": 1639699734,
- "narHash": "sha256-tlX6WebGmiHb2Hmniff+ltYp+7dRfdsBxw9YczLsP60=",
+ "lastModified": 1641887635,
+ "narHash": "sha256-kDGpufwzVaiGe5e1sBUBPo9f1YN+nYHJlYqCaVpZTQQ=",
"owner": "nixos",
"repo": "nixpkgs",
- "rev": "03ec468b14067729a285c2c7cfa7b9434a04816c",
+ "rev": "b2737d4980a17cc2b7d600d7d0b32fd7333aca88",
"type": "github"
},
"original": {
@@ -99,11 +99,11 @@
"nixpkgs": "nixpkgs_2"
},
"locked": {
- "lastModified": 1639880499,
- "narHash": "sha256-/BibDmFwgWuuTUkNVO6YlvuTSWM9dpBvlZoTAPs7ORI=",
+ "lastModified": 1642128126,
+ "narHash": "sha256-av8JUACdrTfQYl/ftZJvKpZEmZfa0avCq7tt5Usdoq0=",
"owner": "oxalica",
"repo": "rust-overlay",
- "rev": "c6c83589ae048af20d93d01eb07a4176012093d0",
+ "rev": "ce4ef6f2d74f2b68f7547df1de22d1b0037ce4ad",
"type": "github"
},
"original": {
diff --git a/flake.nix b/flake.nix
index cbf10c97..0d22c5c1 100644
--- a/flake.nix
+++ b/flake.nix
@@ -20,50 +20,63 @@
# Set default package to helix-term release build
defaultOutputs = { app = "hx"; package = "helix"; };
overrides = {
- crateOverrides = common: _: {
- helix-term = prev: {
- # link languages and theme toml files since helix-term expects them (for tests)
- preConfigure = "ln -s ${common.root}/{languages.toml,theme.toml} ..";
- buildInputs = (prev.buildInputs or [ ]) ++ [ common.cCompiler.cc.lib ];
- };
+ crateOverrides = common: _: rec {
# link languages and theme toml files since helix-view expects them
- helix-view = _: { preConfigure = "ln -s ${common.root}/{languages.toml,theme.toml} .."; };
- helix-syntax = _prev: {
+ helix-view = _: { preConfigure = "ln -s ${common.root}/{languages.toml,theme.toml,base16_theme.toml} .."; };
+ helix-syntax = prev: {
+ src =
+ let
+ pkgs = common.pkgs;
+ helix = pkgs.fetchgit {
+ url = "https://github.com/helix-editor/helix.git";
+ rev = "a8fd33ac012a79069ef1409503a2edcf3a585153";
+ fetchSubmodules = true;
+ sha256 = "sha256-5AtOC55ttWT+7RYMboaFxpGZML51ix93wAkYJTt+8JI=";
+ };
+ in
+ pkgs.runCommand prev.src.name { } ''
+ mkdir -p $out
+ ln -s ${prev.src}/* $out
+ ln -sf ${helix}/helix-syntax/languages $out
+ '';
preConfigure = "mkdir -p ../runtime/grammars";
postInstall = "cp -r ../runtime $out/runtime";
};
- };
- mainBuild = common: prev:
- let
- inherit (common) pkgs lib;
- helixSyntax = lib.buildCrate {
- root = self;
- memberName = "helix-syntax";
- defaultCrateOverrides = {
- helix-syntax = common.crateOverrides.helix-syntax;
+ helix-term = prev:
+ let
+ inherit (common) pkgs lib;
+ helixSyntax = lib.buildCrate {
+ root = self;
+ memberName = "helix-syntax";
+ defaultCrateOverrides = {
+ helix-syntax = helix-syntax;
+ };
+ release = false;
};
- release = false;
+ runtimeDir = pkgs.runCommand "helix-runtime" { } ''
+ mkdir -p $out
+ ln -s ${common.root}/runtime/* $out
+ ln -sf ${helixSyntax}/runtime/grammars $out
+ '';
+ in
+ {
+ # link languages and theme toml files since helix-term expects them (for tests)
+ preConfigure = "ln -s ${common.root}/{languages.toml,theme.toml,base16_theme.toml} ..";
+ buildInputs = (prev.buildInputs or [ ]) ++ [ common.cCompiler.cc.lib ];
+ nativeBuildInputs = [ pkgs.makeWrapper ];
+ postFixup = ''
+ if [ -f "$out/bin/hx" ]; then
+ wrapProgram "$out/bin/hx" --set HELIX_RUNTIME "${runtimeDir}"
+ fi
+ '';
};
- runtimeDir = pkgs.runCommand "helix-runtime" { } ''
- mkdir -p $out
- ln -s ${common.root}/runtime/* $out
- ln -sf ${helixSyntax}/runtime/grammars $out
- '';
- in
- lib.optionalAttrs (common.memberName == "helix-term") {
- nativeBuildInputs = [ pkgs.makeWrapper ];
- postFixup = ''
- if [ -f "$out/bin/hx" ]; then
- wrapProgram "$out/bin/hx" --set HELIX_RUNTIME "${runtimeDir}"
- fi
- '';
- };
+ };
shell = common: prev: {
- packages = prev.packages ++ (with common.pkgs; [ lld_13 lldb cargo-tarpaulin ]);
+ packages = prev.packages ++ (with common.pkgs; [ lld_13 lldb cargo-tarpaulin cargo-flamegraph ]);
env = prev.env ++ [
{ name = "HELIX_RUNTIME"; eval = "$PWD/runtime"; }
{ name = "RUST_BACKTRACE"; value = "1"; }
- { name = "RUSTFLAGS"; value = "-C link-arg=-fuse-ld=lld -C target-cpu=native"; }
+ { name = "RUSTFLAGS"; value = "-C link-arg=-fuse-ld=lld -C target-cpu=native -Clink-arg=-Wl,--no-rosegment"; }
];
};
};
diff --git a/helix-core/Cargo.toml b/helix-core/Cargo.toml
index 3c11ec76..9056b1f6 100644
--- a/helix-core/Cargo.toml
+++ b/helix-core/Cargo.toml
@@ -16,12 +16,13 @@ include = ["src/**/*", "README.md"]
helix-syntax = { version = "0.6", path = "../helix-syntax" }
ropey = "1.3"
-smallvec = "1.7"
+smallvec = "1.8"
tendril = "0.4.2"
unicode-segmentation = "1.8"
unicode-width = "0.1"
unicode-general-category = "0.4"
# slab = "0.4.2"
+slotmap = "1.0"
tree-sitter = "0.20"
once_cell = "1.9"
arc-swap = "1"
diff --git a/helix-core/src/auto_pairs.rs b/helix-core/src/auto_pairs.rs
index 1b3de6ea..9d1152bc 100644
--- a/helix-core/src/auto_pairs.rs
+++ b/helix-core/src/auto_pairs.rs
@@ -1,7 +1,9 @@
//! When typing the opening character of one of the possible pairs defined below,
//! this module provides the functionality to insert the paired closing character.
-use crate::{movement::Direction, Range, Rope, Selection, Tendril, Transaction};
+use crate::{
+ graphemes, movement::Direction, Range, Rope, RopeGraphemes, Selection, Tendril, Transaction,
+};
use log::debug;
use smallvec::SmallVec;
@@ -63,31 +65,132 @@ fn prev_char(doc: &Rope, pos: usize) -> Option<char> {
doc.get_char(pos - 1)
}
+fn is_single_grapheme(doc: &Rope, range: &Range) -> bool {
+ let mut graphemes = RopeGraphemes::new(doc.slice(range.from()..range.to()));
+ let first = graphemes.next();
+ let second = graphemes.next();
+ debug!("first: {:#?}, second: {:#?}", first, second);
+ first.is_some() && second.is_none()
+}
+
/// calculate what the resulting range should be for an auto pair insertion
fn get_next_range(
+ doc: &Rope,
start_range: &Range,
offset: usize,
typed_char: char,
len_inserted: usize,
) -> Range {
- let end_head = start_range.head + offset + typed_char.len_utf8();
+ // When the character under the cursor changes due to complete pair
+ // insertion, we must look backward a grapheme and then add the length
+ // of the insertion to put the resulting cursor in the right place, e.g.
+ //
+ // foo[\r\n] - anchor: 3, head: 5
+ // foo([)]\r\n - anchor: 4, head: 5
+ //
+ // foo[\r\n] - anchor: 3, head: 5
+ // foo'[\r\n] - anchor: 4, head: 6
+ //
+ // foo([)]\r\n - anchor: 4, head: 5
+ // foo()[\r\n] - anchor: 5, head: 7
+ //
+ // [foo]\r\n - anchor: 0, head: 3
+ // [foo(])\r\n - anchor: 0, head: 5
+
+ // inserting at the very end of the document after the last newline
+ if start_range.head == doc.len_chars() && start_range.anchor == doc.len_chars() {
+ return Range::new(
+ start_range.anchor + offset + typed_char.len_utf8(),
+ start_range.head + offset + typed_char.len_utf8(),
+ );
+ }
+
+ let single_grapheme = is_single_grapheme(doc, start_range);
+ let doc_slice = doc.slice(..);
+
+ // just skip over graphemes
+ if len_inserted == 0 {
+ let end_anchor = if single_grapheme {
+ graphemes::next_grapheme_boundary(doc_slice, start_range.anchor) + offset
+
+ // even for backward inserts with multiple grapheme selections,
+ // we want the anchor to stay where it is so that the relative
+ // selection does not change, e.g.:
+ //
+ // foo([) wor]d -> insert ) -> foo()[ wor]d
+ } else {
+ start_range.anchor + offset
+ };
+
+ return Range::new(
+ end_anchor,
+ graphemes::next_grapheme_boundary(doc_slice, start_range.head) + offset,
+ );
+ }
+
+ // trivial case: only inserted a single-char opener, just move the selection
+ if len_inserted == 1 {
+ let end_anchor = if single_grapheme || start_range.direction() == Direction::Backward {
+ start_range.anchor + offset + typed_char.len_utf8()
+ } else {
+ start_range.anchor + offset
+ };
+
+ return Range::new(
+ end_anchor,
+ start_range.head + offset + typed_char.len_utf8(),
+ );
+ }
+
+ // If the head = 0, then we must be in insert mode with a backward
+ // cursor, which implies the head will just move
+ let end_head = if start_range.head == 0 || start_range.direction() == Direction::Backward {
+ start_range.head + offset + typed_char.len_utf8()
+ } else {
+ // We must have a forward cursor, which means we must move to the
+ // other end of the grapheme to get to where the new characters
+ // are inserted, then move the head to where it should be
+ let prev_bound = graphemes::prev_grapheme_boundary(doc_slice, start_range.head);
+ debug!(
+ "prev_bound: {}, offset: {}, len_inserted: {}",
+ prev_bound, offset, len_inserted
+ );
+ prev_bound + offset + len_inserted
+ };
let end_anchor = match (start_range.len(), start_range.direction()) {
// if we have a zero width cursor, it shifts to the same number
(0, _) => end_head,
- // if we are inserting for a regular one-width cursor, the anchor
- // moves with the head
+ // If we are inserting for a regular one-width cursor, the anchor
+ // moves with the head. This is the fast path for ASCII.
(1, Direction::Forward) => end_head - 1,
(1, Direction::Backward) => end_head + 1,
- // if we are appending, the anchor stays where it is; only offset
- // for multiple range insertions
- (_, Direction::Forward) => start_range.anchor + offset,
+ (_, Direction::Forward) => {
+ if single_grapheme {
+ graphemes::prev_grapheme_boundary(doc.slice(..), start_range.head)
+ + typed_char.len_utf8()
- // when we are inserting in front of a selection, we need to move
- // the anchor over by however many characters were inserted overall
- (_, Direction::Backward) => start_range.anchor + offset + len_inserted,
+ // if we are appending, the anchor stays where it is; only offset
+ // for multiple range insertions
+ } else {
+ start_range.anchor + offset
+ }
+ }
+
+ (_, Direction::Backward) => {
+ if single_grapheme {
+ // if we're backward, then the head is at the first char
+ // of the typed char, so we need to add the length of
+ // the closing char
+ graphemes::prev_grapheme_boundary(doc.slice(..), start_range.anchor) + len_inserted
+ } else {
+ // when we are inserting in front of a selection, we need to move
+ // the anchor over by however many characters were inserted overall
+ start_range.anchor + offset + len_inserted
+ }
+ }
};
Range::new(end_anchor, end_head)
@@ -122,7 +225,7 @@ fn handle_open(
}
};
- let next_range = get_next_range(start_range, offs, open, len_inserted);
+ let next_range = get_next_range(doc, start_range, offs, open, len_inserted);
end_ranges.push(next_range);
offs += len_inserted;
@@ -152,7 +255,7 @@ fn handle_close(doc: &Rope, selection: &Selection, _open: char, close: char) ->
(cursor, cursor, Some(Tendril::from_char(close)))
};
- let next_range = get_next_range(start_range, offs, close, len_inserted);
+ let next_range = get_next_range(doc, start_range, offs, close, len_inserted);
end_ranges.push(next_range);
offs += len_inserted;
@@ -202,7 +305,7 @@ fn handle_same(
(cursor, cursor, Some(pair))
};
- let next_range = get_next_range(start_range, offs, token, len_inserted);
+ let next_range = get_next_range(doc, start_range, offs, token, len_inserted);
end_ranges.push(next_range);
offs += len_inserted;
@@ -219,6 +322,8 @@ mod test {
use super::*;
use smallvec::smallvec;
+ const LINE_END: &'static str = crate::DEFAULT_LINE_ENDING.as_str();
+
fn differing_pairs() -> impl Iterator<Item = &'static (char, char)> {
PAIRS.iter().filter(|(open, close)| open != close)
}
@@ -270,12 +375,59 @@ mod test {
#[test]
fn test_insert_blank() {
test_hooks_with_pairs(
- &Rope::new(),
+ &Rope::from(LINE_END),
&Selection::single(1, 0),
PAIRS,
- |open, close| format!("{}{}", open, close),
+ |open, close| format!("{}{}{}", open, close, LINE_END),
&Selection::single(2, 1),
);
+
+ let empty_doc = Rope::from(format!("{line_end}{line_end}", line_end = LINE_END));
+
+ test_hooks_with_pairs(
+ &empty_doc,
+ &Selection::single(empty_doc.len_chars(), LINE_END.len()),
+ PAIRS,
+ |open, close| {
+ format!(
+ "{line_end}{open}{close}{line_end}",
+ open = open,
+ close = close,
+ line_end = LINE_END
+ )
+ },
+ &Selection::single(LINE_END.len() + 2, LINE_END.len() + 1),
+ );
+ }
+
+ #[test]
+ fn test_insert_before_multi_code_point_graphemes() {
+ test_hooks_with_pairs(
+ &Rope::from(format!("hello 👨‍👩‍👧‍👦 goodbye{}", LINE_END)),
+ &Selection::single(13, 6),
+ PAIRS,
+ |open, _| format!("hello {}👨‍👩‍👧‍👦 goodbye{}", open, LINE_END),
+ &Selection::single(14, 7),
+ );
+ }
+
+ #[test]
+ fn test_insert_at_end_of_document() {
+ test_hooks_with_pairs(
+ &Rope::from(LINE_END),
+ &Selection::single(LINE_END.len(), LINE_END.len()),
+ PAIRS,
+ |open, close| format!("{}{}{}", LINE_END, open, close),
+ &Selection::single(LINE_END.len() + 1, LINE_END.len() + 1),
+ );
+
+ test_hooks_with_pairs(
+ &Rope::from(format!("foo{}", LINE_END)),
+ &Selection::single(3 + LINE_END.len(), 3 + LINE_END.len()),
+ PAIRS,
+ |open, close| format!("foo{}{}{}", LINE_END, open, close),
+ &Selection::single(LINE_END.len() + 4, LINE_END.len() + 4),
+ );
}
/// [] -> append ( -> ([])
@@ -283,11 +435,20 @@ mod test {
fn test_append_blank() {
test_hooks_with_pairs(
// this is what happens when you have a totally blank document and then append
- &Rope::from("\n\n"),
- &Selection::single(0, 2),
+ &Rope::from(format!("{line_end}{line_end}", line_end = LINE_END)),
+ // before inserting the pair, the cursor covers all of both empty lines
+ &Selection::single(0, LINE_END.len() * 2),
PAIRS,
- |open, close| format!("\n{}{}\n", open, close),
- &Selection::single(0, 3),
+ |open, close| {
+ format!(
+ "{line_end}{open}{close}{line_end}",
+ line_end = LINE_END,
+ open = open,
+ close = close
+ )
+ },
+ // after inserting pair, the cursor covers the first new line and the open char
+ &Selection::single(0, LINE_END.len() + 2),
);
}
@@ -329,6 +490,18 @@ mod test {
);
}
+ /// foo[] -> append to end of line ( -> foo([])
+ #[test]
+ fn test_append_single_cursor() {
+ test_hooks_with_pairs(
+ &Rope::from(format!("foo{}", LINE_END)),
+ &Selection::single(3, 3 + LINE_END.len()),
+ differing_pairs(),
+ |open, close| format!("foo{}{}{}", open, close, LINE_END),
+ &Selection::single(4, 5),
+ );
+ }
+
/// fo[o] fo[o(])
/// fo[o] -> append ( -> fo[o(])
/// fo[o] fo[o(])
@@ -355,18 +528,18 @@ mod test {
);
}
- /// ([]) -> insert ) -> ()[]
+ /// ([)] -> insert ) -> ()[]
#[test]
fn test_insert_close_inside_pair() {
for (open, close) in PAIRS {
- let doc = Rope::from(format!("{}{}", open, close));
+ let doc = Rope::from(format!("{}{}{}", open, close, LINE_END));
test_hooks(
&doc,
&Selection::single(2, 1),
*close,
&doc,
- &Selection::single(3, 2),
+ &Selection::single(2 + LINE_END.len(), 2),
);
}
}
@@ -375,14 +548,14 @@ mod test {
#[test]
fn test_append_close_inside_pair() {
for (open, close) in PAIRS {
- let doc = Rope::from(format!("{}{}\n", open, close));
+ let doc = Rope::from(format!("{}{}{}", open, close, LINE_END));
test_hooks(
&doc,
&Selection::single(0, 2),
*close,
&doc,
- &Selection::single(0, 3),
+ &Selection::single(0, 2 + LINE_END.len()),
);
}
}
@@ -564,6 +737,20 @@ mod test {
)
}
+ /// foo([) wor]d -> insert ) -> foo()[ wor]d
+ #[test]
+ fn test_insert_close_inside_pair_trailing_word_with_selection() {
+ for (open, close) in differing_pairs() {
+ test_hooks(
+ &Rope::from(format!("foo{}{} word{}", open, close, LINE_END)),
+ &Selection::single(9, 4),
+ *close,
+ &Rope::from(format!("foo{}{} word{}", open, close, LINE_END)),
+ &Selection::single(9, 5),
+ )
+ }
+ }
+
/// we want pairs that are *not* the same char to be inserted after
/// a non-pair char, for cases like functions, but for pairs that are
/// the same char, we want to *not* insert a pair to handle cases like "I'm"
@@ -572,7 +759,7 @@ mod test {
/// word[] -> insert ' -> word'[]
#[test]
fn test_insert_open_after_non_pair() {
- let doc = Rope::from("word");
+ let doc = Rope::from(format!("word{}", LINE_END));
let sel = Selection::single(5, 4);
let expected_sel = Selection::single(6, 5);
@@ -580,7 +767,7 @@ mod test {
&doc,
&sel,
differing_pairs(),
- |open, close| format!("word{}{}", open, close),
+ |open, close| format!("word{}{}{}", open, close, LINE_END),
&expected_sel,
);
@@ -588,22 +775,8 @@ mod test {
&doc,
&sel,
matching_pairs(),
- |open, _| format!("word{}", open),
+ |open, _| format!("word{}{}", open, LINE_END),
&expected_sel,
);
}
-
- /// appending with only a cursor should stay a cursor
- ///
- /// [] -> append to end "foo -> "foo[]"
- #[test]
- fn test_append_single_cursor() {
- test_hooks_with_pairs(
- &Rope::from("\n"),
- &Selection::single(0, 1),
- PAIRS,
- |open, close| format!("{}{}\n", open, close),
- &Selection::single(1, 2),
- );
- }
}
diff --git a/helix-core/src/diff.rs b/helix-core/src/diff.rs
index a83db333..3349213d 100644
--- a/helix-core/src/diff.rs
+++ b/helix-core/src/diff.rs
@@ -11,10 +11,6 @@ pub fn compare_ropes(old: &Rope, new: &Rope) -> Transaction {
// A timeout is set so after 1 seconds, the algorithm will start
// approximating. This is especially important for big `Rope`s or
// `Rope`s that are extremely dissimilar to each other.
- //
- // Note: Ignore the clippy warning, as the trait bounds of
- // `Transaction::change()` require an iterator implementing
- // `ExactIterator`.
let mut config = similar::TextDiff::configure();
config.timeout(std::time::Duration::from_secs(1));
diff --git a/helix-core/src/graphemes.rs b/helix-core/src/graphemes.rs
index c6398875..aa898684 100644
--- a/helix-core/src/graphemes.rs
+++ b/helix-core/src/graphemes.rs
@@ -120,6 +120,43 @@ pub fn nth_next_grapheme_boundary(slice: RopeSlice, char_idx: usize, n: usize) -
chunk_char_idx + tmp
}
+#[must_use]
+pub fn nth_next_grapheme_boundary_byte(slice: RopeSlice, mut byte_idx: usize, n: usize) -> usize {
+ // Bounds check
+ debug_assert!(byte_idx <= slice.len_bytes());
+
+ // Get the chunk with our byte index in it.
+ let (mut chunk, mut chunk_byte_idx, mut _chunk_char_idx, _) = slice.chunk_at_byte(byte_idx);
+
+ // Set up the grapheme cursor.
+ let mut gc = GraphemeCursor::new(byte_idx, slice.len_bytes(), true);
+
+ // Find the nth next grapheme cluster boundary.
+ for _ in 0..n {
+ loop {
+ match gc.next_boundary(chunk, chunk_byte_idx) {
+ Ok(None) => return slice.len_bytes(),
+ Ok(Some(n)) => {
+ byte_idx = n;
+ break;
+ }
+ Err(GraphemeIncomplete::NextChunk) => {
+ chunk_byte_idx += chunk.len();
+ let (a, _, _c, _) = slice.chunk_at_byte(chunk_byte_idx);
+ chunk = a;
+ // chunk_char_idx = c;
+ }
+ Err(GraphemeIncomplete::PreContext(n)) => {
+ let ctx_chunk = slice.chunk_at_byte(n - 1).0;
+ gc.provide_context(ctx_chunk, n - ctx_chunk.len());
+ }
+ _ => unreachable!(),
+ }
+ }
+ }
+ byte_idx
+}
+
/// Finds the next grapheme boundary after the given char position.
#[must_use]
#[inline(always)]
@@ -127,6 +164,13 @@ pub fn next_grapheme_boundary(slice: RopeSlice, char_idx: usize) -> usize {
nth_next_grapheme_boundary(slice, char_idx, 1)
}
+/// Finds the next grapheme boundary after the given byte position.
+#[must_use]
+#[inline(always)]
+pub fn next_grapheme_boundary_byte(slice: RopeSlice, byte_idx: usize) -> usize {
+ nth_next_grapheme_boundary_byte(slice, byte_idx, 1)
+}
+
/// Returns the passed char index if it's already a grapheme boundary,
/// or the next grapheme boundary char index if not.
#[must_use]
@@ -151,6 +195,23 @@ pub fn ensure_grapheme_boundary_prev(slice: RopeSlice, char_idx: usize) -> usize
}
}
+/// Returns the passed byte index if it's already a grapheme boundary,
+/// or the next grapheme boundary byte index if not.
+#[must_use]
+#[inline]
+pub fn ensure_grapheme_boundary_next_byte(slice: RopeSlice, byte_idx: usize) -> usize {
+ if byte_idx == 0 {
+ byte_idx
+ } else {
+ // TODO: optimize so we're not constructing grapheme cursor twice
+ if is_grapheme_boundary_byte(slice, byte_idx) {
+ byte_idx
+ } else {
+ next_grapheme_boundary_byte(slice, byte_idx)
+ }
+ }
+}
+
/// Returns whether the given char position is a grapheme boundary.
#[must_use]
pub fn is_grapheme_boundary(slice: RopeSlice, char_idx: usize) -> bool {
@@ -179,6 +240,31 @@ pub fn is_grapheme_boundary(slice: RopeSlice, char_idx: usize) -> bool {
}
}
+/// Returns whether the given byte position is a grapheme boundary.
+#[must_use]
+pub fn is_grapheme_boundary_byte(slice: RopeSlice, byte_idx: usize) -> bool {
+ // Bounds check
+ debug_assert!(byte_idx <= slice.len_bytes());
+
+ // Get the chunk with our byte index in it.
+ let (chunk, chunk_byte_idx, _, _) = slice.chunk_at_byte(byte_idx);
+
+ // Set up the grapheme cursor.
+ let mut gc = GraphemeCursor::new(byte_idx, slice.len_bytes(), true);
+
+ // Determine if the given position is a grapheme cluster boundary.
+ loop {
+ match gc.is_boundary(chunk, chunk_byte_idx) {
+ Ok(n) => return n,
+ Err(GraphemeIncomplete::PreContext(n)) => {
+ let (ctx_chunk, ctx_byte_start, _, _) = slice.chunk_at_byte(n - 1);
+ gc.provide_context(ctx_chunk, ctx_byte_start);
+ }
+ Err(_) => unreachable!(),
+ }
+ }
+}
+
/// An iterator over the graphemes of a `RopeSlice`.
#[derive(Clone)]
pub struct RopeGraphemes<'a> {
diff --git a/helix-core/src/increment/date_time.rs b/helix-core/src/increment/date_time.rs
index e3cfe107..1703c3ba 100644
--- a/helix-core/src/increment/date_time.rs
+++ b/helix-core/src/increment/date_time.rs
@@ -195,82 +195,82 @@ struct DateField {
impl DateField {
fn from_specifier(specifier: &str) -> Option<Self> {
match specifier {
- "Y" => Some(DateField {
+ "Y" => Some(Self {
regex: r"\d{4}",
unit: DateUnit::Years,
max_len: 5,
}),
- "y" => Some(DateField {
+ "y" => Some(Self {
regex: r"\d\d",
unit: DateUnit::Years,
max_len: 2,
}),
- "m" => Some(DateField {
+ "m" => Some(Self {
regex: r"[0-1]\d",
unit: DateUnit::Months,
max_len: 2,
}),
- "d" => Some(DateField {
+ "d" => Some(Self {
regex: r"[0-3]\d",
unit: DateUnit::Days,
max_len: 2,
}),
- "-d" => Some(DateField {
+ "-d" => Some(Self {
regex: r"[1-3]?\d",
unit: DateUnit::Days,
max_len: 2,
}),
- "a" => Some(DateField {
+ "a" => Some(Self {
regex: r"Sun|Mon|Tue|Wed|Thu|Fri|Sat",
unit: DateUnit::Days,
max_len: 3,
}),
- "A" => Some(DateField {
+ "A" => Some(Self {
regex: r"Sunday|Monday|Tuesday|Wednesday|Thursday|Friday|Saturday",
unit: DateUnit::Days,
max_len: 9,
}),
- "b" | "h" => Some(DateField {
+ "b" | "h" => Some(Self {
regex: r"Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec",
unit: DateUnit::Months,
max_len: 3,
}),
- "B" => Some(DateField {
+ "B" => Some(Self {
regex: r"January|February|March|April|May|June|July|August|September|October|November|December",
unit: DateUnit::Months,
max_len: 9,
}),
- "H" => Some(DateField {
+ "H" => Some(Self {
regex: r"[0-2]\d",
unit: DateUnit::Hours,
max_len: 2,
}),
- "M" => Some(DateField {
+ "M" => Some(Self {
regex: r"[0-5]\d",
unit: DateUnit::Minutes,
max_len: 2,
}),
- "S" => Some(DateField {
+ "S" => Some(Self {
regex: r"[0-5]\d",
unit: DateUnit::Seconds,
max_len: 2,
}),
- "I" => Some(DateField {
+ "I" => Some(Self {
regex: r"[0-1]\d",
unit: DateUnit::Hours,
max_len: 2,
}),
- "-I" => Some(DateField {
+ "-I" => Some(Self {
regex: r"1?\d",
unit: DateUnit::Hours,
max_len: 2,
}),
- "P" => Some(DateField {
+ "P" => Some(Self {
regex: r"am|pm",
unit: DateUnit::AmPm,
max_len: 2,
}),
- "p" => Some(DateField {
+ "p" => Some(Self {
regex: r"AM|PM",
unit: DateUnit::AmPm,
max_len: 2,
diff --git a/helix-core/src/indent.rs b/helix-core/src/indent.rs
index 1fc2b8a5..ac2a1208 100644
--- a/helix-core/src/indent.rs
+++ b/helix-core/src/indent.rs
@@ -454,7 +454,7 @@ where
let language_config = loader.language_config_for_scope("source.rust").unwrap();
let highlight_config = language_config.highlight_config(&[]).unwrap();
- let syntax = Syntax::new(&doc, highlight_config.clone());
+ let syntax = Syntax::new(&doc, highlight_config.clone(), std::sync::Arc::new(loader));
let text = doc.slice(..);
let tab_width = 4;
diff --git a/helix-core/src/object.rs b/helix-core/src/object.rs
index 3363e20b..b06f4144 100644
--- a/helix-core/src/object.rs
+++ b/helix-core/src/object.rs
@@ -1,56 +1,72 @@
use crate::{Range, RopeSlice, Selection, Syntax};
+use tree_sitter::Node;
-pub fn expand_selection(syntax: &Syntax, text: RopeSlice, selection: &Selection) -> Selection {
- let tree = syntax.tree();
-
- selection.clone().transform(|range| {
- let from = text.char_to_byte(range.from());
- let to = text.char_to_byte(range.to());
+pub fn expand_selection(syntax: &Syntax, text: RopeSlice, selection: Selection) -> Selection {
+ select_node_impl(syntax, text, selection, |descendant, from, to| {
+ if descendant.start_byte() == from && descendant.end_byte() == to {
+ descendant.parent()
+ } else {
+ Some(descendant)
+ }
+ })
+}
- // find parent of a descendant that matches the range
- let parent = match tree
- .root_node()
- .descendant_for_byte_range(from, to)
- .and_then(|node| {
- if node.start_byte() == from && node.end_byte() == to {
- node.parent()
- } else {
- Some(node)
- }
- }) {
- Some(parent) => parent,
- None => return range,
- };
+pub fn shrink_selection(syntax: &Syntax, text: RopeSlice, selection: Selection) -> Selection {
+ select_node_impl(syntax, text, selection, |descendant, _from, _to| {
+ descendant.child(0).or(Some(descendant))
+ })
+}
- let from = text.byte_to_char(parent.start_byte());
- let to = text.byte_to_char(parent.end_byte());
+pub fn select_sibling<F>(
+ syntax: &Syntax,
+ text: RopeSlice,
+ selection: Selection,
+ sibling_fn: &F,
+) -> Selection
+where
+ F: Fn(Node) -> Option<Node>,
+{
+ select_node_impl(syntax, text, selection, |descendant, _from, _to| {
+ find_sibling_recursive(descendant, sibling_fn)
+ })
+}
- if range.head < range.anchor {
- Range::new(to, from)
- } else {
- Range::new(from, to)
- }
+fn find_sibling_recursive<F>(node: Node, sibling_fn: F) -> Option<Node>
+where
+ F: Fn(Node) -> Option<Node>,
+{
+ sibling_fn(node).or_else(|| {
+ node.parent()
+ .and_then(|node| find_sibling_recursive(node, sibling_fn))
})
}
-pub fn shrink_selection(syntax: &Syntax, text: RopeSlice, selection: &Selection) -> Selection {
+fn select_node_impl<F>(
+ syntax: &Syntax,
+ text: RopeSlice,
+ selection: Selection,
+ select_fn: F,
+) -> Selection
+where
+ F: Fn(Node, usize, usize) -> Option<Node>,
+{
let tree = syntax.tree();
- selection.clone().transform(|range| {
+ selection.transform(|range| {
let from = text.char_to_byte(range.from());
let to = text.char_to_byte(range.to());
- let descendant = match tree.root_node().descendant_for_byte_range(from, to) {
- // find first child, if not possible, fallback to the node that contains selection
- Some(descendant) => match descendant.child(0) {
- Some(child) => child,
- None => descendant,
- },
+ let node = match tree
+ .root_node()
+ .descendant_for_byte_range(from, to)
+ .and_then(|node| select_fn(node, from, to))
+ {
+ Some(node) => node,
None => return range,
};
- let from = text.byte_to_char(descendant.start_byte());
- let to = text.byte_to_char(descendant.end_byte());
+ let from = text.byte_to_char(node.start_byte());
+ let to = text.byte_to_char(node.end_byte());
if range.head < range.anchor {
Range::new(to, from)
diff --git a/helix-core/src/syntax.rs b/helix-core/src/syntax.rs
index 5d37c219..d6ec7610 100644
--- a/helix-core/src/syntax.rs
+++ b/helix-core/src/syntax.rs
@@ -8,12 +8,13 @@ use crate::{
pub use helix_syntax::get_language;
-use arc_swap::ArcSwap;
+use arc_swap::{ArcSwap, Guard};
+use slotmap::{DefaultKey as LayerId, HopSlotMap};
use std::{
borrow::Cow,
cell::RefCell,
- collections::{HashMap, HashSet},
+ collections::{HashMap, HashSet, VecDeque},
fmt,
path::Path,
sync::Arc,
@@ -95,6 +96,7 @@ pub struct LanguageServerConfiguration {
#[serde(default)]
#[serde(skip_serializing_if = "Vec::is_empty")]
pub args: Vec<String>,
+ pub language_id: Option<String>,
}
#[derive(Debug, Serialize, Deserialize)]
@@ -207,12 +209,9 @@ impl LanguageConfiguration {
&highlights_query,
&injections_query,
&locals_query,
- );
+ )
+ .unwrap(); // TODO: avoid panic
- let config = match config {
- Ok(config) => config,
- Err(err) => panic!("{}", err),
- }; // TODO: avoid panic
config.configure(scopes);
Some(Arc::new(config))
}
@@ -262,12 +261,16 @@ impl LanguageConfiguration {
}
}
+// Expose loader as Lazy<> global since it's always static?
+
#[derive(Debug)]
pub struct Loader {
// highlight_names ?
language_configs: Vec<Arc<LanguageConfiguration>>,
language_config_ids_by_file_type: HashMap<String, usize>, // Vec<usize>
language_config_ids_by_shebang: HashMap<String, usize>,
+
+ scopes: ArcSwap<Vec<String>>,
}
impl Loader {
@@ -276,6 +279,7 @@ impl Loader {
language_configs: Vec::new(),
language_config_ids_by_file_type: HashMap::new(),
language_config_ids_by_shebang: HashMap::new(),
+ scopes: ArcSwap::from_pointee(Vec::new()),
};
for config in config.language {
@@ -361,8 +365,22 @@ impl Loader {
}
None
}
- pub fn language_configs_iter(&self) -> impl Iterator<Item = &Arc<LanguageConfiguration>> {
- self.language_configs.iter()
+
+ pub fn set_scopes(&self, scopes: Vec<String>) {
+ self.scopes.store(Arc::new(scopes));
+
+ // Reconfigure existing grammars
+ for config in self
+ .language_configs
+ .iter()
+ .filter(|cfg| cfg.is_highlight_initialized())
+ {
+ config.reconfigure(&self.scopes());
+ }
+ }
+
+ pub fn scopes(&self) -> Guard<Arc<Vec<String>>> {
+ self.scopes.load()
}
}
@@ -371,12 +389,6 @@ pub struct TsParser {
cursors: Vec<QueryCursor>,
}
-impl fmt::Debug for TsParser {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- f.debug_struct("TsParser").finish()
- }
-}
-
// could also just use a pool, or a single instance?
thread_local! {
pub static PARSER: RefCell<TsParser> = RefCell::new(TsParser {
@@ -387,9 +399,9 @@ thread_local! {
#[derive(Debug)]
pub struct Syntax {
- config: Arc<HighlightConfiguration>,
-
- root_layer: LanguageLayer,
+ layers: HopSlotMap<LayerId, LanguageLayer>,
+ root: LayerId,
+ loader: Arc<Loader>,
}
fn byte_range_to_str(range: std::ops::Range<usize>, source: RopeSlice) -> Cow<str> {
@@ -399,38 +411,34 @@ fn byte_range_to_str(range: std::ops::Range<usize>, source: RopeSlice) -> Cow<st
}
impl Syntax {
- // buffer, grammar, config, grammars, sync_timeout?
- pub fn new(
- /*language: Lang,*/ source: &Rope,
- config: Arc<HighlightConfiguration>,
- ) -> Self {
- let root_layer = LanguageLayer { tree: None };
+ pub fn new(source: &Rope, config: Arc<HighlightConfiguration>, loader: Arc<Loader>) -> Self {
+ let root_layer = LanguageLayer {
+ tree: None,
+ config,
+ depth: 0,
+ ranges: vec![Range {
+ start_byte: 0,
+ end_byte: usize::MAX,
+ start_point: Point::new(0, 0),
+ end_point: Point::new(usize::MAX, usize::MAX),
+ }],
+ };
- // track markers of injections
// track scope_descriptor: a Vec of scopes for item in tree
+ let mut layers = HopSlotMap::default();
+ let root = layers.insert(root_layer);
+
let mut syntax = Self {
- // grammar,
- config,
- root_layer,
+ root,
+ layers,
+ loader,
};
- // update root layer
- PARSER.with(|ts_parser| {
- // TODO: handle the returned `Result` properly.
- let _ = syntax.root_layer.parse(
- &mut ts_parser.borrow_mut(),
- &syntax.config,
- source,
- 0,
- vec![Range {
- start_byte: 0,
- end_byte: usize::MAX,
- start_point: Point::new(0, 0),
- end_point: Point::new(usize::MAX, usize::MAX),
- }],
- );
- });
+ syntax
+ .update(source, source, &ChangeSet::new(source))
+ .unwrap();
+
syntax
}
@@ -440,32 +448,255 @@ impl Syntax {
source: &Rope,
changeset: &ChangeSet,
) -> Result<(), Error> {
+ let mut queue = VecDeque::new();
+ queue.push_back(self.root);
+
+ let scopes = self.loader.scopes.load();
+ let injection_callback = |language: &str| {
+ self.loader
+ .language_configuration_for_injection_string(language)
+ .and_then(|language_config| language_config.highlight_config(&scopes))
+ };
+
+ // Convert the changeset into tree sitter edits.
+ let edits = generate_edits(old_source, changeset);
+
+ // Use the edits to update all layers markers
+ if !edits.is_empty() {
+ fn point_add(a: Point, b: Point) -> Point {
+ if b.row > 0 {
+ Point::new(a.row.saturating_add(b.row), b.column)
+ } else {
+ Point::new(0, a.column.saturating_add(b.column))
+ }
+ }
+ fn point_sub(a: Point, b: Point) -> Point {
+ if a.row > b.row {
+ Point::new(a.row.saturating_sub(b.row), a.column)
+ } else {
+ Point::new(0, a.column.saturating_sub(b.column))
+ }
+ }
+
+ for layer in &mut self.layers.values_mut() {
+ // The root layer always covers the whole range (0..usize::MAX)
+ if layer.depth == 0 {
+ continue;
+ }
+
+ for range in &mut layer.ranges {
+ // Roughly based on https://github.com/tree-sitter/tree-sitter/blob/ddeaa0c7f534268b35b4f6cb39b52df082754413/lib/src/subtree.c#L691-L720
+ for edit in edits.iter().rev() {
+ let is_pure_insertion = edit.old_end_byte == edit.start_byte;
+
+ // if edit is after range, skip
+ if edit.start_byte > range.end_byte {
+ // TODO: || (is_noop && edit.start_byte == range.end_byte)
+ continue;
+ }
+
+ // if edit is before range, shift entire range by len
+ if edit.old_end_byte < range.start_byte {
+ range.start_byte =
+ edit.new_end_byte + (range.start_byte - edit.old_end_byte);
+ range.start_point = point_add(
+ edit.new_end_position,
+ point_sub(range.start_point, edit.old_end_position),
+ );
+
+ range.end_byte = edit
+ .new_end_byte
+ .saturating_add(range.end_byte - edit.old_end_byte);
+ range.end_point = point_add(
+ edit.new_end_position,
+ point_sub(range.end_point, edit.old_end_position),
+ );
+ }
+ // if the edit starts in the space before and extends into the range
+ else if edit.start_byte < range.start_byte {
+ range.start_byte = edit.new_end_byte;
+ range.start_point = edit.new_end_position;
+
+ range.end_byte = range
+ .end_byte
+ .saturating_sub(edit.old_end_byte)
+ .saturating_add(edit.new_end_byte);
+ range.end_point = point_add(
+ edit.new_end_position,
+ point_sub(range.end_point, edit.old_end_position),
+ );
+ }
+ // If the edit is an insertion at the start of the tree, shift
+ else if edit.start_byte == range.start_byte && is_pure_insertion {
+ range.start_byte = edit.new_end_byte;
+ range.start_point = edit.new_end_position;
+ } else {
+ range.end_byte = range
+ .end_byte
+ .saturating_sub(edit.old_end_byte)
+ .saturating_add(edit.new_end_byte);
+ range.end_point = point_add(
+ edit.new_end_position,
+ point_sub(range.end_point, edit.old_end_position),
+ );
+ }
+ }
+ }
+ }
+ }
+
PARSER.with(|ts_parser| {
- self.root_layer.update(
- &mut ts_parser.borrow_mut(),
- &self.config,
- old_source,
- source,
- changeset,
- )
- })
+ let ts_parser = &mut ts_parser.borrow_mut();
+ let mut cursor = ts_parser.cursors.pop().unwrap_or_else(QueryCursor::new);
+ // TODO: might need to set cursor range
+ cursor.set_byte_range(0..usize::MAX);
- // TODO: deal with injections and update them too
- }
+ let source_slice = source.slice(..);
- // fn buffer_changed -> call layer.update(range, new_text) on root layer and then all marker layers
+ let mut touched = HashSet::new();
- // call this on transaction.apply() -> buffer_changed(changes)
- //
- // fn parse(language, old_tree, ranges)
- //
- pub fn tree(&self) -> &Tree {
- self.root_layer.tree()
+ // TODO: we should be able to avoid editing & parsing layers with ranges earlier in the document before the edit
+
+ while let Some(layer_id) = queue.pop_front() {
+ // Mark the layer as touched
+ touched.insert(layer_id);
+
+ let layer = &mut self.layers[layer_id];
+
+ // If a tree already exists, notify it of changes.
+ if let Some(tree) = &mut layer.tree {
+ for edit in edits.iter().rev() {
+ // Apply the edits in reverse.
+ // If we applied them in order then edit 1 would disrupt the positioning of edit 2.
+ tree.edit(edit);
+ }
+ }
+
+ // Re-parse the tree.
+ layer.parse(&mut ts_parser.parser, source)?;
+
+ // Switch to an immutable borrow.
+ let layer = &self.layers[layer_id];
+
+ // Process injections.
+ let matches = cursor.matches(
+ &layer.config.injections_query,
+ layer.tree().root_node(),
+ RopeProvider(source_slice),
+ );
+ let mut injections = Vec::new();
+ for mat in matches {
+ let (language_name, content_node, include_children) = injection_for_match(
+ &layer.config,
+ &layer.config.injections_query,
+ &mat,
+ source_slice,
+ );
+
+ // Explicitly remove this match so that none of its other captures will remain
+ // in the stream of captures.
+ mat.remove();
+
+ // If a language is found with the given name, then add a new language layer
+ // to the highlighted document.
+ if let (Some(language_name), Some(content_node)) = (language_name, content_node)
+ {
+ if let Some(config) = (injection_callback)(&language_name) {
+ let ranges =
+ intersect_ranges(&layer.ranges, &[content_node], include_children);
+
+ if !ranges.is_empty() {
+ injections.push((config, ranges));
+ }
+ }
+ }
+ }
+
+ // Process combined injections.
+ if let Some(combined_injections_query) = &layer.config.combined_injections_query {
+ let mut injections_by_pattern_index =
+ vec![(None, Vec::new(), false); combined_injections_query.pattern_count()];
+ let matches = cursor.matches(
+ combined_injections_query,
+ layer.tree().root_node(),
+ RopeProvider(source_slice),
+ );
+ for mat in matches {
+ let entry = &mut injections_by_pattern_index[mat.pattern_index];
+ let (language_name, content_node, include_children) = injection_for_match(
+ &layer.config,
+ combined_injections_query,
+ &mat,
+ source_slice,
+ );
+ if language_name.is_some() {
+ entry.0 = language_name;
+ }
+ if let Some(content_node) = content_node {
+ entry.1.push(content_node);
+ }
+ entry.2 = include_children;
+ }
+ for (lang_name, content_nodes, includes_children) in injections_by_pattern_index
+ {
+ if let (Some(lang_name), false) = (lang_name, content_nodes.is_empty()) {
+ if let Some(config) = (injection_callback)(&lang_name) {
+ let ranges = intersect_ranges(
+ &layer.ranges,
+ &content_nodes,
+ includes_children,
+ );
+ if !ranges.is_empty() {
+ injections.push((config, ranges));
+ }
+ }
+ }
+ }
+ }
+
+ let depth = layer.depth + 1;
+ // TODO: can't inline this since matches borrows self.layers
+ for (config, ranges) in injections {
+ // Find an existing layer
+ let layer = self
+ .layers
+ .iter_mut()
+ .find(|(_, layer)| {
+ layer.depth == depth && // TODO: track parent id instead
+ layer.config.language == config.language && layer.ranges == ranges
+ })
+ .map(|(id, _layer)| id);
+
+ // ...or insert a new one.
+ let layer_id = layer.unwrap_or_else(|| {
+ self.layers.insert(LanguageLayer {
+ tree: None,
+ config,
+ depth,
+ ranges,
+ })
+ });
+
+ queue.push_back(layer_id);
+ }
+
+ // TODO: pre-process local scopes at this time, rather than highlight?
+ // would solve problems with locals not working across boundaries
+ }
+
+ // Return the cursor back in the pool.
+ ts_parser.cursors.push(cursor);
+
+ // Remove all untouched layers
+ self.layers.retain(|id, _| touched.contains(&id));
+
+ Ok(())
+ })
}
- //
- // <!--update_for_injection(grammar)-->
- // Highlighting
+ pub fn tree(&self) -> &Tree {
+ self.layers[self.root].tree()
+ }
/// Iterate over the highlighted regions for a given slice of source code.
pub fn highlight_iter<'a>(
@@ -473,65 +704,76 @@ impl Syntax {
source: RopeSlice<'a>,
range: Option<std::ops::Range<usize>>,
cancellation_flag: Option<&'a AtomicUsize>,
- injection_callback: impl FnMut(&str) -> Option<&'a HighlightConfiguration> + 'a,
) -> impl Iterator<Item = Result<HighlightEvent, Error>> + 'a {
- // The `captures` iterator borrows the `Tree` and the `QueryCursor`, which
- // prevents them from being moved. But both of these values are really just
- // pointers, so it's actually ok to move them.
-
- // reuse a cursor from the pool if possible
- let mut cursor = PARSER.with(|ts_parser| {
- let highlighter = &mut ts_parser.borrow_mut();
- highlighter.cursors.pop().unwrap_or_else(QueryCursor::new)
+ let mut layers = self
+ .layers
+ .iter()
+ .filter_map(|(_, layer)| {
+ // TODO: if range doesn't overlap layer range, skip it
+
+ // Reuse a cursor from the pool if available.
+ let mut cursor = PARSER.with(|ts_parser| {
+ let highlighter = &mut ts_parser.borrow_mut();
+ highlighter.cursors.pop().unwrap_or_else(QueryCursor::new)
+ });
+
+ // The `captures` iterator borrows the `Tree` and the `QueryCursor`, which
+ // prevents them from being moved. But both of these values are really just
+ // pointers, so it's actually ok to move them.
+ let cursor_ref =
+ unsafe { mem::transmute::<_, &'static mut QueryCursor>(&mut cursor) };
+
+ // if reusing cursors & no range this resets to whole range
+ cursor_ref.set_byte_range(range.clone().unwrap_or(0..usize::MAX));
+
+ let mut captures = cursor_ref
+ .captures(
+ &layer.config.query,
+ layer.tree().root_node(),
+ RopeProvider(source),
+ )
+ .peekable();
+
+ // If there's no captures, skip the layer
+ captures.peek()?;
+
+ Some(HighlightIterLayer {
+ highlight_end_stack: Vec::new(),
+ scope_stack: vec![LocalScope {
+ inherits: false,
+ range: 0..usize::MAX,
+ local_defs: Vec::new(),
+ }],
+ cursor,
+ _tree: None,
+ captures,
+ config: layer.config.as_ref(), // TODO: just reuse `layer`
+ depth: layer.depth, // TODO: just reuse `layer`
+ ranges: &layer.ranges, // TODO: temp
+ })
+ })
+ .collect::<Vec<_>>();
+
+ // HAXX: arrange layers by byte range, with deeper layers positioned first
+ layers.sort_by_key(|layer| {
+ (
+ layer.ranges.first().cloned(),
+ std::cmp::Reverse(layer.depth),
+ )
});
- let tree_ref = self.tree();
- let cursor_ref = unsafe { mem::transmute::<_, &'static mut QueryCursor>(&mut cursor) };
- let query_ref = &self.config.query;
- let config_ref = self.config.as_ref();
-
- // if reusing cursors & no range this resets to whole range
- cursor_ref.set_byte_range(range.clone().unwrap_or(0..usize::MAX));
-
- let captures = cursor_ref
- .captures(query_ref, tree_ref.root_node(), RopeProvider(source))
- .peekable();
-
- // manually craft the root layer based on the existing tree
- let layer = HighlightIterLayer {
- highlight_end_stack: Vec::new(),
- scope_stack: vec![LocalScope {
- inherits: false,
- range: 0..usize::MAX,
- local_defs: Vec::new(),
- }],
- cursor,
- depth: 0,
- _tree: None,
- captures,
- config: config_ref,
- ranges: vec![Range {
- start_byte: 0,
- end_byte: usize::MAX,
- start_point: Point::new(0, 0),
- end_point: Point::new(usize::MAX, usize::MAX),
- }],
- };
let mut result = HighlightIter {
source,
- byte_offset: range.map_or(0, |r| r.start), // TODO: simplify
- injection_callback,
+ byte_offset: range.map_or(0, |r| r.start),
cancellation_flag,
iter_count: 0,
- layers: vec![layer],
+ layers,
next_event: None,
last_highlight_range: None,
};
result.sort_layers();
result
}
- // on_tokenize
- // on_change_highlighting
// Commenting
// comment_strings_for_pos
@@ -543,246 +785,157 @@ impl Syntax {
// indent_level_for_line
// TODO: Folding
-
- // Syntax APIs
- // get_syntax_node_containing_range ->
- // ...
- // get_syntax_node_at_pos
- // buffer_range_for_scope_at_pos
}
#[derive(Debug)]
pub struct LanguageLayer {
// mode
// grammar
- // depth
+ pub config: Arc<HighlightConfiguration>,
pub(crate) tree: Option<Tree>,
+ pub ranges: Vec<Range>,
+ pub depth: usize,
}
impl LanguageLayer {
- // pub fn new() -> Self {
- // Self { tree: None }
- // }
-
pub fn tree(&self) -> &Tree {
// TODO: no unwrap
self.tree.as_ref().unwrap()
}
- fn parse(
- &mut self,
- ts_parser: &mut TsParser,
- config: &HighlightConfiguration,
- source: &Rope,
- _depth: usize,
- ranges: Vec<Range>,
- ) -> Result<(), Error> {
- if ts_parser.parser.set_included_ranges(&ranges).is_ok() {
- ts_parser
- .parser
- .set_language(config.language)
- .map_err(|_| Error::InvalidLanguage)?;
-
- // unsafe { syntax.parser.set_cancellation_flag(cancellation_flag) };
- let tree = ts_parser
- .parser
- .parse_with(
- &mut |byte, _| {
- if byte <= source.len_bytes() {
- let (chunk, start_byte, _, _) = source.chunk_at_byte(byte);
- chunk[byte - start_byte..].as_bytes()
- } else {
- // out of range
- &[]
- }
- },
- self.tree.as_ref(),
- )
- .ok_or(Error::Cancelled)?;
+ fn parse(&mut self, parser: &mut Parser, source: &Rope) -> Result<(), Error> {
+ parser.set_included_ranges(&self.ranges).unwrap();
- self.tree = Some(tree)
- }
+ parser
+ .set_language(self.config.language)
+ .map_err(|_| Error::InvalidLanguage)?;
+
+ // unsafe { syntax.parser.set_cancellation_flag(cancellation_flag) };
+ let tree = parser
+ .parse_with(
+ &mut |byte, _| {
+ if byte <= source.len_bytes() {
+ let (chunk, start_byte, _, _) = source.chunk_at_byte(byte);
+ chunk[byte - start_byte..].as_bytes()
+ } else {
+ // out of range
+ &[]
+ }
+ },
+ self.tree.as_ref(),
+ )
+ .ok_or(Error::Cancelled)?;
+ // unsafe { ts_parser.parser.set_cancellation_flag(None) };
+ self.tree = Some(tree);
Ok(())
}
+}
- pub(crate) fn generate_edits(
- old_text: RopeSlice,
- changeset: &ChangeSet,
- ) -> Vec<tree_sitter::InputEdit> {
- use Operation::*;
- let mut old_pos = 0;
+pub(crate) fn generate_edits(
+ old_text: &Rope,
+ changeset: &ChangeSet,
+) -> Vec<tree_sitter::InputEdit> {
+ use Operation::*;
+ let mut old_pos = 0;
- let mut edits = Vec::new();
+ let mut edits = Vec::new();
- let mut iter = changeset.changes.iter().peekable();
+ if changeset.changes.is_empty() {
+ return edits;
+ }
- // TODO; this is a lot easier with Change instead of Operation.
+ let mut iter = changeset.changes.iter().peekable();
- fn point_at_pos(text: RopeSlice, pos: usize) -> (usize, Point) {
- let byte = text.char_to_byte(pos); // <- attempted to index past end
- let line = text.char_to_line(pos);
- let line_start_byte = text.line_to_byte(line);
- let col = byte - line_start_byte;
+ // TODO; this is a lot easier with Change instead of Operation.
- (byte, Point::new(line, col))
- }
+ fn point_at_pos(text: &Rope, pos: usize) -> (usize, Point) {
+ let byte = text.char_to_byte(pos); // <- attempted to index past end
+ let line = text.char_to_line(pos);
+ let line_start_byte = text.line_to_byte(line);
+ let col = byte - line_start_byte;
- fn traverse(point: Point, text: &Tendril) -> Point {
- let Point {
- mut row,
- mut column,
- } = point;
-
- // TODO: there should be a better way here.
- let mut chars = text.chars().peekable();
- while let Some(ch) = chars.next() {
- if char_is_line_ending(ch) && !(ch == '\r' && chars.peek() == Some(&'\n')) {
- row += 1;
- column = 0;
- } else {
- column += 1;
- }
+ (byte, Point::new(line, col))
+ }
+
+ fn traverse(point: Point, text: &Tendril) -> Point {
+ let Point {
+ mut row,
+ mut column,
+ } = point;
+
+ // TODO: there should be a better way here.
+ let mut chars = text.chars().peekable();
+ while let Some(ch) = chars.next() {
+ if char_is_line_ending(ch) && !(ch == '\r' && chars.peek() == Some(&'\n')) {
+ row += 1;
+ column = 0;
+ } else {
+ column += 1;
}
- Point { row, column }
}
+ Point { row, column }
+ }
- while let Some(change) = iter.next() {
- let len = match change {
- Delete(i) | Retain(i) => *i,
- Insert(_) => 0,
- };
- let mut old_end = old_pos + len;
+ while let Some(change) = iter.next() {
+ let len = match change {
+ Delete(i) | Retain(i) => *i,
+ Insert(_) => 0,
+ };
+ let mut old_end = old_pos + len;
+
+ match change {
+ Retain(_) => {}
+ Delete(_) => {
+ let (start_byte, start_position) = point_at_pos(old_text, old_pos);
+ let (old_end_byte, old_end_position) = point_at_pos(old_text, old_end);
+
+ // deletion
+ edits.push(tree_sitter::InputEdit {
+ start_byte, // old_pos to byte
+ old_end_byte, // old_end to byte
+ new_end_byte: start_byte, // old_pos to byte
+ start_position, // old pos to coords
+ old_end_position, // old_end to coords
+ new_end_position: start_position, // old pos to coords
+ });
+ }
+ Insert(s) => {
+ let (start_byte, start_position) = point_at_pos(old_text, old_pos);
- match change {
- Retain(_) => {}
- Delete(_) => {
- let (start_byte, start_position) = point_at_pos(old_text, old_pos);
+ // a subsequent delete means a replace, consume it
+ if let Some(Delete(len)) = iter.peek() {
+ old_end = old_pos + len;
let (old_end_byte, old_end_position) = point_at_pos(old_text, old_end);
- // TODO: Position also needs to be byte based...
- // let byte = char_to_byte(old_pos)
- // let line = char_to_line(old_pos)
- // let line_start_byte = line_to_byte()
- // Position::new(line, line_start_byte - byte)
+ iter.next();
- // deletion
+ // replacement
edits.push(tree_sitter::InputEdit {
- start_byte, // old_pos to byte
- old_end_byte, // old_end to byte
- new_end_byte: start_byte, // old_pos to byte
- start_position, // old pos to coords
- old_end_position, // old_end to coords
- new_end_position: start_position, // old pos to coords
+ start_byte, // old_pos to byte
+ old_end_byte, // old_end to byte
+ new_end_byte: start_byte + s.len(), // old_pos to byte + s.len()
+ start_position, // old pos to coords
+ old_end_position, // old_end to coords
+ new_end_position: traverse(start_position, s), // old pos + chars, newlines matter too (iter over)
+ });
+ } else {
+ // insert
+ edits.push(tree_sitter::InputEdit {
+ start_byte, // old_pos to byte
+ old_end_byte: start_byte, // same
+ new_end_byte: start_byte + s.len(), // old_pos + s.len()
+ start_position, // old pos to coords
+ old_end_position: start_position, // same
+ new_end_position: traverse(start_position, s), // old pos + chars, newlines matter too (iter over)
});
- }
- Insert(s) => {
- let (start_byte, start_position) = point_at_pos(old_text, old_pos);
-
- // a subsequent delete means a replace, consume it
- if let Some(Delete(len)) = iter.peek() {
- old_end = old_pos + len;
- let (old_end_byte, old_end_position) = point_at_pos(old_text, old_end);
-
- iter.next();
-
- // replacement
- edits.push(tree_sitter::InputEdit {
- start_byte, // old_pos to byte
- old_end_byte, // old_end to byte
- new_end_byte: start_byte + s.len(), // old_pos to byte + s.len()
- start_position, // old pos to coords
- old_end_position, // old_end to coords
- new_end_position: traverse(start_position, s), // old pos + chars, newlines matter too (iter over)
- });
- } else {
- // insert
- edits.push(tree_sitter::InputEdit {
- start_byte, // old_pos to byte
- old_end_byte: start_byte, // same
- new_end_byte: start_byte + s.len(), // old_pos + s.len()
- start_position, // old pos to coords
- old_end_position: start_position, // same
- new_end_position: traverse(start_position, s), // old pos + chars, newlines matter too (iter over)
- });
- }
}
}
- old_pos = old_end;
- }
- edits
- }
-
- fn update(
- &mut self,
- ts_parser: &mut TsParser,
- config: &HighlightConfiguration,
- old_source: &Rope,
- source: &Rope,
- changeset: &ChangeSet,
- ) -> Result<(), Error> {
- if changeset.is_empty() {
- return Ok(());
}
-
- let edits = Self::generate_edits(old_source.slice(..), changeset);
-
- // Notify the tree about all the changes
- for edit in edits.iter().rev() {
- // apply the edits in reverse. If we applied them in order then edit 1 would disrupt
- // the positioning of edit 2
- self.tree.as_mut().unwrap().edit(edit);
- }
-
- self.parse(
- ts_parser,
- config,
- source,
- 0,
- // TODO: what to do about this range on update
- vec![Range {
- start_byte: 0,
- end_byte: usize::MAX,
- start_point: Point::new(0, 0),
- end_point: Point::new(usize::MAX, usize::MAX),
- }],
- )
+ old_pos = old_end;
}
-
- // fn highlight_iter() -> same as Mode but for this layer. Mode composits these
- // fn buffer_changed
- // fn update(range)
- // fn update_injections()
+ edits
}
-// -- refactored from tree-sitter-highlight to be able to retain state
-// TODO: add seek() to iter
-
-// problem: any time a layer is updated it must update it's injections on the parent (potentially
-// removing some from use)
-// can't modify to vec and exist in it at the same time since that would violate borrows
-// maybe we can do with an arena
-// maybe just caching on the top layer and nevermind the injections for now?
-//
-// Grammar {
-// layers: Vec<Box<Layer>> to prevent memory moves when vec is modified
-// }
-// injections tracked by marker:
-// if marker areas match it's fine and update
-// if not found add new layer
-// if length 0 then area got removed, clean up the layer
-//
-// layer update:
-// if range.len = 0 then remove the layer
-// for change in changes { tree.edit(change) }
-// tree = parser.parse(.., tree, ..)
-// calculate affected range and update injections
-// injection update:
-// look for existing injections
-// if present, range = (first injection start, last injection end)
-//
-// For now cheat and just throw out non-root layers if they exist. This should still improve
-// parsing in majority of cases.
-
use std::sync::atomic::{AtomicUsize, Ordering};
use std::{iter, mem, ops, str, usize};
use tree_sitter::{
@@ -819,8 +972,8 @@ pub enum HighlightEvent {
pub struct HighlightConfiguration {
pub language: Grammar,
pub query: Query,
+ injections_query: Query,
combined_injections_query: Option<Query>,
- locals_pattern_index: usize,
highlights_pattern_index: usize,
highlight_indices: ArcSwap<Vec<Option<Highlight>>>,
non_local_variable_patterns: Vec<bool>,
@@ -847,13 +1000,9 @@ struct LocalScope<'a> {
}
#[derive(Debug)]
-struct HighlightIter<'a, F>
-where
- F: FnMut(&str) -> Option<&'a HighlightConfiguration> + 'a,
-{
+struct HighlightIter<'a> {
source: RopeSlice<'a>,
byte_offset: usize,
- injection_callback: F,
cancellation_flag: Option<&'a AtomicUsize>,
layers: Vec<HighlightIterLayer<'a>>,
iter_count: usize,
@@ -893,8 +1042,8 @@ struct HighlightIterLayer<'a> {
config: &'a HighlightConfiguration,
highlight_end_stack: Vec<usize>,
scope_stack: Vec<LocalScope<'a>>,
- ranges: Vec<Range>,
depth: usize,
+ ranges: &'a [Range],
}
impl<'a> fmt::Debug for HighlightIterLayer<'a> {
@@ -926,38 +1075,32 @@ impl HighlightConfiguration {
) -> Result<Self, QueryError> {
// Concatenate the query strings, keeping track of the start offset of each section.
let mut query_source = String::new();
- query_source.push_str(injection_query);
- let locals_query_offset = query_source.len();
query_source.push_str(locals_query);
let highlights_query_offset = query_source.len();
query_source.push_str(highlights_query);
// Construct a single query by concatenating the three query strings, but record the
// range of pattern indices that belong to each individual string.
- let mut query = Query::new(language, &query_source)?;
- let mut locals_pattern_index = 0;
+ let query = Query::new(language, &query_source)?;
let mut highlights_pattern_index = 0;
for i in 0..(query.pattern_count()) {
let pattern_offset = query.start_byte_for_pattern(i);
if pattern_offset < highlights_query_offset {
- if pattern_offset < highlights_query_offset {
- highlights_pattern_index += 1;
- }
- if pattern_offset < locals_query_offset {
- locals_pattern_index += 1;
- }
+ highlights_pattern_index += 1;
}
}
+ let mut injections_query = Query::new(language, injection_query)?;
+
// Construct a separate query just for dealing with the 'combined injections'.
// Disable the combined injection patterns in the main query.
let mut combined_injections_query = Query::new(language, injection_query)?;
let mut has_combined_queries = false;
- for pattern_index in 0..locals_pattern_index {
- let settings = query.property_settings(pattern_index);
+ for pattern_index in 0..injections_query.pattern_count() {
+ let settings = injections_query.property_settings(pattern_index);
if settings.iter().any(|s| &*s.key == "injection.combined") {
has_combined_queries = true;
- query.disable_pattern(pattern_index);
+ injections_query.disable_pattern(pattern_index);
} else {
combined_injections_query.disable_pattern(pattern_index);
}
@@ -989,8 +1132,6 @@ impl HighlightConfiguration {
for (i, name) in query.capture_names().iter().enumerate() {
let i = Some(i as u32);
match name.as_str() {
- "injection.content" => injection_content_capture_index = i,
- "injection.language" => injection_language_capture_index = i,
"local.definition" => local_def_capture_index = i,
"local.definition-value" => local_def_value_capture_index = i,
"local.reference" => local_ref_capture_index = i,
@@ -999,12 +1140,21 @@ impl HighlightConfiguration {
}
}
+ for (i, name) in injections_query.capture_names().iter().enumerate() {
+ let i = Some(i as u32);
+ match name.as_str() {
+ "injection.content" => injection_content_capture_index = i,
+ "injection.language" => injection_language_capture_index = i,
+ _ => {}
+ }
+ }
+
let highlight_indices = ArcSwap::from_pointee(vec![None; query.capture_names().len()]);
Ok(Self {
language,
query,
+ injections_query,
combined_injections_query,
- locals_pattern_index,
highlights_pattern_index,
highlight_indices,
non_local_variable_patterns,
@@ -1069,238 +1219,6 @@ impl HighlightConfiguration {
}
impl<'a> HighlightIterLayer<'a> {
- /// Create a new 'layer' of highlighting for this document.
- ///
- /// In the even that the new layer contains "combined injections" (injections where multiple
- /// disjoint ranges are parsed as one syntax tree), these will be eagerly processed and
- /// added to the returned vector.
- fn new<F: FnMut(&str) -> Option<&'a HighlightConfiguration> + 'a>(
- source: RopeSlice<'a>,
- cancellation_flag: Option<&'a AtomicUsize>,
- injection_callback: &mut F,
- mut config: &'a HighlightConfiguration,
- mut depth: usize,
- mut ranges: Vec<Range>,
- ) -> Result<Vec<Self>, Error> {
- let mut result = Vec::with_capacity(1);
- let mut queue = Vec::new();
- loop {
- // --> Tree parsing part
-
- PARSER.with(|ts_parser| {
- let highlighter = &mut ts_parser.borrow_mut();
-
- if highlighter.parser.set_included_ranges(&ranges).is_ok() {
- highlighter
- .parser
- .set_language(config.language)
- .map_err(|_| Error::InvalidLanguage)?;
-
- unsafe { highlighter.parser.set_cancellation_flag(cancellation_flag) };
- let tree = highlighter
- .parser
- .parse_with(
- &mut |byte, _| {
- if byte <= source.len_bytes() {
- let (chunk, start_byte, _, _) = source.chunk_at_byte(byte);
- chunk[byte - start_byte..].as_bytes()
- } else {
- // out of range
- &[]
- }
- },
- None,
- )
- .ok_or(Error::Cancelled)?;
- unsafe { highlighter.parser.set_cancellation_flag(None) };
- let mut cursor = highlighter.cursors.pop().unwrap_or_else(QueryCursor::new);
-
- // Process combined injections.
- if let Some(combined_injections_query) = &config.combined_injections_query {
- let mut injections_by_pattern_index = vec![
- (None, Vec::new(), false);
- combined_injections_query
- .pattern_count()
- ];
- let matches = cursor.matches(
- combined_injections_query,
- tree.root_node(),
- RopeProvider(source),
- );
- for mat in matches {
- let entry = &mut injections_by_pattern_index[mat.pattern_index];
- let (language_name, content_node, include_children) =
- injection_for_match(
- config,
- combined_injections_query,
- &mat,
- source,
- );
- if language_name.is_some() {
- entry.0 = language_name;
- }
- if let Some(content_node) = content_node {
- entry.1.push(content_node);
- }
- entry.2 = include_children;
- }
- for (lang_name, content_nodes, includes_children) in
- injections_by_pattern_index
- {
- if let (Some(lang_name), false) = (lang_name, content_nodes.is_empty())
- {
- if let Some(next_config) = (injection_callback)(&lang_name) {
- let ranges = Self::intersect_ranges(
- &ranges,
- &content_nodes,
- includes_children,
- );
- if !ranges.is_empty() {
- queue.push((next_config, depth + 1, ranges));
- }
- }
- }
- }
- }
-
- // --> Highlighting query part
-
- // The `captures` iterator borrows the `Tree` and the `QueryCursor`, which
- // prevents them from being moved. But both of these values are really just
- // pointers, so it's actually ok to move them.
- let tree_ref = unsafe { mem::transmute::<_, &'static Tree>(&tree) };
- let cursor_ref =
- unsafe { mem::transmute::<_, &'static mut QueryCursor>(&mut cursor) };
- let captures = cursor_ref
- .captures(&config.query, tree_ref.root_node(), RopeProvider(source))
- .peekable();
-
- result.push(HighlightIterLayer {
- highlight_end_stack: Vec::new(),
- scope_stack: vec![LocalScope {
- inherits: false,
- range: 0..usize::MAX,
- local_defs: Vec::new(),
- }],
- cursor,
- depth,
- _tree: Some(tree),
- captures,
- config,
- ranges,
- });
- }
-
- Ok(()) // so we can use the try operator
- })?;
-
- if queue.is_empty() {
- break;
- }
-
- let (next_config, next_depth, next_ranges) = queue.remove(0);
- config = next_config;
- depth = next_depth;
- ranges = next_ranges;
- }
-
- Ok(result)
- }
-
- // Compute the ranges that should be included when parsing an injection.
- // This takes into account three things:
- // * `parent_ranges` - The ranges must all fall within the *current* layer's ranges.
- // * `nodes` - Every injection takes place within a set of nodes. The injection ranges
- // are the ranges of those nodes.
- // * `includes_children` - For some injections, the content nodes' children should be
- // excluded from the nested document, so that only the content nodes' *own* content
- // is reparsed. For other injections, the content nodes' entire ranges should be
- // reparsed, including the ranges of their children.
- fn intersect_ranges(
- parent_ranges: &[Range],
- nodes: &[Node],
- includes_children: bool,
- ) -> Vec<Range> {
- let mut cursor = nodes[0].walk();
- let mut result = Vec::new();
- let mut parent_range_iter = parent_ranges.iter();
- let mut parent_range = parent_range_iter
- .next()
- .expect("Layers should only be constructed with non-empty ranges vectors");
- for node in nodes.iter() {
- let mut preceding_range = Range {
- start_byte: 0,
- start_point: Point::new(0, 0),
- end_byte: node.start_byte(),
- end_point: node.start_position(),
- };
- let following_range = Range {
- start_byte: node.end_byte(),
- start_point: node.end_position(),
- end_byte: usize::MAX,
- end_point: Point::new(usize::MAX, usize::MAX),
- };
-
- for excluded_range in node
- .children(&mut cursor)
- .filter_map(|child| {
- if includes_children {
- None
- } else {
- Some(child.range())
- }
- })
- .chain([following_range].iter().cloned())
- {
- let mut range = Range {
- start_byte: preceding_range.end_byte,
- start_point: preceding_range.end_point,
- end_byte: excluded_range.start_byte,
- end_point: excluded_range.start_point,
- };
- preceding_range = excluded_range;
-
- if range.end_byte < parent_range.start_byte {
- continue;
- }
-
- while parent_range.start_byte <= range.end_byte {
- if parent_range.end_byte > range.start_byte {
- if range.start_byte < parent_range.start_byte {
- range.start_byte = parent_range.start_byte;
- range.start_point = parent_range.start_point;
- }
-
- if parent_range.end_byte < range.end_byte {
- if range.start_byte < parent_range.end_byte {
- result.push(Range {
- start_byte: range.start_byte,
- start_point: range.start_point,
- end_byte: parent_range.end_byte,
- end_point: parent_range.end_point,
- });
- }
- range.start_byte = parent_range.end_byte;
- range.start_point = parent_range.end_point;
- } else {
- if range.start_byte < range.end_byte {
- result.push(range);
- }
- break;
- }
- }
-
- if let Some(next_range) = parent_range_iter.next() {
- parent_range = next_range;
- } else {
- return result;
- }
- }
- }
- }
- result
- }
-
// First, sort scope boundaries by their byte offset in the document. At a
// given position, emit scope endings before scope beginnings. Finally, emit
// scope boundaries from deeper layers first.
@@ -1326,10 +1244,101 @@ impl<'a> HighlightIterLayer<'a> {
}
}
-impl<'a, F> HighlightIter<'a, F>
-where
- F: FnMut(&str) -> Option<&'a HighlightConfiguration> + 'a,
-{
+// Compute the ranges that should be included when parsing an injection.
+// This takes into account three things:
+// * `parent_ranges` - The ranges must all fall within the *current* layer's ranges.
+// * `nodes` - Every injection takes place within a set of nodes. The injection ranges
+// are the ranges of those nodes.
+// * `includes_children` - For some injections, the content nodes' children should be
+// excluded from the nested document, so that only the content nodes' *own* content
+// is reparsed. For other injections, the content nodes' entire ranges should be
+// reparsed, including the ranges of their children.
+fn intersect_ranges(
+ parent_ranges: &[Range],
+ nodes: &[Node],
+ includes_children: bool,
+) -> Vec<Range> {
+ let mut cursor = nodes[0].walk();
+ let mut result = Vec::new();
+ let mut parent_range_iter = parent_ranges.iter();
+ let mut parent_range = parent_range_iter
+ .next()
+ .expect("Layers should only be constructed with non-empty ranges vectors");
+ for node in nodes.iter() {
+ let mut preceding_range = Range {
+ start_byte: 0,
+ start_point: Point::new(0, 0),
+ end_byte: node.start_byte(),
+ end_point: node.start_position(),
+ };
+ let following_range = Range {
+ start_byte: node.end_byte(),
+ start_point: node.end_position(),
+ end_byte: usize::MAX,
+ end_point: Point::new(usize::MAX, usize::MAX),
+ };
+
+ for excluded_range in node
+ .children(&mut cursor)
+ .filter_map(|child| {
+ if includes_children {
+ None
+ } else {
+ Some(child.range())
+ }
+ })
+ .chain([following_range].iter().cloned())
+ {
+ let mut range = Range {
+ start_byte: preceding_range.end_byte,
+ start_point: preceding_range.end_point,
+ end_byte: excluded_range.start_byte,
+ end_point: excluded_range.start_point,
+ };
+ preceding_range = excluded_range;
+
+ if range.end_byte < parent_range.start_byte {
+ continue;
+ }
+
+ while parent_range.start_byte <= range.end_byte {
+ if parent_range.end_byte > range.start_byte {
+ if range.start_byte < parent_range.start_byte {
+ range.start_byte = parent_range.start_byte;
+ range.start_point = parent_range.start_point;
+ }
+
+ if parent_range.end_byte < range.end_byte {
+ if range.start_byte < parent_range.end_byte {
+ result.push(Range {
+ start_byte: range.start_byte,
+ start_point: range.start_point,
+ end_byte: parent_range.end_byte,
+ end_point: parent_range.end_point,
+ });
+ }
+ range.start_byte = parent_range.end_byte;
+ range.start_point = parent_range.end_point;
+ } else {
+ if range.start_byte < range.end_byte {
+ result.push(range);
+ }
+ break;
+ }
+ }
+
+ if let Some(next_range) = parent_range_iter.next() {
+ parent_range = next_range;
+ } else {
+ return result;
+ }
+ }
+ }
+ }
+ result
+}
+
+impl<'a> HighlightIter<'a> {
fn emit_event(
&mut self,
offset: usize,
@@ -1360,6 +1369,12 @@ where
i += 1;
continue;
}
+ } else {
+ let layer = self.layers.remove(i + 1);
+ PARSER.with(|ts_parser| {
+ let highlighter = &mut ts_parser.borrow_mut();
+ highlighter.cursors.push(layer.cursor);
+ });
}
break;
}
@@ -1376,30 +1391,9 @@ where
}
}
}
-
- fn insert_layer(&mut self, mut layer: HighlightIterLayer<'a>) {
- if let Some(sort_key) = layer.sort_key() {
- let mut i = 1;
- while i < self.layers.len() {
- if let Some(sort_key_i) = self.layers[i].sort_key() {
- if sort_key_i > sort_key {
- self.layers.insert(i, layer);
- return;
- }
- i += 1;
- } else {
- self.layers.remove(i);
- }
- }
- self.layers.push(layer);
- }
- }
}
-impl<'a, F> Iterator for HighlightIter<'a, F>
-where
- F: FnMut(&str) -> Option<&'a HighlightConfiguration> + 'a,
-{
+impl<'a> Iterator for HighlightIter<'a> {
type Item = Result<HighlightEvent, Error>;
fn next(&mut self) -> Option<Self::Item> {
@@ -1459,55 +1453,12 @@ where
layer.highlight_end_stack.pop();
return self.emit_event(end_byte, Some(HighlightEvent::HighlightEnd));
} else {
- // return self.emit_event(self.source.len(), None);
- return None;
+ return self.emit_event(self.source.len_bytes(), None);
};
let (mut match_, capture_index) = layer.captures.next().unwrap();
let mut capture = match_.captures[capture_index];
- // If this capture represents an injection, then process the injection.
- if match_.pattern_index < layer.config.locals_pattern_index {
- let (language_name, content_node, include_children) =
- injection_for_match(layer.config, &layer.config.query, &match_, self.source);
-
- // Explicitly remove this match so that none of its other captures will remain
- // in the stream of captures.
- match_.remove();
-
- // If a language is found with the given name, then add a new language layer
- // to the highlighted document.
- if let (Some(language_name), Some(content_node)) = (language_name, content_node) {
- if let Some(config) = (self.injection_callback)(&language_name) {
- let ranges = HighlightIterLayer::intersect_ranges(
- &self.layers[0].ranges,
- &[content_node],
- include_children,
- );
- if !ranges.is_empty() {
- match HighlightIterLayer::new(
- self.source,
- self.cancellation_flag,
- &mut self.injection_callback,
- config,
- self.layers[0].depth + 1,
- ranges,
- ) {
- Ok(layers) => {
- for layer in layers {
- self.insert_layer(layer);
- }
- }
- Err(e) => return Some(Err(e)),
- }
- }
- }
- }
-
- self.sort_layers();
- continue 'main;
- }
-
// Remove from the local scope stack any local scopes that have already ended.
while range.start > layer.scope_stack.last().unwrap().range.end {
layer.scope_stack.pop();
@@ -1702,14 +1653,6 @@ fn injection_for_match<'a>(
(language_name, content_node, include_children)
}
-// fn shrink_and_clear<T>(vec: &mut Vec<T>, capacity: usize) {
-// if vec.len() > capacity {
-// vec.truncate(capacity);
-// vec.shrink_to_fit();
-// }
-// vec.clear();
-// }
-
pub struct Merge<I> {
iter: I,
spans: Box<dyn Iterator<Item = (usize, std::ops::Range<usize>)>>,
@@ -1876,6 +1819,8 @@ mod test {
.map(String::from)
.collect();
+ let loader = Loader::new(Configuration { language: vec![] });
+
let language = get_language(&crate::RUNTIME_DIR, "Rust").unwrap();
let config = HighlightConfiguration::new(
language,
@@ -1898,7 +1843,7 @@ mod test {
fn main() {}
",
);
- let syntax = Syntax::new(&source, Arc::new(config));
+ let syntax = Syntax::new(&source, Arc::new(config), Arc::new(loader));
let tree = syntax.tree();
let root = tree.root_node();
assert_eq!(root.kind(), "source_file");
@@ -1925,7 +1870,7 @@ mod test {
&doc,
vec![(6, 11, Some("test".into())), (12, 17, None)].into_iter(),
);
- let edits = LanguageLayer::generate_edits(doc.slice(..), transaction.changes());
+ let edits = generate_edits(&doc, transaction.changes());
// transaction.apply(&mut state);
assert_eq!(
@@ -1954,7 +1899,7 @@ mod test {
let mut doc = Rope::from("fn test() {}");
let transaction =
Transaction::change(&doc, vec![(8, 8, Some("a: u32".into()))].into_iter());
- let edits = LanguageLayer::generate_edits(doc.slice(..), transaction.changes());
+ let edits = generate_edits(&doc, transaction.changes());
transaction.apply(&mut doc);
assert_eq!(doc, "fn test(a: u32) {}");
diff --git a/helix-syntax/languages/tree-sitter-elm b/helix-syntax/languages/tree-sitter-elm
new file mode 160000
+Subproject bd50ccf66b42c55252ac8efc1086af4ac6bab8c
diff --git a/helix-syntax/languages/tree-sitter-git-config b/helix-syntax/languages/tree-sitter-git-config
new file mode 160000
+Subproject 0e4f0baf90b57e5aeb62dcdbf03062c6315d43e
diff --git a/helix-syntax/languages/tree-sitter-go b/helix-syntax/languages/tree-sitter-go
-Subproject 2a83dfdd759a632651f852aa4dc0af2525fae5c
+Subproject 0fa917a7022d1cd2e9b779a6a8fc5dc7fad69c7
diff --git a/helix-syntax/languages/tree-sitter-graphql b/helix-syntax/languages/tree-sitter-graphql
new file mode 160000
+Subproject 5e66e961eee421786bdda8495ed1db045e06b5f
diff --git a/helix-syntax/languages/tree-sitter-lean b/helix-syntax/languages/tree-sitter-lean
new file mode 160000
+Subproject d98426109258b266e1e92358c5f11716d2e8f63
diff --git a/helix-syntax/languages/tree-sitter-php b/helix-syntax/languages/tree-sitter-php
-Subproject 0d63eaf94e8d6c0694551b016c802787e61b3fb
+Subproject 57f855461aeeca73bd4218754fb26b5ac143f98
diff --git a/helix-syntax/languages/tree-sitter-zig b/helix-syntax/languages/tree-sitter-zig
-Subproject 1f27fd1dfe7f352408f01b4894c7825f3a1d6c4
+Subproject 93331b8bd8b4ebee2b575490b2758f16ad4e9f3
diff --git a/helix-term/Cargo.toml b/helix-term/Cargo.toml
index 28b4fe2a..e379b369 100644
--- a/helix-term/Cargo.toml
+++ b/helix-term/Cargo.toml
@@ -46,7 +46,7 @@ log = "0.4"
fuzzy-matcher = "0.3"
ignore = "0.4"
# markdown doc rendering
-pulldown-cmark = { version = "0.8", default-features = false }
+pulldown-cmark = { version = "0.9", default-features = false }
# file type detection
content_inspector = "0.2.4"
diff --git a/helix-term/src/application.rs b/helix-term/src/application.rs
index c7202feb..c376aefa 100644
--- a/helix-term/src/application.rs
+++ b/helix-term/src/application.rs
@@ -358,12 +358,8 @@ impl Application {
// trigger textDocument/didOpen for docs that are already open
for doc in docs {
- // TODO: extract and share with editor.open
- let language_id = doc
- .language()
- .and_then(|s| s.split('.').last()) // source.rust
- .map(ToOwned::to_owned)
- .unwrap_or_default();
+ let language_id =
+ doc.language_id().map(ToOwned::to_owned).unwrap_or_default();
tokio::spawn(language_server.text_document_did_open(
doc.url().unwrap(),
diff --git a/helix-term/src/commands.rs b/helix-term/src/commands.rs
index 5c26a5b2..6123b7d2 100644
--- a/helix-term/src/commands.rs
+++ b/helix-term/src/commands.rs
@@ -11,6 +11,7 @@ use helix_core::{
object, pos_at_coords,
regex::{self, Regex, RegexBuilder},
search, selection, shellwords, surround, textobject,
+ tree_sitter::Node,
unicode::width::UnicodeWidthChar,
LineEnding, Position, Range, Rope, RopeGraphemes, RopeSlice, Selection, SmallVec, Tendril,
Transaction,
@@ -173,7 +174,7 @@ macro_rules! static_commands {
impl MappableCommand {
pub fn execute(&self, cx: &mut Context) {
match &self {
- MappableCommand::Typable { name, args, doc: _ } => {
+ Self::Typable { name, args, doc: _ } => {
let args: Vec<Cow<str>> = args.iter().map(Cow::from).collect();
if let Some(command) = cmd::TYPABLE_COMMAND_MAP.get(name.as_str()) {
let mut cx = compositor::Context {
@@ -186,21 +187,21 @@ impl MappableCommand {
}
}
}
- MappableCommand::Static { fun, .. } => (fun)(cx),
+ Self::Static { fun, .. } => (fun)(cx),
}
}
pub fn name(&self) -> &str {
match &self {
- MappableCommand::Typable { name, .. } => name,
- MappableCommand::Static { name, .. } => name,
+ Self::Typable { name, .. } => name,
+ Self::Static { name, .. } => name,
}
}
pub fn doc(&self) -> &str {
match &self {
- MappableCommand::Typable { doc, .. } => doc,
- MappableCommand::Static { doc, .. } => doc,
+ Self::Typable { doc, .. } => doc,
+ Self::Static { doc, .. } => doc,
}
}
@@ -363,6 +364,8 @@ impl MappableCommand {
rotate_selection_contents_backward, "Rotate selections contents backward",
expand_selection, "Expand selection to parent syntax node",
shrink_selection, "Shrink selection to previously expanded syntax node",
+ select_next_sibling, "Select the next sibling in the syntax tree",
+ select_prev_sibling, "Select the previous sibling in the syntax tree",
jump_forward, "Jump forward on jumplist",
jump_backward, "Jump backward on jumplist",
save_selection, "Save the current selection to the jumplist",
@@ -2278,7 +2281,7 @@ pub mod cmd {
force: bool,
) -> anyhow::Result<()> {
let mut errors = String::new();
-
+ let jobs = &mut cx.jobs;
// save all documents
for doc in &mut cx.editor.documents.values_mut() {
if doc.path().is_none() {
@@ -2286,9 +2289,23 @@ pub mod cmd {
continue;
}
- // TODO: handle error.
- let handle = doc.save();
- cx.jobs.add(Job::new(handle).wait_before_exiting());
+ if !doc.is_modified() {
+ continue;
+ }
+
+ let fmt = doc.auto_format().map(|fmt| {
+ let shared = fmt.shared();
+ let callback = make_format_callback(
+ doc.id(),
+ doc.version(),
+ Modified::SetUnmodified,
+ shared.clone(),
+ );
+ jobs.callback(callback);
+ shared
+ });
+ let future = doc.format_and_save(fmt);
+ jobs.add(Job::new(future).wait_before_exiting());
}
if quit {
@@ -2748,6 +2765,46 @@ pub mod cmd {
Ok(())
}
+ fn tree_sitter_subtree(
+ cx: &mut compositor::Context,
+ _args: &[Cow<str>],
+ _event: PromptEvent,
+ ) -> anyhow::Result<()> {
+ let (view, doc) = current!(cx.editor);
+
+ if let Some(syntax) = doc.syntax() {
+ let primary_selection = doc.selection(view.id).primary();
+ let text = doc.text();
+ let from = text.char_to_byte(primary_selection.from());
+ let to = text.char_to_byte(primary_selection.to());
+ if let Some(selected_node) = syntax
+ .tree()
+ .root_node()
+ .descendant_for_byte_range(from, to)
+ {
+ let contents = format!("```tsq\n{}\n```", selected_node.to_sexp());
+
+ let callback = async move {
+ let call: job::Callback =
+ Box::new(move |editor: &mut Editor, compositor: &mut Compositor| {
+ let contents = ui::Markdown::new(contents, editor.syn_loader.clone());
+ let popup = Popup::new("hover", contents);
+ if let Some(doc_popup) = compositor.find_id("hover") {
+ *doc_popup = popup;
+ } else {
+ compositor.push(Box::new(popup));
+ }
+ });
+ Ok(call)
+ };
+
+ cx.jobs.callback(callback);
+ }
+ }
+
+ Ok(())
+ }
+
pub const TYPABLE_COMMAND_LIST: &[TypableCommand] = &[
TypableCommand {
name: "quit",
@@ -3064,6 +3121,13 @@ pub mod cmd {
fun: sort_reverse,
completer: None,
},
+ TypableCommand {
+ name: "tree-sitter-subtree",
+ aliases: &["ts-subtree"],
+ doc: "Display tree sitter subtree under cursor, primarily for debugging queries.",
+ fun: tree_sitter_subtree,
+ completer: None,
+ },
];
pub static TYPABLE_COMMAND_MAP: Lazy<HashMap<&'static str, &'static TypableCommand>> =
@@ -3239,8 +3303,8 @@ fn buffer_picker(cx: &mut Context) {
.map(|(_, doc)| new_meta(doc))
.collect(),
BufferMeta::format,
- |editor: &mut Editor, meta, _action| {
- editor.switch(meta.id, Action::Replace);
+ |editor: &mut Editor, meta, action| {
+ editor.switch(meta.id, action);
},
|editor, meta| {
let doc = &editor.documents.get(&meta.id)?;
@@ -3479,11 +3543,9 @@ pub fn apply_document_resource_op(op: &lsp::ResourceOp) -> std::io::Result<()> {
match op {
ResourceOp::Create(op) => {
let path = op.uri.to_file_path().unwrap();
- let ignore_if_exists = if let Some(options) = &op.options {
+ let ignore_if_exists = op.options.as_ref().map_or(false, |options| {
!options.overwrite.unwrap_or(false) && options.ignore_if_exists.unwrap_or(false)
- } else {
- false
- };
+ });
if ignore_if_exists && path.exists() {
Ok(())
} else {
@@ -3493,11 +3555,12 @@ pub fn apply_document_resource_op(op: &lsp::ResourceOp) -> std::io::Result<()> {
ResourceOp::Delete(op) => {
let path = op.uri.to_file_path().unwrap();
if path.is_dir() {
- let recursive = if let Some(options) = &op.options {
- options.recursive.unwrap_or(false)
- } else {
- false
- };
+ let recursive = op
+ .options
+ .as_ref()
+ .and_then(|options| options.recursive)
+ .unwrap_or(false);
+
if recursive {
fs::remove_dir_all(&path)
} else {
@@ -3512,11 +3575,9 @@ pub fn apply_document_resource_op(op: &lsp::ResourceOp) -> std::io::Result<()> {
ResourceOp::Rename(op) => {
let from = op.old_uri.to_file_path().unwrap();
let to = op.new_uri.to_file_path().unwrap();
- let ignore_if_exists = if let Some(options) = &op.options {
+ let ignore_if_exists = op.options.as_ref().map_or(false, |options| {
!options.overwrite.unwrap_or(false) && options.ignore_if_exists.unwrap_or(false)
- } else {
- false
- };
+ });
if ignore_if_exists && to.exists() {
Ok(())
} else {
@@ -5388,8 +5449,8 @@ fn hover(cx: &mut Context) {
// skip if contents empty
let contents = ui::Markdown::new(contents, editor.syn_loader.clone());
- let popup = Popup::new("documentation", contents);
- if let Some(doc_popup) = compositor.find_id("documentation") {
+ let popup = Popup::new("hover", contents);
+ if let Some(doc_popup) = compositor.find_id("hover") {
*doc_popup = popup;
} else {
compositor.push(Box::new(popup));
@@ -5490,7 +5551,7 @@ fn expand_selection(cx: &mut Context) {
// save current selection so it can be restored using shrink_selection
view.object_selections.push(current_selection.clone());
- let selection = object::expand_selection(syntax, text, current_selection);
+ let selection = object::expand_selection(syntax, text, current_selection.clone());
doc.set_selection(view.id, selection);
}
};
@@ -5516,7 +5577,26 @@ fn shrink_selection(cx: &mut Context) {
// if not previous selection, shrink to first child
if let Some(syntax) = doc.syntax() {
let text = doc.text().slice(..);
- let selection = object::shrink_selection(syntax, text, current_selection);
+ let selection = object::shrink_selection(syntax, text, current_selection.clone());
+ doc.set_selection(view.id, selection);
+ }
+ };
+ motion(cx.editor);
+ cx.editor.last_motion = Some(Motion(Box::new(motion)));
+}
+
+fn select_sibling_impl<F>(cx: &mut Context, sibling_fn: &'static F)
+where
+ F: Fn(Node) -> Option<Node>,
+{
+ let motion = |editor: &mut Editor| {
+ let (view, doc) = current!(editor);
+
+ if let Some(syntax) = doc.syntax() {
+ let text = doc.text().slice(..);
+ let current_selection = doc.selection(view.id);
+ let selection =
+ object::select_sibling(syntax, text, current_selection.clone(), sibling_fn);
doc.set_selection(view.id, selection);
}
};
@@ -5524,6 +5604,14 @@ fn shrink_selection(cx: &mut Context) {
cx.editor.last_motion = Some(Motion(Box::new(motion)));
}
+fn select_next_sibling(cx: &mut Context) {
+ select_sibling_impl(cx, &|node| Node::next_sibling(&node))
+}
+
+fn select_prev_sibling(cx: &mut Context) {
+ select_sibling_impl(cx, &|node| Node::prev_sibling(&node))
+}
+
fn match_brackets(cx: &mut Context) {
let (view, doc) = current!(cx.editor);
diff --git a/helix-term/src/job.rs b/helix-term/src/job.rs
index 4fa38174..f5a0a425 100644
--- a/helix-term/src/job.rs
+++ b/helix-term/src/job.rs
@@ -22,8 +22,8 @@ pub struct Jobs {
}
impl Job {
- pub fn new<F: Future<Output = anyhow::Result<()>> + Send + 'static>(f: F) -> Job {
- Job {
+ pub fn new<F: Future<Output = anyhow::Result<()>> + Send + 'static>(f: F) -> Self {
+ Self {
future: f.map(|r| r.map(|()| None)).boxed(),
wait: false,
}
@@ -31,22 +31,22 @@ impl Job {
pub fn with_callback<F: Future<Output = anyhow::Result<Callback>> + Send + 'static>(
f: F,
- ) -> Job {
- Job {
+ ) -> Self {
+ Self {
future: f.map(|r| r.map(Some)).boxed(),
wait: false,
}
}
- pub fn wait_before_exiting(mut self) -> Job {
+ pub fn wait_before_exiting(mut self) -> Self {
self.wait = true;
self
}
}
impl Jobs {
- pub fn new() -> Jobs {
- Jobs::default()
+ pub fn new() -> Self {
+ Self::default()
}
pub fn spawn<F: Future<Output = anyhow::Result<()>> + Send + 'static>(&mut self, f: F) {
diff --git a/helix-term/src/keymap.rs b/helix-term/src/keymap.rs
index 49f8469a..e5990d72 100644
--- a/helix-term/src/keymap.rs
+++ b/helix-term/src/keymap.rs
@@ -344,7 +344,7 @@ pub struct Keymap {
impl Keymap {
pub fn new(root: KeyTrie) -> Self {
- Keymap {
+ Self {
root,
state: Vec::new(),
sticky: None,
@@ -368,7 +368,7 @@ impl Keymap {
/// key cancels pending keystrokes. If there are no pending keystrokes but a
/// sticky node is in use, it will be cleared.
pub fn get(&mut self, key: KeyEvent) -> KeymapResult {
- if let key!(Esc) = key {
+ if key!(Esc) == key {
if !self.state.is_empty() {
return KeymapResult::new(
// Note that Esc is not included here
@@ -477,7 +477,7 @@ impl DerefMut for Keymaps {
}
impl Default for Keymaps {
- fn default() -> Keymaps {
+ fn default() -> Self {
let normal = keymap!({ "Normal mode"
"h" | "left" => move_char_left,
"j" | "down" => move_line_down,
@@ -552,6 +552,11 @@ impl Default for Keymaps {
"S" => split_selection,
";" => collapse_selection,
"A-;" => flip_selections,
+ "A-k" => expand_selection,
+ "A-j" => shrink_selection,
+ "A-h" => select_prev_sibling,
+ "A-l" => select_next_sibling,
+
"%" => select_all,
"x" => extend_line,
"X" => extend_to_line_bounds,
@@ -569,13 +574,11 @@ impl Default for Keymaps {
"d" => goto_prev_diag,
"D" => goto_first_diag,
"space" => add_newline_above,
- "o" => shrink_selection,
},
"]" => { "Right bracket"
"d" => goto_next_diag,
"D" => goto_last_diag,
"space" => add_newline_below,
- "o" => expand_selection,
},
"/" => search,
@@ -751,8 +754,10 @@ impl Default for Keymaps {
"del" => delete_char_forward,
"C-d" => delete_char_forward,
"ret" => insert_newline,
+ "C-j" => insert_newline,
"tab" => insert_tab,
"C-w" => delete_word_backward,
+ "A-backspace" => delete_word_backward,
"A-d" => delete_word_forward,
"left" => move_char_left,
@@ -767,6 +772,8 @@ impl Default for Keymaps {
"A-left" => move_prev_word_end,
"A-f" => move_next_word_start,
"A-right" => move_next_word_start,
+ "A-<" => goto_file_start,
+ "A->" => goto_file_end,
"pageup" => page_up,
"pagedown" => page_down,
"home" => goto_line_start,
@@ -780,7 +787,7 @@ impl Default for Keymaps {
"C-x" => completion,
"C-r" => insert_register,
});
- Keymaps(hashmap!(
+ Self(hashmap!(
Mode::Normal => Keymap::new(normal),
Mode::Select => Keymap::new(select),
Mode::Insert => Keymap::new(insert),
diff --git a/helix-term/src/ui/completion.rs b/helix-term/src/ui/completion.rs
index 274330c0..c9ed3b4a 100644
--- a/helix-term/src/ui/completion.rs
+++ b/helix-term/src/ui/completion.rs
@@ -158,7 +158,7 @@ impl Completion {
let resolved_additional_text_edits = if item.additional_text_edits.is_some() {
None
} else {
- Completion::resolve_completion_item(doc, item.clone())
+ Self::resolve_completion_item(doc, item.clone())
.and_then(|item| item.additional_text_edits)
};
diff --git a/helix-term/src/ui/editor.rs b/helix-term/src/ui/editor.rs
index 5b7e9075..040e746d 100644
--- a/helix-term/src/ui/editor.rs
+++ b/helix-term/src/ui/editor.rs
@@ -8,7 +8,9 @@ use crate::{
use helix_core::{
coords_at_pos, encoding,
- graphemes::{ensure_grapheme_boundary_next, next_grapheme_boundary, prev_grapheme_boundary},
+ graphemes::{
+ ensure_grapheme_boundary_next_byte, next_grapheme_boundary, prev_grapheme_boundary,
+ },
movement::Direction,
syntax::{self, HighlightEvent},
unicode::segmentation::UnicodeSegmentation,
@@ -69,13 +71,12 @@ impl EditorView {
surface: &mut Surface,
theme: &Theme,
is_focused: bool,
- loader: &syntax::Loader,
config: &helix_view::editor::Config,
) {
let inner = view.inner_area();
let area = view.area;
- let highlights = Self::doc_syntax_highlights(doc, view.offset, inner.height, theme, loader);
+ let highlights = Self::doc_syntax_highlights(doc, view.offset, inner.height, theme);
let highlights = syntax::merge(highlights, Self::doc_diagnostics_highlights(doc, theme));
let highlights: Box<dyn Iterator<Item = HighlightEvent>> = if is_focused {
Box::new(syntax::merge(
@@ -98,8 +99,7 @@ impl EditorView {
let x = area.right();
let border_style = theme.get("ui.window");
for y in area.top()..area.bottom() {
- surface
- .get_mut(x, y)
+ surface[(x, y)]
.set_symbol(tui::symbols::line::VERTICAL)
//.set_symbol(" ")
.set_style(border_style);
@@ -123,8 +123,7 @@ impl EditorView {
doc: &'doc Document,
offset: Position,
height: u16,
- theme: &Theme,
- loader: &syntax::Loader,
+ _theme: &Theme,
) -> Box<dyn Iterator<Item = HighlightEvent> + 'doc> {
let text = doc.text().slice(..);
let last_line = std::cmp::min(
@@ -144,25 +143,8 @@ impl EditorView {
// TODO: range doesn't actually restrict source, just highlight range
let highlights = match doc.syntax() {
Some(syntax) => {
- let scopes = theme.scopes();
syntax
- .highlight_iter(text.slice(..), Some(range), None, |language| {
- loader.language_configuration_for_injection_string(language)
- .and_then(|language_config| {
- let config = language_config.highlight_config(scopes)?;
- let config_ref = config.as_ref();
- // SAFETY: the referenced `HighlightConfiguration` behind
- // the `Arc` is guaranteed to remain valid throughout the
- // duration of the highlight.
- let config_ref = unsafe {
- std::mem::transmute::<
- _,
- &'static syntax::HighlightConfiguration,
- >(config_ref)
- };
- Some(config_ref)
- })
- })
+ .highlight_iter(text.slice(..), Some(range), None)
.map(|event| event.unwrap())
.collect() // TODO: we collect here to avoid holding the lock, fix later
}
@@ -175,8 +157,8 @@ impl EditorView {
.map(move |event| match event {
// convert byte offsets to char offset
HighlightEvent::Source { start, end } => {
- let start = ensure_grapheme_boundary_next(text, text.byte_to_char(start));
- let end = ensure_grapheme_boundary_next(text, text.byte_to_char(end));
+ let start = text.byte_to_char(ensure_grapheme_boundary_next_byte(text, start));
+ let end = text.byte_to_char(ensure_grapheme_boundary_next_byte(text, end));
HighlightEvent::Source { start, end }
}
event => event,
@@ -306,6 +288,10 @@ impl EditorView {
let text_style = theme.get("ui.text");
+ // It's slightly more efficient to produce a full RopeSlice from the Rope, then slice that a bunch
+ // of times than it is to always call Rope::slice/get_slice (it will internally always hit RSEnum::Light).
+ let text = text.slice(..);
+
'outer: for event in highlights {
match event {
HighlightEvent::HighlightStart(span) => {
@@ -411,8 +397,7 @@ impl EditorView {
.add_modifier(Modifier::DIM)
});
- surface
- .get_mut(viewport.x + pos.col as u16, viewport.y + pos.row as u16)
+ surface[(viewport.x + pos.col as u16, viewport.y + pos.row as u16)]
.set_style(style);
}
}
@@ -1090,7 +1075,6 @@ impl Component for EditorView {
for (view, is_focused) in cx.editor.tree.views() {
let doc = cx.editor.document(view.doc).unwrap();
- let loader = &cx.editor.syn_loader;
self.render_view(
doc,
view,
@@ -1098,7 +1082,6 @@ impl Component for EditorView {
surface,
&cx.editor.theme,
is_focused,
- loader,
&cx.editor.config,
);
}
diff --git a/helix-term/src/ui/markdown.rs b/helix-term/src/ui/markdown.rs
index 46657fb9..00da2c11 100644
--- a/helix-term/src/ui/markdown.rs
+++ b/helix-term/src/ui/markdown.rs
@@ -38,7 +38,7 @@ impl Markdown {
fn parse<'a>(
contents: &'a str,
theme: Option<&Theme>,
- loader: &syntax::Loader,
+ loader: Arc<syntax::Loader>,
) -> tui::text::Text<'a> {
// // also 2021-03-04T16:33:58.553 helix_lsp::transport [INFO] <- {"contents":{"kind":"markdown","value":"\n```rust\ncore::num\n```\n\n```rust\npub const fn saturating_sub(self, rhs:Self) ->Self\n```\n\n---\n\n```rust\n```"},"range":{"end":{"character":61,"line":101},"start":{"character":47,"line":101}}}
// let text = "\n```rust\ncore::iter::traits::iterator::Iterator\n```\n\n```rust\nfn collect<B: FromIterator<Self::Item>>(self) -> B\nwhere\n Self: Sized,\n```\n\n---\n\nTransforms an iterator into a collection.\n\n`collect()` can take anything iterable, and turn it into a relevant\ncollection. This is one of the more powerful methods in the standard\nlibrary, used in a variety of contexts.\n\nThe most basic pattern in which `collect()` is used is to turn one\ncollection into another. You take a collection, call [`iter`](https://doc.rust-lang.org/nightly/core/iter/traits/iterator/trait.Iterator.html) on it,\ndo a bunch of transformations, and then `collect()` at the end.\n\n`collect()` can also create instances of types that are not typical\ncollections. For example, a [`String`](https://doc.rust-lang.org/nightly/core/iter/std/string/struct.String.html) can be built from [`char`](type@char)s,\nand an iterator of [`Result<T, E>`](https://doc.rust-lang.org/nightly/core/result/enum.Result.html) items can be collected\ninto `Result<Collection<T>, E>`. See the examples below for more.\n\nBecause `collect()` is so general, it can cause problems with type\ninference. As such, `collect()` is one of the few times you'll see\nthe syntax affectionately known as the 'turbofish': `::<>`. This\nhelps the inference algorithm understand specifically which collection\nyou're trying to collect into.\n\n# Examples\n\nBasic usage:\n\n```rust\nlet a = [1, 2, 3];\n\nlet doubled: Vec<i32> = a.iter()\n .map(|&x| x * 2)\n .collect();\n\nassert_eq!(vec![2, 4, 6], doubled);\n```\n\nNote that we needed the `: Vec<i32>` on the left-hand side. This is because\nwe could collect into, for example, a [`VecDeque<T>`](https://doc.rust-lang.org/nightly/core/iter/std/collections/struct.VecDeque.html) instead:\n\n```rust\nuse std::collections::VecDeque;\n\nlet a = [1, 2, 3];\n\nlet doubled: VecDeque<i32> = a.iter().map(|&x| x * 2).collect();\n\nassert_eq!(2, doubled[0]);\nassert_eq!(4, doubled[1]);\nassert_eq!(6, doubled[2]);\n```\n\nUsing the 'turbofish' instead of annotating `doubled`:\n\n```rust\nlet a = [1, 2, 3];\n\nlet doubled = a.iter().map(|x| x * 2).collect::<Vec<i32>>();\n\nassert_eq!(vec![2, 4, 6], doubled);\n```\n\nBecause `collect()` only cares about what you're collecting into, you can\nstill use a partial type hint, `_`, with the turbofish:\n\n```rust\nlet a = [1, 2, 3];\n\nlet doubled = a.iter().map(|x| x * 2).collect::<Vec<_>>();\n\nassert_eq!(vec![2, 4, 6], doubled);\n```\n\nUsing `collect()` to make a [`String`](https://doc.rust-lang.org/nightly/core/iter/std/string/struct.String.html):\n\n```rust\nlet chars = ['g', 'd', 'k', 'k', 'n'];\n\nlet hello: String = chars.iter()\n .map(|&x| x as u8)\n .map(|x| (x + 1) as char)\n .collect();\n\nassert_eq!(\"hello\", hello);\n```\n\nIf you have a list of [`Result<T, E>`](https://doc.rust-lang.org/nightly/core/result/enum.Result.html)s, you can use `collect()` to\nsee if any of them failed:\n\n```rust\nlet results = [Ok(1), Err(\"nope\"), Ok(3), Err(\"bad\")];\n\nlet result: Result<Vec<_>, &str> = results.iter().cloned().collect();\n\n// gives us the first error\nassert_eq!(Err(\"nope\"), result);\n\nlet results = [Ok(1), Ok(3)];\n\nlet result: Result<Vec<_>, &str> = results.iter().cloned().collect();\n\n// gives us the list of answers\nassert_eq!(Ok(vec![1, 3]), result);\n```";
@@ -77,7 +77,9 @@ fn parse<'a>(
Event::End(tag) => {
tags.pop();
match tag {
- Tag::Heading(_) | Tag::Paragraph | Tag::CodeBlock(CodeBlockKind::Fenced(_)) => {
+ Tag::Heading(_, _, _)
+ | Tag::Paragraph
+ | Tag::CodeBlock(CodeBlockKind::Fenced(_)) => {
// whenever code block or paragraph closes, new line
let spans = std::mem::take(&mut spans);
if !spans.is_empty() {
@@ -96,14 +98,13 @@ fn parse<'a>(
let syntax = loader
.language_configuration_for_injection_string(language)
.and_then(|config| config.highlight_config(theme.scopes()))
- .map(|config| Syntax::new(&rope, config));
+ .map(|config| Syntax::new(&rope, config, loader.clone()));
if let Some(syntax) = syntax {
// if we have a syntax available, highlight_iter and generate spans
let mut highlights = Vec::new();
- for event in syntax.highlight_iter(rope.slice(..), None, None, |_| None)
- {
+ for event in syntax.highlight_iter(rope.slice(..), None, None) {
match event.unwrap() {
HighlightEvent::HighlightStart(span) => {
highlights.push(span);
@@ -158,7 +159,7 @@ fn parse<'a>(
lines.push(Spans::from(span));
}
}
- } else if let Some(Tag::Heading(_)) = tags.last() {
+ } else if let Some(Tag::Heading(_, _, _)) = tags.last() {
let mut span = to_span(text);
span.style = heading_style;
spans.push(span);
@@ -209,7 +210,11 @@ impl Component for Markdown {
fn render(&mut self, area: Rect, surface: &mut Surface, cx: &mut Context) {
use tui::widgets::{Paragraph, Widget, Wrap};
- let text = parse(&self.contents, Some(&cx.editor.theme), &self.config_loader);
+ let text = parse(
+ &self.contents,
+ Some(&cx.editor.theme),
+ self.config_loader.clone(),
+ );
let par = Paragraph::new(text)
.wrap(Wrap { trim: false })
@@ -227,7 +232,7 @@ impl Component for Markdown {
if padding >= viewport.1 || padding >= viewport.0 {
return None;
}
- let contents = parse(&self.contents, None, &self.config_loader);
+ let contents = parse(&self.contents, None, self.config_loader.clone());
// TODO: account for tab width
let max_text_width = (viewport.0 - padding).min(120);
let mut text_width = 0;
diff --git a/helix-term/src/ui/menu.rs b/helix-term/src/ui/menu.rs
index 69053db3..9758732c 100644
--- a/helix-term/src/ui/menu.rs
+++ b/helix-term/src/ui/menu.rs
@@ -301,7 +301,7 @@ impl<T: Item + 'static> Component for Menu<T> {
let is_marked = i >= scroll_line && i < scroll_line + scroll_height;
if is_marked {
- let cell = surface.get_mut(area.x + area.width - 2, area.y + i as u16);
+ let cell = &mut surface[(area.x + area.width - 2, area.y + i as u16)];
cell.set_symbol("▐ ");
// cell.set_style(selected);
// cell.set_style(if is_marked { selected } else { style });
diff --git a/helix-term/src/ui/picker.rs b/helix-term/src/ui/picker.rs
index 1ef94df0..e9692809 100644
--- a/helix-term/src/ui/picker.rs
+++ b/helix-term/src/ui/picker.rs
@@ -139,7 +139,7 @@ impl<T> FilePicker<T> {
(size, _) if size > MAX_FILE_SIZE_FOR_PREVIEW => CachedPreview::LargeFile,
_ => {
// TODO: enable syntax highlighting; blocked by async rendering
- Document::open(path, None, Some(&editor.theme), None)
+ Document::open(path, None, None)
.map(|doc| CachedPreview::Document(Box::new(doc)))
.unwrap_or(CachedPreview::NotFound)
}
@@ -159,6 +159,7 @@ impl<T: 'static> Component for FilePicker<T> {
// |picker | | |
// | | | |
// +---------+ +---------+
+
let render_preview = area.width > MIN_SCREEN_WIDTH_FOR_PREVIEW;
let area = inner_rect(area);
// -- Render the frame:
@@ -220,13 +221,8 @@ impl<T: 'static> Component for FilePicker<T> {
let offset = Position::new(first_line, 0);
- let highlights = EditorView::doc_syntax_highlights(
- doc,
- offset,
- area.height,
- &cx.editor.theme,
- &cx.editor.syn_loader,
- );
+ let highlights =
+ EditorView::doc_syntax_highlights(doc, offset, area.height, &cx.editor.theme);
EditorView::render_text_highlights(
doc,
offset,
@@ -492,10 +488,9 @@ impl<T: 'static> Component for Picker<T> {
let sep_style = Style::default().fg(Color::Rgb(90, 89, 119));
let borders = BorderType::line_symbols(BorderType::Plain);
for x in inner.left()..inner.right() {
- surface
- .get_mut(x, inner.y + 1)
- .set_symbol(borders.horizontal)
- .set_style(sep_style);
+ if let Some(cell) = surface.get_mut(x, inner.y + 1) {
+ cell.set_symbol(borders.horizontal).set_style(sep_style);
+ }
}
// -- Render the contents:
@@ -505,7 +500,7 @@ impl<T: 'static> Component for Picker<T> {
let selected = cx.editor.theme.get("ui.text.focus");
let rows = inner.height;
- let offset = self.cursor / (rows as usize) * (rows as usize);
+ let offset = self.cursor / std::cmp::max(1, (rows as usize) * (rows as usize));
let files = self.matches.iter().skip(offset).map(|(index, _score)| {
(index, self.options.get(*index).unwrap()) // get_unchecked
@@ -513,7 +508,7 @@ impl<T: 'static> Component for Picker<T> {
for (i, (_index, option)) in files.take(rows as usize).enumerate() {
if i == (self.cursor - offset) {
- surface.set_string(inner.x - 2, inner.y + i as u16, ">", selected);
+ surface.set_string(inner.x.saturating_sub(2), inner.y + i as u16, ">", selected);
}
surface.set_string_truncated(
diff --git a/helix-term/src/ui/prompt.rs b/helix-term/src/ui/prompt.rs
index 0202de23..4c4fef26 100644
--- a/helix-term/src/ui/prompt.rs
+++ b/helix-term/src/ui/prompt.rs
@@ -330,7 +330,7 @@ impl Prompt {
.max(BASE_WIDTH);
let cols = std::cmp::max(1, area.width / max_len);
- let col_width = (area.width - (cols)) / cols;
+ let col_width = (area.width.saturating_sub(cols)) / cols;
let height = ((self.completion.len() as u16 + cols - 1) / cols)
.min(10) // at most 10 rows (or less)
diff --git a/helix-tui/src/backend/test.rs b/helix-tui/src/backend/test.rs
index 3f56b49c..52474148 100644
--- a/helix-tui/src/backend/test.rs
+++ b/helix-tui/src/backend/test.rs
@@ -111,8 +111,7 @@ impl Backend for TestBackend {
I: Iterator<Item = (u16, u16, &'a Cell)>,
{
for (x, y, c) in content {
- let cell = self.buffer.get_mut(x, y);
- *cell = c.clone();
+ self.buffer[(x, y)] = c.clone();
}
Ok(())
}
diff --git a/helix-tui/src/buffer.rs b/helix-tui/src/buffer.rs
index c49a0200..f8673e43 100644
--- a/helix-tui/src/buffer.rs
+++ b/helix-tui/src/buffer.rs
@@ -90,17 +90,17 @@ impl Default for Cell {
/// use helix_view::graphics::{Rect, Color, Style, Modifier};
///
/// let mut buf = Buffer::empty(Rect{x: 0, y: 0, width: 10, height: 5});
-/// buf.get_mut(0, 2).set_symbol("x");
-/// assert_eq!(buf.get(0, 2).symbol, "x");
+/// buf[(0, 2)].set_symbol("x");
+/// assert_eq!(buf[(0, 2)].symbol, "x");
/// buf.set_string(3, 0, "string", Style::default().fg(Color::Red).bg(Color::White));
-/// assert_eq!(buf.get(5, 0), &Cell{
+/// assert_eq!(buf[(5, 0)], Cell{
/// symbol: String::from("r"),
/// fg: Color::Red,
/// bg: Color::White,
/// modifier: Modifier::empty()
/// });
-/// buf.get_mut(5, 0).set_char('x');
-/// assert_eq!(buf.get(5, 0).symbol, "x");
+/// buf[(5, 0)].set_char('x');
+/// assert_eq!(buf[(5, 0)].symbol, "x");
/// ```
#[derive(Debug, Default, Clone, PartialEq)]
pub struct Buffer {
@@ -162,15 +162,38 @@ impl Buffer {
}
/// Returns a reference to Cell at the given coordinates
- pub fn get(&self, x: u16, y: u16) -> &Cell {
- let i = self.index_of(x, y);
- &self.content[i]
+ pub fn get(&self, x: u16, y: u16) -> Option<&Cell> {
+ self.index_of_opt(x, y).map(|i| &self.content[i])
}
/// Returns a mutable reference to Cell at the given coordinates
- pub fn get_mut(&mut self, x: u16, y: u16) -> &mut Cell {
- let i = self.index_of(x, y);
- &mut self.content[i]
+ pub fn get_mut(&mut self, x: u16, y: u16) -> Option<&mut Cell> {
+ self.index_of_opt(x, y).map(|i| &mut self.content[i])
+ }
+
+ /// Tells whether the global (x, y) coordinates are inside the Buffer's area.
+ ///
+ /// Global coordinates are offset by the Buffer's area offset (`x`/`y`).
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # use helix_tui::buffer::Buffer;
+ /// # use helix_view::graphics::Rect;
+ /// let rect = Rect::new(200, 100, 10, 10);
+ /// let buffer = Buffer::empty(rect);
+ /// // Global coordinates inside the Buffer's area
+ /// assert!(buffer.in_bounds(209, 100));
+ /// // Global coordinates outside the Buffer's area
+ /// assert!(!buffer.in_bounds(210, 100));
+ /// ```
+ ///
+ /// Global coordinates are offset by the Buffer's area offset (`x`/`y`).
+ pub fn in_bounds(&self, x: u16, y: u16) -> bool {
+ x >= self.area.left()
+ && x < self.area.right()
+ && y >= self.area.top()
+ && y < self.area.bottom()
}
/// Returns the index in the Vec<Cell> for the given global (x, y) coordinates.
@@ -184,7 +207,7 @@ impl Buffer {
/// # use helix_view::graphics::Rect;
/// let rect = Rect::new(200, 100, 10, 10);
/// let buffer = Buffer::empty(rect);
- /// // Global coordinates to the top corner of this buffer's area
+ /// // Global coordinates to the top corner of this Buffer's area
/// assert_eq!(buffer.index_of(200, 100), 0);
/// ```
///
@@ -193,10 +216,7 @@ impl Buffer {
/// Panics when given an coordinate that is outside of this Buffer's area.
pub fn index_of(&self, x: u16, y: u16) -> usize {
debug_assert!(
- x >= self.area.left()
- && x < self.area.right()
- && y >= self.area.top()
- && y < self.area.bottom(),
+ self.in_bounds(x, y),
"Trying to access position outside the buffer: x={}, y={}, area={:?}",
x,
y,
@@ -205,6 +225,16 @@ impl Buffer {
((y - self.area.y) * self.area.width + (x - self.area.x)) as usize
}
+ /// Returns the index in the Vec<Cell> for the given global (x, y) coordinates,
+ /// or `None` if the coordinates are outside the buffer's area.
+ fn index_of_opt(&self, x: u16, y: u16) -> Option<usize> {
+ if self.in_bounds(x, y) {
+ Some(self.index_of(x, y))
+ } else {
+ None
+ }
+ }
+
/// Returns the (global) coordinates of a cell given its index
///
/// Global coordinates are offset by the Buffer's area offset (`x`/`y`).
@@ -278,6 +308,11 @@ impl Buffer {
where
S: AsRef<str>,
{
+ // prevent panic if out of range
+ if !self.in_bounds(x, y) || width == 0 {
+ return (x, y);
+ }
+
let mut index = self.index_of(x, y);
let mut x_offset = x as usize;
let width = if ellipsis { width - 1 } else { width };
@@ -372,7 +407,7 @@ impl Buffer {
pub fn set_background(&mut self, area: Rect, color: Color) {
for y in area.top()..area.bottom() {
for x in area.left()..area.right() {
- self.get_mut(x, y).set_bg(color);
+ self[(x, y)].set_bg(color);
}
}
}
@@ -380,7 +415,7 @@ impl Buffer {
pub fn set_style(&mut self, area: Rect, style: Style) {
for y in area.top()..area.bottom() {
for x in area.left()..area.right() {
- self.get_mut(x, y).set_style(style);
+ self[(x, y)].set_style(style);
}
}
}
@@ -408,7 +443,7 @@ impl Buffer {
pub fn clear(&mut self, area: Rect) {
for x in area.left()..area.right() {
for y in area.top()..area.bottom() {
- self.get_mut(x, y).reset();
+ self[(x, y)].reset();
}
}
}
@@ -417,7 +452,7 @@ impl Buffer {
pub fn clear_with(&mut self, area: Rect, style: Style) {
for x in area.left()..area.right() {
for y in area.top()..area.bottom() {
- let cell = self.get_mut(x, y);
+ let cell = &mut self[(x, y)];
cell.reset();
cell.set_style(style);
}
@@ -500,15 +535,32 @@ impl Buffer {
updates.push((x, y, &next_buffer[i]));
}
- to_skip = current.symbol.width().saturating_sub(1);
+ let current_width = current.symbol.width();
+ to_skip = current_width.saturating_sub(1);
- let affected_width = std::cmp::max(current.symbol.width(), previous.symbol.width());
+ let affected_width = std::cmp::max(current_width, previous.symbol.width());
invalidated = std::cmp::max(affected_width, invalidated).saturating_sub(1);
}
updates
}
}
+impl std::ops::Index<(u16, u16)> for Buffer {
+ type Output = Cell;
+
+ fn index(&self, (x, y): (u16, u16)) -> &Self::Output {
+ let i = self.index_of(x, y);
+ &self.content[i]
+ }
+}
+
+impl std::ops::IndexMut<(u16, u16)> for Buffer {
+ fn index_mut(&mut self, (x, y): (u16, u16)) -> &mut Self::Output {
+ let i = self.index_of(x, y);
+ &mut self.content[i]
+ }
+}
+
#[cfg(test)]
mod tests {
use super::*;
diff --git a/helix-tui/src/widgets/block.rs b/helix-tui/src/widgets/block.rs
index 648c2d7e..26223c3e 100644
--- a/helix-tui/src/widgets/block.rs
+++ b/helix-tui/src/widgets/block.rs
@@ -15,12 +15,12 @@ pub enum BorderType {
}
impl BorderType {
- pub fn line_symbols(border_type: BorderType) -> line::Set {
+ pub fn line_symbols(border_type: Self) -> line::Set {
match border_type {
- BorderType::Plain => line::NORMAL,
- BorderType::Rounded => line::ROUNDED,
- BorderType::Double => line::DOUBLE,
- BorderType::Thick => line::THICK,
+ Self::Plain => line::NORMAL,
+ Self::Rounded => line::ROUNDED,
+ Self::Double => line::DOUBLE,
+ Self::Thick => line::THICK,
}
}
}
@@ -140,14 +140,14 @@ impl<'a> Widget for Block<'a> {
// Sides
if self.borders.intersects(Borders::LEFT) {
for y in area.top()..area.bottom() {
- buf.get_mut(area.left(), y)
+ buf[(area.left(), y)]
.set_symbol(symbols.vertical)
.set_style(self.border_style);
}
}
if self.borders.intersects(Borders::TOP) {
for x in area.left()..area.right() {
- buf.get_mut(x, area.top())
+ buf[(x, area.top())]
.set_symbol(symbols.horizontal)
.set_style(self.border_style);
}
@@ -155,7 +155,7 @@ impl<'a> Widget for Block<'a> {
if self.borders.intersects(Borders::RIGHT) {
let x = area.right() - 1;
for y in area.top()..area.bottom() {
- buf.get_mut(x, y)
+ buf[(x, y)]
.set_symbol(symbols.vertical)
.set_style(self.border_style);
}
@@ -163,7 +163,7 @@ impl<'a> Widget for Block<'a> {
if self.borders.intersects(Borders::BOTTOM) {
let y = area.bottom() - 1;
for x in area.left()..area.right() {
- buf.get_mut(x, y)
+ buf[(x, y)]
.set_symbol(symbols.horizontal)
.set_style(self.border_style);
}
@@ -171,22 +171,22 @@ impl<'a> Widget for Block<'a> {
// Corners
if self.borders.contains(Borders::RIGHT | Borders::BOTTOM) {
- buf.get_mut(area.right() - 1, area.bottom() - 1)
+ buf[(area.right() - 1, area.bottom() - 1)]
.set_symbol(symbols.bottom_right)
.set_style(self.border_style);
}
if self.borders.contains(Borders::RIGHT | Borders::TOP) {
- buf.get_mut(area.right() - 1, area.top())
+ buf[(area.right() - 1, area.top())]
.set_symbol(symbols.top_right)
.set_style(self.border_style);
}
if self.borders.contains(Borders::LEFT | Borders::BOTTOM) {
- buf.get_mut(area.left(), area.bottom() - 1)
+ buf[(area.left(), area.bottom() - 1)]
.set_symbol(symbols.bottom_left)
.set_style(self.border_style);
}
if self.borders.contains(Borders::LEFT | Borders::TOP) {
- buf.get_mut(area.left(), area.top())
+ buf[(area.left(), area.top())]
.set_symbol(symbols.top_left)
.set_style(self.border_style);
}
diff --git a/helix-tui/src/widgets/paragraph.rs b/helix-tui/src/widgets/paragraph.rs
index fee35d25..4e839162 100644
--- a/helix-tui/src/widgets/paragraph.rs
+++ b/helix-tui/src/widgets/paragraph.rs
@@ -166,7 +166,7 @@ impl<'a> Widget for Paragraph<'a> {
Box::new(WordWrapper::new(&mut styled, text_area.width, trim))
} else {
let mut line_composer = Box::new(LineTruncator::new(&mut styled, text_area.width));
- if let Alignment::Left = self.alignment {
+ if self.alignment == Alignment::Left {
line_composer.set_horizontal_offset(self.scroll.1);
}
line_composer
@@ -176,7 +176,7 @@ impl<'a> Widget for Paragraph<'a> {
if y >= self.scroll.0 {
let mut x = get_line_offset(current_line_width, text_area.width, self.alignment);
for StyledGrapheme { symbol, style } in current_line {
- buf.get_mut(text_area.left() + x, text_area.top() + y - self.scroll.0)
+ buf[(text_area.left() + x, text_area.top() + y - self.scroll.0)]
.set_symbol(if symbol.is_empty() {
// If the symbol is empty, the last char which rendered last time will
// leave on the line. It's a quick fix.
diff --git a/helix-view/Cargo.toml b/helix-view/Cargo.toml
index 3c8866fc..b2a9d42e 100644
--- a/helix-view/Cargo.toml
+++ b/helix-view/Cargo.toml
@@ -38,7 +38,7 @@ log = "~0.4"
which = "4.2"
[target.'cfg(windows)'.dependencies]
-clipboard-win = { version = "4.2", features = ["std"] }
+clipboard-win = { version = "4.3", features = ["std"] }
[dev-dependencies]
helix-tui = { path = "../helix-tui" }
diff --git a/helix-view/src/document.rs b/helix-view/src/document.rs
index 9185e483..2c4fbbfb 100644
--- a/helix-view/src/document.rs
+++ b/helix-view/src/document.rs
@@ -20,7 +20,7 @@ use helix_core::{
};
use helix_lsp::util::LspFormatting;
-use crate::{DocumentId, Theme, ViewId};
+use crate::{DocumentId, ViewId};
/// 8kB of buffer space for encoding and decoding `Rope`s.
const BUF_SIZE: usize = 8192;
@@ -368,8 +368,7 @@ impl Document {
pub fn open(
path: &Path,
encoding: Option<&'static encoding::Encoding>,
- theme: Option<&Theme>,
- config_loader: Option<&syntax::Loader>,
+ config_loader: Option<Arc<syntax::Loader>>,
) -> Result<Self, Error> {
// Open the file if it exists, otherwise assume it is a new file (and thus empty).
let (rope, encoding) = if path.exists() {
@@ -386,7 +385,7 @@ impl Document {
// set the path and try detecting the language
doc.set_path(Some(path))?;
if let Some(loader) = config_loader {
- doc.detect_language(theme, loader);
+ doc.detect_language(loader);
}
doc.detect_indent_and_line_ending();
@@ -508,12 +507,12 @@ impl Document {
}
/// Detect the programming language based on the file type.
- pub fn detect_language(&mut self, theme: Option<&Theme>, config_loader: &syntax::Loader) {
+ pub fn detect_language(&mut self, config_loader: Arc<syntax::Loader>) {
if let Some(path) = &self.path {
let language_config = config_loader
.language_config_for_file_name(path)
.or_else(|| config_loader.language_config_for_shebang(self.text()));
- self.set_language(theme, language_config);
+ self.set_language(language_config, Some(config_loader));
}
}
@@ -587,15 +586,13 @@ impl Document {
/// if it exists.
pub fn set_language(
&mut self,
- theme: Option<&Theme>,
language_config: Option<Arc<helix_core::syntax::LanguageConfiguration>>,
+ loader: Option<Arc<helix_core::syntax::Loader>>,
) {
- if let Some(language_config) = language_config {
- let scopes = theme.map(|theme| theme.scopes()).unwrap_or(&[]);
- if let Some(highlight_config) = language_config.highlight_config(scopes) {
- let syntax = Syntax::new(&self.text, highlight_config);
+ if let (Some(language_config), Some(loader)) = (language_config, loader) {
+ if let Some(highlight_config) = language_config.highlight_config(&loader.scopes()) {
+ let syntax = Syntax::new(&self.text, highlight_config, loader);
self.syntax = Some(syntax);
- // TODO: config.configure(scopes) is now delayed, is that ok?
}
self.language = Some(language_config);
@@ -607,15 +604,10 @@ impl Document {
/// Set the programming language for the file if you know the name (scope) but don't have the
/// [`syntax::LanguageConfiguration`] for it.
- pub fn set_language2(
- &mut self,
- scope: &str,
- theme: Option<&Theme>,
- config_loader: Arc<syntax::Loader>,
- ) {
+ pub fn set_language2(&mut self, scope: &str, config_loader: Arc<syntax::Loader>) {
let language_config = config_loader.language_config_for_scope(scope);
- self.set_language(theme, language_config);
+ self.set_language(language_config, Some(config_loader));
}
/// Set the LSP.
@@ -854,6 +846,24 @@ impl Document {
.map(|language| language.scope.as_str())
}
+ /// Language ID for the document. Either the `language-id` from the
+ /// `language-server` configuration, or the document language if no
+ /// `language-id` has been specified.
+ pub fn language_id(&self) -> Option<&str> {
+ self.language
+ .as_ref()
+ .and_then(|config| config.language_server.as_ref())
+ .and_then(|lsp_config| lsp_config.language_id.as_ref())
+ .map_or_else(
+ || {
+ self.language()
+ .and_then(|s| s.rsplit_once('.'))
+ .map(|(_, language_id)| language_id)
+ },
+ |language_id| Some(language_id.as_str()),
+ )
+ }
+
/// Corresponding [`LanguageConfiguration`].
pub fn language_config(&self) -> Option<&LanguageConfiguration> {
self.language.as_deref()
diff --git a/helix-view/src/editor.rs b/helix-view/src/editor.rs
index eef7520e..82ef0cdc 100644
--- a/helix-view/src/editor.rs
+++ b/helix-view/src/editor.rs
@@ -324,13 +324,7 @@ impl Editor {
}
let scopes = theme.scopes();
- for config in self
- .syn_loader
- .language_configs_iter()
- .filter(|cfg| cfg.is_highlight_initialized())
- {
- config.reconfigure(scopes);
- }
+ self.syn_loader.set_scopes(scopes.to_vec());
self.theme = theme;
self._refresh();
@@ -339,7 +333,7 @@ impl Editor {
/// Refreshes the language server for a given document
pub fn refresh_language_server(&mut self, doc_id: DocumentId) -> Option<()> {
let doc = self.documents.get_mut(&doc_id)?;
- doc.detect_language(Some(&self.theme), &self.syn_loader);
+ doc.detect_language(self.syn_loader.clone());
Self::launch_language_server(&mut self.language_servers, doc)
}
@@ -363,11 +357,8 @@ impl Editor {
if let Some(language_server) = doc.language_server() {
tokio::spawn(language_server.text_document_did_close(doc.identifier()));
}
- let language_id = doc
- .language()
- .and_then(|s| s.split('.').last()) // source.rust
- .map(ToOwned::to_owned)
- .unwrap_or_default();
+
+ let language_id = doc.language_id().map(ToOwned::to_owned).unwrap_or_default();
// TODO: this now races with on_init code if the init happens too quickly
tokio::spawn(language_server.text_document_did_open(
@@ -521,7 +512,7 @@ impl Editor {
let id = if let Some(id) = id {
id
} else {
- let mut doc = Document::open(&path, None, Some(&self.theme), Some(&self.syn_loader))?;
+ let mut doc = Document::open(&path, None, Some(self.syn_loader.clone()))?;
let _ = Self::launch_language_server(&mut self.language_servers, &mut doc);
diff --git a/helix-view/src/graphics.rs b/helix-view/src/graphics.rs
index d75cde82..6d0a9292 100644
--- a/helix-view/src/graphics.rs
+++ b/helix-view/src/graphics.rs
@@ -331,7 +331,7 @@ impl FromStr for Modifier {
/// ];
/// let mut buffer = Buffer::empty(Rect::new(0, 0, 1, 1));
/// for style in &styles {
-/// buffer.get_mut(0, 0).set_style(*style);
+/// buffer[(0, 0)].set_style(*style);
/// }
/// assert_eq!(
/// Style {
@@ -340,7 +340,7 @@ impl FromStr for Modifier {
/// add_modifier: Modifier::BOLD,
/// sub_modifier: Modifier::empty(),
/// },
-/// buffer.get(0, 0).style(),
+/// buffer[(0, 0)].style(),
/// );
/// ```
///
@@ -356,7 +356,7 @@ impl FromStr for Modifier {
/// ];
/// let mut buffer = Buffer::empty(Rect::new(0, 0, 1, 1));
/// for style in &styles {
-/// buffer.get_mut(0, 0).set_style(*style);
+/// buffer[(0, 0)].set_style(*style);
/// }
/// assert_eq!(
/// Style {
@@ -365,7 +365,7 @@ impl FromStr for Modifier {
/// add_modifier: Modifier::empty(),
/// sub_modifier: Modifier::empty(),
/// },
-/// buffer.get(0, 0).style(),
+/// buffer[(0, 0)].style(),
/// );
/// ```
#[derive(Debug, Clone, Copy, PartialEq)]
diff --git a/helix-view/src/info.rs b/helix-view/src/info.rs
index b5a002fa..73856154 100644
--- a/helix-view/src/info.rs
+++ b/helix-view/src/info.rs
@@ -16,7 +16,7 @@ pub struct Info {
}
impl Info {
- pub fn new(title: &str, body: Vec<(&str, BTreeSet<KeyEvent>)>) -> Info {
+ pub fn new(title: &str, body: Vec<(&str, BTreeSet<KeyEvent>)>) -> Self {
let body = body
.into_iter()
.map(|(desc, events)| {
@@ -38,7 +38,7 @@ impl Info {
);
}
- Info {
+ Self {
title: title.to_string(),
width: text.lines().map(|l| l.width()).max().unwrap() as u16,
height: body.len() as u16,
diff --git a/languages.toml b/languages.toml
index afee20aa..3c328610 100644
--- a/languages.toml
+++ b/languages.toml
@@ -128,7 +128,7 @@ roots = []
comment-token = "//"
# TODO: highlights-jsx, highlights-params
-language-server = { command = "typescript-language-server", args = ["--stdio"] }
+language-server = { command = "typescript-language-server", args = ["--stdio"], language-id = "javascript" }
indent = { tab-width = 2, unit = " " }
[[language]]
@@ -140,7 +140,7 @@ shebangs = []
roots = []
# TODO: highlights-jsx, highlights-params
-language-server = { command = "typescript-language-server", args = ["--stdio"] }
+language-server = { command = "typescript-language-server", args = ["--stdio"], language-id = "typescript"}
indent = { tab-width = 2, unit = " " }
[[language]]
@@ -151,14 +151,14 @@ file-types = ["tsx"]
roots = []
# TODO: highlights-jsx, highlights-params
-language-server = { command = "typescript-language-server", args = ["--stdio"] }
+language-server = { command = "typescript-language-server", args = ["--stdio"], language-id = "typescriptreact" }
indent = { tab-width = 2, unit = " " }
[[language]]
name = "css"
scope = "source.css"
injection-regex = "css"
-file-types = ["css"]
+file-types = ["css", "scss"]
roots = []
indent = { tab-width = 2, unit = " " }
@@ -242,6 +242,17 @@ comment-token = "%"
indent = { tab-width = 4, unit = "\t" }
[[language]]
+name = "lean"
+scope = "source.lean"
+injection-regex = "lean"
+file-types = ["lean"]
+roots = [ "lakefile.lean" ]
+comment-token = "--"
+language-server = { command = "lean", args = [ "--server" ] }
+
+indent = { tab-width = 2, unit = " " }
+
+[[language]]
name = "julia"
scope = "source.julia"
injection-regex = "julia"
@@ -270,7 +281,7 @@ name = "java"
scope = "source.java"
injection-regex = "java"
file-types = ["java"]
-roots = []
+roots = ["pom.xml"]
indent = { tab-width = 4, unit = " " }
[[language]]
@@ -337,15 +348,15 @@ comment-token = "#"
indent = { tab-width = 2, unit = " " }
injection-regex = "yml|yaml"
-# [[language]]
-# name = "haskell"
-# scope = "source.haskell"
-# injection-regex = "haskell"
-# file-types = ["hs"]
-# roots = []
-# comment-token = "--"
-#
-# indent = { tab-width = 2, unit = " " }
+[[language]]
+name = "haskell"
+scope = "source.haskell"
+injection-regex = "haskell"
+file-types = ["hs"]
+roots = []
+comment-token = "--"
+language-server = { command = "haskell-language-server-wrapper", args = ["--lsp"] }
+indent = { tab-width = 2, unit = " " }
[[language]]
name = "zig"
@@ -378,6 +389,7 @@ scope = "source.tsq"
file-types = ["scm"]
roots = []
comment-token = ";"
+injection-regex = "tsq"
indent = { tab-width = 2, unit = " " }
[[language]]
@@ -497,7 +509,7 @@ indent = { tab-width = 2, unit = " " }
[[language]]
name = "scala"
scope = "source.scala"
-roots = ["build.sbt"]
+roots = ["build.sbt", "pom.xml"]
file-types = ["scala", "sbt"]
comment-token = "//"
indent = { tab-width = 2, unit = " " }
@@ -545,3 +557,32 @@ scope = "source.regex"
injection-regex = "regex"
file-types = ["regex"]
roots = []
+
+[[language]]
+name = "git-config"
+scope = "source.gitconfig"
+roots = []
+# TODO: allow specifying file-types as a regex so we can read directory names (e.g. `.git/config`)
+file-types = [".gitmodules", ".gitconfig"]
+injection-regex = "git-config"
+comment-token = "#"
+indent = { tab-width = 4, unit = "\t" }
+
+[[language]]
+name = "graphql"
+scope = "source.graphql"
+injection-regex = "graphql"
+file-types = ["gql", "graphql"]
+roots = []
+indent = { tab-width = 2, unit = " " }
+
+[[language]]
+name = "elm"
+scope = "source.elm"
+injection-regex = "elm"
+file-types = ["elm"]
+roots = ["elm.json"]
+auto-format = true
+comment-token = "--"
+language-server = { command = "elm-language-server" }
+indent = { tab-width = 4, unit = " " }
diff --git a/runtime/queries/bash/injections.scm b/runtime/queries/bash/injections.scm
new file mode 100644
index 00000000..321c90ad
--- /dev/null
+++ b/runtime/queries/bash/injections.scm
@@ -0,0 +1,2 @@
+((comment) @injection.content
+ (#set! injection.language "comment"))
diff --git a/runtime/queries/c-sharp/injections.scm b/runtime/queries/c-sharp/injections.scm
new file mode 100644
index 00000000..321c90ad
--- /dev/null
+++ b/runtime/queries/c-sharp/injections.scm
@@ -0,0 +1,2 @@
+((comment) @injection.content
+ (#set! injection.language "comment"))
diff --git a/runtime/queries/css/injections.scm b/runtime/queries/css/injections.scm
new file mode 100644
index 00000000..321c90ad
--- /dev/null
+++ b/runtime/queries/css/injections.scm
@@ -0,0 +1,2 @@
+((comment) @injection.content
+ (#set! injection.language "comment"))
diff --git a/runtime/queries/dart/injections.scm b/runtime/queries/dart/injections.scm
new file mode 100644
index 00000000..321c90ad
--- /dev/null
+++ b/runtime/queries/dart/injections.scm
@@ -0,0 +1,2 @@
+((comment) @injection.content
+ (#set! injection.language "comment"))
diff --git a/runtime/queries/elm/highlights.scm b/runtime/queries/elm/highlights.scm
new file mode 100644
index 00000000..3c8fd12d
--- /dev/null
+++ b/runtime/queries/elm/highlights.scm
@@ -0,0 +1,83 @@
+; Keywords
+[
+ "if"
+ "then"
+ "else"
+ "let"
+ "in"
+ ] @keyword.control
+(case) @keyword.control
+(of) @keyword.control
+
+(colon) @keyword.operator
+(backslash) @keyword
+(as) @keyword
+(port) @keyword
+(exposing) @keyword
+(alias) @keyword
+(infix) @keyword
+
+(arrow) @keyword.operator
+(dot) @keyword.operator
+
+(port) @keyword
+
+(type_annotation(lower_case_identifier) @function)
+(port_annotation(lower_case_identifier) @function)
+(file (value_declaration (function_declaration_left(lower_case_identifier) @function)))
+
+(field name: (lower_case_identifier) @attribute)
+(field_access_expr(lower_case_identifier) @attribute)
+
+(operator_identifier) @keyword.operator
+(eq) @keyword.operator.assignment
+
+[
+ "("
+ ")"
+ "["
+ "]"
+ "{"
+ "}"
+] @punctuation.bracket
+
+"|" @keyword
+"," @punctuation.delimiter
+
+[
+ "|>"
+] @keyword
+
+
+(import) @keyword.contol.import
+(module) @keyword.other
+
+(number_constant_expr) @constant.numeric
+
+(type) @type
+
+(type_declaration(upper_case_identifier) @type)
+(type_ref) @type
+(type_alias_declaration name: (upper_case_identifier) @type)
+
+(union_pattern constructor: (upper_case_qid (upper_case_identifier) @label (dot) (upper_case_identifier) @variable.other.member))
+(union_pattern constructor: (upper_case_qid (upper_case_identifier) @variable.other.member))
+
+(union_variant(upper_case_identifier) @variable.other.member)
+(value_expr name: (value_qid (upper_case_identifier) @label))
+(value_expr (upper_case_qid (upper_case_identifier) @label (dot) (upper_case_identifier) @variable.other.member))
+(value_expr(upper_case_qid(upper_case_identifier)) @variable.other.member)
+
+; comments
+(line_comment) @comment
+(block_comment) @comment
+
+; strings
+(string_escape) @constant.character.escape
+
+(open_quote) @string
+(close_quote) @string
+(regular_string_part) @string
+
+(open_char) @constant.character
+(close_char) @constant.character
diff --git a/runtime/queries/elm/injections.scm b/runtime/queries/elm/injections.scm
new file mode 100644
index 00000000..83f8245c
--- /dev/null
+++ b/runtime/queries/elm/injections.scm
@@ -0,0 +1,4 @@
+; Parse glsl where defined
+
+((glsl_content) @injection.content
+ (#set! injection.language "glsl"))
diff --git a/runtime/queries/elm/locals.scm b/runtime/queries/elm/locals.scm
new file mode 100644
index 00000000..ab103115
--- /dev/null
+++ b/runtime/queries/elm/locals.scm
@@ -0,0 +1,14 @@
+(value_declaration) @local.scope
+(type_alias_declaration) @local.scope
+(type_declaration) @local.scope
+(type_annotation) @local.scope
+(port_annotation) @local.scope
+(infix_declaration) @local.scope
+(let_in_expr) @local.scope
+
+(function_declaration_left (lower_pattern (lower_case_identifier)) @local.definition)
+(function_declaration_left (lower_case_identifier) @local.definition)
+
+(value_expr(value_qid(upper_case_identifier)) @local.reference)
+(value_expr(value_qid(lower_case_identifier)) @local.reference)
+(type_ref (upper_case_qid) @local.reference)
diff --git a/runtime/queries/elm/tags.scm b/runtime/queries/elm/tags.scm
new file mode 100644
index 00000000..03999fb1
--- /dev/null
+++ b/runtime/queries/elm/tags.scm
@@ -0,0 +1,19 @@
+(value_declaration (function_declaration_left (lower_case_identifier) @name)) @definition.function
+
+(function_call_expr (value_expr (value_qid) @name)) @reference.function
+(exposed_value (lower_case_identifier) @name) @reference.function
+(type_annotation ((lower_case_identifier) @name) (colon)) @reference.function
+
+(type_declaration ((upper_case_identifier) @name) ) @definition.type
+
+(type_ref (upper_case_qid (upper_case_identifier) @name)) @reference.type
+(exposed_type (upper_case_identifier) @name) @reference.type
+
+(type_declaration (union_variant (upper_case_identifier) @name)) @definition.union
+
+(value_expr (upper_case_qid (upper_case_identifier) @name)) @reference.union
+
+
+(module_declaration
+ (upper_case_qid (upper_case_identifier)) @name
+) @definition.module
diff --git a/runtime/queries/git-config/highlights.scm b/runtime/queries/git-config/highlights.scm
new file mode 100644
index 00000000..84767edc
--- /dev/null
+++ b/runtime/queries/git-config/highlights.scm
@@ -0,0 +1,27 @@
+((section_name) @keyword.directive
+ (#eq? @keyword.directive "include"))
+
+((section_header
+ (section_name) @keyword.directive
+ (subsection_name))
+ (#eq? @keyword.directive "includeIf"))
+
+(section_name) @markup.heading
+(variable (name) @variable.other.member)
+[(true) (false)] @constant.builtin.boolean
+(integer) @constant.numeric.integer
+
+((string) @string.special.path
+ (#match? @string.special.path "^(~|./|/)"))
+
+[(string) (subsection_name)] @string
+
+[
+ "["
+ "]"
+ "\""
+] @punctuation.bracket
+
+"=" @punctuation.delimiter
+
+(comment) @comment
diff --git a/runtime/queries/go/highlights.scm b/runtime/queries/go/highlights.scm
index 56384d4d..4ff8675b 100644
--- a/runtime/queries/go/highlights.scm
+++ b/runtime/queries/go/highlights.scm
@@ -69,6 +69,7 @@
"|"
"|="
"||"
+ "~"
] @operator
; Keywords
@@ -143,6 +144,9 @@
(false)
] @constant.builtin.boolean
-(nil) @constant.builtin
+[
+ (nil)
+ (iota)
+] @constant.builtin
(comment) @comment
diff --git a/runtime/queries/go/injections.scm b/runtime/queries/go/injections.scm
new file mode 100644
index 00000000..321c90ad
--- /dev/null
+++ b/runtime/queries/go/injections.scm
@@ -0,0 +1,2 @@
+((comment) @injection.content
+ (#set! injection.language "comment"))
diff --git a/runtime/queries/graphql/highlights.scm b/runtime/queries/graphql/highlights.scm
new file mode 100644
index 00000000..9fab4051
--- /dev/null
+++ b/runtime/queries/graphql/highlights.scm
@@ -0,0 +1,163 @@
+; Types
+;------
+
+(scalar_type_definition
+ (name) @type)
+
+(object_type_definition
+ (name) @type)
+
+(interface_type_definition
+ (name) @type)
+
+(union_type_definition
+ (name) @type)
+
+(enum_type_definition
+ (name) @type)
+
+(input_object_type_definition
+ (name) @type)
+
+(directive_definition
+ (name) @type)
+
+(directive_definition
+ "@" @type)
+
+(scalar_type_extension
+ (name) @type)
+
+(object_type_extension
+ (name) @type)
+
+(interface_type_extension
+ (name) @type)
+
+(union_type_extension
+ (name) @type)
+
+(enum_type_extension
+ (name) @type)
+
+(input_object_type_extension
+ (name) @type)
+
+(named_type
+ (name) @type)
+
+(directive) @type
+
+; Properties
+;-----------
+
+(field
+ (name) @variable.other.member)
+
+(field
+ (alias
+ (name) @variable.other.member))
+
+(field_definition
+ (name) @variable.other.member)
+
+(object_value
+ (object_field
+ (name) @variable.other.member))
+
+(enum_value
+ (name) @variable.other.member)
+
+; Variable Definitions and Arguments
+;-----------------------------------
+
+(operation_definition
+ (name) @variable)
+
+(fragment_name
+ (name) @variable)
+
+(input_fields_definition
+ (input_value_definition
+ (name) @variable.parameter))
+
+(argument
+ (name) @variable.parameter)
+
+(arguments_definition
+ (input_value_definition
+ (name) @variable.parameter))
+
+(variable_definition
+ (variable) @variable.parameter)
+
+(argument
+ (value
+ (variable) @variable))
+
+; Constants
+;----------
+
+(string_value) @string
+
+(int_value) @constants.numeric.integer
+
+(float_value) @constants.numeric.float
+
+(boolean_value) @constants.builtin.boolean
+
+; Literals
+;---------
+
+(description) @comment
+
+(comment) @comment
+
+(directive_location
+ (executable_directive_location) @type.builtin)
+
+(directive_location
+ (type_system_directive_location) @type.builtin)
+
+; Keywords
+;----------
+
+[
+ "query"
+ "mutation"
+ "subscription"
+ "fragment"
+ "scalar"
+ "type"
+ "interface"
+ "union"
+ "enum"
+ "input"
+ "extend"
+ "directive"
+ "schema"
+ "on"
+ "repeatable"
+ "implements"
+] @keyword
+
+; Punctuation
+;------------
+
+[
+ "("
+ ")"
+ "["
+ "]"
+ "{"
+ "}"
+] @punctuation.bracket
+
+"=" @operator
+
+"|" @punctuation.delimiter
+"&" @punctuation.delimiter
+":" @punctuation.delimiter
+
+"..." @punctuation.special
+"!" @punctuation.special
diff --git a/runtime/queries/haskell/injections.scm b/runtime/queries/haskell/injections.scm
new file mode 100644
index 00000000..321c90ad
--- /dev/null
+++ b/runtime/queries/haskell/injections.scm
@@ -0,0 +1,2 @@
+((comment) @injection.content
+ (#set! injection.language "comment"))
diff --git a/runtime/queries/html/injections.scm b/runtime/queries/html/injections.scm
index 71e7c3ae..ef58f415 100644
--- a/runtime/queries/html/injections.scm
+++ b/runtime/queries/html/injections.scm
@@ -1,3 +1,6 @@
+((comment) @injection.content
+ (#set! injection.language "comment"))
+
((script_element
(raw_text) @injection.content)
(#set! injection.language "javascript"))
diff --git a/runtime/queries/java/injections.scm b/runtime/queries/java/injections.scm
new file mode 100644
index 00000000..321c90ad
--- /dev/null
+++ b/runtime/queries/java/injections.scm
@@ -0,0 +1,2 @@
+((comment) @injection.content
+ (#set! injection.language "comment"))
diff --git a/runtime/queries/javascript/injections.scm b/runtime/queries/javascript/injections.scm
index 5539241a..e8429111 100644
--- a/runtime/queries/javascript/injections.scm
+++ b/runtime/queries/javascript/injections.scm
@@ -9,6 +9,14 @@
]
arguments: (template_string) @injection.content)
+; Parse the contents of gql template literals
+
+((call_expression
+ function: (identifier) @_template_function_name
+ arguments: (template_string) @injection.content)
+ (#eq? @_template_function_name "gql")
+ (#set! injection.language "graphql"))
+
; Parse regex syntax within regex literals
((regex_pattern) @injection.content
diff --git a/runtime/queries/julia/injections.scm b/runtime/queries/julia/injections.scm
index 3cf7339f..1c1e804e 100644
--- a/runtime/queries/julia/injections.scm
+++ b/runtime/queries/julia/injections.scm
@@ -1,7 +1,5 @@
-; TODO: re-add when markdown is added.
-; ((triple_string) @injection.content
-; (#offset! @injection.content 0 3 0 -3)
-; (#set! injection.language "markdown"))
+((triple_string) @injection.content
+ (#set! injection.language "markdown"))
((comment) @injection.content
(#set! injection.language "comment"))
diff --git a/runtime/queries/latex/injections.scm b/runtime/queries/latex/injections.scm
new file mode 100644
index 00000000..321c90ad
--- /dev/null
+++ b/runtime/queries/latex/injections.scm
@@ -0,0 +1,2 @@
+((comment) @injection.content
+ (#set! injection.language "comment"))
diff --git a/runtime/queries/lean/folds.scm b/runtime/queries/lean/folds.scm
new file mode 100644
index 00000000..2c2bbb33
--- /dev/null
+++ b/runtime/queries/lean/folds.scm
@@ -0,0 +1,15 @@
+[
+ (namespace)
+ (section)
+
+ (instance)
+ (def)
+ (theorem)
+ (example)
+
+ (product)
+ (array)
+ (list)
+
+ (string)
+] @fold
diff --git a/runtime/queries/lean/highlights.scm b/runtime/queries/lean/highlights.scm
new file mode 100644
index 00000000..a64feb1d
--- /dev/null
+++ b/runtime/queries/lean/highlights.scm
@@ -0,0 +1,217 @@
+(open
+ namespace: (identifier) @namespace)
+(namespace
+ name: (identifier) @namespace)
+(section
+ name: (identifier) @namespace)
+
+;; Identifier naming conventions
+((identifier) @type
+ (#match? @type "^[A-Z]"))
+
+(arrow) @type
+(product) @type
+
+;; Declarations
+
+[
+ "abbrev"
+ "def"
+ "theorem"
+ "constant"
+ "instance"
+ "axiom"
+ "example"
+ "inductive"
+ "structure"
+ "class"
+
+ "deriving"
+
+ "section"
+ "namespace"
+] @keyword
+
+(attributes
+ (identifier) @function)
+
+(abbrev
+ name: (identifier) @type)
+(def
+ name: (identifier) @function)
+(theorem
+ name: (identifier) @function)
+(constant
+ name: (identifier) @type)
+(instance
+ name: (identifier) @function)
+(instance
+ type: (identifier) @type)
+(axiom
+ name: (identifier) @function)
+(structure
+ name: (identifier) @type)
+(structure
+ extends: (identifier) @type)
+
+(where_decl
+ type: (identifier) @type)
+
+(proj
+ name: (identifier) @field)
+
+(binders
+ type: (identifier) @type)
+
+["if" "then" "else"] @keyword.control.conditional
+
+["for" "in" "do"] @keyword.control.repeat
+
+(import) @include
+
+; Tokens
+
+[
+ "!"
+ "$"
+ "%"
+ "&&"
+ "*"
+ "*>"
+ "+"
+ "++"
+ "-"
+ "/"
+ "::"
+ ":="
+ "<"
+ "<$>"
+ "<*"
+ "<*>"
+ "<="
+ "<|"
+ "<|>"
+ "="
+ "=="
+ "=>"
+ ">"
+ ">"
+ ">="
+ ">>"
+ ">>="
+ "@"
+ "^"
+ "|>"
+ "|>."
+ "||"
+ "←"
+ "→"
+ "↔"
+ "∘"
+ "∧"
+ "∨"
+ "≠"
+ "≤"
+ "≥"
+] @operator
+
+[
+ "@&"
+] @operator
+
+[
+ "attribute"
+ "by"
+ "end"
+ "export"
+ "extends"
+ "fun"
+ "let"
+ "have"
+ "match"
+ "open"
+ "return"
+ "universe"
+ "variable"
+ "where"
+ "with"
+ "λ"
+ (hash_command)
+ (prelude)
+ (sorry)
+] @keyword
+
+[
+ "prefix"
+ "infix"
+ "infixl"
+ "infixr"
+ "postfix"
+ "notation"
+ "macro_rules"
+ "syntax"
+ "elab"
+ "builtin_initialize"
+] @keyword
+
+[
+ "noncomputable"
+ "partial"
+ "private"
+ "protected"
+ "unsafe"
+] @keyword
+
+[
+ "apply"
+ "exact"
+ "rewrite"
+ "rw"
+ "simp"
+ (trivial)
+] @keyword
+
+[
+ "catch"
+ "finally"
+ "try"
+] @exception
+
+((apply
+ name: (identifier) @exception)
+ (#match? @exception "throw"))
+
+[
+ "unless"
+ "mut"
+] @keyword
+
+[(true) (false)] @boolean
+
+(number) @constant.numeric.integer
+(float) @constant.numeric.float
+
+(comment) @comment
+(char) @character
+(string) @string
+(interpolated_string) @string
+; (escape_sequence) @string.escape
+
+; Reset highlighing in string interpolation
+(interpolation) @none
+
+(interpolation
+ "{" @punctuation.special
+ "}" @punctuation.special)
+
+["(" ")" "[" "]" "{" "}" "⟨" "⟩"] @punctuation.bracket
+
+["|" "," "." ":" ";"] @punctuation.delimiter
+
+(sorry) @error
+
+;; Error
+(ERROR) @error
+
+; Variables
+(identifier) @variable
diff --git a/runtime/queries/lean/injections.scm b/runtime/queries/lean/injections.scm
new file mode 100644
index 00000000..030714f1
--- /dev/null
+++ b/runtime/queries/lean/injections.scm
@@ -0,0 +1,2 @@
+((comment) @injection.content
+ (#set! injection.language "markdown"))
diff --git a/runtime/queries/lean/locals.scm b/runtime/queries/lean/locals.scm
new file mode 100644
index 00000000..dd6c2036
--- /dev/null
+++ b/runtime/queries/lean/locals.scm
@@ -0,0 +1,5 @@
+[
+ (module)
+ (namespace)
+ (section)
+] @local.scope
diff --git a/runtime/queries/lua/injections.scm b/runtime/queries/lua/injections.scm
new file mode 100644
index 00000000..321c90ad
--- /dev/null
+++ b/runtime/queries/lua/injections.scm
@@ -0,0 +1,2 @@
+((comment) @injection.content
+ (#set! injection.language "comment"))
diff --git a/runtime/queries/make/injections.scm b/runtime/queries/make/injections.scm
new file mode 100644
index 00000000..321c90ad
--- /dev/null
+++ b/runtime/queries/make/injections.scm
@@ -0,0 +1,2 @@
+((comment) @injection.content
+ (#set! injection.language "comment"))
diff --git a/runtime/queries/ocaml-interface/injections.scm b/runtime/queries/ocaml-interface/injections.scm
new file mode 100644
index 00000000..321c90ad
--- /dev/null
+++ b/runtime/queries/ocaml-interface/injections.scm
@@ -0,0 +1,2 @@
+((comment) @injection.content
+ (#set! injection.language "comment"))
diff --git a/runtime/queries/ocaml/injections.scm b/runtime/queries/ocaml/injections.scm
new file mode 100644
index 00000000..321c90ad
--- /dev/null
+++ b/runtime/queries/ocaml/injections.scm
@@ -0,0 +1,2 @@
+((comment) @injection.content
+ (#set! injection.language "comment"))
diff --git a/runtime/queries/perl/injections.scm b/runtime/queries/perl/injections.scm
new file mode 100644
index 00000000..cab5f53d
--- /dev/null
+++ b/runtime/queries/perl/injections.scm
@@ -0,0 +1,2 @@
+((comments) @injection.content
+ (#set! injection.language "comment"))
diff --git a/runtime/queries/php/highlights.scm b/runtime/queries/php/highlights.scm
index 46b5d26c..5379fa1e 100644
--- a/runtime/queries/php/highlights.scm
+++ b/runtime/queries/php/highlights.scm
@@ -5,7 +5,8 @@
(primitive_type) @type.builtin
(cast_type) @type.builtin
-(type_name (name) @type)
+(named_type (name) @type) @type
+(named_type (qualified_name) @type) @type
; Functions
@@ -85,10 +86,12 @@
"endif" @keyword
"endswitch" @keyword
"endwhile" @keyword
+"enum" @keyword
"extends" @keyword
"final" @keyword
"finally" @keyword
"foreach" @keyword
+"fn" @keyword
"function" @keyword
"global" @keyword
"if" @keyword
@@ -97,6 +100,7 @@
"include" @keyword
"insteadof" @keyword
"interface" @keyword
+"match" @keyword
"namespace" @keyword
"new" @keyword
"private" @keyword
diff --git a/runtime/queries/php/injections.scm b/runtime/queries/php/injections.scm
index 16d5736b..614a3850 100644
--- a/runtime/queries/php/injections.scm
+++ b/runtime/queries/php/injections.scm
@@ -1,3 +1,6 @@
((text) @injection.content
(#set! injection.language "html")
(#set! injection.combined))
+
+((comment) @injection.content
+ (#set! injection.language "comment"))
diff --git a/runtime/queries/protobuf/injections.scm b/runtime/queries/protobuf/injections.scm
new file mode 100644
index 00000000..321c90ad
--- /dev/null
+++ b/runtime/queries/protobuf/injections.scm
@@ -0,0 +1,2 @@
+((comment) @injection.content
+ (#set! injection.language "comment"))
diff --git a/runtime/queries/ruby/injections.scm b/runtime/queries/ruby/injections.scm
new file mode 100644
index 00000000..321c90ad
--- /dev/null
+++ b/runtime/queries/ruby/injections.scm
@@ -0,0 +1,2 @@
+((comment) @injection.content
+ (#set! injection.language "comment"))
diff --git a/runtime/queries/scala/injections.scm b/runtime/queries/scala/injections.scm
index 4bb7d675..321c90ad 100644
--- a/runtime/queries/scala/injections.scm
+++ b/runtime/queries/scala/injections.scm
@@ -1 +1,2 @@
-(comment) @comment
+((comment) @injection.content
+ (#set! injection.language "comment"))
diff --git a/runtime/queries/toml/injections.scm b/runtime/queries/toml/injections.scm
new file mode 100644
index 00000000..321c90ad
--- /dev/null
+++ b/runtime/queries/toml/injections.scm
@@ -0,0 +1,2 @@
+((comment) @injection.content
+ (#set! injection.language "comment"))
diff --git a/runtime/queries/tsx/injections.scm b/runtime/queries/tsx/injections.scm
new file mode 100644
index 00000000..1b61e36d
--- /dev/null
+++ b/runtime/queries/tsx/injections.scm
@@ -0,0 +1 @@
+; inherits: typescript
diff --git a/runtime/queries/typescript/injections.scm b/runtime/queries/typescript/injections.scm
new file mode 100644
index 00000000..ff0ddfac
--- /dev/null
+++ b/runtime/queries/typescript/injections.scm
@@ -0,0 +1 @@
+; inherits: javascript
diff --git a/runtime/queries/vue/injections.scm b/runtime/queries/vue/injections.scm
index 8ee34ffb..73df868b 100644
--- a/runtime/queries/vue/injections.scm
+++ b/runtime/queries/vue/injections.scm
@@ -15,3 +15,6 @@
((style_element
(raw_text) @injection.content)
(#set! injection.language "css"))
+
+((comment) @injection.content
+ (#set! injection.language "comment"))
diff --git a/runtime/queries/wgsl/injections.scm b/runtime/queries/wgsl/injections.scm
new file mode 100644
index 00000000..321c90ad
--- /dev/null
+++ b/runtime/queries/wgsl/injections.scm
@@ -0,0 +1,2 @@
+((comment) @injection.content
+ (#set! injection.language "comment"))
diff --git a/runtime/queries/yaml/injections.scm b/runtime/queries/yaml/injections.scm
index 39bda293..321c90ad 100644
--- a/runtime/queries/yaml/injections.scm
+++ b/runtime/queries/yaml/injections.scm
@@ -1,2 +1,2 @@
-((comment) @injection.content
+((comment) @injection.content
(#set! injection.language "comment"))
diff --git a/runtime/queries/zig/highlights.scm b/runtime/queries/zig/highlights.scm
index 34dbeacd..62c99acc 100644
--- a/runtime/queries/zig/highlights.scm
+++ b/runtime/queries/zig/highlights.scm
@@ -144,7 +144,6 @@ field_constant: (IDENTIFIER) @constant
; VarDecl
[
- "comptime"
"threadlocal"
"fn"
] @keyword.function
@@ -178,6 +177,7 @@ field_constant: (IDENTIFIER) @constant
; PrecProc
[
+ "comptime"
"inline"
"noinline"
"asm"
@@ -195,15 +195,14 @@ field_constant: (IDENTIFIER) @constant
(BitwiseOp)
(BitShiftOp)
(AdditionOp)
+ (AssignOp)
(MultiplyOp)
(PrefixOp)
"*"
"**"
"->"
- "=>"
".?"
".*"
- "="
"?"
] @operator
diff --git a/runtime/queries/zig/indents.toml b/runtime/queries/zig/indents.toml
index 88f88e16..36ba8e55 100644
--- a/runtime/queries/zig/indents.toml
+++ b/runtime/queries/zig/indents.toml
@@ -3,6 +3,9 @@ indent = [
"BlockExpr",
"ContainerDecl",
"SwitchExpr",
+ "AssignExpr",
+ "ErrorUnionExpr",
+ "Statement",
"InitList"
]
diff --git a/runtime/queries/zig/injections.scm b/runtime/queries/zig/injections.scm
new file mode 100644
index 00000000..3df95897
--- /dev/null
+++ b/runtime/queries/zig/injections.scm
@@ -0,0 +1,2 @@
+([(line_comment) (doc_comment)] @injection.content
+ (#set! injection.language "comment"))
diff --git a/runtime/themes/base16_default_dark.toml b/runtime/themes/base16_default_dark.toml
index 33ff87fa..d19863e0 100644
--- a/runtime/themes/base16_default_dark.toml
+++ b/runtime/themes/base16_default_dark.toml
@@ -33,6 +33,15 @@
"namespace" = "base0E"
"ui.help" = { fg = "base06", bg = "base01" }
+"markup.heading" = "base0D"
+"markup.list" = "base08"
+"markup.bold" = { fg = "base0A", modifiers = ["bold"] }
+"markup.italic" = { fg = "base0E", modifiers = ["italic"] }
+"markup.link.url" = { fg = "base09", modifiers = ["underlined"] }
+"markup.link.text" = "base08"
+"markup.quote" = "base0C"
+"markup.raw" = "base0B"
+
"diff.plus" = "base0B"
"diff.delta" = "base09"
"diff.minus" = "base08"
diff --git a/runtime/themes/base16_default_light.toml b/runtime/themes/base16_default_light.toml
index 3f4de265..483e87cc 100644
--- a/runtime/themes/base16_default_light.toml
+++ b/runtime/themes/base16_default_light.toml
@@ -33,6 +33,15 @@
"namespace" = "base0E"
"ui.help" = { fg = "base06", bg = "base01" }
+"markup.heading" = "base0D"
+"markup.list" = "base08"
+"markup.bold" = { fg = "base0A", modifiers = ["bold"] }
+"markup.italic" = { fg = "base0E", modifiers = ["italic"] }
+"markup.link.url" = { fg = "base09", modifiers = ["underlined"] }
+"markup.link.text" = "base08"
+"markup.quote" = "base0C"
+"markup.raw" = "base0B"
+
"diff.plus" = "base0B"
"diff.delta" = "base09"
"diff.minus" = "base08"
diff --git a/runtime/themes/base16_terminal.toml b/runtime/themes/base16_terminal.toml
index cbbfbf24..23240d8d 100644
--- a/runtime/themes/base16_terminal.toml
+++ b/runtime/themes/base16_terminal.toml
@@ -12,7 +12,7 @@
"ui.statusline.inactive" = { fg = "gray", bg = "black" }
"ui.help" = { fg = "white", bg = "black" }
"ui.cursor" = { fg = "light-gray", modifiers = ["reversed"] }
-"ui.cursor.primary" = { modifiers = ["reversed"] }
+"ui.cursor.primary" = { fg = "light-white", modifiers = ["reversed"] }
"variable" = "light-red"
"constant.numeric" = "yellow"
"constant" = "yellow"
@@ -30,6 +30,15 @@
"namespace" = "light-magenta"
"ui.help" = { fg = "white", bg = "black" }
+"markup.heading" = "light-blue"
+"markup.list" = "light-red"
+"markup.bold" = { fg = "light-yellow", modifiers = ["bold"] }
+"markup.italic" = { fg = "light-magenta", modifiers = ["italic"] }
+"markup.link.url" = { fg = "yellow", modifiers = ["underlined"] }
+"markup.link.text" = "light-red"
+"markup.quote" = "light-cyan"
+"markup.raw" = "light-green"
+
"diff.plus" = "light-green"
"diff.delta" = "yellow"
"diff.minus" = "light-red"
diff --git a/runtime/themes/bogster.toml b/runtime/themes/bogster.toml
index 493e5ace..32b58d0a 100644
--- a/runtime/themes/bogster.toml
+++ b/runtime/themes/bogster.toml
@@ -28,6 +28,16 @@
"module" = "#d32c5d"
+# TODO
+"markup.heading" = "blue"
+"markup.list" = "red"
+"markup.bold" = { fg = "yellow", modifiers = ["bold"] }
+"markup.italic" = { fg = "magenta", modifiers = ["italic"] }
+"markup.link.url" = { fg = "yellow", modifiers = ["underlined"] }
+"markup.link.text" = "red"
+"markup.quote" = "cyan"
+"markup.raw" = "green"
+
"diff.plus" = "#59dcb7"
"diff.delta" = "#dc7759"
"diff.minus" = "#dc597f"
diff --git a/runtime/themes/dark_plus.toml b/runtime/themes/dark_plus.toml
index 78437649..ab7c16ec 100644
--- a/runtime/themes/dark_plus.toml
+++ b/runtime/themes/dark_plus.toml
@@ -39,6 +39,16 @@
"constant.numeric" = { fg = "pale_green" }
"constant.character.escape" = { fg = "gold" }
+# TODO
+"markup.heading" = "blue"
+"markup.list" = "red"
+"markup.bold" = { fg = "yellow", modifiers = ["bold"] }
+"markup.italic" = { fg = "magenta", modifiers = ["italic"] }
+"markup.link.url" = { fg = "yellow", modifiers = ["underlined"] }
+"markup.link.text" = "red"
+"markup.quote" = "cyan"
+"markup.raw" = "green"
+
"diff.plus" = { fg = "pale_green" }
"diff.delta" = { fg = "gold" }
"diff.minus" = { fg = "red" }
diff --git a/runtime/themes/dracula.toml b/runtime/themes/dracula.toml
index f01c7323..1db25d8f 100644
--- a/runtime/themes/dracula.toml
+++ b/runtime/themes/dracula.toml
@@ -28,7 +28,7 @@
"ui.linenr.selected" = { fg = "foreground" }
"ui.menu" = { fg = "foreground", bg = "background_dark" }
"ui.menu.selected" = { fg = "cyan", bg = "background_dark" }
-"ui.popup" = { fg = "foreground" }
+"ui.popup" = { fg = "foreground", bg = "background_dark" }
"ui.selection" = { fg = "background", bg = "purple", modifiers = ["dim"] }
"ui.selection.primary" = { fg = "background", bg = "pink" }
"ui.statusline" = { fg = "foreground", bg = "background_dark" }
@@ -40,6 +40,15 @@
"error" = { fg = "red" }
"warning" = { fg = "cyan" }
+"markup.heading" = { fg = "purple", modifiers = ["bold"] }
+"markup.list" = "cyan"
+"markup.bold" = { fg = "orange", modifiers = ["bold"] }
+"markup.italic" = { fg = "yellow", modifiers = ["italic"] }
+"markup.link.url" = "cyan"
+"markup.link.text" = "pink"
+"markup.quote" = { fg = "yellow", modifiers = ["italic"] }
+"markup.raw" = { fg = "foreground" }
+
[palette]
background = "#282a36"
background_dark = "#21222c"
diff --git a/runtime/themes/everforest_dark.toml b/runtime/themes/everforest_dark.toml
index f5a0088c..a6389da2 100644
--- a/runtime/themes/everforest_dark.toml
+++ b/runtime/themes/everforest_dark.toml
@@ -34,6 +34,16 @@
"module" = "blue"
"special" = "orange"
+# TODO
+"markup.heading" = "blue"
+"markup.list" = "red"
+"markup.bold" = { fg = "yellow", modifiers = ["bold"] }
+"markup.italic" = { fg = "magenta", modifiers = ["italic"] }
+"markup.link.url" = { fg = "yellow", modifiers = ["underlined"] }
+"markup.link.text" = "red"
+"markup.quote" = "cyan"
+"markup.raw" = "green"
+
"diff.plus" = "green"
"diff.delta" = "orange"
"diff.minus" = "red"
diff --git a/runtime/themes/everforest_light.toml b/runtime/themes/everforest_light.toml
index 3038ef9c..5490adb3 100644
--- a/runtime/themes/everforest_light.toml
+++ b/runtime/themes/everforest_light.toml
@@ -34,6 +34,16 @@
"module" = "blue"
"special" = "orange"
+# TODO
+"markup.heading" = "blue"
+"markup.list" = "red"
+"markup.bold" = { fg = "yellow", modifiers = ["bold"] }
+"markup.italic" = { fg = "magenta", modifiers = ["italic"] }
+"markup.link.url" = { fg = "yellow", modifiers = ["underlined"] }
+"markup.link.text" = "red"
+"markup.quote" = "cyan"
+"markup.raw" = "green"
+
"diff.plus" = "green"
"diff.delta" = "orange"
"diff.minus" = "red"
diff --git a/runtime/themes/gruvbox.toml b/runtime/themes/gruvbox.toml
index 9eae0915..a976a9bd 100644
--- a/runtime/themes/gruvbox.toml
+++ b/runtime/themes/gruvbox.toml
@@ -19,6 +19,7 @@
"function" = { fg = "green1", modifiers = ["bold"] }
"function.macro" = "aqua1"
"function.builtin" = "yellow1"
+"tag" = "red1"
"comment" = { fg = "gray1", modifiers = ["italic"] }
"constant" = { fg = "purple1" }
"constant.builtin" = { fg = "purple1", modifiers = ["bold"] }
@@ -55,6 +56,13 @@
"diagnostic" = { modifiers = ["underlined"] }
+"markup.heading" = "aqua1"
+"markup.bold" = { modifiers = ["bold"] }
+"markup.italic" = { modifiers = ["italic"] }
+"markup.link.url" = { fg = "green1", modifiers = ["underlined"] }
+"markup.link.text" = "red1"
+"markup.raw" = "red1"
+
[palette]
bg0 = "#282828" # main background
bg1 = "#3c3836"
diff --git a/runtime/themes/gruvbox_light.toml b/runtime/themes/gruvbox_light.toml
new file mode 100644
index 00000000..81ea7fd1
--- /dev/null
+++ b/runtime/themes/gruvbox_light.toml
@@ -0,0 +1,96 @@
+# Author : Rohan Jain <crodjer@pm.me>
+# Author : Jakub Bartodziej <kubabartodziej@gmail.com>
+# The theme uses the gruvbox light palette with standard contrast: github.com/morhetz/gruvbox
+
+"attribute" = "aqua1"
+"keyword" = { fg = "red1" }
+"keyword.directive" = "red0"
+"namespace" = "aqua1"
+"punctuation" = "orange1"
+"punctuation.delimiter" = "orange1"
+"operator" = "purple1"
+"special" = "purple0"
+"variable.other.member" = "blue1"
+"variable" = "fg1"
+"variable.builtin" = "orange1"
+"variable.parameter" = "fg2"
+"type" = "yellow1"
+"type.builtin" = "yellow1"
+"constructor" = { fg = "purple1", modifiers = ["bold"] }
+"function" = { fg = "green1", modifiers = ["bold"] }
+"function.macro" = "aqua1"
+"function.builtin" = "yellow1"
+"tag" = "red1"
+"comment" = { fg = "gray1", modifiers = ["italic"] }
+"constant" = { fg = "purple1" }
+"constant.builtin" = { fg = "purple1", modifiers = ["bold"] }
+"string" = "green1"
+"constant.numeric" = "purple1"
+"constant.character.escape" = { fg = "fg2", modifiers = ["bold"] }
+"label" = "aqua1"
+"module" = "aqua1"
+
+"diff.plus" = "green1"
+"diff.delta" = "orange1"
+"diff.minus" = "red1"
+
+"warning" = { fg = "orange1", bg = "bg1" }
+"error" = { fg = "red1", bg = "bg1" }
+"info" = { fg = "aqua1", bg = "bg1" }
+"hint" = { fg = "blue1", bg = "bg1" }
+
+"ui.background" = { bg = "bg0" }
+"ui.linenr" = { fg = "bg4" }
+"ui.linenr.selected" = { fg = "yellow1" }
+"ui.statusline" = { fg = "fg1", bg = "bg2" }
+"ui.statusline.inactive" = { fg = "fg4", bg = "bg1" }
+"ui.popup" = { bg = "bg1" }
+"ui.window" = { bg = "bg1" }
+"ui.help" = { bg = "bg1", fg = "fg1" }
+"ui.text" = { fg = "fg1" }
+"ui.text.focus" = { fg = "fg1" }
+"ui.selection" = { bg = "bg3", modifiers = ["reversed"] }
+"ui.cursor.primary" = { modifiers = ["reversed"] }
+"ui.cursor.match" = { modifiers = ["reversed"] }
+"ui.menu" = { fg = "fg1", bg = "bg2" }
+"ui.menu.selected" = { fg = "bg2", bg = "blue1", modifiers = ["bold"] }
+
+"diagnostic" = { modifiers = ["underlined"] }
+
+"markup.heading" = "aqua1"
+"markup.bold" = { modifiers = ["bold"] }
+"markup.italic" = { modifiers = ["italic"] }
+"markup.link.url" = { fg = "green1", modifiers = ["underlined"] }
+"markup.link.text" = "red1"
+"markup.raw" = "red1"
+
+[palette]
+bg0 = "#fbf1c7" # main background
+bg1 = "#ebdbb2"
+bg2 = "#d5c4a1"
+bg3 = "#bdae93"
+bg4 = "#a89984"
+
+fg0 = "#282828" # main foreground
+fg1 = "#3c3836"
+fg2 = "#504945"
+fg3 = "#665c54"
+fg4 = "#7c6f64" # gray0
+
+gray0 = "#7c6f64"
+gray1 = "#928374"
+
+red0 = "#cc241d" # neutral
+red1 = "#9d0006" # bright
+green0 = "#98971a"
+green1 = "#79740e"
+yellow0 = "#d79921"
+yellow1 = "#b57614"
+blue0 = "#458588"
+blue1 = "#076678"
+purple0 = "#b16286"
+purple1 = "#8f3f71"
+aqua0 = "#689d6a"
+aqua1 = "#427b58"
+orange0 = "#d65d0e"
+orange1 = "#af3a03"
diff --git a/runtime/themes/ingrid.toml b/runtime/themes/ingrid.toml
index df8a922f..a7c33e2d 100644
--- a/runtime/themes/ingrid.toml
+++ b/runtime/themes/ingrid.toml
@@ -28,6 +28,16 @@
"module" = "#839A53"
+# TODO
+"markup.heading" = "blue"
+"markup.list" = "red"
+"markup.bold" = { fg = "yellow", modifiers = ["bold"] }
+"markup.italic" = { fg = "magenta", modifiers = ["italic"] }
+"markup.link.url" = { fg = "yellow", modifiers = ["underlined"] }
+"markup.link.text" = "red"
+"markup.quote" = "cyan"
+"markup.raw" = "green"
+
"diff.plus" = "#839A53"
"diff.delta" = "#D4A520"
"diff.minus" = "#D74E50"
diff --git a/runtime/themes/monokai.toml b/runtime/themes/monokai.toml
index c8bf8ebd..e6ff0a5e 100644
--- a/runtime/themes/monokai.toml
+++ b/runtime/themes/monokai.toml
@@ -39,6 +39,16 @@
"constant.numeric" = { fg = "#ae81ff" }
"constant.character.escape" = { fg = "#ae81ff" }
+# TODO
+"markup.heading" = "blue"
+"markup.list" = "red"
+"markup.bold" = { fg = "yellow", modifiers = ["bold"] }
+"markup.italic" = { fg = "magenta", modifiers = ["italic"] }
+"markup.link.url" = { fg = "yellow", modifiers = ["underlined"] }
+"markup.link.text" = "red"
+"markup.quote" = "cyan"
+"markup.raw" = "green"
+
"diff.plus" = { fg = "#a6e22e" }
"diff.delta" = { fg = "#fd971f" }
"diff.minus" = { fg = "#f92672" }
diff --git a/runtime/themes/monokai_pro.toml b/runtime/themes/monokai_pro.toml
index 5daeaf6c..8de9994c 100644
--- a/runtime/themes/monokai_pro.toml
+++ b/runtime/themes/monokai_pro.toml
@@ -85,6 +85,14 @@
# make diagnostic underlined, to distinguish with selection text.
diagnostic = { modifiers = ["underlined"] }
+# markup highlight, no need for `markup.raw` and `markup.list`, make them to be default
+"markup.heading" = "green"
+"markup.bold" = { fg = "orange", modifiers = ["bold"] }
+"markup.italic" = { fg = "orange", modifiers = ["italic"] }
+"markup.link.url" = { fg = "orange", modifiers = ["underlined"] }
+"markup.link.text" = "yellow"
+"markup.quote" = "green"
+
[palette]
# primary colors
"red" = "#ff6188"
diff --git a/runtime/themes/monokai_pro_machine.toml b/runtime/themes/monokai_pro_machine.toml
index 0763a5fb..c5890042 100644
--- a/runtime/themes/monokai_pro_machine.toml
+++ b/runtime/themes/monokai_pro_machine.toml
@@ -85,6 +85,14 @@
# make diagnostic underlined, to distinguish with selection text.
diagnostic = { modifiers = ["underlined"] }
+# markup highlight, no need for `markup.raw` and `markup.list`, make them to be default
+"markup.heading" = "green"
+"markup.bold" = { fg = "orange", modifiers = ["bold"] }
+"markup.italic" = { fg = "orange", modifiers = ["italic"] }
+"markup.link.url" = { fg = "orange", modifiers = ["underlined"] }
+"markup.link.text" = "yellow"
+"markup.quote" = "green"
+
[palette]
# primary colors
"red" = "#ff6d7e"
diff --git a/runtime/themes/monokai_pro_octagon.toml b/runtime/themes/monokai_pro_octagon.toml
index 6a74a8d0..d9badf3c 100644
--- a/runtime/themes/monokai_pro_octagon.toml
+++ b/runtime/themes/monokai_pro_octagon.toml
@@ -85,6 +85,14 @@
# make diagnostic underlined, to distinguish with selection text.
diagnostic = { modifiers = ["underlined"] }
+# markup highlight, no need for `markup.raw` and `markup.list`, make them to be default
+"markup.heading" = "green"
+"markup.bold" = { fg = "orange", modifiers = ["bold"] }
+"markup.italic" = { fg = "orange", modifiers = ["italic"] }
+"markup.link.url" = { fg = "orange", modifiers = ["underlined"] }
+"markup.link.text" = "yellow"
+"markup.quote" = "green"
+
[palette]
# primary colors
"red" = "#ff657a"
diff --git a/runtime/themes/monokai_pro_ristretto.toml b/runtime/themes/monokai_pro_ristretto.toml
index 1a1a32ff..ed7ebeae 100644
--- a/runtime/themes/monokai_pro_ristretto.toml
+++ b/runtime/themes/monokai_pro_ristretto.toml
@@ -85,6 +85,14 @@
# make diagnostic underlined, to distinguish with selection text.
diagnostic = { modifiers = ["underlined"] }
+# markup highlight, no need for `markup.raw` and `markup.list`, make them to be default
+"markup.heading" = "green"
+"markup.bold" = { fg = "orange", modifiers = ["bold"] }
+"markup.italic" = { fg = "orange", modifiers = ["italic"] }
+"markup.link.url" = { fg = "orange", modifiers = ["underlined"] }
+"markup.link.text" = "yellow"
+"markup.quote" = "green"
+
[palette]
# primary colors
"red" = "#fd6883"
diff --git a/runtime/themes/monokai_pro_spectrum.toml b/runtime/themes/monokai_pro_spectrum.toml
index 36630493..da06e597 100644
--- a/runtime/themes/monokai_pro_spectrum.toml
+++ b/runtime/themes/monokai_pro_spectrum.toml
@@ -85,6 +85,14 @@
# make diagnostic underlined, to distinguish with selection text.
diagnostic = { modifiers = ["underlined"] }
+# markup highlight, no need for `markup.raw` and `markup.list`, make them to be default
+"markup.heading" = "green"
+"markup.bold" = { fg = "orange", modifiers = ["bold"] }
+"markup.italic" = { fg = "orange", modifiers = ["italic"] }
+"markup.link.url" = { fg = "orange", modifiers = ["underlined"] }
+"markup.link.text" = "yellow"
+"markup.quote" = "green"
+
[palette]
# primary colors
"red" = "#fc618d"
diff --git a/runtime/themes/nord.toml b/runtime/themes/nord.toml
index ae1ea29b..deb90452 100644
--- a/runtime/themes/nord.toml
+++ b/runtime/themes/nord.toml
@@ -84,6 +84,16 @@
# nord15 - integer, floating point
"constant.numeric" = "nord15"
+# TODO markup
+"markup.heading" = "blue"
+"markup.list" = "red"
+"markup.bold" = { fg = "yellow", modifiers = ["bold"] }
+"markup.italic" = { fg = "magenta", modifiers = ["italic"] }
+"markup.link.url" = { fg = "yellow", modifiers = ["underlined"] }
+"markup.link.text" = "red"
+"markup.quote" = "cyan"
+"markup.raw" = "green"
+
# vcs
"diff.plus" = "nord14"
"diff.delta" = "nord12"
diff --git a/runtime/themes/onedark.toml b/runtime/themes/onedark.toml
index dfeadbb2..acdaf99c 100644
--- a/runtime/themes/onedark.toml
+++ b/runtime/themes/onedark.toml
@@ -34,7 +34,7 @@
"markup.list" = { fg = "red" }
"markup.quote" = { fg = "yellow" }
"markup.link.url" = { fg = "cyan", modifiers = ["underlined"]}
-"markup.link.label" = { fg = "purple" }
+"markup.link.text" = { fg = "purple" }
"diff.plus" = "green"
"diff.delta" = "gold"
diff --git a/runtime/themes/rose_pine.toml b/runtime/themes/rose_pine.toml
index bf3a040b..027cc5ed 100644
--- a/runtime/themes/rose_pine.toml
+++ b/runtime/themes/rose_pine.toml
@@ -55,7 +55,7 @@
"markup.list" = { fg = "love" }
"markup.quote" = { fg = "rose" }
"markup.link.url" = { fg = "pine", modifiers = ["underlined"]}
-"markup.link.label" = { fg = "foam" }
+"markup.link.text" = { fg = "foam" }
[palette]
base = "#191724"
diff --git a/runtime/themes/rose_pine_dawn.toml b/runtime/themes/rose_pine_dawn.toml
index cc6d287e..73b01844 100644
--- a/runtime/themes/rose_pine_dawn.toml
+++ b/runtime/themes/rose_pine_dawn.toml
@@ -55,7 +55,7 @@
"markup.list" = { fg = "love" }
"markup.quote" = { fg = "rose" }
"markup.link.url" = { fg = "pine", modifiers = ["underlined"]}
-"markup.link.label" = { fg = "foam" }
+"markup.link.test" = { fg = "foam" }
[palette]
base = "#faf4ed"
diff --git a/runtime/themes/solarized_dark.toml b/runtime/themes/solarized_dark.toml
index ab345c8d..dfaa104a 100644
--- a/runtime/themes/solarized_dark.toml
+++ b/runtime/themes/solarized_dark.toml
@@ -22,6 +22,16 @@
"module" = { fg = "violet" }
"tag" = { fg = "magenta" }
+# TODO
+"markup.heading" = "blue"
+"markup.list" = "red"
+"markup.bold" = { fg = "yellow", modifiers = ["bold"] }
+"markup.italic" = { fg = "magenta", modifiers = ["italic"] }
+"markup.link.url" = { fg = "yellow", modifiers = ["underlined"] }
+"markup.link.text" = "red"
+"markup.quote" = "cyan"
+"markup.raw" = "green"
+
"diff.plus" = { fg = "green" }
"diff.delta" = { fg = "orange" }
"diff.minus" = { fg = "red" }
diff --git a/runtime/themes/solarized_light.toml b/runtime/themes/solarized_light.toml
index 6b0137fb..c8a3dee1 100644
--- a/runtime/themes/solarized_light.toml
+++ b/runtime/themes/solarized_light.toml
@@ -22,6 +22,16 @@
"module" = { fg = "violet" }
"tag" = { fg = "magenta" }
+# TODO
+"markup.heading" = "blue"
+"markup.list" = "red"
+"markup.bold" = { fg = "yellow", modifiers = ["bold"] }
+"markup.italic" = { fg = "magenta", modifiers = ["italic"] }
+"markup.link.url" = { fg = "yellow", modifiers = ["underlined"] }
+"markup.link.text" = "red"
+"markup.quote" = "cyan"
+"markup.raw" = "green"
+
"diff.plus" = { fg = "green" }
"diff.delta" = { fg = "orange" }
"diff.minus" = { fg = "red" }
diff --git a/runtime/themes/spacebones_light.toml b/runtime/themes/spacebones_light.toml
index fcbe7b37..5318dc2d 100644
--- a/runtime/themes/spacebones_light.toml
+++ b/runtime/themes/spacebones_light.toml
@@ -30,6 +30,16 @@
"label" = "#b1951d"
"module" = "#b1951d"
+# TODO
+"markup.heading" = "blue"
+"markup.list" = "red"
+"markup.bold" = { fg = "yellow", modifiers = ["bold"] }
+"markup.italic" = { fg = "magenta", modifiers = ["italic"] }
+"markup.link.url" = { fg = "yellow", modifiers = ["underlined"] }
+"markup.link.text" = "red"
+"markup.quote" = "cyan"
+"markup.raw" = "green"
+
"diff.plus" = "#2d9574"
"diff.delta" = "#715ab1"
"diff.minus" = "#ba2f59"