Merge remote-tracking branch 'origin/master' into debug

This commit is contained in:
Blaž Hrastnik 2022-02-13 18:31:51 +09:00
commit bd549d8a20
268 changed files with 10671 additions and 2673 deletions

2
.cargo/config Normal file
View File

@ -0,0 +1,2 @@
[alias]
xtask = "run --package xtask --"

View File

@ -17,6 +17,7 @@ ### Reproduction steps
### Environment ### Environment
- Platform: <!-- macOS / Windows / Linux --> - Platform: <!-- macOS / Windows / Linux -->
- Terminal emulator:
- Helix version: <!-- 'hx -V' if using a release, 'git describe' if building from master --> - Helix version: <!-- 'hx -V' if using a release, 'git describe' if building from master -->
<details><summary>~/.cache/helix/helix.log</summary> <details><summary>~/.cache/helix/helix.log</summary>

View File

@ -136,4 +136,52 @@ jobs:
uses: actions-rs/cargo@v1 uses: actions-rs/cargo@v1
with: with:
command: clippy command: clippy
args: -- -D warnings args: --all-targets -- -D warnings
docs:
name: Docs
runs-on: ubuntu-latest
steps:
- name: Checkout sources
uses: actions/checkout@v2
with:
submodules: true
- name: Install stable toolchain
uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
override: true
- name: Cache cargo registry
uses: actions/cache@v2.1.6
with:
path: ~/.cargo/registry
key: ${{ runner.os }}-v2-cargo-registry-${{ hashFiles('**/Cargo.lock') }}
- name: Cache cargo index
uses: actions/cache@v2.1.6
with:
path: ~/.cargo/git
key: ${{ runner.os }}-v2-cargo-index-${{ hashFiles('**/Cargo.lock') }}
- name: Cache cargo target dir
uses: actions/cache@v2.1.6
with:
path: target
key: ${{ runner.os }}-v2-cargo-build-target-${{ hashFiles('**/Cargo.lock') }}
- name: Generate docs
uses: actions-rs/cargo@v1
with:
command: xtask
args: docgen
- name: Check uncommitted documentation changes
run: |
git diff
git diff-files --quiet \
|| (echo "Run 'cargo xtask docgen', commit the changes and push again" \
&& exit 1)

View File

@ -102,7 +102,7 @@ jobs:
fi fi
cp -r runtime dist cp -r runtime dist
- uses: actions/upload-artifact@v2.2.4 - uses: actions/upload-artifact@v2.3.1
with: with:
name: bins-${{ matrix.build }} name: bins-${{ matrix.build }}
path: dist path: dist

78
.gitmodules vendored
View File

@ -142,11 +142,87 @@
path = helix-syntax/languages/tree-sitter-perl path = helix-syntax/languages/tree-sitter-perl
url = https://github.com/ganezdragon/tree-sitter-perl url = https://github.com/ganezdragon/tree-sitter-perl
shallow = true shallow = true
[submodule "helix-syntax/languages/tree-sitter-comment"]
path = helix-syntax/languages/tree-sitter-comment
url = https://github.com/stsewd/tree-sitter-comment
shallow = true
[submodule "helix-syntax/languages/tree-sitter-wgsl"] [submodule "helix-syntax/languages/tree-sitter-wgsl"]
path = helix-syntax/languages/tree-sitter-wgsl path = helix-syntax/languages/tree-sitter-wgsl
url = https://github.com/szebniok/tree-sitter-wgsl url = https://github.com/szebniok/tree-sitter-wgsl
shallow = true shallow = true
[submodule "helix-syntax/tree-sitter-llvm"] [submodule "helix-syntax/languages/tree-sitter-llvm"]
path = helix-syntax/languages/tree-sitter-llvm path = helix-syntax/languages/tree-sitter-llvm
url = https://github.com/benwilliamgraham/tree-sitter-llvm url = https://github.com/benwilliamgraham/tree-sitter-llvm
shallow = true shallow = true
[submodule "helix-syntax/languages/tree-sitter-markdown"]
path = helix-syntax/languages/tree-sitter-markdown
url = https://github.com/MDeiml/tree-sitter-markdown
shallow = true
[submodule "helix-syntax/languages/tree-sitter-dart"]
path = helix-syntax/languages/tree-sitter-dart
url = https://github.com/UserNobody14/tree-sitter-dart.git
shallow = true
[submodule "helix-syntax/languages/tree-sitter-dockerfile"]
path = helix-syntax/languages/tree-sitter-dockerfile
url = https://github.com/camdencheek/tree-sitter-dockerfile.git
shallow = true
[submodule "helix-syntax/languages/tree-sitter-fish"]
path = helix-syntax/languages/tree-sitter-fish
url = https://github.com/ram02z/tree-sitter-fish
shallow = true
[submodule "helix-syntax/languages/tree-sitter-git-commit"]
path = helix-syntax/languages/tree-sitter-git-commit
url = https://github.com/the-mikedavis/tree-sitter-git-commit.git
shallow = true
[submodule "helix-syntax/languages/tree-sitter-llvm-mir"]
path = helix-syntax/languages/tree-sitter-llvm-mir
url = https://github.com/Flakebi/tree-sitter-llvm-mir.git
shallow = true
[submodule "helix-syntax/languages/tree-sitter-git-diff"]
path = helix-syntax/languages/tree-sitter-git-diff
url = https://github.com/the-mikedavis/tree-sitter-git-diff.git
shallow = true
[submodule "helix-syntax/languages/tree-sitter-tablegen"]
path = helix-syntax/languages/tree-sitter-tablegen
url = https://github.com/Flakebi/tree-sitter-tablegen
shallow = true
[submodule "helix-syntax/languages/tree-sitter-git-rebase"]
path = helix-syntax/languages/tree-sitter-git-rebase
url = https://github.com/the-mikedavis/tree-sitter-git-rebase.git
shallow = true
[submodule "helix-syntax/languages/tree-sitter-lean"]
path = helix-syntax/languages/tree-sitter-lean
url = https://github.com/Julian/tree-sitter-lean
shallow = true
[submodule "helix-syntax/languages/tree-sitter-regex"]
path = helix-syntax/languages/tree-sitter-regex
url = https://github.com/tree-sitter/tree-sitter-regex.git
shallow = true
[submodule "helix-syntax/languages/tree-sitter-make"]
path = helix-syntax/languages/tree-sitter-make
url = https://github.com/alemuller/tree-sitter-make
shallow = true
[submodule "helix-syntax/languages/tree-sitter-git-config"]
path = helix-syntax/languages/tree-sitter-git-config
url = https://github.com/the-mikedavis/tree-sitter-git-config.git
shallow = true
[submodule "helix-syntax/languages/tree-sitter-graphql"]
path = helix-syntax/languages/tree-sitter-graphql
url = https://github.com/bkegley/tree-sitter-graphql
shallow = true
[submodule "helix-syntax/languages/tree-sitter-elm"]
path = helix-syntax/languages/tree-sitter-elm
url = https://github.com/elm-tooling/tree-sitter-elm
shallow = true
[submodule "helix-syntax/languages/tree-sitter-iex"]
path = helix-syntax/languages/tree-sitter-iex
url = https://github.com/elixir-lang/tree-sitter-iex
shallow = true
[submodule "helix-syntax/languages/tree-sitter-twig"]
path = helix-syntax/languages/tree-sitter-twig
url = https://github.com/eirabben/tree-sitter-twig.git
shallow = true
[submodule "helix-syntax/languages/tree-sitter-rescript"]
path = helix-syntax/languages/tree-sitter-rescript
url = https://github.com/jaredramirez/tree-sitter-rescript
shallow = true

View File

@ -1,4 +1,123 @@
# 0.6.0 (2022-01-04)
Happy new year and a big shout out to all the contributors! We had 55 contributors in this release.
Helix has popped up in DPorts and Fedora Linux via COPR ([#1270](https://github.com/helix-editor/helix/pull/1270))
As usual the following is a brief summary, refer to the git history for a full log:
Breaking changes:
- fix: Normalize backtab into shift-tab
Features:
- Macros ([#1234](https://github.com/helix-editor/helix/pull/1234))
- Add reverse search functionality ([#958](https://github.com/helix-editor/helix/pull/958))
- Allow keys to be mapped to sequences of commands ([#589](https://github.com/helix-editor/helix/pull/589))
- Make it possible to keybind TypableCommands ([#1169](https://github.com/helix-editor/helix/pull/1169))
- Detect workspace root using language markers ([#1370](https://github.com/helix-editor/helix/pull/1370))
- Add WORD textobject ([#991](https://github.com/helix-editor/helix/pull/991))
- Add LSP rename_symbol (space-r) ([#1011](https://github.com/helix-editor/helix/pull/1011))
- Added workspace_symbol_picker ([#1041](https://github.com/helix-editor/helix/pull/1041))
- Detect filetype from shebang line ([#1001](https://github.com/helix-editor/helix/pull/1001))
- Allow piping from stdin into a buffer on startup ([#996](https://github.com/helix-editor/helix/pull/996))
- Add auto pairs for same-char pairs ([#1219](https://github.com/helix-editor/helix/pull/1219))
- Update settings at runtime ([#798](https://github.com/helix-editor/helix/pull/798))
- Enable thin LTO (cccc194)
Commands:
- :wonly -- window only ([#1057](https://github.com/helix-editor/helix/pull/1057))
- buffer-close (:bc, :bclose) ([#1035](https://github.com/helix-editor/helix/pull/1035))
- Add :<line> and :goto <line> commands ([#1128](https://github.com/helix-editor/helix/pull/1128))
- :sort command ([#1288](https://github.com/helix-editor/helix/pull/1288))
- Add m textobject for pair under cursor ([#961](https://github.com/helix-editor/helix/pull/961))
- Implement "Goto next buffer / Goto previous buffer" commands ([#950](https://github.com/helix-editor/helix/pull/950))
- Implement "Goto last modification" command ([#1067](https://github.com/helix-editor/helix/pull/1067))
- Add trim_selections command ([#1092](https://github.com/helix-editor/helix/pull/1092))
- Add movement shortcut for history ([#1088](https://github.com/helix-editor/helix/pull/1088))
- Add command to inc/dec number under cursor ([#1027](https://github.com/helix-editor/helix/pull/1027))
- Add support for dates for increment/decrement
- Align selections (&) ([#1101](https://github.com/helix-editor/helix/pull/1101))
- Implement no-yank delete/change ([#1099](https://github.com/helix-editor/helix/pull/1099))
- Implement black hole register ([#1165](https://github.com/helix-editor/helix/pull/1165))
- gf as goto_file (gf) ([#1102](https://github.com/helix-editor/helix/pull/1102))
- Add last modified file (gm) ([#1093](https://github.com/helix-editor/helix/pull/1093))
- ensure_selections_forward ([#1393](https://github.com/helix-editor/helix/pull/1393))
- Readline style insert mode ([#1039](https://github.com/helix-editor/helix/pull/1039))
Usability improvements and fixes:
- Detect filetype on :write ([#1141](https://github.com/helix-editor/helix/pull/1141))
- Add single and double quotes to matching pairs ([#995](https://github.com/helix-editor/helix/pull/995))
- Launch with defaults upon invalid config/theme (rather than panicking) ([#982](https://github.com/helix-editor/helix/pull/982))
- If switching away from an empty scratch buffer, remove it ([#935](https://github.com/helix-editor/helix/pull/935))
- Truncate the starts of file paths instead of the ends in picker ([#951](https://github.com/helix-editor/helix/pull/951))
- Truncate the start of file paths in the StatusLine ([#1351](https://github.com/helix-editor/helix/pull/1351))
- Prevent picker from previewing binaries or large file ([#939](https://github.com/helix-editor/helix/pull/939))
- Inform when reaching undo/redo bounds ([#981](https://github.com/helix-editor/helix/pull/981))
- search_impl will only align cursor center when it isn't in view ([#959](https://github.com/helix-editor/helix/pull/959))
- Add <C-h>, <C-u>, <C-d>, Delete in prompt mode ([#1034](https://github.com/helix-editor/helix/pull/1034))
- Restore screen position when aborting search ([#1047](https://github.com/helix-editor/helix/pull/1047))
- Buffer picker: show is_modifier flag ([#1020](https://github.com/helix-editor/helix/pull/1020))
- Add commit hash to version info, if present ([#957](https://github.com/helix-editor/helix/pull/957))
- Implement indent-aware delete ([#1120](https://github.com/helix-editor/helix/pull/1120))
- Jump to end char of surrounding pair from any cursor pos ([#1121](https://github.com/helix-editor/helix/pull/1121))
- File picker configuration ([#988](https://github.com/helix-editor/helix/pull/988))
- Fix surround cursor position calculation ([#1183](https://github.com/helix-editor/helix/pull/1183))
- Accept count for goto_window ([#1033](https://github.com/helix-editor/helix/pull/1033))
- Make kill_to_line_end behave like emacs ([#1235](https://github.com/helix-editor/helix/pull/1235))
- Only use a single documentation popup ([#1241](https://github.com/helix-editor/helix/pull/1241))
- ui: popup: Don't allow scrolling past the end of content (3307f44c)
- Open files with spaces in filename, allow opening multiple files ([#1231](https://github.com/helix-editor/helix/pull/1231))
- Allow paste commands to take a count ([#1261](https://github.com/helix-editor/helix/pull/1261))
- Auto pairs selection ([#1254](https://github.com/helix-editor/helix/pull/1254))
- Use a fuzzy matcher for commands ([#1386](https://github.com/helix-editor/helix/pull/1386))
- Add c-s to pick word under doc cursor to prompt line & search completion ([#831](https://github.com/helix-editor/helix/pull/831))
- Fix :earlier/:later missing changeset update ([#1069](https://github.com/helix-editor/helix/pull/1069))
- Support extend for multiple goto ([#909](https://github.com/helix-editor/helix/pull/909))
- Add arrow-key bindings for window switching ([#933](https://github.com/helix-editor/helix/pull/933))
- Implement key ordering for info box ([#952](https://github.com/helix-editor/helix/pull/952))
LSP:
- Implement MarkedString rendering (e128a8702)
- Don't panic if init fails (d31bef7)
- Configurable diagnostic severity ([#1325](https://github.com/helix-editor/helix/pull/1325))
- Resolve completion item ([#1315](https://github.com/helix-editor/helix/pull/1315))
- Code action command support ([#1304](https://github.com/helix-editor/helix/pull/1304))
Grammars:
- Adds mint language server ([#974](https://github.com/helix-editor/helix/pull/974))
- Perl ([#978](https://github.com/helix-editor/helix/pull/978)) ([#1280](https://github.com/helix-editor/helix/pull/1280))
- GLSL ([#993](https://github.com/helix-editor/helix/pull/993))
- Racket ([#1143](https://github.com/helix-editor/helix/pull/1143))
- WGSL ([#1166](https://github.com/helix-editor/helix/pull/1166))
- LLVM ([#1167](https://github.com/helix-editor/helix/pull/1167)) ([#1388](https://github.com/helix-editor/helix/pull/1388)) ([#1409](https://github.com/helix-editor/helix/pull/1409)) ([#1398](https://github.com/helix-editor/helix/pull/1398))
- Markdown (49e06787)
- Scala ([#1278](https://github.com/helix-editor/helix/pull/1278))
- Dart ([#1250](https://github.com/helix-editor/helix/pull/1250))
- Fish ([#1308](https://github.com/helix-editor/helix/pull/1308))
- Dockerfile ([#1303](https://github.com/helix-editor/helix/pull/1303))
- Git (commit, rebase, diff) ([#1338](https://github.com/helix-editor/helix/pull/1338)) ([#1402](https://github.com/helix-editor/helix/pull/1402)) ([#1373](https://github.com/helix-editor/helix/pull/1373))
- tree-sitter-comment ([#1300](https://github.com/helix-editor/helix/pull/1300))
- Highlight comments in c, cpp, cmake and llvm ([#1309](https://github.com/helix-editor/helix/pull/1309))
- Improve yaml syntax highlighting highlighting ([#1294](https://github.com/helix-editor/helix/pull/1294))
- Improve rust syntax highlighting ([#1295](https://github.com/helix-editor/helix/pull/1295))
- Add textobjects and indents to cmake ([#1307](https://github.com/helix-editor/helix/pull/1307))
- Add textobjects and indents to c and cpp ([#1293](https://github.com/helix-editor/helix/pull/1293))
New themes:
- Solarized dark ([#999](https://github.com/helix-editor/helix/pull/999))
- Solarized light ([#1010](https://github.com/helix-editor/helix/pull/1010))
- Spacebones light ([#1131](https://github.com/helix-editor/helix/pull/1131))
- Monokai Pro ([#1206](https://github.com/helix-editor/helix/pull/1206))
- Base16 Light and Terminal ([#1078](https://github.com/helix-editor/helix/pull/1078))
- and a default 16 color theme, truecolor detection
- Dracula ([#1258](https://github.com/helix-editor/helix/pull/1258))
# 0.5.0 (2021-10-28) # 0.5.0 (2021-10-28)
A big shout out to all the contributors! We had 46 contributors in this release. A big shout out to all the contributors! We had 46 contributors in this release.

320
Cargo.lock generated
View File

@ -13,9 +13,9 @@ dependencies = [
[[package]] [[package]]
name = "anyhow" name = "anyhow"
version = "1.0.51" version = "1.0.53"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8b26702f315f53b6071259e15dd9d64528213b44d61de1ec926eca7715d62203" checksum = "94a45b455c14666b85fc40a019e8ab9eb75e3a124e05494f5397122bc9eb06e0"
[[package]] [[package]]
name = "arc-swap" name = "arc-swap"
@ -78,9 +78,9 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]] [[package]]
name = "chardetng" name = "chardetng"
version = "0.1.15" version = "0.1.17"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "83ee29c16b81c32fbc882ecc568305793338a8353952573db837f4f4a6cd5c2e" checksum = "14b8f0b65b7b08ae3c8187e8d77174de20cb6777864c6b832d8ad365999cf1ea"
dependencies = [ dependencies = [
"cfg-if", "cfg-if",
"encoding_rs", "encoding_rs",
@ -101,9 +101,9 @@ dependencies = [
[[package]] [[package]]
name = "clipboard-win" name = "clipboard-win"
version = "4.2.2" version = "4.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3db8340083d28acb43451166543b98c838299b7e0863621be53a338adceea0ed" checksum = "2f3e1238132dc01f081e1cbb9dace14e5ef4c3a51ee244bd982275fb514605db"
dependencies = [ dependencies = [
"error-code", "error-code",
"str-buf", "str-buf",
@ -121,9 +121,9 @@ dependencies = [
[[package]] [[package]]
name = "crossbeam-utils" name = "crossbeam-utils"
version = "0.8.5" version = "0.8.7"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d82cfc11ce7f2c3faef78d8a684447b40d503d9681acebed6cb728d45940c4db" checksum = "b5e5bed1f1c269533fa816a0a5492b3545209a205ca1a54842be180eb63a16a6"
dependencies = [ dependencies = [
"cfg-if", "cfg-if",
"lazy_static", "lazy_static",
@ -131,16 +131,16 @@ dependencies = [
[[package]] [[package]]
name = "crossterm" name = "crossterm"
version = "0.22.1" version = "0.23.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c85525306c4291d1b73ce93c8acf9c339f9b213aef6c1d85c3830cbf1c16325c" checksum = "77b75a27dc8d220f1f8521ea69cd55a34d720a200ebb3a624d9aa19193d3b432"
dependencies = [ dependencies = [
"bitflags", "bitflags",
"crossterm_winapi", "crossterm_winapi",
"futures-core", "futures-core",
"libc", "libc",
"mio", "mio",
"parking_lot", "parking_lot 0.12.0",
"signal-hook", "signal-hook",
"signal-hook-mio", "signal-hook-mio",
"winapi", "winapi",
@ -184,9 +184,9 @@ checksum = "e78d4f1cc4ae33bbfc157ed5d5a5ef3bc29227303d595861deb238fcec4e9457"
[[package]] [[package]]
name = "encoding_rs" name = "encoding_rs"
version = "0.8.29" version = "0.8.30"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a74ea89a0a1b98f6332de42c95baff457ada66d1cb4030f9ff151b2041a1c746" checksum = "7896dc8abb250ffdda33912550faa54c88ec8b998dec0b2c55ab224921ce11df"
dependencies = [ dependencies = [
"cfg-if", "cfg-if",
] ]
@ -202,9 +202,9 @@ dependencies = [
[[package]] [[package]]
name = "error-code" name = "error-code"
version = "2.3.0" version = "2.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b5115567ac25674e0043e472be13d14e537f37ea8aa4bdc4aef0c89add1db1ff" checksum = "64f18991e7bf11e7ffee451b5318b5c1a73c52d0d0ada6e5a3017c8c1ced6a21"
dependencies = [ dependencies = [
"libc", "libc",
"str-buf", "str-buf",
@ -246,27 +246,17 @@ dependencies = [
"percent-encoding", "percent-encoding",
] ]
[[package]]
name = "futf"
version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7c9c1ce3fa9336301af935ab852c437817d14cd33690446569392e65170aac3b"
dependencies = [
"mac",
"new_debug_unreachable",
]
[[package]] [[package]]
name = "futures-core" name = "futures-core"
version = "0.3.18" version = "0.3.21"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "629316e42fe7c2a0b9a65b47d159ceaa5453ab14e8f0a3c5eedbb8cd55b4a445" checksum = "0c09fd04b7e4073ac7156a9539b57a484a8ea920f79c7c675d05d289ab6110d3"
[[package]] [[package]]
name = "futures-executor" name = "futures-executor"
version = "0.3.18" version = "0.3.21"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7b808bf53348a36cab739d7e04755909b9fcaaa69b7d7e588b37b6ec62704c97" checksum = "9420b90cfa29e327d0429f19be13e7ddb68fa1cccb09d65e5706b8c7a749b8a6"
dependencies = [ dependencies = [
"futures-core", "futures-core",
"futures-task", "futures-task",
@ -275,15 +265,15 @@ dependencies = [
[[package]] [[package]]
name = "futures-task" name = "futures-task"
version = "0.3.18" version = "0.3.21"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dabf1872aaab32c886832f2276d2f5399887e2bd613698a02359e4ea83f8de12" checksum = "57c66a976bf5909d801bbef33416c41372779507e7a6b3a5e25e4749c58f776a"
[[package]] [[package]]
name = "futures-util" name = "futures-util"
version = "0.3.18" version = "0.3.21"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "41d22213122356472061ac0f1ab2cee28d2bac8491410fd68c2af53d1cedb83e" checksum = "d8b7abd5d659d9b90c8cba917f6ec750a74e2dc23902ef9cd4cc8c8b22e6036a"
dependencies = [ dependencies = [
"futures-core", "futures-core",
"futures-task", "futures-task",
@ -303,9 +293,9 @@ dependencies = [
[[package]] [[package]]
name = "getrandom" name = "getrandom"
version = "0.2.3" version = "0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7fcd999463524c52659517fe2cea98493cfe485d10565e7b0fb07dbba7ad2753" checksum = "418d37c8b1d42553c93648be529cb70f920d3baf8ef469b74b9638df426e0b4c"
dependencies = [ dependencies = [
"cfg-if", "cfg-if",
"libc", "libc",
@ -366,9 +356,11 @@ dependencies = [
[[package]] [[package]]
name = "helix-core" name = "helix-core"
version = "0.5.0" version = "0.6.0"
dependencies = [ dependencies = [
"arc-swap", "arc-swap",
"chrono",
"encoding_rs",
"etcetera", "etcetera",
"helix-syntax", "helix-syntax",
"log", "log",
@ -379,8 +371,9 @@ dependencies = [
"serde", "serde",
"serde_json", "serde_json",
"similar", "similar",
"slotmap",
"smallvec", "smallvec",
"tendril", "smartstring",
"toml", "toml",
"tree-sitter", "tree-sitter",
"unicode-general-category", "unicode-general-category",
@ -390,7 +383,7 @@ dependencies = [
[[package]] [[package]]
name = "helix-dap" name = "helix-dap"
version = "0.5.0" version = "0.6.0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"fern", "fern",
@ -404,7 +397,7 @@ dependencies = [
[[package]] [[package]]
name = "helix-lsp" name = "helix-lsp"
version = "0.5.0" version = "0.6.0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"futures-executor", "futures-executor",
@ -422,7 +415,7 @@ dependencies = [
[[package]] [[package]]
name = "helix-syntax" name = "helix-syntax"
version = "0.5.0" version = "0.6.0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"cc", "cc",
@ -433,7 +426,7 @@ dependencies = [
[[package]] [[package]]
name = "helix-term" name = "helix-term"
version = "0.5.0" version = "0.6.0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"chrono", "chrono",
@ -465,7 +458,7 @@ dependencies = [
[[package]] [[package]]
name = "helix-tui" name = "helix-tui"
version = "0.5.0" version = "0.6.0"
dependencies = [ dependencies = [
"bitflags", "bitflags",
"cassowary", "cassowary",
@ -478,14 +471,13 @@ dependencies = [
[[package]] [[package]]
name = "helix-view" name = "helix-view"
version = "0.5.0" version = "0.6.0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"bitflags", "bitflags",
"chardetng", "chardetng",
"clipboard-win", "clipboard-win",
"crossterm", "crossterm",
"encoding_rs",
"futures-util", "futures-util",
"helix-core", "helix-core",
"helix-dap", "helix-dap",
@ -551,9 +543,9 @@ dependencies = [
[[package]] [[package]]
name = "itoa" name = "itoa"
version = "0.4.8" version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b71991ff56294aa922b450139ee08b3bfc70982c6b2c7562771375cf73542dd4" checksum = "1aab8fc367588b89dcee83ab0fd66b72b50b72fa1904d7095045ace2b0c81c35"
[[package]] [[package]]
name = "jsonrpc-core" name = "jsonrpc-core"
@ -576,15 +568,15 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
[[package]] [[package]]
name = "libc" name = "libc"
version = "0.2.104" version = "0.2.117"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7b2f96d100e1cf1929e7719b7edb3b90ab5298072638fccd77be9ce942ecdfce" checksum = "e74d72e0f9b65b5b4ca49a346af3976df0f9c61d550727f349ecd559f251a26c"
[[package]] [[package]]
name = "libloading" name = "libloading"
version = "0.7.2" version = "0.7.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "afe203d669ec979b7128619bae5a63b7b42e9203c1b29146079ee05e2f604b52" checksum = "efbc0f03f9a775e9f6aed295c6a1ba2253c5757a9e03d55c6caa46a681abcddd"
dependencies = [ dependencies = [
"cfg-if", "cfg-if",
"winapi", "winapi",
@ -592,9 +584,9 @@ dependencies = [
[[package]] [[package]]
name = "lock_api" name = "lock_api"
version = "0.4.5" version = "0.4.6"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "712a4d093c9976e24e7dbca41db895dabcbac38eb5f4045393d17a95bdfb1109" checksum = "88943dd7ef4a2e5a4bfa2753aaab3013e34ce2533d1996fb18ef591e315e2b3b"
dependencies = [ dependencies = [
"scopeguard", "scopeguard",
] ]
@ -610,9 +602,9 @@ dependencies = [
[[package]] [[package]]
name = "lsp-types" name = "lsp-types"
version = "0.91.1" version = "0.92.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2368312c59425dd133cb9a327afee65be0a633a8ce471d248e2202a48f8f68ae" checksum = "e8a69d4142d51b208c9fc3cea68b1a7fcef30354e7aa6ccad07250fd8430fc76"
dependencies = [ dependencies = [
"bitflags", "bitflags",
"serde", "serde",
@ -621,12 +613,6 @@ dependencies = [
"url", "url",
] ]
[[package]]
name = "mac"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c41e0c4fef86961ac6d6f8a82609f55f31b05e4fce149ac5710e439df7619ba4"
[[package]] [[package]]
name = "matches" name = "matches"
version = "0.1.9" version = "0.1.9"
@ -670,12 +656,6 @@ dependencies = [
"winapi", "winapi",
] ]
[[package]]
name = "new_debug_unreachable"
version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e4a24736216ec316047a1fc4252e27dabb04218aa4a3f37c6e7ddbf1f9782b54"
[[package]] [[package]]
name = "ntapi" name = "ntapi"
version = "0.3.6" version = "0.3.6"
@ -706,9 +686,9 @@ dependencies = [
[[package]] [[package]]
name = "num_cpus" name = "num_cpus"
version = "1.13.0" version = "1.13.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "05499f3756671c15885fee9034446956fff3f243d6077b91e5767df161f766b3" checksum = "19e64526ebdee182341572e50e9ad03965aa510cd94427a4549448f285e957a1"
dependencies = [ dependencies = [
"hermit-abi", "hermit-abi",
"libc", "libc",
@ -716,9 +696,9 @@ dependencies = [
[[package]] [[package]]
name = "once_cell" name = "once_cell"
version = "1.8.0" version = "1.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "692fcb63b64b1758029e0a96ee63e049ce8c5948587f2f7208df04625e5f6b56" checksum = "da32515d9f6e6e489d7bc9d84c71b060db7247dc035bbe44eac88cf87486d8d5"
[[package]] [[package]]
name = "parking_lot" name = "parking_lot"
@ -728,7 +708,17 @@ checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99"
dependencies = [ dependencies = [
"instant", "instant",
"lock_api", "lock_api",
"parking_lot_core", "parking_lot_core 0.8.5",
]
[[package]]
name = "parking_lot"
version = "0.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "87f5ec2493a61ac0506c0f4199f99070cbe83857b0337006a30f3e6719b8ef58"
dependencies = [
"lock_api",
"parking_lot_core 0.9.1",
] ]
[[package]] [[package]]
@ -745,6 +735,19 @@ dependencies = [
"winapi", "winapi",
] ]
[[package]]
name = "parking_lot_core"
version = "0.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "28141e0cc4143da2443301914478dc976a61ffdb3f043058310c70df2fed8954"
dependencies = [
"cfg-if",
"libc",
"redox_syscall",
"smallvec",
"windows-sys",
]
[[package]] [[package]]
name = "percent-encoding" name = "percent-encoding"
version = "2.1.0" version = "2.1.0"
@ -753,9 +756,9 @@ checksum = "d4fd5641d01c8f18a23da7b6fe29298ff4b55afcccdf78973b24cf3175fee32e"
[[package]] [[package]]
name = "pin-project-lite" name = "pin-project-lite"
version = "0.2.7" version = "0.2.8"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8d31d11c69a6b52a174b42bdc0c30e5e11670f90788b2c471c31c1d17d449443" checksum = "e280fbe77cc62c91527259e9442153f4688736748d24660126286329742b4c6c"
[[package]] [[package]]
name = "pin-utils" name = "pin-utils"
@ -765,18 +768,18 @@ checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184"
[[package]] [[package]]
name = "proc-macro2" name = "proc-macro2"
version = "1.0.30" version = "1.0.36"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "edc3358ebc67bc8b7fa0c007f945b0b18226f78437d61bec735a9eb96b61ee70" checksum = "c7342d5883fbccae1cc37a2353b09c87c9b0f3afd73f5fb9bba687a1f733b029"
dependencies = [ dependencies = [
"unicode-xid", "unicode-xid",
] ]
[[package]] [[package]]
name = "pulldown-cmark" name = "pulldown-cmark"
version = "0.8.0" version = "0.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ffade02495f22453cd593159ea2f59827aae7f53fa8323f756799b670881dcf8" checksum = "34f197a544b0c9ab3ae46c359a7ec9cbbb5c7bf97054266fecb7ead794a181d6"
dependencies = [ dependencies = [
"bitflags", "bitflags",
"memchr", "memchr",
@ -794,9 +797,9 @@ dependencies = [
[[package]] [[package]]
name = "quote" name = "quote"
version = "1.0.10" version = "1.0.15"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "38bc8cc6a5f2e3655e0899c1b848643b2562f853f114bfec7be120678e3ace05" checksum = "864d3e96a899863136fc6e99f3d7cae289dafe43bf2c5ac19b70df7210c0a145"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
] ]
@ -863,18 +866,18 @@ checksum = "f497285884f3fcff424ffc933e56d7cbca511def0c9831a7f9b5f6153e3cc89b"
[[package]] [[package]]
name = "ropey" name = "ropey"
version = "1.3.1" version = "1.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9150aff6deb25b20ed110889f070a678bcd1033e46e5e9d6fb1abeab17947f28" checksum = "e6b9aa65bcd9f308d37c7158b4a1afaaa32b8450213e20c9b98e7d5b3cc2fec3"
dependencies = [ dependencies = [
"smallvec", "smallvec",
] ]
[[package]] [[package]]
name = "ryu" name = "ryu"
version = "1.0.5" version = "1.0.9"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "71d301d4193d031abdd79ff7e3dd721168a9572ef3fe51a1517aba235bd8f86e" checksum = "73b4b750c782965c211b42f022f59af1fbceabdd026623714f104152f1ec149f"
[[package]] [[package]]
name = "same-file" name = "same-file"
@ -893,18 +896,18 @@ checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
[[package]] [[package]]
name = "serde" name = "serde"
version = "1.0.130" version = "1.0.136"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f12d06de37cf59146fbdecab66aa99f9fe4f78722e3607577a5375d66bd0c913" checksum = "ce31e24b01e1e524df96f1c2fdd054405f8d7376249a5110886fb4b658484789"
dependencies = [ dependencies = [
"serde_derive", "serde_derive",
] ]
[[package]] [[package]]
name = "serde_derive" name = "serde_derive"
version = "1.0.130" version = "1.0.136"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d7bc1a1ab1961464eae040d96713baa5a724a8152c1222492465b54322ec508b" checksum = "08597e7152fcd306f41838ed3e37be9eaeed2b61c42e2117266a554fab4662f9"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
@ -913,9 +916,9 @@ dependencies = [
[[package]] [[package]]
name = "serde_json" name = "serde_json"
version = "1.0.72" version = "1.0.78"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d0ffa0837f2dfa6fb90868c2b5468cad482e175f7dad97e7421951e663f2b527" checksum = "d23c1ba4cf0efd44be32017709280b32d1cea5c3f1275c3b6d9e8bc54f758085"
dependencies = [ dependencies = [
"itoa", "itoa",
"ryu", "ryu",
@ -935,9 +938,9 @@ dependencies = [
[[package]] [[package]]
name = "signal-hook" name = "signal-hook"
version = "0.3.10" version = "0.3.13"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9c98891d737e271a2954825ef19e46bd16bdb98e2746f2eec4f7a4ef7946efd1" checksum = "647c97df271007dcea485bb74ffdb57f2e683f1306c854f468a0c244badabf2d"
dependencies = [ dependencies = [
"libc", "libc",
"signal-hook-registry", "signal-hook-registry",
@ -965,9 +968,9 @@ dependencies = [
[[package]] [[package]]
name = "signal-hook-tokio" name = "signal-hook-tokio"
version = "0.3.0" version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f6c5d32165ff8b94e68e7b3bdecb1b082e958c22434b363482cfb89dcd6f3ff8" checksum = "213241f76fb1e37e27de3b6aa1b068a2c333233b59cca6634f634b80a27ecf1e"
dependencies = [ dependencies = [
"futures-core", "futures-core",
"libc", "libc",
@ -998,9 +1001,24 @@ dependencies = [
[[package]] [[package]]
name = "smallvec" name = "smallvec"
version = "1.7.0" version = "1.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1ecab6c735a6bb4139c0caafd0cc3635748bbb3acf4550e8138122099251f309" checksum = "f2dd574626839106c320a323308629dcb1acfc96e32a8cba364ddc61ac23ee83"
[[package]]
name = "smartstring"
version = "0.2.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "31aa6a31c0c2b21327ce875f7e8952322acfcfd0c27569a6e18a647281352c9b"
dependencies = [
"static_assertions",
]
[[package]]
name = "static_assertions"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f"
[[package]] [[package]]
name = "str-buf" name = "str-buf"
@ -1010,26 +1028,15 @@ checksum = "d44a3643b4ff9caf57abcee9c2c621d6c03d9135e0d8b589bd9afb5992cb176a"
[[package]] [[package]]
name = "syn" name = "syn"
version = "1.0.80" version = "1.0.86"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d010a1623fbd906d51d650a9916aaefc05ffa0e4053ff7fe601167f3e715d194" checksum = "8a65b3f4ffa0092e9887669db0eae07941f023991ab58ea44da8fe8e2d511c6b"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"unicode-xid", "unicode-xid",
] ]
[[package]]
name = "tendril"
version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a9ef557cb397a4f0a5a3a628f06515f78563f2209e64d47055d9dc6052bf5e33"
dependencies = [
"futf",
"mac",
"utf-8",
]
[[package]] [[package]]
name = "thiserror" name = "thiserror"
version = "1.0.30" version = "1.0.30"
@ -1052,9 +1059,9 @@ dependencies = [
[[package]] [[package]]
name = "thread_local" name = "thread_local"
version = "1.1.3" version = "1.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8018d24e04c95ac8790716a5987d0fec4f8b27249ffa0f7d33f1369bdfb88cbd" checksum = "5516c27b78311c50bf42c071425c560ac799b11c30b31f87e3081965fe5e0180"
dependencies = [ dependencies = [
"once_cell", "once_cell",
] ]
@ -1070,9 +1077,9 @@ dependencies = [
[[package]] [[package]]
name = "tinyvec" name = "tinyvec"
version = "1.5.0" version = "1.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f83b2a3d4d9091d0abd7eba4dc2710b1718583bd4d8992e2190720ea38f391f7" checksum = "2c1c1d5a42b6245520c249549ec267180beaffcc0615401ac8e31853d4b6d8d2"
dependencies = [ dependencies = [
"tinyvec_macros", "tinyvec_macros",
] ]
@ -1085,18 +1092,17 @@ checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c"
[[package]] [[package]]
name = "tokio" name = "tokio"
version = "1.14.0" version = "1.16.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "70e992e41e0d2fb9f755b37446f20900f64446ef54874f40a60c78f021ac6144" checksum = "0c27a64b625de6d309e8c57716ba93021dccf1b3b5c97edd6d3dd2d2135afc0a"
dependencies = [ dependencies = [
"autocfg",
"bytes", "bytes",
"libc", "libc",
"memchr", "memchr",
"mio", "mio",
"num_cpus", "num_cpus",
"once_cell", "once_cell",
"parking_lot", "parking_lot 0.11.2",
"pin-project-lite", "pin-project-lite",
"signal-hook-registry", "signal-hook-registry",
"tokio-macros", "tokio-macros",
@ -1105,9 +1111,9 @@ dependencies = [
[[package]] [[package]]
name = "tokio-macros" name = "tokio-macros"
version = "1.6.0" version = "1.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c9efc1aba077437943f7515666aa2b882dfabfbfdf89c819ea75a8d6e9eaba5e" checksum = "b557f72f448c511a979e2564e55d74e6c4432fc96ff4f6241bc6bded342643b7"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
@ -1136,9 +1142,9 @@ dependencies = [
[[package]] [[package]]
name = "tree-sitter" name = "tree-sitter"
version = "0.20.1" version = "0.20.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9394e9dbfe967b5f3d6ab79e302e78b5fb7b530c368d634ff3b8d67ede138bf1" checksum = "4e34327f8eac545e3f037382471b2b19367725a242bba7bc45edb9efb49fe39a"
dependencies = [ dependencies = [
"cc", "cc",
"regex", "regex",
@ -1161,9 +1167,9 @@ checksum = "1a01404663e3db436ed2746d9fefef640d868edae3cceb81c3b8d5732fda678f"
[[package]] [[package]]
name = "unicode-general-category" name = "unicode-general-category"
version = "0.4.0" version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "07547e3ee45e28326cc23faac56d44f58f16ab23e413db526debce3b0bfd2742" checksum = "1218098468b8085b19a2824104c70d976491d247ce194bbd9dc77181150cdfd6"
[[package]] [[package]]
name = "unicode-normalization" name = "unicode-normalization"
@ -1176,9 +1182,9 @@ dependencies = [
[[package]] [[package]]
name = "unicode-segmentation" name = "unicode-segmentation"
version = "1.8.0" version = "1.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8895849a949e7845e06bd6dc1aa51731a103c42707010a5b591c0038fb73385b" checksum = "7e8820f5d777f6224dc4be3632222971ac30164d4a258d595640799554ebfd99"
[[package]] [[package]]
name = "unicode-width" name = "unicode-width"
@ -1205,17 +1211,11 @@ dependencies = [
"serde", "serde",
] ]
[[package]]
name = "utf-8"
version = "0.7.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9"
[[package]] [[package]]
name = "version_check" name = "version_check"
version = "0.9.3" version = "0.9.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5fecdca9a5291cc2b8dcf7dc02453fee791a280f3743cb0905f8822ae463b3fe" checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
[[package]] [[package]]
name = "walkdir" name = "walkdir"
@ -1236,9 +1236,9 @@ checksum = "fd6fbd9a79829dd1ad0cc20627bf1ed606756a7f77edff7b66b7064f9cb327c6"
[[package]] [[package]]
name = "which" name = "which"
version = "4.2.2" version = "4.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ea187a8ef279bc014ec368c27a920da2024d2a711109bfbe3440585d5cf27ad9" checksum = "2a5a7e487e921cf220206864a94a89b6c6905bfc19f1057fa26a4cb360e5c1d2"
dependencies = [ dependencies = [
"either", "either",
"lazy_static", "lazy_static",
@ -1275,3 +1275,55 @@ name = "winapi-x86_64-pc-windows-gnu"
version = "0.4.0" version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
[[package]]
name = "windows-sys"
version = "0.32.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3df6e476185f92a12c072be4a189a0210dcdcf512a1891d6dff9edb874deadc6"
dependencies = [
"windows_aarch64_msvc",
"windows_i686_gnu",
"windows_i686_msvc",
"windows_x86_64_gnu",
"windows_x86_64_msvc",
]
[[package]]
name = "windows_aarch64_msvc"
version = "0.32.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d8e92753b1c443191654ec532f14c199742964a061be25d77d7a96f09db20bf5"
[[package]]
name = "windows_i686_gnu"
version = "0.32.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6a711c68811799e017b6038e0922cb27a5e2f43a2ddb609fe0b6f3eeda9de615"
[[package]]
name = "windows_i686_msvc"
version = "0.32.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "146c11bb1a02615db74680b32a68e2d61f553cc24c4eb5b4ca10311740e44172"
[[package]]
name = "windows_x86_64_gnu"
version = "0.32.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c912b12f7454c6620635bbff3450962753834be2a594819bd5e945af18ec64bc"
[[package]]
name = "windows_x86_64_msvc"
version = "0.32.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "504a2476202769977a040c6364301a3f65d0cc9e3fb08600b2bda150a0488316"
[[package]]
name = "xtask"
version = "0.6.0"
dependencies = [
"helix-core",
"helix-term",
"toml",
]

View File

@ -7,6 +7,7 @@ members = [
"helix-syntax", "helix-syntax",
"helix-lsp", "helix-lsp",
"helix-dap", "helix-dap",
"xtask",
] ]
# Build helix-syntax in release mode to make the code path faster in development. # Build helix-syntax in release mode to make the code path faster in development.
@ -18,3 +19,4 @@ split-debuginfo = "unpacked"
[profile.release] [profile.release]
lto = "thin" lto = "thin"
# debug = true

View File

@ -44,8 +44,8 @@ # Installation
This will install the `hx` binary to `$HOME/.cargo/bin`. This will install the `hx` binary to `$HOME/.cargo/bin`.
Helix also needs its runtime files so make sure to copy/symlink the `runtime/` directory into the Helix also needs its runtime files so make sure to copy/symlink the `runtime/` directory into the
config directory (for example `~/.config/helix/runtime` on Linux/macOS). This location can be overriden config directory (for example `~/.config/helix/runtime` on Linux/macOS, or `%AppData%/helix/runtime` on Windows).
via the `HELIX_RUNTIME` environment variable. This location can be overriden via the `HELIX_RUNTIME` environment variable.
Packages already solve this for you by wrapping the `hx` binary with a wrapper Packages already solve this for you by wrapping the `hx` binary with a wrapper
that sets the variable to the install dir. that sets the variable to the install dir.
@ -65,21 +65,7 @@ ## MacOS
# Contributing # Contributing
Contributors are very welcome! **No contribution is too small and all contributions are valued.** Contributing guidelines can be found [here](./docs/CONTRIBUTING.md).
Some suggestions to get started:
- You can look at the [good first issue](https://github.com/helix-editor/helix/issues?q=is%3Aopen+label%3AE-easy+label%3AE-good-first-issue) label on the issue tracker.
- Help with packaging on various distributions needed!
- To use print debugging to the [Helix log file](https://github.com/helix-editor/helix/wiki/FAQ#access-the-log-file), you must:
* Print using `log::info!`, `warn!`, or `error!`. (`log::info!("helix!")`)
* Pass the appropriate verbosity level option for the desired log level. (`hx -v <file>` for info, more `v`s for higher severity inclusive)
- If your preferred language is missing, integrating a tree-sitter grammar for
it and defining syntax highlight queries for it is straight forward and
doesn't require much knowledge of the internals.
We provide an [architecture.md](./docs/architecture.md) that should give you
a good overview of the internals.
# Getting help # Getting help

13
TODO.md
View File

@ -1,25 +1,12 @@
- tree sitter:
- markdown
- regex
- kotlin
- clojure
- erlang
- [ ] completion isIncomplete support - [ ] completion isIncomplete support
1
- [ ] respect view fullscreen flag - [ ] respect view fullscreen flag
- [ ] Implement marks (superset of Selection/Range) - [ ] Implement marks (superset of Selection/Range)
- [ ] = for auto indent line/selection - [ ] = for auto indent line/selection
- [ ] :x for closing buffers
- [ ] lsp: signature help - [ ] lsp: signature help
2 2
- [ ] macro recording
- [ ] extend selection (treesitter select parent node) (replaces viw, vi(, va( etc )
- [ ] selection align
- [ ] store some state between restarts: file positions, prompt history - [ ] store some state between restarts: file positions, prompt history
- [ ] highlight matched characters in picker - [ ] highlight matched characters in picker

51
base16_theme.toml Normal file
View File

@ -0,0 +1,51 @@
# Author: NNB <nnbnh@protonmail.com>
"ui.menu" = "black"
"ui.menu.selected" = { modifiers = ["reversed"] }
"ui.linenr" = { fg = "gray", bg = "black" }
"ui.popup" = { modifiers = ["reversed"] }
"ui.linenr.selected" = { fg = "white", bg = "black", modifiers = ["bold"] }
"ui.selection" = { fg = "black", bg = "blue" }
"ui.selection.primary" = { fg = "white", bg = "blue" }
"comment" = { fg = "gray" }
"ui.statusline" = { fg = "black", bg = "white" }
"ui.statusline.inactive" = { fg = "gray", bg = "white" }
"ui.help" = { modifiers = ["reversed"] }
"ui.cursor" = { fg = "white", modifiers = ["reversed"] }
"variable" = "red"
"constant.numeric" = "yellow"
"constant" = "yellow"
"attributes" = "yellow"
"type" = "yellow"
"ui.cursor.match" = { fg = "yellow", modifiers = ["underlined"] }
"string" = "green"
"variable.other.member" = "green"
"constant.character.escape" = "cyan"
"function" = "blue"
"constructor" = "blue"
"special" = "blue"
"keyword" = "magenta"
"label" = "magenta"
"namespace" = "magenta"
"ui.help" = { fg = "white", bg = "black" }
"markup.heading" = "blue"
"markup.list" = "red"
"markup.bold" = { fg = "yellow", modifiers = ["bold"] }
"markup.italic" = { fg = "magenta", modifiers = ["italic"] }
"markup.link.url" = { fg = "yellow", modifiers = ["underlined"] }
"markup.link.text" = "red"
"markup.quote" = "cyan"
"markup.raw" = "green"
"diff.plus" = "green"
"diff.delta" = "yellow"
"diff.minus" = "red"
"diagnostic" = { modifiers = ["underlined"] }
"ui.gutter" = { bg = "black" }
"info" = "blue"
"hint" = "gray"
"debug" = "gray"
"warning" = "yellow"
"error" = "red"

View File

@ -2,10 +2,12 @@ # Summary
- [Installation](./install.md) - [Installation](./install.md)
- [Usage](./usage.md) - [Usage](./usage.md)
- [Keymap](./keymap.md)
- [Commands](./commands.md)
- [Language Support](./lang-support.md)
- [Migrating from Vim](./from-vim.md) - [Migrating from Vim](./from-vim.md)
- [Configuration](./configuration.md) - [Configuration](./configuration.md)
- [Themes](./themes.md) - [Themes](./themes.md)
- [Keymap](./keymap.md)
- [Key Remapping](./remapping.md) - [Key Remapping](./remapping.md)
- [Hooks](./hooks.md) - [Hooks](./hooks.md)
- [Languages](./languages.md) - [Languages](./languages.md)

5
book/src/commands.md Normal file
View File

@ -0,0 +1,5 @@
# Commands
Command mode can be activated by pressing `:`, similar to vim. Built-in commands:
{{#include ./generated/typable-cmd.md}}

View File

@ -5,9 +5,27 @@ # Configuration
* Linux and Mac: `~/.config/helix/config.toml` * Linux and Mac: `~/.config/helix/config.toml`
* Windows: `%AppData%\helix\config.toml` * Windows: `%AppData%\helix\config.toml`
Example config:
```toml
theme = "onedark"
[editor]
line-number = "relative"
mouse = false
[editor.cursor-shape]
insert = "bar"
normal = "block"
select = "underline"
[editor.file-picker]
hidden = false
```
## Editor ## Editor
`[editor]` section of the config. ### `[editor]` Section
| Key | Description | Default | | Key | Description | Default |
|--|--|---------| |--|--|---------|
@ -16,15 +34,37 @@ ## Editor
| `middle-click-paste` | Middle click paste support. | `true` | | `middle-click-paste` | Middle click paste support. | `true` |
| `scroll-lines` | Number of lines to scroll per scroll wheel step. | `3` | | `scroll-lines` | Number of lines to scroll per scroll wheel step. | `3` |
| `shell` | Shell to use when running external commands. | Unix: `["sh", "-c"]`<br/>Windows: `["cmd", "/C"]` | | `shell` | Shell to use when running external commands. | Unix: `["sh", "-c"]`<br/>Windows: `["cmd", "/C"]` |
| `line-number` | Line number display (`absolute`, `relative`) | `absolute` | | `line-number` | Line number display: `absolute` simply shows each line's number, while `relative` shows the distance from the current line. When unfocused or in insert mode, `relative` will still show absolute line numbers. | `absolute` |
| `smart-case` | Enable smart case regex searching (case insensitive unless pattern contains upper case characters) | `true` | | `smart-case` | Enable smart case regex searching (case insensitive unless pattern contains upper case characters) | `true` |
| `auto-pairs` | Enable automatic insertion of pairs to parenthese, brackets, etc. | `true` | | `auto-pairs` | Enable automatic insertion of pairs to parenthese, brackets, etc. | `true` |
| `auto-completion` | Enable automatic pop up of auto-completion. | `true` | | `auto-completion` | Enable automatic pop up of auto-completion. | `true` |
| `idle-timeout` | Time in milliseconds since last keypress before idle timers trigger. Used for autocompletion, set to 0 for instant. | `400` | | `idle-timeout` | Time in milliseconds since last keypress before idle timers trigger. Used for autocompletion, set to 0 for instant. | `400` |
| `completion-trigger-len` | The min-length of word under cursor to trigger autocompletion | `2` | | `completion-trigger-len` | The min-length of word under cursor to trigger autocompletion | `2` |
| `auto-info` | Whether to display infoboxes | `true` | | `auto-info` | Whether to display infoboxes | `true` |
| `true-color` | Set to `true` to override automatic detection of terminal truecolor support in the event of a false negative. | `false` |
`[editor.filepicker]` section of the config. Sets options for file picker and global search. All but the last key listed in the default file-picker configuration below are IgnoreOptions: whether hidden files and files listed within ignore files are ignored by (not visible in) the helix file picker and global search. There is also one other key, `max-depth` available, which is not defined by default. ### `[editor.cursor-shape]` Section
Defines the shape of cursor in each mode. Note that due to limitations
of the terminal environment, only the primary cursor can change shape.
| Key | Description | Default |
| --- | ----------- | ------- |
| `normal` | Cursor shape in [normal mode][normal mode] | `block` |
| `insert` | Cursor shape in [insert mode][insert mode] | `block` |
| `select` | Cursor shape in [select mode][select mode] | `block` |
[normal mode]: ./keymap.md#normal-mode
[insert mode]: ./keymap.md#insert-mode
[select mode]: ./keymap.md#select--extend-mode
### `[editor.file-picker]` Section
Sets options for file picker and global search. All but the last key listed in
the default file-picker configuration below are IgnoreOptions: whether hidden
files and files listed within ignore files are ignored by (not visible in) the
helix file picker and global search. There is also one other key, `max-depth`
available, which is not defined by default.
| Key | Description | Default | | Key | Description | Default |
|--|--|---------| |--|--|---------|

View File

@ -0,0 +1,63 @@
| Language | Syntax Highlighting | Treesitter Textobjects | Auto Indent | Default LSP |
| --- | --- | --- | --- | --- |
| bash | ✓ | | | `bash-language-server` |
| c | ✓ | ✓ | ✓ | `clangd` |
| c-sharp | ✓ | | | |
| cmake | ✓ | ✓ | ✓ | `cmake-language-server` |
| comment | ✓ | | | |
| cpp | ✓ | ✓ | ✓ | `clangd` |
| css | ✓ | | | |
| dart | ✓ | | ✓ | `dart` |
| dockerfile | ✓ | | | `docker-langserver` |
| elixir | ✓ | | | `elixir-ls` |
| elm | ✓ | | | `elm-language-server` |
| fish | ✓ | ✓ | ✓ | |
| git-commit | ✓ | | | |
| git-config | ✓ | | | |
| git-diff | ✓ | | | |
| git-rebase | ✓ | | | |
| glsl | ✓ | | ✓ | |
| go | ✓ | ✓ | ✓ | `gopls` |
| graphql | ✓ | | | |
| haskell | ✓ | | | `haskell-language-server-wrapper` |
| html | ✓ | | | |
| iex | ✓ | | | |
| java | ✓ | | | |
| javascript | ✓ | | ✓ | `typescript-language-server` |
| json | ✓ | | ✓ | |
| julia | ✓ | | | `julia` |
| latex | ✓ | | | |
| lean | ✓ | | | `lean` |
| ledger | ✓ | | | |
| llvm | ✓ | ✓ | ✓ | |
| llvm-mir | ✓ | ✓ | ✓ | |
| llvm-mir-yaml | ✓ | | ✓ | |
| lua | ✓ | | ✓ | |
| make | ✓ | | | |
| markdown | ✓ | | | |
| mint | | | | `mint` |
| nix | ✓ | | ✓ | `rnix-lsp` |
| ocaml | ✓ | | ✓ | |
| ocaml-interface | ✓ | | | |
| perl | ✓ | ✓ | ✓ | |
| php | ✓ | ✓ | ✓ | |
| prolog | | | | `swipl` |
| protobuf | ✓ | | ✓ | |
| python | ✓ | ✓ | ✓ | `pylsp` |
| racket | | | | `racket` |
| regex | ✓ | | | |
| rescript | ✓ | ✓ | | `rescript-language-server` |
| ruby | ✓ | | ✓ | `solargraph` |
| rust | ✓ | ✓ | ✓ | `rust-analyzer` |
| scala | ✓ | | ✓ | `metals` |
| svelte | ✓ | | ✓ | `svelteserver` |
| tablegen | ✓ | ✓ | ✓ | |
| toml | ✓ | | | |
| tsq | ✓ | | | |
| tsx | ✓ | | | `typescript-language-server` |
| twig | ✓ | | | |
| typescript | ✓ | | ✓ | `typescript-language-server` |
| vue | ✓ | | | |
| wgsl | ✓ | | | |
| yaml | ✓ | | ✓ | |
| zig | ✓ | | ✓ | `zls` |

View File

@ -0,0 +1,48 @@
| Name | Description |
| --- | --- |
| `:quit`, `:q` | Close the current view. |
| `:quit!`, `:q!` | Close the current view forcefully (ignoring unsaved changes). |
| `:open`, `:o` | Open a file from disk into the current view. |
| `:buffer-close`, `:bc`, `:bclose` | Close the current buffer. |
| `:buffer-close!`, `:bc!`, `:bclose!` | Close the current buffer forcefully (ignoring unsaved changes). |
| `:write`, `:w` | Write changes to disk. Accepts an optional path (:write some/path.txt) |
| `:new`, `:n` | Create a new scratch buffer. |
| `:format`, `:fmt` | Format the file using the LSP formatter. |
| `:indent-style` | Set the indentation style for editing. ('t' for tabs or 1-8 for number of spaces.) |
| `:line-ending` | Set the document's default line ending. Options: crlf, lf, cr, ff, nel. |
| `:earlier`, `:ear` | Jump back to an earlier point in edit history. Accepts a number of steps or a time span. |
| `:later`, `:lat` | Jump to a later point in edit history. Accepts a number of steps or a time span. |
| `:write-quit`, `:wq`, `:x` | Write changes to disk and close the current view. Accepts an optional path (:wq some/path.txt) |
| `:write-quit!`, `:wq!`, `:x!` | Write changes to disk and close the current view forcefully. Accepts an optional path (:wq! some/path.txt) |
| `:write-all`, `:wa` | Write changes from all views to disk. |
| `:write-quit-all`, `:wqa`, `:xa` | Write changes from all views to disk and close all views. |
| `:write-quit-all!`, `:wqa!`, `:xa!` | Write changes from all views to disk and close all views forcefully (ignoring unsaved changes). |
| `:quit-all`, `:qa` | Close all views. |
| `:quit-all!`, `:qa!` | Close all views forcefully (ignoring unsaved changes). |
| `:cquit`, `:cq` | Quit with exit code (default 1). Accepts an optional integer exit code (:cq 2). |
| `:cquit!`, `:cq!` | Quit with exit code (default 1) forcefully (ignoring unsaved changes). Accepts an optional integer exit code (:cq! 2). |
| `:theme` | Change the editor theme. |
| `:clipboard-yank` | Yank main selection into system clipboard. |
| `:clipboard-yank-join` | Yank joined selections into system clipboard. A separator can be provided as first argument. Default value is newline. |
| `:primary-clipboard-yank` | Yank main selection into system primary clipboard. |
| `:primary-clipboard-yank-join` | Yank joined selections into system primary clipboard. A separator can be provided as first argument. Default value is newline. |
| `:clipboard-paste-after` | Paste system clipboard after selections. |
| `:clipboard-paste-before` | Paste system clipboard before selections. |
| `:clipboard-paste-replace` | Replace selections with content of system clipboard. |
| `:primary-clipboard-paste-after` | Paste primary clipboard after selections. |
| `:primary-clipboard-paste-before` | Paste primary clipboard before selections. |
| `:primary-clipboard-paste-replace` | Replace selections with content of system primary clipboard. |
| `:show-clipboard-provider` | Show clipboard provider name in status bar. |
| `:change-current-directory`, `:cd` | Change the current working directory. |
| `:show-directory`, `:pwd` | Show the current working directory. |
| `:encoding` | Set encoding based on `https://encoding.spec.whatwg.org` |
| `:reload` | Discard changes and reload from the source file. |
| `:tree-sitter-scopes` | Display tree sitter scopes, primarily for theming and development. |
| `:vsplit`, `:vs` | Open the file in a vertical split. |
| `:hsplit`, `:hs`, `:sp` | Open the file in a horizontal split. |
| `:tutor` | Open the tutorial. |
| `:goto`, `:g` | Go to line number. |
| `:set-option`, `:set` | Set a config option at runtime |
| `:sort` | Sort ranges in selection. |
| `:rsort` | Sort ranges in selection in reverse order. |
| `:tree-sitter-subtree`, `:ts-subtree` | Display tree sitter subtree under cursor, primarily for debugging queries. |

View File

@ -2,7 +2,7 @@ # Adding languages
## Submodules ## Submodules
To add a new langauge, you should first add a tree-sitter submodule. To do this, To add a new language, you should first add a tree-sitter submodule. To do this,
you can run the command you can run the command
```sh ```sh
git submodule add -f <repository> helix-syntax/languages/tree-sitter-<name> git submodule add -f <repository> helix-syntax/languages/tree-sitter-<name>
@ -27,22 +27,32 @@ ## languages.toml
These are the available keys and descriptions for the file. These are the available keys and descriptions for the file.
| Key | Description | | Key | Description |
| ---- | ----------- | | ---- | ----------- |
| name | The name of the language | | name | The name of the language |
| scope | A string like `source.js` that identifies the language. Currently, we strive to match the scope names used by popular TextMate grammars and by the Linguist library. Usually `source.<name>` or `text.<name>` in case of markup languages | | scope | A string like `source.js` that identifies the language. Currently, we strive to match the scope names used by popular TextMate grammars and by the Linguist library. Usually `source.<name>` or `text.<name>` in case of markup languages |
| injection-regex | regex pattern that will be tested against a language name in order to determine whether this language should be used for a potential [language injection][treesitter-language-injection] site. | | injection-regex | regex pattern that will be tested against a language name in order to determine whether this language should be used for a potential [language injection][treesitter-language-injection] site. |
| file-types | The filetypes of the language, for example `["yml", "yaml"]` | | file-types | The filetypes of the language, for example `["yml", "yaml"]` |
| shebangs | The interpreters from the shebang line, for example `["sh", "bash"]` | | shebangs | The interpreters from the shebang line, for example `["sh", "bash"]` |
| roots | A set of marker files to look for when trying to find the workspace root. For example `Cargo.lock`, `yarn.lock` | | roots | A set of marker files to look for when trying to find the workspace root. For example `Cargo.lock`, `yarn.lock` |
| auto-format | Whether to autoformat this language when saving | | auto-format | Whether to autoformat this language when saving |
| comment-token | The token to use as a comment-token | | diagnostic-severity | Minimal severity of diagnostic for it to be displayed. (Allowed values: `Error`, `Warning`, `Info`, `Hint`) |
| indent | The indent to use. Has sub keys `tab-width` and `unit` | | comment-token | The token to use as a comment-token |
| config | Language server configuration | | indent | The indent to use. Has sub keys `tab-width` and `unit` |
| config | Language server configuration |
## Queries ## Queries
For a language to have syntax-highlighting and indentation among other things, you have to add queries. Add a directory for your language with the path `runtime/queries/<name>/`. The tree-sitter [website](https://tree-sitter.github.io/tree-sitter/syntax-highlighting#queries) gives more info on how to write queries. For a language to have syntax-highlighting and indentation among
other things, you have to add queries. Add a directory for your
language with the path `runtime/queries/<name>/`. The tree-sitter
[website](https://tree-sitter.github.io/tree-sitter/syntax-highlighting#queries)
gives more info on how to write queries.
> NOTE: When evaluating queries, the first matching query takes
precedence, which is different from other editors like neovim where
the last matching query supercedes the ones before it. See
[this issue][neovim-query-precedence] for an example.
## Common Issues ## Common Issues
@ -58,3 +68,4 @@ ## Common Issues
[treesitter-language-injection]: https://tree-sitter.github.io/tree-sitter/syntax-highlighting#language-injection [treesitter-language-injection]: https://tree-sitter.github.io/tree-sitter/syntax-highlighting#language-injection
[languages.toml]: https://github.com/helix-editor/helix/blob/master/languages.toml [languages.toml]: https://github.com/helix-editor/helix/blob/master/languages.toml
[neovim-query-precedence]: https://github.com/helix-editor/helix/pull/1170#issuecomment-997294090

View File

@ -27,6 +27,15 @@ ### Arch Linux
A [helix-git](https://aur.archlinux.org/packages/helix-git/) package is also available on the AUR, which builds the master branch. A [helix-git](https://aur.archlinux.org/packages/helix-git/) package is also available on the AUR, which builds the master branch.
### Fedora Linux
You can install the COPR package for Helix via
```
sudo dnf copr enable varlad/helix
sudo dnf install helix
```
## Build from source ## Build from source
``` ```

View File

@ -25,7 +25,9 @@ ### Movement
| `f` | Find next char | `find_next_char` | | `f` | Find next char | `find_next_char` |
| `T` | Find 'till previous char | `till_prev_char` | | `T` | Find 'till previous char | `till_prev_char` |
| `F` | Find previous char | `find_prev_char` | | `F` | Find previous char | `find_prev_char` |
| `G` | Go to line number `<n>` | `goto_line` |
| `Alt-.` | Repeat last motion (`f`, `t` or `m`) | `repeat_last_motion` | | `Alt-.` | Repeat last motion (`f`, `t` or `m`) | `repeat_last_motion` |
| `Alt-:` | Ensures the selection is in forward direction | `ensure_selections_forward` |
| `Home` | Move to the start of the line | `goto_line_start` | | `Home` | Move to the start of the line | `goto_line_start` |
| `End` | Move to the end of the line | `goto_line_end` | | `End` | Move to the end of the line | `goto_line_end` |
| `PageUp` | Move page up | `page_up` | | `PageUp` | Move page up | `page_up` |
@ -34,6 +36,7 @@ ### Movement
| `Ctrl-d` | Move half page down | `half_page_down` | | `Ctrl-d` | Move half page down | `half_page_down` |
| `Ctrl-i` | Jump forward on the jumplist | `jump_forward` | | `Ctrl-i` | Jump forward on the jumplist | `jump_forward` |
| `Ctrl-o` | Jump backward on the jumplist | `jump_backward` | | `Ctrl-o` | Jump backward on the jumplist | `jump_backward` |
| `Ctrl-s` | Save the current selection to the jumplist | `save_selection` |
| `v` | Enter [select (extend) mode](#select--extend-mode) | `select_mode` | | `v` | Enter [select (extend) mode](#select--extend-mode) | `select_mode` |
| `g` | Enter [goto mode](#goto-mode) | N/A | | `g` | Enter [goto mode](#goto-mode) | N/A |
| `m` | Enter [match mode](#match-mode) | N/A | | `m` | Enter [match mode](#match-mode) | N/A |
@ -45,37 +48,39 @@ ### Movement
### Changes ### Changes
| Key | Description | Command | | Key | Description | Command |
| ----- | ----------- | ------- | | ----- | ----------- | ------- |
| `r` | Replace with a character | `replace` | | `r` | Replace with a character | `replace` |
| `R` | Replace with yanked text | `replace_with_yanked` | | `R` | Replace with yanked text | `replace_with_yanked` |
| `~` | Switch case of the selected text | `switch_case` | | `~` | Switch case of the selected text | `switch_case` |
| `` ` `` | Set the selected text to lower case | `switch_to_lowercase` | | `` ` `` | Set the selected text to lower case | `switch_to_lowercase` |
| `` Alt-` `` | Set the selected text to upper case | `switch_to_uppercase` | | `` Alt-` `` | Set the selected text to upper case | `switch_to_uppercase` |
| `i` | Insert before selection | `insert_mode` | | `i` | Insert before selection | `insert_mode` |
| `a` | Insert after selection (append) | `append_mode` | | `a` | Insert after selection (append) | `append_mode` |
| `I` | Insert at the start of the line | `prepend_to_line` | | `I` | Insert at the start of the line | `prepend_to_line` |
| `A` | Insert at the end of the line | `append_to_line` | | `A` | Insert at the end of the line | `append_to_line` |
| `o` | Open new line below selection | `open_below` | | `o` | Open new line below selection | `open_below` |
| `O` | Open new line above selection | `open_above` | | `O` | Open new line above selection | `open_above` |
| `.` | Repeat last change | N/A | | `.` | Repeat last change | N/A |
| `u` | Undo change | `undo` | | `u` | Undo change | `undo` |
| `U` | Redo change | `redo` | | `U` | Redo change | `redo` |
| `Alt-u` | Move backward in history | `earlier` | | `Alt-u` | Move backward in history | `earlier` |
| `Alt-U` | Move forward in history | `later` | | `Alt-U` | Move forward in history | `later` |
| `y` | Yank selection | `yank` | | `y` | Yank selection | `yank` |
| `p` | Paste after selection | `paste_after` | | `p` | Paste after selection | `paste_after` |
| `P` | Paste before selection | `paste_before` | | `P` | Paste before selection | `paste_before` |
| `"` `<reg>` | Select a register to yank to or paste from | `select_register` | | `"` `<reg>` | Select a register to yank to or paste from | `select_register` |
| `>` | Indent selection | `indent` | | `>` | Indent selection | `indent` |
| `<` | Unindent selection | `unindent` | | `<` | Unindent selection | `unindent` |
| `=` | Format selection (**LSP**) | `format_selections` | | `=` | Format selection (currently nonfunctional/disabled) (**LSP**) | `format_selections` |
| `d` | Delete selection | `delete_selection` | | `d` | Delete selection | `delete_selection` |
| `Alt-d` | Delete selection, without yanking | `delete_selection_noyank` | | `Alt-d` | Delete selection, without yanking | `delete_selection_noyank` |
| `c` | Change selection (delete and enter insert mode) | `change_selection` | | `c` | Change selection (delete and enter insert mode) | `change_selection` |
| `Alt-c` | Change selection (delete and enter insert mode, without yanking) | `change_selection_noyank` | | `Alt-c` | Change selection (delete and enter insert mode, without yanking) | `change_selection_noyank` |
| `Ctrl-a` | Increment object (number) under cursor | `increment` | | `Ctrl-a` | Increment object (number) under cursor | `increment` |
| `Ctrl-x` | Decrement object (number) under cursor | `decrement` | | `Ctrl-x` | Decrement object (number) under cursor | `decrement` |
| `Q` | Start/stop macro recording to the selected register (experimental) | `record_macro` |
| `q` | Play back a recorded macro from the selected register (experimental) | `replay_macro` |
#### Shell #### Shell
@ -85,6 +90,7 @@ #### Shell
| <code>Alt-&#124;</code> | Pipe each selection into shell command, ignoring output | `shell_pipe_to` | | <code>Alt-&#124;</code> | Pipe each selection into shell command, ignoring output | `shell_pipe_to` |
| `!` | Run shell command, inserting output before each selection | `shell_insert_output` | | `!` | Run shell command, inserting output before each selection | `shell_insert_output` |
| `Alt-!` | Run shell command, appending output after each selection | `shell_append_output` | | `Alt-!` | Run shell command, appending output after each selection | `shell_append_output` |
| `$` | Pipe each selection into shell command, keep selections where command returned 0 | `shell_keep_pipe` |
### Selection manipulation ### Selection manipulation
@ -109,12 +115,14 @@ ### Selection manipulation
| `%` | Select entire file | `select_all` | | `%` | Select entire file | `select_all` |
| `x` | Select current line, if already selected, extend to next line | `extend_line` | | `x` | Select current line, if already selected, extend to next line | `extend_line` |
| `X` | Extend selection to line bounds (line-wise selection) | `extend_to_line_bounds` | | `X` | Extend selection to line bounds (line-wise selection) | `extend_to_line_bounds` |
| | Expand selection to parent syntax node TODO: pick a key (**TS**) | `expand_selection` |
| `J` | Join lines inside selection | `join_selections` | | `J` | Join lines inside selection | `join_selections` |
| `K` | Keep selections matching the regex | `keep_selections` | | `K` | Keep selections matching the regex | `keep_selections` |
| `Alt-K` | Remove selections matching the regex | `remove_selections` | | `Alt-K` | Remove selections matching the regex | `remove_selections` |
| `$` | Pipe each selection into shell command, keep selections where command returned 0 | `shell_keep_pipe` |
| `Ctrl-c` | Comment/uncomment the selections | `toggle_comments` | | `Ctrl-c` | Comment/uncomment the selections | `toggle_comments` |
| `Alt-k` | Expand selection to parent syntax node (**TS**) | `expand_selection` |
| `Alt-j` | Shrink syntax tree object selection (**TS**) | `shrink_selection` |
| `Alt-h` | Select previous sibling node in syntax tree (**TS**) | `select_prev_sibling` |
| `Alt-l` | Select next sibling node in syntax tree (**TS**) | `select_next_sibling` |
### Search ### Search
@ -147,10 +155,10 @@ #### View mode
| `m` | Align the line to the middle of the screen (horizontally) | `align_view_middle` | | `m` | Align the line to the middle of the screen (horizontally) | `align_view_middle` |
| `j` , `down` | Scroll the view downwards | `scroll_down` | | `j` , `down` | Scroll the view downwards | `scroll_down` |
| `k` , `up` | Scroll the view upwards | `scroll_up` | | `k` , `up` | Scroll the view upwards | `scroll_up` |
| `f` | Move page down | `page_down` | | `Ctrl-f` | Move page down | `page_down` |
| `b` | Move page up | `page_up` | | `Ctrl-b` | Move page up | `page_up` |
| `d` | Move half page down | `half_page_down` | | `Ctrl-d` | Move half page down | `half_page_down` |
| `u` | Move half page up | `half_page_up` | | `Ctrl-u` | Move half page up | `half_page_up` |
#### Goto mode #### Goto mode
@ -158,20 +166,21 @@ #### Goto mode
| Key | Description | Command | | Key | Description | Command |
| ----- | ----------- | ------- | | ----- | ----------- | ------- |
| `g` | Go to the start of the file | `goto_file_start` | | `g` | Go to line number `<n>` else start of file | `goto_file_start` |
| `e` | Go to the end of the file | `goto_last_line` | | `e` | Go to the end of the file | `goto_last_line` |
| `f` | Go to files in the selection | `goto_file` | | `f` | Go to files in the selection | `goto_file` |
| `h` | Go to the start of the line | `goto_line_start` | | `h` | Go to the start of the line | `goto_line_start` |
| `l` | Go to the end of the line | `goto_line_end` | | `l` | Go to the end of the line | `goto_line_end` |
| `s` | Go to first non-whitespace character of the line | `goto_first_nonwhitespace` | | `s` | Go to first non-whitespace character of the line | `goto_first_nonwhitespace` |
| `t` | Go to the top of the screen | `goto_window_top` | | `t` | Go to the top of the screen | `goto_window_top` |
| `m` | Go to the middle of the screen | `goto_window_middle` | | `c` | Go to the middle of the screen | `goto_window_center` |
| `b` | Go to the bottom of the screen | `goto_window_bottom` | | `b` | Go to the bottom of the screen | `goto_window_bottom` |
| `d` | Go to definition (**LSP**) | `goto_definition` | | `d` | Go to definition (**LSP**) | `goto_definition` |
| `y` | Go to type definition (**LSP**) | `goto_type_definition` | | `y` | Go to type definition (**LSP**) | `goto_type_definition` |
| `r` | Go to references (**LSP**) | `goto_reference` | | `r` | Go to references (**LSP**) | `goto_reference` |
| `i` | Go to implementation (**LSP**) | `goto_implementation` | | `i` | Go to implementation (**LSP**) | `goto_implementation` |
| `a` | Go to the last accessed/alternate file | `goto_last_accessed_file` | | `a` | Go to the last accessed/alternate file | `goto_last_accessed_file` |
| `m` | Go to the last modified/alternate file | `goto_last_modified_file` |
| `n` | Go to next buffer | `goto_next_buffer` | | `n` | Go to next buffer | `goto_next_buffer` |
| `p` | Go to previous buffer | `goto_previous_buffer` | | `p` | Go to previous buffer | `goto_previous_buffer` |
| `.` | Go to last modification in current file | `goto_last_modification` | | `.` | Go to last modification in current file | `goto_last_modification` |

10
book/src/lang-support.md Normal file
View File

@ -0,0 +1,10 @@
# Language Support
For more information like arguments passed to default LSP server,
extensions assosciated with a filetype, custom LSP settings, filetype
specific indent settings, etc see the default
[`languages.toml`][languages.toml] file.
{{#include ./generated/lang-support.md}}
[languages.toml]: https://github.com/helix-editor/helix/blob/master/languages.toml

View File

@ -11,4 +11,3 @@ # in <config_dir>/helix/languages.toml
name = "rust" name = "rust"
auto-format = false auto-format = false
``` ```

View File

@ -11,6 +11,8 @@ # Key Remapping
```toml ```toml
# At most one section each of 'keys.normal', 'keys.insert' and 'keys.select' # At most one section each of 'keys.normal', 'keys.insert' and 'keys.select'
[keys.normal] [keys.normal]
C-s = ":w" # Maps the Control-s to the typable command :w which is an alias for :write (save file)
C-o = ":open ~/.config/helix/config.toml" # Maps the Control-o to opening of the helix config file
a = "move_char_left" # Maps the 'a' key to the move_char_left command a = "move_char_left" # Maps the 'a' key to the move_char_left command
w = "move_line_up" # Maps the 'w' key move_line_up w = "move_line_up" # Maps the 'w' key move_line_up
"C-S-esc" = "extend_line" # Maps Control-Shift-Escape to extend_line "C-S-esc" = "extend_line" # Maps Control-Shift-Escape to extend_line
@ -21,6 +23,7 @@ # At most one section each of 'keys.normal', 'keys.insert' and 'keys.select'
"A-x" = "normal_mode" # Maps Alt-X to enter normal mode "A-x" = "normal_mode" # Maps Alt-X to enter normal mode
j = { k = "normal_mode" } # Maps `jk` to exit insert mode j = { k = "normal_mode" } # Maps `jk` to exit insert mode
``` ```
> NOTE: Typable commands can also be remapped, remember to keep the `:` prefix to indicate it's a typable command.
Control, Shift and Alt modifiers are encoded respectively with the prefixes Control, Shift and Alt modifiers are encoded respectively with the prefixes
`C-`, `S-` and `A-`. Special keys are encoded as follows: `C-`, `S-` and `A-`. Special keys are encoded as follows:
@ -42,10 +45,9 @@ # At most one section each of 'keys.normal', 'keys.insert' and 'keys.select'
| Down | `"down"` | | Down | `"down"` |
| Home | `"home"` | | Home | `"home"` |
| End | `"end"` | | End | `"end"` |
| Page | `"pageup"` | | Page Up | `"pageup"` |
| Page | `"pagedown"` | | Page Down | `"pagedown"` |
| Tab | `"tab"` | | Tab | `"tab"` |
| Back | `"backtab"` |
| Delete | `"del"` | | Delete | `"del"` |
| Insert | `"ins"` | | Insert | `"ins"` |
| Null | `"null"` | | Null | `"null"` |
@ -54,4 +56,4 @@ # At most one section each of 'keys.normal', 'keys.insert' and 'keys.select'
Keys can be disabled by binding them to the `no_op` command. Keys can be disabled by binding them to the `no_op` command.
Commands can be found at [Keymap](https://docs.helix-editor.com/keymap.html) Commands. Commands can be found at [Keymap](https://docs.helix-editor.com/keymap.html) Commands.
> Commands can also be found in the source code at [`helix-term/src/commands.rs`](https://github.com/helix-editor/helix/blob/master/helix-term/src/commands.rs) at the invocation of `commands!` macro. > Commands can also be found in the source code at [`helix-term/src/commands.rs`](https://github.com/helix-editor/helix/blob/master/helix-term/src/commands.rs) at the invocation of `static_commands!` macro and the `TypableCommandList`.

View File

@ -1,14 +1,14 @@
# Themes # Themes
First you'll need to place selected themes in your `themes` directory (i.e `~/.config/helix/themes`), the directory might have to be created beforehand. To use a theme add `theme = "<name>"` to your [`config.toml`](./configuration.md) at the very top of the file before the first section or select it during runtime using `:theme <name>`.
To use a custom theme add `theme = <name>` to your [`config.toml`](./configuration.md) or override it during runtime using `:theme <name>`.
The default theme.toml can be found [here](https://github.com/helix-editor/helix/blob/master/theme.toml), and user submitted themes [here](https://github.com/helix-editor/helix/blob/master/runtime/themes).
## Creating a theme ## Creating a theme
First create a file with the name of your theme as file name (i.e `mytheme.toml`) and place it in your `themes` directory (i.e `~/.config/helix/themes`). Create a file with the name of your theme as file name (i.e `mytheme.toml`) and place it in your `themes` directory (i.e `~/.config/helix/themes`). The directory might have to be created beforehand.
The names "default" and "base16_default" are reserved for the builtin themes and cannot be overridden by user defined themes.
The default theme.toml can be found [here](https://github.com/helix-editor/helix/blob/master/theme.toml), and user submitted themes [here](https://github.com/helix-editor/helix/blob/master/runtime/themes).
Each line in the theme file is specified as below: Each line in the theme file is specified as below:
@ -105,6 +105,7 @@ #### Syntax highlighting
- `type` - Types - `type` - Types
- `builtin` - Primitive types provided by the language (`int`, `usize`) - `builtin` - Primitive types provided by the language (`int`, `usize`)
- `constructor`
- `constant` (TODO: constant.other.placeholder for %v) - `constant` (TODO: constant.other.placeholder for %v)
- `builtin` Special constants provided by the language (`true`, `false`, `nil` etc) - `builtin` Special constants provided by the language (`true`, `false`, `nil` etc)
@ -146,6 +147,7 @@ #### Syntax highlighting
- `repeat` - `for`, `while`, `loop` - `repeat` - `for`, `while`, `loop`
- `import` - `import`, `export` - `import` - `import`, `export`
- `return` - `return`
- `exception`
- `operator` - `or`, `in` - `operator` - `or`, `in`
- `directive` - Preprocessor directives (`#if` in C) - `directive` - Preprocessor directives (`#if` in C)
- `function` - `fn`, `func` - `function` - `fn`, `func`
@ -162,10 +164,44 @@ #### Syntax highlighting
- `namespace` - `namespace`
- `markup`
- `heading`
- `list`
- `unnumbered`
- `numbered`
- `bold`
- `italic`
- `link`
- `url` - urls pointed to by links
- `label` - non-url link references
- `text` - url and image descriptions in links
- `quote`
- `raw`
- `inline`
- `block`
- `diff` - version control changes
- `plus` - additions
- `minus` - deletions
- `delta` - modifications
- `moved` - renamed or moved files/changes
#### Interface #### Interface
These scopes are used for theming the editor interface. These scopes are used for theming the editor interface.
- `markup`
- `normal`
- `completion` - for completion doc popup ui
- `hover` - for hover popup ui
- `heading`
- `completion` - for completion doc popup ui
- `hover` - for hover popup ui
- `raw`
- `inline`
- `completion` - for completion doc popup ui
- `hover` - for hover popup ui
| Key | Notes | | Key | Notes |
| --- | --- | | --- | --- |

View File

@ -42,7 +42,7 @@ ## Surround
`ms` acts on a selection, so select the text first and use `ms<char>`. `mr` and `md` work `ms` acts on a selection, so select the text first and use `ms<char>`. `mr` and `md` work
on the closest pairs found and selections are not required; use counts to act in outer pairs. on the closest pairs found and selections are not required; use counts to act in outer pairs.
It can also act on multiple seletions (yay!). For example, to change every occurance of `(use)` to `[use]`: It can also act on multiple selections (yay!). For example, to change every occurrence of `(use)` to `[use]`:
- `%` to select the whole file - `%` to select the whole file
- `s` to split the selections on a search term - `s` to split the selections on a search term

37
docs/CONTRIBUTING.md Normal file
View File

@ -0,0 +1,37 @@
# Contributing
Contributors are very welcome! **No contribution is too small and all contributions are valued.**
Some suggestions to get started:
- You can look at the [good first issue][good-first-issue] label on the issue tracker.
- Help with packaging on various distributions needed!
- To use print debugging to the [Helix log file][log-file], you must:
* Print using `log::info!`, `warn!`, or `error!`. (`log::info!("helix!")`)
* Pass the appropriate verbosity level option for the desired log level. (`hx -v <file>` for info, more `v`s for higher severity inclusive)
- If your preferred language is missing, integrating a tree-sitter grammar for
it and defining syntax highlight queries for it is straight forward and
doesn't require much knowledge of the internals.
We provide an [architecture.md][architecture.md] that should give you
a good overview of the internals.
# Auto generated documentation
Some parts of [the book][docs] are autogenerated from the code itself,
like the list of `:commands` and supported languages. To generate these
files, run
```shell
cargo xtask docgen
```
inside the project. We use [xtask][xtask] as an ad-hoc task runner and
thus do not require any dependencies other than `cargo` (You don't have
to `cargo install` anything either).
[good-first-issue]: https://github.com/helix-editor/helix/labels/E-easy
[log-file]: https://github.com/helix-editor/helix/wiki/FAQ#access-the-log-file
[architecture.md]: ./architecture.md
[docs]: https://docs.helix-editor.com/
[xtask]: https://github.com/matklad/cargo-xtask

View File

@ -2,11 +2,11 @@
"nodes": { "nodes": {
"devshell": { "devshell": {
"locked": { "locked": {
"lastModified": 1632436039, "lastModified": 1641980203,
"narHash": "sha256-OtITeVWcKXn1SpVEnImpTGH91FycCskGBPqmlxiykv4=", "narHash": "sha256-RiWJ3+6V267Ji+P54K1Xrj1Nsah9BfG/aLfIhqgVyBY=",
"owner": "numtide", "owner": "numtide",
"repo": "devshell", "repo": "devshell",
"rev": "7a7a7aa0adebe5488e5abaec688fd9ae0f8ea9c6", "rev": "d897c1ddb4eab66cc2b783c7868d78555b9880ad",
"type": "github" "type": "github"
}, },
"original": { "original": {
@ -17,11 +17,11 @@
}, },
"flake-utils": { "flake-utils": {
"locked": { "locked": {
"lastModified": 1623875721, "lastModified": 1637014545,
"narHash": "sha256-A8BU7bjS5GirpAUv4QA+QnJ4CceLHkcXdRp4xITDB0s=", "narHash": "sha256-26IZAc5yzlD9FlDT54io1oqG/bBoyka+FJk5guaX4x4=",
"owner": "numtide", "owner": "numtide",
"repo": "flake-utils", "repo": "flake-utils",
"rev": "f7e004a55b120c02ecb6219596820fcd32ca8772", "rev": "bba5dcc8e0b20ab664967ad83d24d64cb64ec4f4",
"type": "github" "type": "github"
}, },
"original": { "original": {
@ -41,11 +41,11 @@
] ]
}, },
"locked": { "locked": {
"lastModified": 1634796585, "lastModified": 1642054253,
"narHash": "sha256-CW4yx6omk5qCXUIwXHp/sztA7u0SpyLq9NEACPnkiz8=", "narHash": "sha256-kHh9VmaB7gbS6pheheC4x0uT84LEmhfbsbWEQJgU2E4=",
"owner": "yusdacra", "owner": "yusdacra",
"repo": "nix-cargo-integration", "repo": "nix-cargo-integration",
"rev": "a84a2137a396f303978f1d48341e0390b0e16a8b", "rev": "f8fa9af990195a3f63fe2dde84aa187e193da793",
"type": "github" "type": "github"
}, },
"original": { "original": {
@ -56,11 +56,11 @@
}, },
"nixpkgs": { "nixpkgs": {
"locked": { "locked": {
"lastModified": 1634782485, "lastModified": 1641887635,
"narHash": "sha256-psfh4OQSokGXG0lpq3zKFbhOo3QfoeudRcaUnwMRkQo=", "narHash": "sha256-kDGpufwzVaiGe5e1sBUBPo9f1YN+nYHJlYqCaVpZTQQ=",
"owner": "nixos", "owner": "nixos",
"repo": "nixpkgs", "repo": "nixpkgs",
"rev": "34ad3ffe08adfca17fcb4e4a47bb5f3b113687be", "rev": "b2737d4980a17cc2b7d600d7d0b32fd7333aca88",
"type": "github" "type": "github"
}, },
"original": { "original": {
@ -72,15 +72,16 @@
}, },
"nixpkgs_2": { "nixpkgs_2": {
"locked": { "locked": {
"lastModified": 1628186154, "lastModified": 1637453606,
"narHash": "sha256-r2d0wvywFnL9z4iptztdFMhaUIAaGzrSs7kSok0PgmE=", "narHash": "sha256-Gy6cwUswft9xqsjWxFYEnx/63/qzaFUwatcbV5GF/GQ=",
"owner": "NixOS", "owner": "NixOS",
"repo": "nixpkgs", "repo": "nixpkgs",
"rev": "06552b72346632b6943c8032e57e702ea12413bf", "rev": "8afc4e543663ca0a6a4f496262cd05233737e732",
"type": "github" "type": "github"
}, },
"original": { "original": {
"owner": "NixOS", "owner": "NixOS",
"ref": "nixpkgs-unstable",
"repo": "nixpkgs", "repo": "nixpkgs",
"type": "github" "type": "github"
} }
@ -98,11 +99,11 @@
"nixpkgs": "nixpkgs_2" "nixpkgs": "nixpkgs_2"
}, },
"locked": { "locked": {
"lastModified": 1634869268, "lastModified": 1642128126,
"narHash": "sha256-RVAcEFlFU3877Mm4q/nbXGEYTDg/wQNhzmXGMTV6wBs=", "narHash": "sha256-av8JUACdrTfQYl/ftZJvKpZEmZfa0avCq7tt5Usdoq0=",
"owner": "oxalica", "owner": "oxalica",
"repo": "rust-overlay", "repo": "rust-overlay",
"rev": "c02c2d86354327317546501af001886fbb53d374", "rev": "ce4ef6f2d74f2b68f7547df1de22d1b0037ce4ad",
"type": "github" "type": "github"
}, },
"original": { "original": {

View File

@ -20,50 +20,63 @@
# Set default package to helix-term release build # Set default package to helix-term release build
defaultOutputs = { app = "hx"; package = "helix"; }; defaultOutputs = { app = "hx"; package = "helix"; };
overrides = { overrides = {
crateOverrides = common: _: { crateOverrides = common: _: rec {
helix-term = prev: {
# link languages and theme toml files since helix-term expects them (for tests)
preConfigure = "ln -s ${common.root}/{languages.toml,theme.toml} ..";
buildInputs = (prev.buildInputs or [ ]) ++ [ common.cCompiler.cc.lib ];
};
# link languages and theme toml files since helix-view expects them # link languages and theme toml files since helix-view expects them
helix-view = _: { preConfigure = "ln -s ${common.root}/{languages.toml,theme.toml} .."; }; helix-view = _: { preConfigure = "ln -s ${common.root}/{languages.toml,theme.toml,base16_theme.toml} .."; };
helix-syntax = _prev: { helix-syntax = prev: {
src =
let
pkgs = common.pkgs;
helix = pkgs.fetchgit {
url = "https://github.com/helix-editor/helix.git";
rev = "a8fd33ac012a79069ef1409503a2edcf3a585153";
fetchSubmodules = true;
sha256 = "sha256-5AtOC55ttWT+7RYMboaFxpGZML51ix93wAkYJTt+8JI=";
};
in
pkgs.runCommand prev.src.name { } ''
mkdir -p $out
ln -s ${prev.src}/* $out
ln -sf ${helix}/helix-syntax/languages $out
'';
preConfigure = "mkdir -p ../runtime/grammars"; preConfigure = "mkdir -p ../runtime/grammars";
postInstall = "cp -r ../runtime $out/runtime"; postInstall = "cp -r ../runtime $out/runtime";
}; };
}; helix-term = prev:
mainBuild = common: prev: let
let inherit (common) pkgs lib;
inherit (common) pkgs lib; helixSyntax = lib.buildCrate {
helixSyntax = lib.buildCrate { root = self;
root = self; memberName = "helix-syntax";
memberName = "helix-syntax"; defaultCrateOverrides = {
defaultCrateOverrides = { helix-syntax = helix-syntax;
helix-syntax = common.crateOverrides.helix-syntax; };
release = false;
}; };
release = false; runtimeDir = pkgs.runCommand "helix-runtime" { } ''
mkdir -p $out
ln -s ${common.root}/runtime/* $out
ln -sf ${helixSyntax}/runtime/grammars $out
'';
in
{
# link languages and theme toml files since helix-term expects them (for tests)
preConfigure = "ln -s ${common.root}/{languages.toml,theme.toml,base16_theme.toml} ..";
buildInputs = (prev.buildInputs or [ ]) ++ [ common.cCompiler.cc.lib ];
nativeBuildInputs = [ pkgs.makeWrapper ];
postFixup = ''
if [ -f "$out/bin/hx" ]; then
wrapProgram "$out/bin/hx" --set HELIX_RUNTIME "${runtimeDir}"
fi
'';
}; };
runtimeDir = pkgs.runCommand "helix-runtime" { } '' };
mkdir -p $out
ln -s ${common.root}/runtime/* $out
ln -sf ${helixSyntax}/runtime/grammars $out
'';
in
lib.optionalAttrs (common.memberName == "helix-term") {
nativeBuildInputs = [ pkgs.makeWrapper ];
postFixup = ''
if [ -f "$out/bin/hx" ]; then
wrapProgram "$out/bin/hx" --set HELIX_RUNTIME "${runtimeDir}"
fi
'';
};
shell = common: prev: { shell = common: prev: {
packages = prev.packages ++ (with common.pkgs; [ lld_13 lldb cargo-tarpaulin ]); packages = prev.packages ++ (with common.pkgs; [ lld_13 lldb cargo-tarpaulin cargo-flamegraph ]);
env = prev.env ++ [ env = prev.env ++ [
{ name = "HELIX_RUNTIME"; eval = "$PWD/runtime"; } { name = "HELIX_RUNTIME"; eval = "$PWD/runtime"; }
{ name = "RUST_BACKTRACE"; value = "1"; } { name = "RUST_BACKTRACE"; value = "1"; }
{ name = "RUSTFLAGS"; value = "-C link-arg=-fuse-ld=lld -C target-cpu=native"; } { name = "RUSTFLAGS"; value = "-C link-arg=-fuse-ld=lld -C target-cpu=native -Clink-arg=-Wl,--no-rosegment"; }
]; ];
}; };
}; };

View File

@ -1,6 +1,6 @@
[package] [package]
name = "helix-core" name = "helix-core"
version = "0.5.0" version = "0.6.0"
authors = ["Blaž Hrastnik <blaz@mxxn.io>"] authors = ["Blaž Hrastnik <blaz@mxxn.io>"]
edition = "2021" edition = "2021"
license = "MPL-2.0" license = "MPL-2.0"
@ -13,17 +13,18 @@ include = ["src/**/*", "README.md"]
[features] [features]
[dependencies] [dependencies]
helix-syntax = { version = "0.5", path = "../helix-syntax" } helix-syntax = { version = "0.6", path = "../helix-syntax" }
ropey = "1.3" ropey = "1.3"
smallvec = "1.7" smallvec = "1.8"
tendril = "0.4.2" smartstring = "0.2.9"
unicode-segmentation = "1.8" unicode-segmentation = "1.9"
unicode-width = "0.1" unicode-width = "0.1"
unicode-general-category = "0.4" unicode-general-category = "0.5"
# slab = "0.4.2" # slab = "0.4.2"
slotmap = "1.0"
tree-sitter = "0.20" tree-sitter = "0.20"
once_cell = "1.8" once_cell = "1.9"
arc-swap = "1" arc-swap = "1"
regex = "1" regex = "1"
@ -35,6 +36,9 @@ toml = "0.5"
similar = "2.1" similar = "2.1"
etcetera = "0.3" etcetera = "0.3"
encoding_rs = "0.8"
chrono = { version = "0.4", default-features = false, features = ["alloc", "std"] }
[dev-dependencies] [dev-dependencies]
quickcheck = { version = "1", default-features = false } quickcheck = { version = "1", default-features = false }

View File

@ -1,7 +1,10 @@
//! When typing the opening character of one of the possible pairs defined below, //! When typing the opening character of one of the possible pairs defined below,
//! this module provides the functionality to insert the paired closing character. //! this module provides the functionality to insert the paired closing character.
use crate::{Range, Rope, Selection, Tendril, Transaction}; use crate::{
graphemes, movement::Direction, Range, Rope, RopeGraphemes, Selection, Tendril, Transaction,
};
use log::debug;
use smallvec::SmallVec; use smallvec::SmallVec;
// Heavily based on https://github.com/codemirror/closebrackets/ // Heavily based on https://github.com/codemirror/closebrackets/
@ -15,7 +18,9 @@
('`', '`'), ('`', '`'),
]; ];
const CLOSE_BEFORE: &str = ")]}'\":;> \n\r\u{000B}\u{000C}\u{0085}\u{2028}\u{2029}"; // includes space and newlines // [TODO] build this dynamically in language config. see #992
const OPEN_BEFORE: &str = "([{'\":;,> \n\r\u{000B}\u{000C}\u{0085}\u{2028}\u{2029}";
const CLOSE_BEFORE: &str = ")]}'\":;,> \n\r\u{000B}\u{000C}\u{0085}\u{2028}\u{2029}"; // includes space and newlines
// insert hook: // insert hook:
// Fn(doc, selection, char) => Option<Transaction> // Fn(doc, selection, char) => Option<Transaction>
@ -25,14 +30,19 @@
// //
// to simplify, maybe return Option<Transaction> and just reimplement the default // to simplify, maybe return Option<Transaction> and just reimplement the default
// TODO: delete implementation where it erases the whole bracket (|) -> | // [TODO]
// * delete implementation where it erases the whole bracket (|) -> |
// * change to multi character pairs to handle cases like placing the cursor in the
// middle of triple quotes, and more exotic pairs like Jinja's {% %}
#[must_use] #[must_use]
pub fn hook(doc: &Rope, selection: &Selection, ch: char) -> Option<Transaction> { pub fn hook(doc: &Rope, selection: &Selection, ch: char) -> Option<Transaction> {
debug!("autopairs hook selection: {:#?}", selection);
for &(open, close) in PAIRS { for &(open, close) in PAIRS {
if open == ch { if open == ch {
if open == close { if open == close {
return handle_same(doc, selection, open); return Some(handle_same(doc, selection, open, CLOSE_BEFORE, OPEN_BEFORE));
} else { } else {
return Some(handle_open(doc, selection, open, close, CLOSE_BEFORE)); return Some(handle_open(doc, selection, open, close, CLOSE_BEFORE));
} }
@ -47,18 +57,145 @@ pub fn hook(doc: &Rope, selection: &Selection, ch: char) -> Option<Transaction>
None None
} }
// TODO: special handling for lifetimes in rust: if preceeded by & or < don't auto close ' fn prev_char(doc: &Rope, pos: usize) -> Option<char> {
// for example "&'a mut", or "fn<'a>" if pos == 0 {
fn next_char(doc: &Rope, pos: usize) -> Option<char> {
if pos >= doc.len_chars() {
return None; return None;
} }
Some(doc.char(pos))
}
// TODO: selections should be extended if range, moved if point.
// TODO: if not cursor but selection, wrap on both sides of selection (surround) doc.get_char(pos - 1)
}
fn is_single_grapheme(doc: &Rope, range: &Range) -> bool {
let mut graphemes = RopeGraphemes::new(doc.slice(range.from()..range.to()));
let first = graphemes.next();
let second = graphemes.next();
debug!("first: {:#?}, second: {:#?}", first, second);
first.is_some() && second.is_none()
}
/// calculate what the resulting range should be for an auto pair insertion
fn get_next_range(
doc: &Rope,
start_range: &Range,
offset: usize,
typed_char: char,
len_inserted: usize,
) -> Range {
// When the character under the cursor changes due to complete pair
// insertion, we must look backward a grapheme and then add the length
// of the insertion to put the resulting cursor in the right place, e.g.
//
// foo[\r\n] - anchor: 3, head: 5
// foo([)]\r\n - anchor: 4, head: 5
//
// foo[\r\n] - anchor: 3, head: 5
// foo'[\r\n] - anchor: 4, head: 6
//
// foo([)]\r\n - anchor: 4, head: 5
// foo()[\r\n] - anchor: 5, head: 7
//
// [foo]\r\n - anchor: 0, head: 3
// [foo(])\r\n - anchor: 0, head: 5
// inserting at the very end of the document after the last newline
if start_range.head == doc.len_chars() && start_range.anchor == doc.len_chars() {
return Range::new(
start_range.anchor + offset + typed_char.len_utf8(),
start_range.head + offset + typed_char.len_utf8(),
);
}
let single_grapheme = is_single_grapheme(doc, start_range);
let doc_slice = doc.slice(..);
// just skip over graphemes
if len_inserted == 0 {
let end_anchor = if single_grapheme {
graphemes::next_grapheme_boundary(doc_slice, start_range.anchor) + offset
// even for backward inserts with multiple grapheme selections,
// we want the anchor to stay where it is so that the relative
// selection does not change, e.g.:
//
// foo([) wor]d -> insert ) -> foo()[ wor]d
} else {
start_range.anchor + offset
};
return Range::new(
end_anchor,
graphemes::next_grapheme_boundary(doc_slice, start_range.head) + offset,
);
}
// trivial case: only inserted a single-char opener, just move the selection
if len_inserted == 1 {
let end_anchor = if single_grapheme || start_range.direction() == Direction::Backward {
start_range.anchor + offset + typed_char.len_utf8()
} else {
start_range.anchor + offset
};
return Range::new(
end_anchor,
start_range.head + offset + typed_char.len_utf8(),
);
}
// If the head = 0, then we must be in insert mode with a backward
// cursor, which implies the head will just move
let end_head = if start_range.head == 0 || start_range.direction() == Direction::Backward {
start_range.head + offset + typed_char.len_utf8()
} else {
// We must have a forward cursor, which means we must move to the
// other end of the grapheme to get to where the new characters
// are inserted, then move the head to where it should be
let prev_bound = graphemes::prev_grapheme_boundary(doc_slice, start_range.head);
debug!(
"prev_bound: {}, offset: {}, len_inserted: {}",
prev_bound, offset, len_inserted
);
prev_bound + offset + len_inserted
};
let end_anchor = match (start_range.len(), start_range.direction()) {
// if we have a zero width cursor, it shifts to the same number
(0, _) => end_head,
// If we are inserting for a regular one-width cursor, the anchor
// moves with the head. This is the fast path for ASCII.
(1, Direction::Forward) => end_head - 1,
(1, Direction::Backward) => end_head + 1,
(_, Direction::Forward) => {
if single_grapheme {
graphemes::prev_grapheme_boundary(doc.slice(..), start_range.head)
+ typed_char.len_utf8()
// if we are appending, the anchor stays where it is; only offset
// for multiple range insertions
} else {
start_range.anchor + offset
}
}
(_, Direction::Backward) => {
if single_grapheme {
// if we're backward, then the head is at the first char
// of the typed char, so we need to add the length of
// the closing char
graphemes::prev_grapheme_boundary(doc.slice(..), start_range.anchor) + len_inserted
} else {
// when we are inserting in front of a selection, we need to move
// the anchor over by however many characters were inserted overall
start_range.anchor + offset + len_inserted
}
}
};
Range::new(end_anchor, end_head)
}
fn handle_open( fn handle_open(
doc: &Rope, doc: &Rope,
selection: &Selection, selection: &Selection,
@ -66,98 +203,584 @@ fn handle_open(
close: char, close: char,
close_before: &str, close_before: &str,
) -> Transaction { ) -> Transaction {
let mut ranges = SmallVec::with_capacity(selection.len()); let mut end_ranges = SmallVec::with_capacity(selection.len());
let mut offs = 0; let mut offs = 0;
let transaction = Transaction::change_by_selection(doc, selection, |range| { let transaction = Transaction::change_by_selection(doc, selection, |start_range| {
let pos = range.head; let cursor = start_range.cursor(doc.slice(..));
let next = next_char(doc, pos); let next_char = doc.get_char(cursor);
let len_inserted;
let head = pos + offs + open.len_utf8(); let change = match next_char {
// if selection, retain anchor, if cursor, move over
ranges.push(Range::new(
if range.is_empty() {
head
} else {
range.anchor + offs
},
head,
));
match next {
Some(ch) if !close_before.contains(ch) => { Some(ch) if !close_before.contains(ch) => {
offs += 1; len_inserted = open.len_utf8();
// TODO: else return (use default handler that inserts open) let mut tendril = Tendril::new();
(pos, pos, Some(Tendril::from_char(open))) tendril.push(open);
(cursor, cursor, Some(tendril))
} }
// None | Some(ch) if close_before.contains(ch) => {} // None | Some(ch) if close_before.contains(ch) => {}
_ => { _ => {
// insert open & close // insert open & close
let mut pair = Tendril::with_capacity(2); let pair = Tendril::from_iter([open, close]);
pair.push_char(open); len_inserted = open.len_utf8() + close.len_utf8();
pair.push_char(close); (cursor, cursor, Some(pair))
offs += 2;
(pos, pos, Some(pair))
} }
} };
let next_range = get_next_range(doc, start_range, offs, open, len_inserted);
end_ranges.push(next_range);
offs += len_inserted;
change
}); });
transaction.with_selection(Selection::new(ranges, selection.primary_index())) let t = transaction.with_selection(Selection::new(end_ranges, selection.primary_index()));
debug!("auto pair transaction: {:#?}", t);
t
} }
fn handle_close(doc: &Rope, selection: &Selection, _open: char, close: char) -> Transaction { fn handle_close(doc: &Rope, selection: &Selection, _open: char, close: char) -> Transaction {
let mut ranges = SmallVec::with_capacity(selection.len()); let mut end_ranges = SmallVec::with_capacity(selection.len());
let mut offs = 0; let mut offs = 0;
let transaction = Transaction::change_by_selection(doc, selection, |range| { let transaction = Transaction::change_by_selection(doc, selection, |start_range| {
let pos = range.head; let cursor = start_range.cursor(doc.slice(..));
let next = next_char(doc, pos); let next_char = doc.get_char(cursor);
let mut len_inserted = 0;
let head = pos + offs + close.len_utf8(); let change = if next_char == Some(close) {
// if selection, retain anchor, if cursor, move over // return transaction that moves past close
ranges.push(Range::new( (cursor, cursor, None) // no-op
if range.is_empty() {
head
} else {
range.anchor + offs
},
head,
));
if next == Some(close) {
// return transaction that moves past close
(pos, pos, None) // no-op
} else { } else {
offs += close.len_utf8(); len_inserted += close.len_utf8();
let mut tendril = Tendril::new();
tendril.push(close);
(cursor, cursor, Some(tendril))
};
// TODO: else return (use default handler that inserts close) let next_range = get_next_range(doc, start_range, offs, close, len_inserted);
(pos, pos, Some(Tendril::from_char(close))) end_ranges.push(next_range);
} offs += len_inserted;
change
}); });
transaction.with_selection(Selection::new(ranges, selection.primary_index())) let t = transaction.with_selection(Selection::new(end_ranges, selection.primary_index()));
debug!("auto pair transaction: {:#?}", t);
t
} }
// handle cases where open and close is the same, or in triples ("""docstring""") /// handle cases where open and close is the same, or in triples ("""docstring""")
fn handle_same(_doc: &Rope, _selection: &Selection, _token: char) -> Option<Transaction> { fn handle_same(
// if not cursor but selection, wrap doc: &Rope,
// let next = next char selection: &Selection,
token: char,
close_before: &str,
open_before: &str,
) -> Transaction {
let mut end_ranges = SmallVec::with_capacity(selection.len());
// if next == bracket { let mut offs = 0;
// // if start of syntax node, insert token twice (new pair because node is complete)
// // elseif colsedBracketAt let transaction = Transaction::change_by_selection(doc, selection, |start_range| {
// // is_triple == allow triple && next 3 is equal let cursor = start_range.cursor(doc.slice(..));
// // cursor jump over let mut len_inserted = 0;
// }
//} else if allow_triple && followed by triple { let next_char = doc.get_char(cursor);
//} let prev_char = prev_char(doc, cursor);
//} else if next != word char && prev != bracket && prev != word char {
// // condition checks for cases like I' where you don't want I'' (or I'm) let change = if next_char == Some(token) {
// insert pair ("") // return transaction that moves past close
//} (cursor, cursor, None) // no-op
None } else {
let mut pair = Tendril::new();
pair.push(token);
// for equal pairs, don't insert both open and close if either
// side has a non-pair char
if (next_char.is_none() || close_before.contains(next_char.unwrap()))
&& (prev_char.is_none() || open_before.contains(prev_char.unwrap()))
{
pair.push(token);
}
len_inserted += pair.len();
(cursor, cursor, Some(pair))
};
let next_range = get_next_range(doc, start_range, offs, token, len_inserted);
end_ranges.push(next_range);
offs += len_inserted;
change
});
let t = transaction.with_selection(Selection::new(end_ranges, selection.primary_index()));
debug!("auto pair transaction: {:#?}", t);
t
}
#[cfg(test)]
mod test {
use super::*;
use smallvec::smallvec;
const LINE_END: &str = crate::DEFAULT_LINE_ENDING.as_str();
fn differing_pairs() -> impl Iterator<Item = &'static (char, char)> {
PAIRS.iter().filter(|(open, close)| open != close)
}
fn matching_pairs() -> impl Iterator<Item = &'static (char, char)> {
PAIRS.iter().filter(|(open, close)| open == close)
}
fn test_hooks(
in_doc: &Rope,
in_sel: &Selection,
ch: char,
expected_doc: &Rope,
expected_sel: &Selection,
) {
let trans = hook(in_doc, in_sel, ch).unwrap();
let mut actual_doc = in_doc.clone();
assert!(trans.apply(&mut actual_doc));
assert_eq!(expected_doc, &actual_doc);
assert_eq!(expected_sel, trans.selection().unwrap());
}
fn test_hooks_with_pairs<I, F, R>(
in_doc: &Rope,
in_sel: &Selection,
pairs: I,
get_expected_doc: F,
actual_sel: &Selection,
) where
I: IntoIterator<Item = &'static (char, char)>,
F: Fn(char, char) -> R,
R: Into<Rope>,
Rope: From<R>,
{
pairs.into_iter().for_each(|(open, close)| {
test_hooks(
in_doc,
in_sel,
*open,
&Rope::from(get_expected_doc(*open, *close)),
actual_sel,
)
});
}
// [] indicates range
/// [] -> insert ( -> ([])
#[test]
fn test_insert_blank() {
test_hooks_with_pairs(
&Rope::from(LINE_END),
&Selection::single(1, 0),
PAIRS,
|open, close| format!("{}{}{}", open, close, LINE_END),
&Selection::single(2, 1),
);
let empty_doc = Rope::from(format!("{line_end}{line_end}", line_end = LINE_END));
test_hooks_with_pairs(
&empty_doc,
&Selection::single(empty_doc.len_chars(), LINE_END.len()),
PAIRS,
|open, close| {
format!(
"{line_end}{open}{close}{line_end}",
open = open,
close = close,
line_end = LINE_END
)
},
&Selection::single(LINE_END.len() + 2, LINE_END.len() + 1),
);
}
#[test]
fn test_insert_before_multi_code_point_graphemes() {
test_hooks_with_pairs(
&Rope::from(format!("hello 👨‍👩‍👧‍👦 goodbye{}", LINE_END)),
&Selection::single(13, 6),
PAIRS,
|open, _| format!("hello {}👨‍👩‍👧‍👦 goodbye{}", open, LINE_END),
&Selection::single(14, 7),
);
}
#[test]
fn test_insert_at_end_of_document() {
test_hooks_with_pairs(
&Rope::from(LINE_END),
&Selection::single(LINE_END.len(), LINE_END.len()),
PAIRS,
|open, close| format!("{}{}{}", LINE_END, open, close),
&Selection::single(LINE_END.len() + 1, LINE_END.len() + 1),
);
test_hooks_with_pairs(
&Rope::from(format!("foo{}", LINE_END)),
&Selection::single(3 + LINE_END.len(), 3 + LINE_END.len()),
PAIRS,
|open, close| format!("foo{}{}{}", LINE_END, open, close),
&Selection::single(LINE_END.len() + 4, LINE_END.len() + 4),
);
}
/// [] -> append ( -> ([])
#[test]
fn test_append_blank() {
test_hooks_with_pairs(
// this is what happens when you have a totally blank document and then append
&Rope::from(format!("{line_end}{line_end}", line_end = LINE_END)),
// before inserting the pair, the cursor covers all of both empty lines
&Selection::single(0, LINE_END.len() * 2),
PAIRS,
|open, close| {
format!(
"{line_end}{open}{close}{line_end}",
line_end = LINE_END,
open = open,
close = close
)
},
// after inserting pair, the cursor covers the first new line and the open char
&Selection::single(0, LINE_END.len() + 2),
);
}
/// [] ([])
/// [] -> insert -> ([])
/// [] ([])
#[test]
fn test_insert_blank_multi_cursor() {
test_hooks_with_pairs(
&Rope::from("\n\n\n"),
&Selection::new(
smallvec!(Range::new(1, 0), Range::new(2, 1), Range::new(3, 2),),
0,
),
PAIRS,
|open, close| {
format!(
"{open}{close}\n{open}{close}\n{open}{close}\n",
open = open,
close = close
)
},
&Selection::new(
smallvec!(Range::new(2, 1), Range::new(5, 4), Range::new(8, 7),),
0,
),
);
}
/// fo[o] -> append ( -> fo[o(])
#[test]
fn test_append() {
test_hooks_with_pairs(
&Rope::from("foo\n"),
&Selection::single(2, 4),
differing_pairs(),
|open, close| format!("foo{}{}\n", open, close),
&Selection::single(2, 5),
);
}
/// foo[] -> append to end of line ( -> foo([])
#[test]
fn test_append_single_cursor() {
test_hooks_with_pairs(
&Rope::from(format!("foo{}", LINE_END)),
&Selection::single(3, 3 + LINE_END.len()),
differing_pairs(),
|open, close| format!("foo{}{}{}", open, close, LINE_END),
&Selection::single(4, 5),
);
}
/// fo[o] fo[o(])
/// fo[o] -> append ( -> fo[o(])
/// fo[o] fo[o(])
#[test]
fn test_append_multi() {
test_hooks_with_pairs(
&Rope::from("foo\nfoo\nfoo\n"),
&Selection::new(
smallvec!(Range::new(2, 4), Range::new(6, 8), Range::new(10, 12)),
0,
),
differing_pairs(),
|open, close| {
format!(
"foo{open}{close}\nfoo{open}{close}\nfoo{open}{close}\n",
open = open,
close = close
)
},
&Selection::new(
smallvec!(Range::new(2, 5), Range::new(8, 11), Range::new(14, 17)),
0,
),
);
}
/// ([)] -> insert ) -> ()[]
#[test]
fn test_insert_close_inside_pair() {
for (open, close) in PAIRS {
let doc = Rope::from(format!("{}{}{}", open, close, LINE_END));
test_hooks(
&doc,
&Selection::single(2, 1),
*close,
&doc,
&Selection::single(2 + LINE_END.len(), 2),
);
}
}
/// [(]) -> append ) -> [()]
#[test]
fn test_append_close_inside_pair() {
for (open, close) in PAIRS {
let doc = Rope::from(format!("{}{}{}", open, close, LINE_END));
test_hooks(
&doc,
&Selection::single(0, 2),
*close,
&doc,
&Selection::single(0, 2 + LINE_END.len()),
);
}
}
/// ([]) ()[]
/// ([]) -> insert ) -> ()[]
/// ([]) ()[]
#[test]
fn test_insert_close_inside_pair_multi_cursor() {
let sel = Selection::new(
smallvec!(Range::new(2, 1), Range::new(5, 4), Range::new(8, 7),),
0,
);
let expected_sel = Selection::new(
smallvec!(Range::new(3, 2), Range::new(6, 5), Range::new(9, 8),),
0,
);
for (open, close) in PAIRS {
let doc = Rope::from(format!(
"{open}{close}\n{open}{close}\n{open}{close}\n",
open = open,
close = close
));
test_hooks(&doc, &sel, *close, &doc, &expected_sel);
}
}
/// [(]) [()]
/// [(]) -> append ) -> [()]
/// [(]) [()]
#[test]
fn test_append_close_inside_pair_multi_cursor() {
let sel = Selection::new(
smallvec!(Range::new(0, 2), Range::new(3, 5), Range::new(6, 8),),
0,
);
let expected_sel = Selection::new(
smallvec!(Range::new(0, 3), Range::new(3, 6), Range::new(6, 9),),
0,
);
for (open, close) in PAIRS {
let doc = Rope::from(format!(
"{open}{close}\n{open}{close}\n{open}{close}\n",
open = open,
close = close
));
test_hooks(&doc, &sel, *close, &doc, &expected_sel);
}
}
/// ([]) -> insert ( -> (([]))
#[test]
fn test_insert_open_inside_pair() {
let sel = Selection::single(2, 1);
let expected_sel = Selection::single(3, 2);
for (open, close) in differing_pairs() {
let doc = Rope::from(format!("{}{}", open, close));
let expected_doc = Rope::from(format!(
"{open}{open}{close}{close}",
open = open,
close = close
));
test_hooks(&doc, &sel, *open, &expected_doc, &expected_sel);
}
}
/// [word(]) -> append ( -> [word((]))
#[test]
fn test_append_open_inside_pair() {
let sel = Selection::single(0, 6);
let expected_sel = Selection::single(0, 7);
for (open, close) in differing_pairs() {
let doc = Rope::from(format!("word{}{}", open, close));
let expected_doc = Rope::from(format!(
"word{open}{open}{close}{close}",
open = open,
close = close
));
test_hooks(&doc, &sel, *open, &expected_doc, &expected_sel);
}
}
/// ([]) -> insert " -> ("[]")
#[test]
fn test_insert_nested_open_inside_pair() {
let sel = Selection::single(2, 1);
let expected_sel = Selection::single(3, 2);
for (outer_open, outer_close) in differing_pairs() {
let doc = Rope::from(format!("{}{}", outer_open, outer_close,));
for (inner_open, inner_close) in matching_pairs() {
let expected_doc = Rope::from(format!(
"{}{}{}{}",
outer_open, inner_open, inner_close, outer_close
));
test_hooks(&doc, &sel, *inner_open, &expected_doc, &expected_sel);
}
}
}
/// [(]) -> append " -> [("]")
#[test]
fn test_append_nested_open_inside_pair() {
let sel = Selection::single(0, 2);
let expected_sel = Selection::single(0, 3);
for (outer_open, outer_close) in differing_pairs() {
let doc = Rope::from(format!("{}{}", outer_open, outer_close,));
for (inner_open, inner_close) in matching_pairs() {
let expected_doc = Rope::from(format!(
"{}{}{}{}",
outer_open, inner_open, inner_close, outer_close
));
test_hooks(&doc, &sel, *inner_open, &expected_doc, &expected_sel);
}
}
}
/// []word -> insert ( -> ([]word
#[test]
fn test_insert_open_before_non_pair() {
test_hooks_with_pairs(
&Rope::from("word"),
&Selection::single(1, 0),
PAIRS,
|open, _| format!("{}word", open),
&Selection::single(2, 1),
)
}
/// [wor]d -> insert ( -> ([wor]d
#[test]
fn test_insert_open_with_selection() {
test_hooks_with_pairs(
&Rope::from("word"),
&Selection::single(3, 0),
PAIRS,
|open, _| format!("{}word", open),
&Selection::single(4, 1),
)
}
/// [wor]d -> append ) -> [wor)]d
#[test]
fn test_append_close_inside_non_pair_with_selection() {
let sel = Selection::single(0, 4);
let expected_sel = Selection::single(0, 5);
for (_, close) in PAIRS {
let doc = Rope::from("word");
let expected_doc = Rope::from(format!("wor{}d", close));
test_hooks(&doc, &sel, *close, &expected_doc, &expected_sel);
}
}
/// foo[ wor]d -> insert ( -> foo([) wor]d
#[test]
fn test_insert_open_trailing_word_with_selection() {
test_hooks_with_pairs(
&Rope::from("foo word"),
&Selection::single(7, 3),
differing_pairs(),
|open, close| format!("foo{}{} word", open, close),
&Selection::single(9, 4),
)
}
/// foo([) wor]d -> insert ) -> foo()[ wor]d
#[test]
fn test_insert_close_inside_pair_trailing_word_with_selection() {
for (open, close) in differing_pairs() {
test_hooks(
&Rope::from(format!("foo{}{} word{}", open, close, LINE_END)),
&Selection::single(9, 4),
*close,
&Rope::from(format!("foo{}{} word{}", open, close, LINE_END)),
&Selection::single(9, 5),
)
}
}
/// we want pairs that are *not* the same char to be inserted after
/// a non-pair char, for cases like functions, but for pairs that are
/// the same char, we want to *not* insert a pair to handle cases like "I'm"
///
/// word[] -> insert ( -> word([])
/// word[] -> insert ' -> word'[]
#[test]
fn test_insert_open_after_non_pair() {
let doc = Rope::from(format!("word{}", LINE_END));
let sel = Selection::single(5, 4);
let expected_sel = Selection::single(6, 5);
test_hooks_with_pairs(
&doc,
&sel,
differing_pairs(),
|open, close| format!("word{}{}{}", open, close, LINE_END),
&expected_sel,
);
test_hooks_with_pairs(
&doc,
&sel,
matching_pairs(),
|open, _| format!("word{}{}", open, LINE_END),
&expected_sel,
);
}
} }

View File

@ -91,12 +91,11 @@ mod test {
#[test] #[test]
fn test_categorize() { fn test_categorize() {
const EOL_TEST_CASE: &'static str = "\n\r\u{000B}\u{000C}\u{0085}\u{2028}\u{2029}"; const EOL_TEST_CASE: &str = "\n\r\u{000B}\u{000C}\u{0085}\u{2028}\u{2029}";
const WORD_TEST_CASE: &'static str = const WORD_TEST_CASE: &str = "_hello_world_あいうえおー1234567890";
"_hello_world_あいうえおー1234567890"; const PUNCTUATION_TEST_CASE: &str =
const PUNCTUATION_TEST_CASE: &'static str =
"!\"#$%&\'()*+,-./:;<=>?@[\\]^`{|}~!”#$%&’()*+、。:;<=>?@「」^`{|}~"; "!\"#$%&\'()*+,-./:;<=>?@[\\]^`{|}~!”#$%&’()*+、。:;<=>?@「」^`{|}~";
const WHITESPACE_TEST_CASE: &'static str = "  "; const WHITESPACE_TEST_CASE: &str = "  ";
for ch in EOL_TEST_CASE.chars() { for ch in EOL_TEST_CASE.chars() {
assert_eq!(CharCategory::Eol, categorize_char(ch)); assert_eq!(CharCategory::Eol, categorize_char(ch));

View File

@ -1,12 +1,19 @@
//! LSP diagnostic utility types. //! LSP diagnostic utility types.
use serde::{Deserialize, Serialize};
/// Describes the severity level of a [`Diagnostic`]. /// Describes the severity level of a [`Diagnostic`].
#[derive(Debug, Clone, Copy, Eq, PartialEq)] #[derive(Debug, Clone, Copy, Eq, PartialEq, PartialOrd, Ord, Deserialize, Serialize)]
pub enum Severity { pub enum Severity {
Error,
Warning,
Info,
Hint, Hint,
Info,
Warning,
Error,
}
impl Default for Severity {
fn default() -> Self {
Self::Hint
}
} }
/// A range of `char`s within the text. /// A range of `char`s within the text.

View File

@ -11,10 +11,6 @@ pub fn compare_ropes(old: &Rope, new: &Rope) -> Transaction {
// A timeout is set so after 1 seconds, the algorithm will start // A timeout is set so after 1 seconds, the algorithm will start
// approximating. This is especially important for big `Rope`s or // approximating. This is especially important for big `Rope`s or
// `Rope`s that are extremely dissimilar to each other. // `Rope`s that are extremely dissimilar to each other.
//
// Note: Ignore the clippy warning, as the trait bounds of
// `Transaction::change()` require an iterator implementing
// `ExactIterator`.
let mut config = similar::TextDiff::configure(); let mut config = similar::TextDiff::configure();
config.timeout(std::time::Duration::from_secs(1)); config.timeout(std::time::Duration::from_secs(1));
@ -62,7 +58,7 @@ fn test_compare_ropes(a: String, b: String) -> bool {
let mut old = Rope::from(a); let mut old = Rope::from(a);
let new = Rope::from(b); let new = Rope::from(b);
compare_ropes(&old, &new).apply(&mut old); compare_ropes(&old, &new).apply(&mut old);
old.to_string() == new.to_string() old == new
} }
} }
} }

View File

@ -120,6 +120,43 @@ pub fn nth_next_grapheme_boundary(slice: RopeSlice, char_idx: usize, n: usize) -
chunk_char_idx + tmp chunk_char_idx + tmp
} }
#[must_use]
pub fn nth_next_grapheme_boundary_byte(slice: RopeSlice, mut byte_idx: usize, n: usize) -> usize {
// Bounds check
debug_assert!(byte_idx <= slice.len_bytes());
// Get the chunk with our byte index in it.
let (mut chunk, mut chunk_byte_idx, mut _chunk_char_idx, _) = slice.chunk_at_byte(byte_idx);
// Set up the grapheme cursor.
let mut gc = GraphemeCursor::new(byte_idx, slice.len_bytes(), true);
// Find the nth next grapheme cluster boundary.
for _ in 0..n {
loop {
match gc.next_boundary(chunk, chunk_byte_idx) {
Ok(None) => return slice.len_bytes(),
Ok(Some(n)) => {
byte_idx = n;
break;
}
Err(GraphemeIncomplete::NextChunk) => {
chunk_byte_idx += chunk.len();
let (a, _, _c, _) = slice.chunk_at_byte(chunk_byte_idx);
chunk = a;
// chunk_char_idx = c;
}
Err(GraphemeIncomplete::PreContext(n)) => {
let ctx_chunk = slice.chunk_at_byte(n - 1).0;
gc.provide_context(ctx_chunk, n - ctx_chunk.len());
}
_ => unreachable!(),
}
}
}
byte_idx
}
/// Finds the next grapheme boundary after the given char position. /// Finds the next grapheme boundary after the given char position.
#[must_use] #[must_use]
#[inline(always)] #[inline(always)]
@ -127,6 +164,13 @@ pub fn next_grapheme_boundary(slice: RopeSlice, char_idx: usize) -> usize {
nth_next_grapheme_boundary(slice, char_idx, 1) nth_next_grapheme_boundary(slice, char_idx, 1)
} }
/// Finds the next grapheme boundary after the given byte position.
#[must_use]
#[inline(always)]
pub fn next_grapheme_boundary_byte(slice: RopeSlice, byte_idx: usize) -> usize {
nth_next_grapheme_boundary_byte(slice, byte_idx, 1)
}
/// Returns the passed char index if it's already a grapheme boundary, /// Returns the passed char index if it's already a grapheme boundary,
/// or the next grapheme boundary char index if not. /// or the next grapheme boundary char index if not.
#[must_use] #[must_use]
@ -151,6 +195,23 @@ pub fn ensure_grapheme_boundary_prev(slice: RopeSlice, char_idx: usize) -> usize
} }
} }
/// Returns the passed byte index if it's already a grapheme boundary,
/// or the next grapheme boundary byte index if not.
#[must_use]
#[inline]
pub fn ensure_grapheme_boundary_next_byte(slice: RopeSlice, byte_idx: usize) -> usize {
if byte_idx == 0 {
byte_idx
} else {
// TODO: optimize so we're not constructing grapheme cursor twice
if is_grapheme_boundary_byte(slice, byte_idx) {
byte_idx
} else {
next_grapheme_boundary_byte(slice, byte_idx)
}
}
}
/// Returns whether the given char position is a grapheme boundary. /// Returns whether the given char position is a grapheme boundary.
#[must_use] #[must_use]
pub fn is_grapheme_boundary(slice: RopeSlice, char_idx: usize) -> bool { pub fn is_grapheme_boundary(slice: RopeSlice, char_idx: usize) -> bool {
@ -179,6 +240,31 @@ pub fn is_grapheme_boundary(slice: RopeSlice, char_idx: usize) -> bool {
} }
} }
/// Returns whether the given byte position is a grapheme boundary.
#[must_use]
pub fn is_grapheme_boundary_byte(slice: RopeSlice, byte_idx: usize) -> bool {
// Bounds check
debug_assert!(byte_idx <= slice.len_bytes());
// Get the chunk with our byte index in it.
let (chunk, chunk_byte_idx, _, _) = slice.chunk_at_byte(byte_idx);
// Set up the grapheme cursor.
let mut gc = GraphemeCursor::new(byte_idx, slice.len_bytes(), true);
// Determine if the given position is a grapheme cluster boundary.
loop {
match gc.is_boundary(chunk, chunk_byte_idx) {
Ok(n) => return n,
Err(GraphemeIncomplete::PreContext(n)) => {
let (ctx_chunk, ctx_byte_start, _, _) = slice.chunk_at_byte(n - 1);
gc.provide_context(ctx_chunk, ctx_byte_start);
}
Err(_) => unreachable!(),
}
}
}
/// An iterator over the graphemes of a `RopeSlice`. /// An iterator over the graphemes of a `RopeSlice`.
#[derive(Clone)] #[derive(Clone)]
pub struct RopeGraphemes<'a> { pub struct RopeGraphemes<'a> {

View File

@ -448,8 +448,8 @@ fn commit_change(
change: crate::transaction::Change, change: crate::transaction::Change,
instant: Instant, instant: Instant,
) { ) {
let txn = Transaction::change(&state.doc, vec![change.clone()].into_iter()); let txn = Transaction::change(&state.doc, vec![change].into_iter());
history.commit_revision_at_timestamp(&txn, &state, instant); history.commit_revision_at_timestamp(&txn, state, instant);
txn.apply(&mut state.doc); txn.apply(&mut state.doc);
} }

View File

@ -0,0 +1,490 @@
use chrono::{Datelike, Duration, NaiveDate, NaiveDateTime, NaiveTime, Timelike};
use once_cell::sync::Lazy;
use regex::Regex;
use ropey::RopeSlice;
use std::borrow::Cow;
use std::cmp;
use super::Increment;
use crate::{Range, Tendril};
#[derive(Debug, PartialEq, Eq)]
pub struct DateTimeIncrementor {
date_time: NaiveDateTime,
range: Range,
fmt: &'static str,
field: DateField,
}
impl DateTimeIncrementor {
pub fn from_range(text: RopeSlice, range: Range) -> Option<DateTimeIncrementor> {
let range = if range.is_empty() {
if range.anchor < text.len_chars() {
// Treat empty range as a cursor range.
range.put_cursor(text, range.anchor + 1, true)
} else {
// The range is empty and at the end of the text.
return None;
}
} else {
range
};
FORMATS.iter().find_map(|format| {
let from = range.from().saturating_sub(format.max_len);
let to = (range.from() + format.max_len).min(text.len_chars());
let (from_in_text, to_in_text) = (range.from() - from, range.to() - from);
let text: Cow<str> = text.slice(from..to).into();
let captures = format.regex.captures(&text)?;
if captures.len() - 1 != format.fields.len() {
return None;
}
let date_time = captures.get(0)?;
let offset = range.from() - from_in_text;
let range = Range::new(date_time.start() + offset, date_time.end() + offset);
let field = captures
.iter()
.skip(1)
.enumerate()
.find_map(|(i, capture)| {
let capture = capture?;
let capture_range = capture.range();
if capture_range.contains(&from_in_text)
&& capture_range.contains(&(to_in_text - 1))
{
Some(format.fields[i])
} else {
None
}
})?;
let has_date = format.fields.iter().any(|f| f.unit.is_date());
let has_time = format.fields.iter().any(|f| f.unit.is_time());
let date_time = &text[date_time.start()..date_time.end()];
let date_time = match (has_date, has_time) {
(true, true) => NaiveDateTime::parse_from_str(date_time, format.fmt).ok()?,
(true, false) => {
let date = NaiveDate::parse_from_str(date_time, format.fmt).ok()?;
date.and_hms(0, 0, 0)
}
(false, true) => {
let time = NaiveTime::parse_from_str(date_time, format.fmt).ok()?;
NaiveDate::from_ymd(0, 1, 1).and_time(time)
}
(false, false) => return None,
};
Some(DateTimeIncrementor {
date_time,
range,
fmt: format.fmt,
field,
})
})
}
}
impl Increment for DateTimeIncrementor {
fn increment(&self, amount: i64) -> (Range, Tendril) {
let date_time = match self.field.unit {
DateUnit::Years => add_years(self.date_time, amount),
DateUnit::Months => add_months(self.date_time, amount),
DateUnit::Days => add_duration(self.date_time, Duration::days(amount)),
DateUnit::Hours => add_duration(self.date_time, Duration::hours(amount)),
DateUnit::Minutes => add_duration(self.date_time, Duration::minutes(amount)),
DateUnit::Seconds => add_duration(self.date_time, Duration::seconds(amount)),
DateUnit::AmPm => toggle_am_pm(self.date_time),
}
.unwrap_or(self.date_time);
(self.range, date_time.format(self.fmt).to_string().into())
}
}
static FORMATS: Lazy<Vec<Format>> = Lazy::new(|| {
vec![
Format::new("%Y-%m-%d %H:%M:%S"), // 2021-11-24 07:12:23
Format::new("%Y/%m/%d %H:%M:%S"), // 2021/11/24 07:12:23
Format::new("%Y-%m-%d %H:%M"), // 2021-11-24 07:12
Format::new("%Y/%m/%d %H:%M"), // 2021/11/24 07:12
Format::new("%Y-%m-%d"), // 2021-11-24
Format::new("%Y/%m/%d"), // 2021/11/24
Format::new("%a %b %d %Y"), // Wed Nov 24 2021
Format::new("%d-%b-%Y"), // 24-Nov-2021
Format::new("%Y %b %d"), // 2021 Nov 24
Format::new("%b %d, %Y"), // Nov 24, 2021
Format::new("%-I:%M:%S %P"), // 7:21:53 am
Format::new("%-I:%M %P"), // 7:21 am
Format::new("%-I:%M:%S %p"), // 7:21:53 AM
Format::new("%-I:%M %p"), // 7:21 AM
Format::new("%H:%M:%S"), // 23:24:23
Format::new("%H:%M"), // 23:24
]
});
#[derive(Debug)]
struct Format {
fmt: &'static str,
fields: Vec<DateField>,
regex: Regex,
max_len: usize,
}
impl Format {
fn new(fmt: &'static str) -> Self {
let mut remaining = fmt;
let mut fields = Vec::new();
let mut regex = String::new();
let mut max_len = 0;
while let Some(i) = remaining.find('%') {
let after = &remaining[i + 1..];
let mut chars = after.chars();
let c = chars.next().unwrap();
let spec_len = if c == '-' {
1 + chars.next().unwrap().len_utf8()
} else {
c.len_utf8()
};
let specifier = &after[..spec_len];
let field = DateField::from_specifier(specifier).unwrap();
fields.push(field);
max_len += field.max_len + remaining[..i].len();
regex += &remaining[..i];
regex += &format!("({})", field.regex);
remaining = &after[spec_len..];
}
let regex = Regex::new(&regex).unwrap();
Self {
fmt,
fields,
regex,
max_len,
}
}
}
impl PartialEq for Format {
fn eq(&self, other: &Self) -> bool {
self.fmt == other.fmt && self.fields == other.fields && self.max_len == other.max_len
}
}
impl Eq for Format {}
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
struct DateField {
regex: &'static str,
unit: DateUnit,
max_len: usize,
}
impl DateField {
fn from_specifier(specifier: &str) -> Option<Self> {
match specifier {
"Y" => Some(Self {
regex: r"\d{4}",
unit: DateUnit::Years,
max_len: 5,
}),
"y" => Some(Self {
regex: r"\d\d",
unit: DateUnit::Years,
max_len: 2,
}),
"m" => Some(Self {
regex: r"[0-1]\d",
unit: DateUnit::Months,
max_len: 2,
}),
"d" => Some(Self {
regex: r"[0-3]\d",
unit: DateUnit::Days,
max_len: 2,
}),
"-d" => Some(Self {
regex: r"[1-3]?\d",
unit: DateUnit::Days,
max_len: 2,
}),
"a" => Some(Self {
regex: r"Sun|Mon|Tue|Wed|Thu|Fri|Sat",
unit: DateUnit::Days,
max_len: 3,
}),
"A" => Some(Self {
regex: r"Sunday|Monday|Tuesday|Wednesday|Thursday|Friday|Saturday",
unit: DateUnit::Days,
max_len: 9,
}),
"b" | "h" => Some(Self {
regex: r"Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec",
unit: DateUnit::Months,
max_len: 3,
}),
"B" => Some(Self {
regex: r"January|February|March|April|May|June|July|August|September|October|November|December",
unit: DateUnit::Months,
max_len: 9,
}),
"H" => Some(Self {
regex: r"[0-2]\d",
unit: DateUnit::Hours,
max_len: 2,
}),
"M" => Some(Self {
regex: r"[0-5]\d",
unit: DateUnit::Minutes,
max_len: 2,
}),
"S" => Some(Self {
regex: r"[0-5]\d",
unit: DateUnit::Seconds,
max_len: 2,
}),
"I" => Some(Self {
regex: r"[0-1]\d",
unit: DateUnit::Hours,
max_len: 2,
}),
"-I" => Some(Self {
regex: r"1?\d",
unit: DateUnit::Hours,
max_len: 2,
}),
"P" => Some(Self {
regex: r"am|pm",
unit: DateUnit::AmPm,
max_len: 2,
}),
"p" => Some(Self {
regex: r"AM|PM",
unit: DateUnit::AmPm,
max_len: 2,
}),
_ => None,
}
}
}
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
enum DateUnit {
Years,
Months,
Days,
Hours,
Minutes,
Seconds,
AmPm,
}
impl DateUnit {
fn is_date(self) -> bool {
matches!(self, DateUnit::Years | DateUnit::Months | DateUnit::Days)
}
fn is_time(self) -> bool {
matches!(
self,
DateUnit::Hours | DateUnit::Minutes | DateUnit::Seconds
)
}
}
fn ndays_in_month(year: i32, month: u32) -> u32 {
// The first day of the next month...
let (y, m) = if month == 12 {
(year + 1, 1)
} else {
(year, month + 1)
};
let d = NaiveDate::from_ymd(y, m, 1);
// ...is preceded by the last day of the original month.
d.pred().day()
}
fn add_months(date_time: NaiveDateTime, amount: i64) -> Option<NaiveDateTime> {
let month = (date_time.month0() as i64).checked_add(amount)?;
let year = date_time.year() + i32::try_from(month / 12).ok()?;
let year = if month.is_negative() { year - 1 } else { year };
// Normalize month
let month = month % 12;
let month = if month.is_negative() {
month + 12
} else {
month
} as u32
+ 1;
let day = cmp::min(date_time.day(), ndays_in_month(year, month));
Some(NaiveDate::from_ymd(year, month, day).and_time(date_time.time()))
}
fn add_years(date_time: NaiveDateTime, amount: i64) -> Option<NaiveDateTime> {
let year = i32::try_from((date_time.year() as i64).checked_add(amount)?).ok()?;
let ndays = ndays_in_month(year, date_time.month());
if date_time.day() > ndays {
let d = NaiveDate::from_ymd(year, date_time.month(), ndays);
Some(d.succ().and_time(date_time.time()))
} else {
date_time.with_year(year)
}
}
fn add_duration(date_time: NaiveDateTime, duration: Duration) -> Option<NaiveDateTime> {
date_time.checked_add_signed(duration)
}
fn toggle_am_pm(date_time: NaiveDateTime) -> Option<NaiveDateTime> {
if date_time.hour() < 12 {
add_duration(date_time, Duration::hours(12))
} else {
add_duration(date_time, Duration::hours(-12))
}
}
#[cfg(test)]
mod test {
use super::*;
use crate::Rope;
#[test]
fn test_increment_date_times() {
let tests = [
// (original, cursor, amount, expected)
("2020-02-28", 0, 1, "2021-02-28"),
("2020-02-29", 0, 1, "2021-03-01"),
("2020-01-31", 5, 1, "2020-02-29"),
("2020-01-20", 5, 1, "2020-02-20"),
("2021-01-01", 5, -1, "2020-12-01"),
("2021-01-31", 5, -2, "2020-11-30"),
("2020-02-28", 8, 1, "2020-02-29"),
("2021-02-28", 8, 1, "2021-03-01"),
("2021-02-28", 0, -1, "2020-02-28"),
("2021-03-01", 0, -1, "2020-03-01"),
("2020-02-29", 5, -1, "2020-01-29"),
("2020-02-20", 5, -1, "2020-01-20"),
("2020-02-29", 8, -1, "2020-02-28"),
("2021-03-01", 8, -1, "2021-02-28"),
("1980/12/21", 8, 100, "1981/03/31"),
("1980/12/21", 8, -100, "1980/09/12"),
("1980/12/21", 8, 1000, "1983/09/17"),
("1980/12/21", 8, -1000, "1978/03/27"),
("2021-11-24 07:12:23", 0, 1, "2022-11-24 07:12:23"),
("2021-11-24 07:12:23", 5, 1, "2021-12-24 07:12:23"),
("2021-11-24 07:12:23", 8, 1, "2021-11-25 07:12:23"),
("2021-11-24 07:12:23", 11, 1, "2021-11-24 08:12:23"),
("2021-11-24 07:12:23", 14, 1, "2021-11-24 07:13:23"),
("2021-11-24 07:12:23", 17, 1, "2021-11-24 07:12:24"),
("2021/11/24 07:12:23", 0, 1, "2022/11/24 07:12:23"),
("2021/11/24 07:12:23", 5, 1, "2021/12/24 07:12:23"),
("2021/11/24 07:12:23", 8, 1, "2021/11/25 07:12:23"),
("2021/11/24 07:12:23", 11, 1, "2021/11/24 08:12:23"),
("2021/11/24 07:12:23", 14, 1, "2021/11/24 07:13:23"),
("2021/11/24 07:12:23", 17, 1, "2021/11/24 07:12:24"),
("2021-11-24 07:12", 0, 1, "2022-11-24 07:12"),
("2021-11-24 07:12", 5, 1, "2021-12-24 07:12"),
("2021-11-24 07:12", 8, 1, "2021-11-25 07:12"),
("2021-11-24 07:12", 11, 1, "2021-11-24 08:12"),
("2021-11-24 07:12", 14, 1, "2021-11-24 07:13"),
("2021/11/24 07:12", 0, 1, "2022/11/24 07:12"),
("2021/11/24 07:12", 5, 1, "2021/12/24 07:12"),
("2021/11/24 07:12", 8, 1, "2021/11/25 07:12"),
("2021/11/24 07:12", 11, 1, "2021/11/24 08:12"),
("2021/11/24 07:12", 14, 1, "2021/11/24 07:13"),
("Wed Nov 24 2021", 0, 1, "Thu Nov 25 2021"),
("Wed Nov 24 2021", 4, 1, "Fri Dec 24 2021"),
("Wed Nov 24 2021", 8, 1, "Thu Nov 25 2021"),
("Wed Nov 24 2021", 11, 1, "Thu Nov 24 2022"),
("24-Nov-2021", 0, 1, "25-Nov-2021"),
("24-Nov-2021", 3, 1, "24-Dec-2021"),
("24-Nov-2021", 7, 1, "24-Nov-2022"),
("2021 Nov 24", 0, 1, "2022 Nov 24"),
("2021 Nov 24", 5, 1, "2021 Dec 24"),
("2021 Nov 24", 9, 1, "2021 Nov 25"),
("Nov 24, 2021", 0, 1, "Dec 24, 2021"),
("Nov 24, 2021", 4, 1, "Nov 25, 2021"),
("Nov 24, 2021", 8, 1, "Nov 24, 2022"),
("7:21:53 am", 0, 1, "8:21:53 am"),
("7:21:53 am", 3, 1, "7:22:53 am"),
("7:21:53 am", 5, 1, "7:21:54 am"),
("7:21:53 am", 8, 1, "7:21:53 pm"),
("7:21:53 AM", 0, 1, "8:21:53 AM"),
("7:21:53 AM", 3, 1, "7:22:53 AM"),
("7:21:53 AM", 5, 1, "7:21:54 AM"),
("7:21:53 AM", 8, 1, "7:21:53 PM"),
("7:21 am", 0, 1, "8:21 am"),
("7:21 am", 3, 1, "7:22 am"),
("7:21 am", 5, 1, "7:21 pm"),
("7:21 AM", 0, 1, "8:21 AM"),
("7:21 AM", 3, 1, "7:22 AM"),
("7:21 AM", 5, 1, "7:21 PM"),
("23:24:23", 1, 1, "00:24:23"),
("23:24:23", 3, 1, "23:25:23"),
("23:24:23", 6, 1, "23:24:24"),
("23:24", 1, 1, "00:24"),
("23:24", 3, 1, "23:25"),
];
for (original, cursor, amount, expected) in tests {
let rope = Rope::from_str(original);
let range = Range::new(cursor, cursor + 1);
assert_eq!(
DateTimeIncrementor::from_range(rope.slice(..), range)
.unwrap()
.increment(amount)
.1,
Tendril::from(expected)
);
}
}
#[test]
fn test_invalid_date_times() {
let tests = [
"0000-00-00",
"1980-2-21",
"1980-12-1",
"12345",
"2020-02-30",
"1999-12-32",
"19-12-32",
"1-2-3",
"0000/00/00",
"1980/2/21",
"1980/12/1",
"12345",
"2020/02/30",
"1999/12/32",
"19/12/32",
"1/2/3",
"123:456:789",
"11:61",
"2021-55-12 08:12:54",
];
for invalid in tests {
let rope = Rope::from_str(invalid);
let range = Range::new(0, 1);
assert_eq!(DateTimeIncrementor::from_range(rope.slice(..), range), None)
}
}
}

View File

@ -0,0 +1,8 @@
pub mod date_time;
pub mod number;
use crate::{Range, Tendril};
pub trait Increment {
fn increment(&self, amount: i64) -> (Range, Tendril);
}

View File

@ -2,6 +2,8 @@
use ropey::RopeSlice; use ropey::RopeSlice;
use super::Increment;
use crate::{ use crate::{
textobject::{textobject_word, TextObject}, textobject::{textobject_word, TextObject},
Range, Tendril, Range, Tendril,
@ -9,9 +11,9 @@
#[derive(Debug, PartialEq, Eq)] #[derive(Debug, PartialEq, Eq)]
pub struct NumberIncrementor<'a> { pub struct NumberIncrementor<'a> {
pub range: Range, value: i64,
pub value: i64, radix: u32,
pub radix: u32, range: Range,
text: RopeSlice<'a>, text: RopeSlice<'a>,
} }
@ -71,9 +73,10 @@ pub fn from_range(text: RopeSlice, range: Range) -> Option<NumberIncrementor> {
text, text,
}) })
} }
}
/// Add `amount` to the number and return the formatted text. impl<'a> Increment for NumberIncrementor<'a> {
pub fn incremented_text(&self, amount: i64) -> Tendril { fn increment(&self, amount: i64) -> (Range, Tendril) {
let old_text: Cow<str> = self.text.slice(self.range.from()..self.range.to()).into(); let old_text: Cow<str> = self.text.slice(self.range.from()..self.range.to()).into();
let old_length = old_text.len(); let old_length = old_text.len();
let new_value = self.value.wrapping_add(amount); let new_value = self.value.wrapping_add(amount);
@ -144,7 +147,7 @@ pub fn incremented_text(&self, amount: i64) -> Tendril {
} }
} }
new_text.into() (self.range, new_text.into())
} }
} }
@ -366,8 +369,9 @@ fn test_increment_basic_decimal_numbers() {
assert_eq!( assert_eq!(
NumberIncrementor::from_range(rope.slice(..), range) NumberIncrementor::from_range(rope.slice(..), range)
.unwrap() .unwrap()
.incremented_text(amount), .increment(amount)
expected.into() .1,
Tendril::from(expected)
); );
} }
} }
@ -392,8 +396,9 @@ fn test_increment_basic_hexadedimal_numbers() {
assert_eq!( assert_eq!(
NumberIncrementor::from_range(rope.slice(..), range) NumberIncrementor::from_range(rope.slice(..), range)
.unwrap() .unwrap()
.incremented_text(amount), .increment(amount)
expected.into() .1,
Tendril::from(expected)
); );
} }
} }
@ -419,8 +424,9 @@ fn test_increment_basic_octal_numbers() {
assert_eq!( assert_eq!(
NumberIncrementor::from_range(rope.slice(..), range) NumberIncrementor::from_range(rope.slice(..), range)
.unwrap() .unwrap()
.incremented_text(amount), .increment(amount)
expected.into() .1,
Tendril::from(expected)
); );
} }
} }
@ -464,8 +470,9 @@ fn test_increment_basic_binary_numbers() {
assert_eq!( assert_eq!(
NumberIncrementor::from_range(rope.slice(..), range) NumberIncrementor::from_range(rope.slice(..), range)
.unwrap() .unwrap()
.incremented_text(amount), .increment(amount)
expected.into() .1,
Tendril::from(expected)
); );
} }
} }
@ -491,8 +498,9 @@ fn test_increment_with_separators() {
assert_eq!( assert_eq!(
NumberIncrementor::from_range(rope.slice(..), range) NumberIncrementor::from_range(rope.slice(..), range)
.unwrap() .unwrap()
.incremented_text(amount), .increment(amount)
expected.into() .1,
Tendril::from(expected)
); );
} }
} }

View File

@ -1,6 +1,5 @@
use crate::{ use crate::{
chars::{char_is_line_ending, char_is_whitespace}, chars::{char_is_line_ending, char_is_whitespace},
find_first_non_whitespace_char,
syntax::{IndentQuery, LanguageConfiguration, Syntax}, syntax::{IndentQuery, LanguageConfiguration, Syntax},
tree_sitter::Node, tree_sitter::Node,
Rope, RopeSlice, Rope, RopeSlice,
@ -174,8 +173,7 @@ pub fn auto_detect_indent_style(document_text: &Rope) -> Option<IndentStyle> {
/// To determine indentation of a newly inserted line, figure out the indentation at the last col /// To determine indentation of a newly inserted line, figure out the indentation at the last col
/// of the previous line. /// of the previous line.
#[allow(dead_code)] pub fn indent_level_for_line(line: RopeSlice, tab_width: usize) -> usize {
fn indent_level_for_line(line: RopeSlice, tab_width: usize) -> usize {
let mut len = 0; let mut len = 0;
for ch in line.chars() { for ch in line.chars() {
match ch { match ch {
@ -207,10 +205,15 @@ fn get_highest_syntax_node_at_bytepos(syntax: &Syntax, pos: usize) -> Option<Nod
Some(node) Some(node)
} }
fn calculate_indentation(query: &IndentQuery, node: Option<Node>, newline: bool) -> usize { /// Calculate the indentation at a given treesitter node.
// NOTE: can't use contains() on query because of comparing Vec<String> and &str /// If newline is false, then any "indent" nodes on the line are ignored ("outdent" still applies).
// https://doc.rust-lang.org/std/vec/struct.Vec.html#method.contains /// This is because the indentation is only increased starting at the second line of the node.
fn calculate_indentation(
query: &IndentQuery,
node: Option<Node>,
line: usize,
newline: bool,
) -> usize {
let mut increment: isize = 0; let mut increment: isize = 0;
let mut node = match node { let mut node = match node {
@ -218,70 +221,45 @@ fn calculate_indentation(query: &IndentQuery, node: Option<Node>, newline: bool)
None => return 0, None => return 0,
}; };
let mut prev_start = node.start_position().row; let mut current_line = line;
let mut consider_indent = newline;
let mut increment_from_line: isize = 0;
// if we're calculating indentation for a brand new line then the current node will become the loop {
// parent node. We need to take it's indentation level into account too. let node_kind = node.kind();
let node_kind = node.kind(); let start = node.start_position().row;
if newline && query.indent.contains(node_kind) { if current_line != start {
increment += 1; // Indent/dedent by at most one per line:
} // .map(|a| { <-- ({ is two scopes
// let len = 1; <-- indents one level
while let Some(parent) = node.parent() { // }) <-- }) is two scopes
let parent_kind = parent.kind(); if consider_indent || increment_from_line < 0 {
let start = parent.start_position().row; increment += increment_from_line.signum();
}
// detect deeply nested indents in the same line increment_from_line = 0;
// .map(|a| { <-- ({ is two scopes current_line = start;
// let len = 1; <-- indents one level consider_indent = true;
// }) <-- }) is two scopes
let starts_same_line = start == prev_start;
if query.outdent.contains(node.kind()) && !starts_same_line {
// we outdent by skipping the rules for the current level and jumping up
// node = parent;
increment -= 1;
// continue;
} }
if query.indent.contains(parent_kind) // && not_first_or_last_sibling if query.outdent.contains(node_kind) {
&& !starts_same_line increment_from_line -= 1;
{ }
// println!("is_scope {}", parent_kind); if query.indent.contains(node_kind) {
prev_start = start; increment_from_line += 1;
increment += 1
} }
// if last_scope && increment > 0 && ...{ ignore } if let Some(parent) = node.parent() {
node = parent;
node = parent; } else {
break;
}
}
if consider_indent || increment_from_line < 0 {
increment += increment_from_line.signum();
} }
increment.max(0) as usize increment.max(0) as usize
} }
#[allow(dead_code)]
fn suggested_indent_for_line(
language_config: &LanguageConfiguration,
syntax: Option<&Syntax>,
text: RopeSlice,
line_num: usize,
_tab_width: usize,
) -> usize {
if let Some(start) = find_first_non_whitespace_char(text.line(line_num)) {
return suggested_indent_for_pos(
Some(language_config),
syntax,
text,
start + text.line_to_char(line_num),
false,
);
};
// if the line is blank, indent should be zero
0
}
// TODO: two usecases: if we are triggering this for a new, blank line: // TODO: two usecases: if we are triggering this for a new, blank line:
// - it should return 0 when mass indenting stuff // - it should return 0 when mass indenting stuff
// - it should look up the wrapper node and count it too when we press o/O // - it should look up the wrapper node and count it too when we press o/O
@ -290,23 +268,20 @@ pub fn suggested_indent_for_pos(
syntax: Option<&Syntax>, syntax: Option<&Syntax>,
text: RopeSlice, text: RopeSlice,
pos: usize, pos: usize,
line: usize,
new_line: bool, new_line: bool,
) -> usize { ) -> Option<usize> {
if let (Some(query), Some(syntax)) = ( if let (Some(query), Some(syntax)) = (
language_config.and_then(|config| config.indent_query()), language_config.and_then(|config| config.indent_query()),
syntax, syntax,
) { ) {
let byte_start = text.char_to_byte(pos); let byte_start = text.char_to_byte(pos);
let node = get_highest_syntax_node_at_bytepos(syntax, byte_start); let node = get_highest_syntax_node_at_bytepos(syntax, byte_start);
// let config = load indentation query config from Syntax(should contain language_config)
// TODO: special case for comments // TODO: special case for comments
// TODO: if preserve_leading_whitespace // TODO: if preserve_leading_whitespace
calculate_indentation(query, node, new_line) Some(calculate_indentation(query, node, line, new_line))
} else { } else {
// TODO: heuristics for non-tree sitter grammars None
0
} }
} }
@ -438,7 +413,8 @@ pub fn change<I>(document: &Document, changes: I) -> Self
", ",
); );
let doc = Rope::from(doc); let doc = doc;
use crate::diagnostic::Severity;
use crate::syntax::{ use crate::syntax::{
Configuration, IndentationConfiguration, LanguageConfiguration, Loader, Configuration, IndentationConfiguration, LanguageConfiguration, Loader,
}; };
@ -456,6 +432,8 @@ pub fn change<I>(document: &Document, changes: I) -> Self
roots: vec![], roots: vec![],
comment_token: None, comment_token: None,
auto_format: false, auto_format: false,
diagnostic_severity: Severity::Warning,
tree_sitter_library: None,
language_server: None, language_server: None,
indent: Some(IndentationConfiguration { indent: Some(IndentationConfiguration {
tab_width: 4, tab_width: 4,
@ -474,20 +452,29 @@ pub fn change<I>(document: &Document, changes: I) -> Self
let language_config = loader.language_config_for_scope("source.rust").unwrap(); let language_config = loader.language_config_for_scope("source.rust").unwrap();
let highlight_config = language_config.highlight_config(&[]).unwrap(); let highlight_config = language_config.highlight_config(&[]).unwrap();
let syntax = Syntax::new(&doc, highlight_config.clone()); let syntax = Syntax::new(&doc, highlight_config, std::sync::Arc::new(loader));
let text = doc.slice(..); let text = doc.slice(..);
let tab_width = 4; let tab_width = 4;
for i in 0..doc.len_lines() { for i in 0..doc.len_lines() {
let line = text.line(i); let line = text.line(i);
let indent = indent_level_for_line(line, tab_width); if let Some(pos) = crate::find_first_non_whitespace_char(line) {
assert_eq!( let indent = indent_level_for_line(line, tab_width);
suggested_indent_for_line(&language_config, Some(&syntax), text, i, tab_width), assert_eq!(
indent, suggested_indent_for_pos(
"line {}: {}", Some(&language_config),
i, Some(&syntax),
line text,
); text.line_to_char(i) + pos,
i,
false
),
Some(indent),
"line {}: \"{}\"",
i,
line
);
}
} }
} }
} }

View File

@ -1,3 +1,5 @@
pub use encoding_rs as encoding;
pub mod auto_pairs; pub mod auto_pairs;
pub mod chars; pub mod chars;
pub mod comment; pub mod comment;
@ -5,18 +7,19 @@
pub mod diff; pub mod diff;
pub mod graphemes; pub mod graphemes;
pub mod history; pub mod history;
pub mod increment;
pub mod indent; pub mod indent;
pub mod line_ending; pub mod line_ending;
pub mod macros; pub mod macros;
pub mod match_brackets; pub mod match_brackets;
pub mod movement; pub mod movement;
pub mod numbers;
pub mod object; pub mod object;
pub mod path; pub mod path;
mod position; mod position;
pub mod register; pub mod register;
pub mod search; pub mod search;
pub mod selection; pub mod selection;
pub mod shellwords;
mod state; mod state;
pub mod surround; pub mod surround;
pub mod syntax; pub mod syntax;
@ -36,8 +39,14 @@ pub fn find_first_non_whitespace_char(line: RopeSlice) -> Option<usize> {
line.chars().position(|ch| !ch.is_whitespace()) line.chars().position(|ch| !ch.is_whitespace())
} }
/// Find `.git` root. /// Find project root.
pub fn find_root(root: Option<&str>) -> Option<std::path::PathBuf> { ///
/// Order of detection:
/// * Top-most folder containing a root marker in current git repository
/// * Git repostory root if no marker detected
/// * Top-most folder containing a root marker if not git repository detected
/// * Current working directory as fallback
pub fn find_root(root: Option<&str>, root_markers: &[String]) -> Option<std::path::PathBuf> {
let current_dir = std::env::current_dir().expect("unable to determine current directory"); let current_dir = std::env::current_dir().expect("unable to determine current directory");
let root = match root { let root = match root {
@ -49,16 +58,30 @@ pub fn find_root(root: Option<&str>) -> Option<std::path::PathBuf> {
current_dir.join(root) current_dir.join(root)
} }
} }
None => current_dir, None => current_dir.clone(),
}; };
let mut top_marker = None;
for ancestor in root.ancestors() { for ancestor in root.ancestors() {
// TODO: also use defined roots if git isn't found for marker in root_markers {
if ancestor.join(marker).exists() {
top_marker = Some(ancestor);
break;
}
}
// don't go higher than repo
if ancestor.join(".git").is_dir() { if ancestor.join(".git").is_dir() {
return Some(ancestor.to_path_buf()); // Use workspace if detected from marker
return Some(top_marker.unwrap_or(ancestor).to_path_buf());
} }
} }
None
// In absence of git repo, use workspace if detected
if top_marker.is_some() {
top_marker.map(|a| a.to_path_buf())
} else {
Some(current_dir)
}
} }
pub fn runtime_dir() -> std::path::PathBuf { pub fn runtime_dir() -> std::path::PathBuf {
@ -158,7 +181,7 @@ fn language_tomls() {
"; ";
let base: Value = toml::from_slice(include_bytes!("../../languages.toml")) let base: Value = toml::from_slice(include_bytes!("../../languages.toml"))
.expect("Couldn't parse built-in langauges config"); .expect("Couldn't parse built-in languages config");
let user: Value = toml::from_str(USER).unwrap(); let user: Value = toml::from_str(USER).unwrap();
let merged = merge_toml_values(base, user); let merged = merge_toml_values(base, user);
@ -189,7 +212,10 @@ fn language_tomls() {
pub use ropey::{Rope, RopeBuilder, RopeSlice}; pub use ropey::{Rope, RopeBuilder, RopeSlice};
pub use tendril::StrTendril as Tendril; // pub use tendril::StrTendril as Tendril;
pub use smartstring::SmartString;
pub type Tendril = SmartString<smartstring::LazyCompact>;
#[doc(inline)] #[doc(inline)]
pub use {regex, tree_sitter}; pub use {regex, tree_sitter};

View File

@ -250,7 +250,7 @@ fn get_line_ending_str() {
assert_eq!(get_line_ending_of_str(&text[..6]), Some(LineEnding::CR)); assert_eq!(get_line_ending_of_str(&text[..6]), Some(LineEnding::CR));
assert_eq!(get_line_ending_of_str(&text[..12]), Some(LineEnding::LF)); assert_eq!(get_line_ending_of_str(&text[..12]), Some(LineEnding::LF));
assert_eq!(get_line_ending_of_str(&text[..17]), Some(LineEnding::Crlf)); assert_eq!(get_line_ending_of_str(&text[..17]), Some(LineEnding::Crlf));
assert_eq!(get_line_ending_of_str(&text[..]), None); assert_eq!(get_line_ending_of_str(text), None);
} }
#[test] #[test]

View File

@ -11,7 +11,7 @@
('\"', '\"'), ('\"', '\"'),
]; ];
// limit matching pairs to only ( ) { } [ ] < > // limit matching pairs to only ( ) { } [ ] < > ' ' " "
// Returns the position of the matching bracket under cursor. // Returns the position of the matching bracket under cursor.
// //

View File

@ -307,8 +307,6 @@ fn reached_target(target: WordMotionTarget, prev_ch: char, next_ch: char) -> boo
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use std::array::{self, IntoIter};
use ropey::Rope; use ropey::Rope;
use super::*; use super::*;
@ -360,7 +358,7 @@ fn horizontal_moves_through_single_line_text() {
((Direction::Backward, 999usize), (0, 0)), // |This is a simple alphabetic line ((Direction::Backward, 999usize), (0, 0)), // |This is a simple alphabetic line
]; ];
for ((direction, amount), coordinates) in IntoIter::new(moves_and_expected_coordinates) { for ((direction, amount), coordinates) in moves_and_expected_coordinates {
range = move_horizontally(slice, range, direction, amount, Movement::Move); range = move_horizontally(slice, range, direction, amount, Movement::Move);
assert_eq!(coords_at_pos(slice, range.head), coordinates.into()) assert_eq!(coords_at_pos(slice, range.head), coordinates.into())
} }
@ -374,7 +372,7 @@ fn horizontal_moves_through_multiline_text() {
let mut range = Range::point(position); let mut range = Range::point(position);
let moves_and_expected_coordinates = IntoIter::new([ let moves_and_expected_coordinates = [
((Direction::Forward, 11usize), (1, 1)), // Multiline\nt|ext sample\n... ((Direction::Forward, 11usize), (1, 1)), // Multiline\nt|ext sample\n...
((Direction::Backward, 1usize), (1, 0)), // Multiline\n|text sample\n... ((Direction::Backward, 1usize), (1, 0)), // Multiline\n|text sample\n...
((Direction::Backward, 5usize), (0, 5)), // Multi|line\ntext sample\n... ((Direction::Backward, 5usize), (0, 5)), // Multi|line\ntext sample\n...
@ -384,7 +382,7 @@ fn horizontal_moves_through_multiline_text() {
((Direction::Backward, 0usize), (0, 3)), // Mul|tiline\ntext sample\n... ((Direction::Backward, 0usize), (0, 3)), // Mul|tiline\ntext sample\n...
((Direction::Forward, 999usize), (5, 0)), // ...and whitespaced\n| ((Direction::Forward, 999usize), (5, 0)), // ...and whitespaced\n|
((Direction::Forward, 999usize), (5, 0)), // ...and whitespaced\n| ((Direction::Forward, 999usize), (5, 0)), // ...and whitespaced\n|
]); ];
for ((direction, amount), coordinates) in moves_and_expected_coordinates { for ((direction, amount), coordinates) in moves_and_expected_coordinates {
range = move_horizontally(slice, range, direction, amount, Movement::Move); range = move_horizontally(slice, range, direction, amount, Movement::Move);
@ -402,11 +400,11 @@ fn selection_extending_moves_in_single_line_text() {
let mut range = Range::point(position); let mut range = Range::point(position);
let original_anchor = range.anchor; let original_anchor = range.anchor;
let moves = IntoIter::new([ let moves = [
(Direction::Forward, 1usize), (Direction::Forward, 1usize),
(Direction::Forward, 5usize), (Direction::Forward, 5usize),
(Direction::Backward, 3usize), (Direction::Backward, 3usize),
]); ];
for (direction, amount) in moves { for (direction, amount) in moves {
range = move_horizontally(slice, range, direction, amount, Movement::Extend); range = move_horizontally(slice, range, direction, amount, Movement::Extend);
@ -420,7 +418,7 @@ fn vertical_moves_in_single_column() {
let slice = text.slice(..); let slice = text.slice(..);
let position = pos_at_coords(slice, (0, 0).into(), true); let position = pos_at_coords(slice, (0, 0).into(), true);
let mut range = Range::point(position); let mut range = Range::point(position);
let moves_and_expected_coordinates = IntoIter::new([ let moves_and_expected_coordinates = [
((Direction::Forward, 1usize), (1, 0)), ((Direction::Forward, 1usize), (1, 0)),
((Direction::Forward, 2usize), (3, 0)), ((Direction::Forward, 2usize), (3, 0)),
((Direction::Forward, 1usize), (4, 0)), ((Direction::Forward, 1usize), (4, 0)),
@ -430,7 +428,7 @@ fn vertical_moves_in_single_column() {
((Direction::Backward, 0usize), (4, 0)), ((Direction::Backward, 0usize), (4, 0)),
((Direction::Forward, 5), (5, 0)), ((Direction::Forward, 5), (5, 0)),
((Direction::Forward, 999usize), (5, 0)), ((Direction::Forward, 999usize), (5, 0)),
]); ];
for ((direction, amount), coordinates) in moves_and_expected_coordinates { for ((direction, amount), coordinates) in moves_and_expected_coordinates {
range = move_vertically(slice, range, direction, amount, Movement::Move); range = move_vertically(slice, range, direction, amount, Movement::Move);
@ -450,7 +448,7 @@ enum Axis {
H, H,
V, V,
} }
let moves_and_expected_coordinates = IntoIter::new([ let moves_and_expected_coordinates = [
// Places cursor at the end of line // Places cursor at the end of line
((Axis::H, Direction::Forward, 8usize), (0, 8)), ((Axis::H, Direction::Forward, 8usize), (0, 8)),
// First descent preserves column as the target line is wider // First descent preserves column as the target line is wider
@ -463,7 +461,7 @@ enum Axis {
((Axis::V, Direction::Backward, 999usize), (0, 8)), ((Axis::V, Direction::Backward, 999usize), (0, 8)),
((Axis::V, Direction::Forward, 4usize), (4, 8)), ((Axis::V, Direction::Forward, 4usize), (4, 8)),
((Axis::V, Direction::Forward, 999usize), (5, 0)), ((Axis::V, Direction::Forward, 999usize), (5, 0)),
]); ];
for ((axis, direction, amount), coordinates) in moves_and_expected_coordinates { for ((axis, direction, amount), coordinates) in moves_and_expected_coordinates {
range = match axis { range = match axis {
@ -489,7 +487,7 @@ enum Axis {
H, H,
V, V,
} }
let moves_and_expected_coordinates = IntoIter::new([ let moves_and_expected_coordinates = [
// Places cursor at the fourth kana. // Places cursor at the fourth kana.
((Axis::H, Direction::Forward, 4), (0, 4)), ((Axis::H, Direction::Forward, 4), (0, 4)),
// Descent places cursor at the 4th character. // Descent places cursor at the 4th character.
@ -498,7 +496,7 @@ enum Axis {
((Axis::H, Direction::Backward, 1usize), (1, 3)), ((Axis::H, Direction::Backward, 1usize), (1, 3)),
// Jumping back up 1 line. // Jumping back up 1 line.
((Axis::V, Direction::Backward, 1usize), (0, 3)), ((Axis::V, Direction::Backward, 1usize), (0, 3)),
]); ];
for ((axis, direction, amount), coordinates) in moves_and_expected_coordinates { for ((axis, direction, amount), coordinates) in moves_and_expected_coordinates {
range = match axis { range = match axis {
@ -530,7 +528,7 @@ fn nonsensical_ranges_panic_on_backwards_movement_attempt_in_debug_mode() {
#[test] #[test]
fn test_behaviour_when_moving_to_start_of_next_words() { fn test_behaviour_when_moving_to_start_of_next_words() {
let tests = array::IntoIter::new([ let tests = [
("Basic forward motion stops at the first space", ("Basic forward motion stops at the first space",
vec![(1, Range::new(0, 0), Range::new(0, 6))]), vec![(1, Range::new(0, 0), Range::new(0, 6))]),
(" Starting from a boundary advances the anchor", (" Starting from a boundary advances the anchor",
@ -604,7 +602,7 @@ fn test_behaviour_when_moving_to_start_of_next_words() {
vec![ vec![
(1, Range::new(0, 0), Range::new(0, 6)), (1, Range::new(0, 0), Range::new(0, 6)),
]), ]),
]); ];
for (sample, scenario) in tests { for (sample, scenario) in tests {
for (count, begin, expected_end) in scenario.into_iter() { for (count, begin, expected_end) in scenario.into_iter() {
@ -616,7 +614,7 @@ fn test_behaviour_when_moving_to_start_of_next_words() {
#[test] #[test]
fn test_behaviour_when_moving_to_start_of_next_long_words() { fn test_behaviour_when_moving_to_start_of_next_long_words() {
let tests = array::IntoIter::new([ let tests = [
("Basic forward motion stops at the first space", ("Basic forward motion stops at the first space",
vec![(1, Range::new(0, 0), Range::new(0, 6))]), vec![(1, Range::new(0, 0), Range::new(0, 6))]),
(" Starting from a boundary advances the anchor", (" Starting from a boundary advances the anchor",
@ -688,7 +686,7 @@ fn test_behaviour_when_moving_to_start_of_next_long_words() {
vec![ vec![
(1, Range::new(0, 0), Range::new(0, 8)), (1, Range::new(0, 0), Range::new(0, 8)),
]), ]),
]); ];
for (sample, scenario) in tests { for (sample, scenario) in tests {
for (count, begin, expected_end) in scenario.into_iter() { for (count, begin, expected_end) in scenario.into_iter() {
@ -700,7 +698,7 @@ fn test_behaviour_when_moving_to_start_of_next_long_words() {
#[test] #[test]
fn test_behaviour_when_moving_to_start_of_previous_words() { fn test_behaviour_when_moving_to_start_of_previous_words() {
let tests = array::IntoIter::new([ let tests = [
("Basic backward motion from the middle of a word", ("Basic backward motion from the middle of a word",
vec![(1, Range::new(3, 3), Range::new(4, 0))]), vec![(1, Range::new(3, 3), Range::new(4, 0))]),
@ -773,7 +771,7 @@ fn test_behaviour_when_moving_to_start_of_previous_words() {
vec![ vec![
(1, Range::new(0, 6), Range::new(6, 0)), (1, Range::new(0, 6), Range::new(6, 0)),
]), ]),
]); ];
for (sample, scenario) in tests { for (sample, scenario) in tests {
for (count, begin, expected_end) in scenario.into_iter() { for (count, begin, expected_end) in scenario.into_iter() {
@ -785,7 +783,7 @@ fn test_behaviour_when_moving_to_start_of_previous_words() {
#[test] #[test]
fn test_behaviour_when_moving_to_start_of_previous_long_words() { fn test_behaviour_when_moving_to_start_of_previous_long_words() {
let tests = array::IntoIter::new([ let tests = [
( (
"Basic backward motion from the middle of a word", "Basic backward motion from the middle of a word",
vec![(1, Range::new(3, 3), Range::new(4, 0))], vec![(1, Range::new(3, 3), Range::new(4, 0))],
@ -870,7 +868,7 @@ fn test_behaviour_when_moving_to_start_of_previous_long_words() {
vec![ vec![
(1, Range::new(0, 8), Range::new(8, 0)), (1, Range::new(0, 8), Range::new(8, 0)),
]), ]),
]); ];
for (sample, scenario) in tests { for (sample, scenario) in tests {
for (count, begin, expected_end) in scenario.into_iter() { for (count, begin, expected_end) in scenario.into_iter() {
@ -882,7 +880,7 @@ fn test_behaviour_when_moving_to_start_of_previous_long_words() {
#[test] #[test]
fn test_behaviour_when_moving_to_end_of_next_words() { fn test_behaviour_when_moving_to_end_of_next_words() {
let tests = array::IntoIter::new([ let tests = [
("Basic forward motion from the start of a word to the end of it", ("Basic forward motion from the start of a word to the end of it",
vec![(1, Range::new(0, 0), Range::new(0, 5))]), vec![(1, Range::new(0, 0), Range::new(0, 5))]),
("Basic forward motion from the end of a word to the end of the next", ("Basic forward motion from the end of a word to the end of the next",
@ -954,7 +952,7 @@ fn test_behaviour_when_moving_to_end_of_next_words() {
vec![ vec![
(1, Range::new(0, 0), Range::new(0, 5)), (1, Range::new(0, 0), Range::new(0, 5)),
]), ]),
]); ];
for (sample, scenario) in tests { for (sample, scenario) in tests {
for (count, begin, expected_end) in scenario.into_iter() { for (count, begin, expected_end) in scenario.into_iter() {
@ -966,7 +964,7 @@ fn test_behaviour_when_moving_to_end_of_next_words() {
#[test] #[test]
fn test_behaviour_when_moving_to_end_of_previous_words() { fn test_behaviour_when_moving_to_end_of_previous_words() {
let tests = array::IntoIter::new([ let tests = [
("Basic backward motion from the middle of a word", ("Basic backward motion from the middle of a word",
vec![(1, Range::new(9, 9), Range::new(10, 5))]), vec![(1, Range::new(9, 9), Range::new(10, 5))]),
("Starting from after boundary retreats the anchor", ("Starting from after boundary retreats the anchor",
@ -1036,7 +1034,7 @@ fn test_behaviour_when_moving_to_end_of_previous_words() {
vec![ vec![
(1, Range::new(0, 10), Range::new(10, 4)), (1, Range::new(0, 10), Range::new(10, 4)),
]), ]),
]); ];
for (sample, scenario) in tests { for (sample, scenario) in tests {
for (count, begin, expected_end) in scenario.into_iter() { for (count, begin, expected_end) in scenario.into_iter() {
@ -1048,7 +1046,7 @@ fn test_behaviour_when_moving_to_end_of_previous_words() {
#[test] #[test]
fn test_behaviour_when_moving_to_end_of_next_long_words() { fn test_behaviour_when_moving_to_end_of_next_long_words() {
let tests = array::IntoIter::new([ let tests = [
("Basic forward motion from the start of a word to the end of it", ("Basic forward motion from the start of a word to the end of it",
vec![(1, Range::new(0, 0), Range::new(0, 5))]), vec![(1, Range::new(0, 0), Range::new(0, 5))]),
("Basic forward motion from the end of a word to the end of the next", ("Basic forward motion from the end of a word to the end of the next",
@ -1118,7 +1116,7 @@ fn test_behaviour_when_moving_to_end_of_next_long_words() {
vec![ vec![
(1, Range::new(0, 0), Range::new(0, 7)), (1, Range::new(0, 0), Range::new(0, 7)),
]), ]),
]); ];
for (sample, scenario) in tests { for (sample, scenario) in tests {
for (count, begin, expected_end) in scenario.into_iter() { for (count, begin, expected_end) in scenario.into_iter() {

View File

@ -1,31 +1,72 @@
use crate::{Range, RopeSlice, Selection, Syntax}; use crate::{Range, RopeSlice, Selection, Syntax};
use tree_sitter::Node;
// TODO: to contract_selection we'd need to store the previous ranges before expand. pub fn expand_selection(syntax: &Syntax, text: RopeSlice, selection: Selection) -> Selection {
// Maybe just contract to the first child node? select_node_impl(syntax, text, selection, |descendant, from, to| {
pub fn expand_selection(syntax: &Syntax, text: RopeSlice, selection: &Selection) -> Selection { if descendant.start_byte() == from && descendant.end_byte() == to {
descendant.parent()
} else {
Some(descendant)
}
})
}
pub fn shrink_selection(syntax: &Syntax, text: RopeSlice, selection: Selection) -> Selection {
select_node_impl(syntax, text, selection, |descendant, _from, _to| {
descendant.child(0).or(Some(descendant))
})
}
pub fn select_sibling<F>(
syntax: &Syntax,
text: RopeSlice,
selection: Selection,
sibling_fn: &F,
) -> Selection
where
F: Fn(Node) -> Option<Node>,
{
select_node_impl(syntax, text, selection, |descendant, _from, _to| {
find_sibling_recursive(descendant, sibling_fn)
})
}
fn find_sibling_recursive<F>(node: Node, sibling_fn: F) -> Option<Node>
where
F: Fn(Node) -> Option<Node>,
{
sibling_fn(node).or_else(|| {
node.parent()
.and_then(|node| find_sibling_recursive(node, sibling_fn))
})
}
fn select_node_impl<F>(
syntax: &Syntax,
text: RopeSlice,
selection: Selection,
select_fn: F,
) -> Selection
where
F: Fn(Node, usize, usize) -> Option<Node>,
{
let tree = syntax.tree(); let tree = syntax.tree();
selection.clone().transform(|range| { selection.transform(|range| {
let from = text.char_to_byte(range.from()); let from = text.char_to_byte(range.from());
let to = text.char_to_byte(range.to()); let to = text.char_to_byte(range.to());
// find parent of a descendant that matches the range let node = match tree
let parent = match tree
.root_node() .root_node()
.descendant_for_byte_range(from, to) .descendant_for_byte_range(from, to)
.and_then(|node| { .and_then(|node| select_fn(node, from, to))
if node.child_count() == 0 || (node.start_byte() == from && node.end_byte() == to) { {
node.parent() Some(node) => node,
} else {
Some(node)
}
}) {
Some(parent) => parent,
None => return range, None => return range,
}; };
let from = text.byte_to_char(parent.start_byte()); let from = text.byte_to_char(node.start_byte());
let to = text.byte_to_char(parent.end_byte()); let to = text.byte_to_char(node.end_byte());
if range.head < range.anchor { if range.head < range.anchor {
Range::new(to, from) Range::new(to, from)

View File

@ -109,7 +109,10 @@ pub fn visual_coords_at_pos(text: RopeSlice, pos: usize, tab_width: usize) -> Po
/// TODO: this should be changed to work in terms of visual row/column, not /// TODO: this should be changed to work in terms of visual row/column, not
/// graphemes. /// graphemes.
pub fn pos_at_coords(text: RopeSlice, coords: Position, limit_before_line_ending: bool) -> usize { pub fn pos_at_coords(text: RopeSlice, coords: Position, limit_before_line_ending: bool) -> usize {
let Position { row, col } = coords; let Position { mut row, col } = coords;
if limit_before_line_ending {
row = row.min(text.len_lines() - 1);
};
let line_start = text.line_to_char(row); let line_start = text.line_to_char(row);
let line_end = if limit_before_line_ending { let line_end = if limit_before_line_ending {
line_end_char_index(&text, row) line_end_char_index(&text, row)
@ -290,5 +293,12 @@ fn test_pos_at_coords() {
assert_eq!(pos_at_coords(slice, (0, 0).into(), false), 0); assert_eq!(pos_at_coords(slice, (0, 0).into(), false), 0);
assert_eq!(pos_at_coords(slice, (0, 1).into(), false), 1); assert_eq!(pos_at_coords(slice, (0, 1).into(), false), 1);
assert_eq!(pos_at_coords(slice, (0, 2).into(), false), 2); assert_eq!(pos_at_coords(slice, (0, 2).into(), false), 2);
// Test out of bounds.
let text = Rope::new();
let slice = text.slice(..);
assert_eq!(pos_at_coords(slice, (10, 0).into(), true), 0);
assert_eq!(pos_at_coords(slice, (0, 10).into(), true), 0);
assert_eq!(pos_at_coords(slice, (10, 10).into(), true), 0);
} }
} }

View File

@ -68,4 +68,8 @@ pub fn write(&mut self, name: char, values: Vec<String>) {
pub fn read(&self, name: char) -> Option<&[String]> { pub fn read(&self, name: char) -> Option<&[String]> {
self.get(name).map(|reg| reg.read()) self.get(name).map(|reg| reg.read())
} }
pub fn inner(&self) -> &HashMap<char, Register> {
&self.inner
}
} }

View File

@ -7,6 +7,7 @@
ensure_grapheme_boundary_next, ensure_grapheme_boundary_prev, next_grapheme_boundary, ensure_grapheme_boundary_next, ensure_grapheme_boundary_prev, next_grapheme_boundary,
prev_grapheme_boundary, prev_grapheme_boundary,
}, },
movement::Direction,
Assoc, ChangeSet, RopeSlice, Assoc, ChangeSet, RopeSlice,
}; };
use smallvec::{smallvec, SmallVec}; use smallvec::{smallvec, SmallVec};
@ -82,6 +83,13 @@ pub fn to(&self) -> usize {
std::cmp::max(self.anchor, self.head) std::cmp::max(self.anchor, self.head)
} }
/// Total length of the range.
#[inline]
#[must_use]
pub fn len(&self) -> usize {
self.to() - self.from()
}
/// The (inclusive) range of lines that the range overlaps. /// The (inclusive) range of lines that the range overlaps.
#[inline] #[inline]
#[must_use] #[must_use]
@ -102,6 +110,27 @@ pub fn is_empty(&self) -> bool {
self.anchor == self.head self.anchor == self.head
} }
/// `Direction::Backward` when head < anchor.
/// `Direction::Backward` otherwise.
#[inline]
#[must_use]
pub fn direction(&self) -> Direction {
if self.head < self.anchor {
Direction::Backward
} else {
Direction::Forward
}
}
// flips the direction of the selection
pub fn flip(&self) -> Self {
Self {
anchor: self.head,
head: self.anchor,
horiz: self.horiz,
}
}
/// Check two ranges for overlap. /// Check two ranges for overlap.
#[must_use] #[must_use]
pub fn overlaps(&self, other: &Self) -> bool { pub fn overlaps(&self, other: &Self) -> bool {
@ -111,6 +140,11 @@ pub fn overlaps(&self, other: &Self) -> bool {
self.from() == other.from() || (self.to() > other.from() && other.to() > self.from()) self.from() == other.from() || (self.to() > other.from() && other.to() > self.from())
} }
#[inline]
pub fn contains_range(&self, other: &Self) -> bool {
self.from() <= other.from() && self.to() >= other.to()
}
pub fn contains(&self, pos: usize) -> bool { pub fn contains(&self, pos: usize) -> bool {
self.from() <= pos && pos < self.to() self.from() <= pos && pos < self.to()
} }
@ -515,6 +549,39 @@ pub fn iter(&self) -> std::slice::Iter<'_, Range> {
pub fn len(&self) -> usize { pub fn len(&self) -> usize {
self.ranges.len() self.ranges.len()
} }
// returns true if self ⊇ other
pub fn contains(&self, other: &Selection) -> bool {
// can't contain other if it is larger
if other.len() > self.len() {
return false;
}
let (mut iter_self, mut iter_other) = (self.iter(), other.iter());
let (mut ele_self, mut ele_other) = (iter_self.next(), iter_other.next());
loop {
match (ele_self, ele_other) {
(Some(ra), Some(rb)) => {
if !ra.contains_range(rb) {
// `self` doesn't contain next element from `other`, advance `self`, we need to match all from `other`
ele_self = iter_self.next();
} else {
// matched element from `other`, advance `other`
ele_other = iter_other.next();
};
}
(None, Some(_)) => {
// exhausted `self`, we can't match the reminder of `other`
return false;
}
(_, None) => {
// no elements from `other` left to match, `self` contains `other`
return true;
}
}
}
}
} }
impl<'a> IntoIterator for &'a Selection { impl<'a> IntoIterator for &'a Selection {
@ -699,16 +766,16 @@ fn test_create_merges_adjacent_points() {
fn test_contains() { fn test_contains() {
let range = Range::new(10, 12); let range = Range::new(10, 12);
assert_eq!(range.contains(9), false); assert!(!range.contains(9));
assert_eq!(range.contains(10), true); assert!(range.contains(10));
assert_eq!(range.contains(11), true); assert!(range.contains(11));
assert_eq!(range.contains(12), false); assert!(!range.contains(12));
assert_eq!(range.contains(13), false); assert!(!range.contains(13));
let range = Range::new(9, 6); let range = Range::new(9, 6);
assert_eq!(range.contains(9), false); assert!(!range.contains(9));
assert_eq!(range.contains(7), true); assert!(range.contains(7));
assert_eq!(range.contains(6), true); assert!(range.contains(6));
} }
#[test] #[test]
@ -953,4 +1020,30 @@ fn test_split_on_matches() {
&["", "abcd", "efg", "rs", "xyz"] &["", "abcd", "efg", "rs", "xyz"]
); );
} }
#[test]
fn test_selection_contains() {
fn contains(a: Vec<(usize, usize)>, b: Vec<(usize, usize)>) -> bool {
let sela = Selection::new(a.iter().map(|a| Range::new(a.0, a.1)).collect(), 0);
let selb = Selection::new(b.iter().map(|b| Range::new(b.0, b.1)).collect(), 0);
sela.contains(&selb)
}
// exact match
assert!(contains(vec!((1, 1)), vec!((1, 1))));
// larger set contains smaller
assert!(contains(vec!((1, 1), (2, 2), (3, 3)), vec!((2, 2))));
// multiple matches
assert!(contains(vec!((1, 1), (2, 2)), vec!((1, 1), (2, 2))));
// smaller set can't contain bigger
assert!(!contains(vec!((1, 1)), vec!((1, 1), (2, 2))));
assert!(contains(
vec!((1, 1), (2, 4), (5, 6), (7, 9), (10, 13)),
vec!((3, 4), (7, 9))
));
assert!(!contains(vec!((1, 1), (5, 6)), vec!((1, 6))));
}
} }

View File

@ -0,0 +1,164 @@
use std::borrow::Cow;
/// Get the vec of escaped / quoted / doublequoted filenames from the input str
pub fn shellwords(input: &str) -> Vec<Cow<'_, str>> {
enum State {
Normal,
NormalEscaped,
Quoted,
QuoteEscaped,
Dquoted,
DquoteEscaped,
}
use State::*;
let mut state = Normal;
let mut args: Vec<Cow<str>> = Vec::new();
let mut escaped = String::with_capacity(input.len());
let mut start = 0;
let mut end = 0;
for (i, c) in input.char_indices() {
state = match state {
Normal => match c {
'\\' => {
escaped.push_str(&input[start..i]);
start = i + 1;
NormalEscaped
}
'"' => {
end = i;
Dquoted
}
'\'' => {
end = i;
Quoted
}
c if c.is_ascii_whitespace() => {
end = i;
Normal
}
_ => Normal,
},
NormalEscaped => Normal,
Quoted => match c {
'\\' => {
escaped.push_str(&input[start..i]);
start = i + 1;
QuoteEscaped
}
'\'' => {
end = i;
Normal
}
_ => Quoted,
},
QuoteEscaped => Quoted,
Dquoted => match c {
'\\' => {
escaped.push_str(&input[start..i]);
start = i + 1;
DquoteEscaped
}
'"' => {
end = i;
Normal
}
_ => Dquoted,
},
DquoteEscaped => Dquoted,
};
if i >= input.len() - 1 && end == 0 {
end = i + 1;
}
if end > 0 {
let esc_trim = escaped.trim();
let inp = &input[start..end];
if !(esc_trim.is_empty() && inp.trim().is_empty()) {
if esc_trim.is_empty() {
args.push(inp.into());
} else {
args.push([escaped, inp.into()].concat().into());
escaped = "".to_string();
}
}
start = i + 1;
end = 0;
}
}
args
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_normal() {
let input = r#":o single_word twó wörds \three\ \"with\ escaping\\"#;
let result = shellwords(input);
let expected = vec![
Cow::from(":o"),
Cow::from("single_word"),
Cow::from("twó"),
Cow::from("wörds"),
Cow::from(r#"three "with escaping\"#),
];
// TODO test is_owned and is_borrowed, once they get stabilized.
assert_eq!(expected, result);
}
#[test]
fn test_quoted() {
let quoted =
r#":o 'single_word' 'twó wörds' '' ' ''\three\' \"with\ escaping\\' 'quote incomplete"#;
let result = shellwords(quoted);
let expected = vec![
Cow::from(":o"),
Cow::from("single_word"),
Cow::from("twó wörds"),
Cow::from(r#"three' "with escaping\"#),
Cow::from("quote incomplete"),
];
assert_eq!(expected, result);
}
#[test]
fn test_dquoted() {
let dquoted = r#":o "single_word" "twó wörds" "" " ""\three\' \"with\ escaping\\" "dquote incomplete"#;
let result = shellwords(dquoted);
let expected = vec![
Cow::from(":o"),
Cow::from("single_word"),
Cow::from("twó wörds"),
Cow::from(r#"three' "with escaping\"#),
Cow::from("dquote incomplete"),
];
assert_eq!(expected, result);
}
#[test]
fn test_mixed() {
let dquoted = r#":o single_word 'twó wörds' "\three\' \"with\ escaping\\""no space before"'and after' $#%^@ "%^&(%^" ')(*&^%''a\\\\\b' '"#;
let result = shellwords(dquoted);
let expected = vec![
Cow::from(":o"),
Cow::from("single_word"),
Cow::from("twó wörds"),
Cow::from("three' \"with escaping\\"),
Cow::from("no space before"),
Cow::from("and after"),
Cow::from("$#%^@"),
Cow::from("%^&(%^"),
Cow::from(")(*&^%"),
Cow::from(r#"a\\b"#),
//last ' just changes to quoted but since we dont have anything after it, it should be ignored
];
assert_eq!(expected, result);
}
}

View File

@ -172,6 +172,7 @@ mod test {
use ropey::Rope; use ropey::Rope;
use smallvec::SmallVec; use smallvec::SmallVec;
#[allow(clippy::type_complexity)]
fn check_find_nth_pair_pos( fn check_find_nth_pair_pos(
text: &str, text: &str,
cases: Vec<(usize, char, usize, Option<(usize, usize)>)>, cases: Vec<(usize, char, usize, Option<(usize, usize)>)>,

File diff suppressed because it is too large Load Diff

View File

@ -22,7 +22,7 @@ pub enum Assoc {
} }
// ChangeSpec = Change | ChangeSet | Vec<Change> // ChangeSpec = Change | ChangeSet | Vec<Change>
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Default, Clone, PartialEq, Eq)]
pub struct ChangeSet { pub struct ChangeSet {
pub(crate) changes: Vec<Operation>, pub(crate) changes: Vec<Operation>,
/// The required document length. Will refuse to apply changes unless it matches. /// The required document length. Will refuse to apply changes unless it matches.
@ -30,16 +30,6 @@ pub struct ChangeSet {
len_after: usize, len_after: usize,
} }
impl Default for ChangeSet {
fn default() -> Self {
Self {
changes: Vec::new(),
len: 0,
len_after: 0,
}
}
}
impl ChangeSet { impl ChangeSet {
pub fn with_capacity(capacity: usize) -> Self { pub fn with_capacity(capacity: usize) -> Self {
Self { Self {
@ -95,7 +85,7 @@ fn insert(&mut self, fragment: Tendril) {
let new_last = match self.changes.as_mut_slice() { let new_last = match self.changes.as_mut_slice() {
[.., Insert(prev)] | [.., Insert(prev), Delete(_)] => { [.., Insert(prev)] | [.., Insert(prev), Delete(_)] => {
prev.push_tendril(&fragment); prev.push_str(&fragment);
return; return;
} }
[.., last @ Delete(_)] => std::mem::replace(last, Insert(fragment)), [.., last @ Delete(_)] => std::mem::replace(last, Insert(fragment)),
@ -199,7 +189,7 @@ pub fn compose(self, other: Self) -> Self {
// TODO: cover this with a test // TODO: cover this with a test
// figure out the byte index of the truncated string end // figure out the byte index of the truncated string end
let (pos, _) = s.char_indices().nth(j).unwrap(); let (pos, _) = s.char_indices().nth(j).unwrap();
s.pop_front(pos as u32); s.replace_range(0..pos, "");
head_a = Some(Insert(s)); head_a = Some(Insert(s));
head_b = changes_b.next(); head_b = changes_b.next();
} }
@ -221,9 +211,11 @@ pub fn compose(self, other: Self) -> Self {
Ordering::Greater => { Ordering::Greater => {
// figure out the byte index of the truncated string end // figure out the byte index of the truncated string end
let (pos, _) = s.char_indices().nth(j).unwrap(); let (pos, _) = s.char_indices().nth(j).unwrap();
let pos = pos as u32; let mut before = s;
changes.insert(s.subtendril(0, pos)); let after = before.split_off(pos);
head_a = Some(Insert(s.subtendril(pos, s.len() as u32 - pos)));
changes.insert(before);
head_a = Some(Insert(after));
head_b = changes_b.next(); head_b = changes_b.next();
} }
} }
@ -287,7 +279,7 @@ pub fn invert(&self, original_doc: &Rope) -> Self {
} }
Delete(n) => { Delete(n) => {
let text = Cow::from(original_doc.slice(pos..pos + *n)); let text = Cow::from(original_doc.slice(pos..pos + *n));
changes.insert(Tendril::from_slice(&text)); changes.insert(Tendril::from(text.as_ref()));
pos += n; pos += n;
} }
Insert(s) => { Insert(s) => {
@ -330,7 +322,7 @@ pub fn apply(&self, text: &mut Rope) -> bool {
/// `true` when the set is empty. /// `true` when the set is empty.
#[inline] #[inline]
pub fn is_empty(&self) -> bool { pub fn is_empty(&self) -> bool {
self.changes.is_empty() self.changes.is_empty() || self.changes == [Operation::Retain(self.len)]
} }
/// Map a position through the changes. /// Map a position through the changes.
@ -419,7 +411,7 @@ pub fn changes_iter(&self) -> ChangeIterator {
/// Transaction represents a single undoable unit of changes. Several changes can be grouped into /// Transaction represents a single undoable unit of changes. Several changes can be grouped into
/// a single transaction. /// a single transaction.
#[derive(Debug, Default, Clone)] #[derive(Debug, Default, Clone, PartialEq, Eq)]
pub struct Transaction { pub struct Transaction {
changes: ChangeSet, changes: ChangeSet,
selection: Option<Selection>, selection: Option<Selection>,
@ -720,19 +712,19 @@ fn changes_iter() {
#[test] #[test]
fn optimized_composition() { fn optimized_composition() {
let mut state = State::new("".into()); let mut state = State::new("".into());
let t1 = Transaction::insert(&state.doc, &state.selection, Tendril::from_char('h')); let t1 = Transaction::insert(&state.doc, &state.selection, Tendril::from("h"));
t1.apply(&mut state.doc); t1.apply(&mut state.doc);
state.selection = state.selection.clone().map(t1.changes()); state.selection = state.selection.clone().map(t1.changes());
let t2 = Transaction::insert(&state.doc, &state.selection, Tendril::from_char('e')); let t2 = Transaction::insert(&state.doc, &state.selection, Tendril::from("e"));
t2.apply(&mut state.doc); t2.apply(&mut state.doc);
state.selection = state.selection.clone().map(t2.changes()); state.selection = state.selection.clone().map(t2.changes());
let t3 = Transaction::insert(&state.doc, &state.selection, Tendril::from_char('l')); let t3 = Transaction::insert(&state.doc, &state.selection, Tendril::from("l"));
t3.apply(&mut state.doc); t3.apply(&mut state.doc);
state.selection = state.selection.clone().map(t3.changes()); state.selection = state.selection.clone().map(t3.changes());
let t4 = Transaction::insert(&state.doc, &state.selection, Tendril::from_char('l')); let t4 = Transaction::insert(&state.doc, &state.selection, Tendril::from("l"));
t4.apply(&mut state.doc); t4.apply(&mut state.doc);
state.selection = state.selection.clone().map(t4.changes()); state.selection = state.selection.clone().map(t4.changes());
let t5 = Transaction::insert(&state.doc, &state.selection, Tendril::from_char('o')); let t5 = Transaction::insert(&state.doc, &state.selection, Tendril::from("o"));
t5.apply(&mut state.doc); t5.apply(&mut state.doc);
state.selection = state.selection.clone().map(t5.changes()); state.selection = state.selection.clone().map(t5.changes());
@ -771,7 +763,7 @@ fn combine_with_empty() {
#[test] #[test]
fn combine_with_utf8() { fn combine_with_utf8() {
const TEST_CASE: &'static str = "Hello, これはヘリックスエディターです!"; const TEST_CASE: &str = "Hello, これはヘリックスエディターです!";
let empty = Rope::from(""); let empty = Rope::from("");
let a = ChangeSet::new(&empty); let a = ChangeSet::new(&empty);

View File

@ -1,6 +1,6 @@
[package] [package]
name = "helix-dap" name = "helix-dap"
version = "0.5.0" version = "0.6.0"
authors = ["Blaž Hrastnik <blaz@mxxn.io>"] authors = ["Blaž Hrastnik <blaz@mxxn.io>"]
edition = "2018" edition = "2018"
license = "MPL-2.0" license = "MPL-2.0"
@ -12,7 +12,7 @@ homepage = "https://helix-editor.com"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies] [dependencies]
helix-core = { version = "0.5", path = "../helix-core" } helix-core = { version = "0.6", path = "../helix-core" }
anyhow = "1.0" anyhow = "1.0"
log = "0.4" log = "0.4"
serde = { version = "1.0", features = ["derive"] } serde = { version = "1.0", features = ["derive"] }

View File

@ -36,7 +36,7 @@ pub struct Response {
#[serde(tag = "type", rename_all = "camelCase")] #[serde(tag = "type", rename_all = "camelCase")]
pub enum Payload { pub enum Payload {
// type = "event" // type = "event"
Event(Event), Event(Box<Event>),
// type = "response" // type = "response"
Response(Response), Response(Response),
// type = "request" // type = "request"
@ -45,6 +45,7 @@ pub enum Payload {
#[derive(Debug)] #[derive(Debug)]
pub struct Transport { pub struct Transport {
#[allow(unused)]
id: usize, id: usize,
pending_requests: Mutex<HashMap<u64, Sender<Result<Response>>>>, pending_requests: Mutex<HashMap<u64, Sender<Result<Response>>>>,
} }

View File

@ -1,6 +1,6 @@
[package] [package]
name = "helix-lsp" name = "helix-lsp"
version = "0.5.0" version = "0.6.0"
authors = ["Blaž Hrastnik <blaz@mxxn.io>"] authors = ["Blaž Hrastnik <blaz@mxxn.io>"]
edition = "2021" edition = "2021"
license = "MPL-2.0" license = "MPL-2.0"
@ -12,16 +12,16 @@ homepage = "https://helix-editor.com"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies] [dependencies]
helix-core = { version = "0.5", path = "../helix-core" } helix-core = { version = "0.6", path = "../helix-core" }
anyhow = "1.0" anyhow = "1.0"
futures-executor = "0.3" futures-executor = "0.3"
futures-util = { version = "0.3", features = ["std", "async-await"], default-features = false } futures-util = { version = "0.3", features = ["std", "async-await"], default-features = false }
jsonrpc-core = { version = "18.0", default-features = false } # don't pull in all of futures jsonrpc-core = { version = "18.0", default-features = false } # don't pull in all of futures
log = "0.4" log = "0.4"
lsp-types = { version = "0.91", features = ["proposed"] } lsp-types = { version = "0.92", features = ["proposed"] }
serde = { version = "1.0", features = ["derive"] } serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0" serde_json = "1.0"
thiserror = "1.0" thiserror = "1.0"
tokio = { version = "1.14", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot", "sync"] } tokio = { version = "1.16", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot", "sync"] }
tokio-stream = "0.1.8" tokio-stream = "0.1.8"

View File

@ -31,6 +31,7 @@ pub struct Client {
pub(crate) capabilities: OnceCell<lsp::ServerCapabilities>, pub(crate) capabilities: OnceCell<lsp::ServerCapabilities>,
offset_encoding: OffsetEncoding, offset_encoding: OffsetEncoding,
config: Option<Value>, config: Option<Value>,
root_markers: Vec<String>,
} }
impl Client { impl Client {
@ -39,6 +40,7 @@ pub fn start(
cmd: &str, cmd: &str,
args: &[String], args: &[String],
config: Option<Value>, config: Option<Value>,
root_markers: Vec<String>,
id: usize, id: usize,
) -> Result<(Self, UnboundedReceiver<(usize, Call)>, Arc<Notify>)> { ) -> Result<(Self, UnboundedReceiver<(usize, Call)>, Arc<Notify>)> {
let process = Command::new(cmd) let process = Command::new(cmd)
@ -68,6 +70,7 @@ pub fn start(
capabilities: OnceCell::new(), capabilities: OnceCell::new(),
offset_encoding: OffsetEncoding::Utf8, offset_encoding: OffsetEncoding::Utf8,
config, config,
root_markers,
}; };
Ok((client, server_rx, initialize_notify)) Ok((client, server_rx, initialize_notify))
@ -202,7 +205,7 @@ pub fn reply(
Ok(result) => Output::Success(Success { Ok(result) => Output::Success(Success {
jsonrpc: Some(Version::V2), jsonrpc: Some(Version::V2),
id, id,
result, result: serde_json::to_value(result)?,
}), }),
Err(error) => Output::Failure(Failure { Err(error) => Output::Failure(Failure {
jsonrpc: Some(Version::V2), jsonrpc: Some(Version::V2),
@ -225,7 +228,8 @@ pub fn reply(
pub(crate) async fn initialize(&self) -> Result<lsp::InitializeResult> { pub(crate) async fn initialize(&self) -> Result<lsp::InitializeResult> {
// TODO: delay any requests that are triggered prior to initialize // TODO: delay any requests that are triggered prior to initialize
let root = find_root(None).and_then(|root| lsp::Url::from_file_path(root).ok()); let root = find_root(None, &self.root_markers)
.and_then(|root| lsp::Url::from_file_path(root).ok());
if self.config.is_some() { if self.config.is_some() {
log::info!("Using custom LSP config: {}", self.config.as_ref().unwrap()); log::info!("Using custom LSP config: {}", self.config.as_ref().unwrap());
@ -434,7 +438,7 @@ fn traverse(pos: lsp::Position, text: RopeSlice) -> lsp::Position {
changes.push(lsp::TextDocumentContentChangeEvent { changes.push(lsp::TextDocumentContentChangeEvent {
range: Some(lsp::Range::new(start, end)), range: Some(lsp::Range::new(start, end)),
text: s.into(), text: s.to_string(),
range_length: None, range_length: None,
}); });
} }
@ -556,6 +560,14 @@ pub fn completion(
self.call::<lsp::request::Completion>(params) self.call::<lsp::request::Completion>(params)
} }
pub async fn resolve_completion_item(
&self,
completion_item: lsp::CompletionItem,
) -> Result<lsp::CompletionItem> {
self.request::<lsp::request::ResolveCompletionItem>(completion_item)
.await
}
pub fn text_document_signature_help( pub fn text_document_signature_help(
&self, &self,
text_document: lsp::TextDocumentIdentifier, text_document: lsp::TextDocumentIdentifier,
@ -800,4 +812,16 @@ pub async fn rename_symbol(
let response = self.request::<lsp::request::Rename>(params).await?; let response = self.request::<lsp::request::Rename>(params).await?;
Ok(response.unwrap_or_default()) Ok(response.unwrap_or_default())
} }
pub fn command(&self, command: lsp::Command) -> impl Future<Output = Result<Value>> {
let params = lsp::ExecuteCommandParams {
command: command.command,
arguments: command.arguments.unwrap_or_default(),
work_done_progress_params: lsp::WorkDoneProgressParams {
work_done_token: None,
},
};
self.call::<lsp::request::ExecuteCommand>(params)
}
} }

View File

@ -66,39 +66,26 @@ pub fn lsp_pos_to_pos(
pos: lsp::Position, pos: lsp::Position,
offset_encoding: OffsetEncoding, offset_encoding: OffsetEncoding,
) -> Option<usize> { ) -> Option<usize> {
let max_line = doc.lines().count().saturating_sub(1);
let pos_line = pos.line as usize; let pos_line = pos.line as usize;
let pos_line = if pos_line > max_line { if pos_line > doc.len_lines() - 1 {
return None; return None;
} else { }
pos_line
};
match offset_encoding { match offset_encoding {
OffsetEncoding::Utf8 => { OffsetEncoding::Utf8 => {
let max_char = doc
.line_to_char(max_line)
.checked_add(doc.line(max_line).len_chars())?;
let line = doc.line_to_char(pos_line); let line = doc.line_to_char(pos_line);
let pos = line.checked_add(pos.character as usize)?; let pos = line.checked_add(pos.character as usize)?;
if pos <= max_char { if pos <= doc.len_chars() {
Some(pos) Some(pos)
} else { } else {
None None
} }
} }
OffsetEncoding::Utf16 => { OffsetEncoding::Utf16 => {
let max_char = doc
.line_to_char(max_line)
.checked_add(doc.line(max_line).len_chars())?;
let max_cu = doc.char_to_utf16_cu(max_char);
let line = doc.line_to_char(pos_line); let line = doc.line_to_char(pos_line);
let line_start = doc.char_to_utf16_cu(line); let line_start = doc.char_to_utf16_cu(line);
let pos = line_start.checked_add(pos.character as usize)?; let pos = line_start.checked_add(pos.character as usize)?;
if pos <= max_cu { doc.try_utf16_cu_to_char(pos).ok()
Some(doc.utf16_cu_to_char(pos))
} else {
None
}
} }
} }
} }
@ -203,6 +190,7 @@ fn from(fmt: LspFormatting) -> Transaction {
#[derive(Debug, PartialEq, Clone)] #[derive(Debug, PartialEq, Clone)]
pub enum MethodCall { pub enum MethodCall {
WorkDoneProgressCreate(lsp::WorkDoneProgressCreateParams), WorkDoneProgressCreate(lsp::WorkDoneProgressCreateParams),
ApplyWorkspaceEdit(lsp::ApplyWorkspaceEditParams),
} }
impl MethodCall { impl MethodCall {
@ -215,6 +203,12 @@ pub fn parse(method: &str, params: jsonrpc::Params) -> Option<MethodCall> {
.expect("Failed to parse WorkDoneCreate params"); .expect("Failed to parse WorkDoneCreate params");
Self::WorkDoneProgressCreate(params) Self::WorkDoneProgressCreate(params)
} }
lsp::request::ApplyWorkspaceEdit::METHOD => {
let params: lsp::ApplyWorkspaceEditParams = params
.parse()
.expect("Failed to parse ApplyWorkspaceEdit params");
Self::ApplyWorkspaceEdit(params)
}
_ => { _ => {
log::warn!("unhandled lsp request: {}", method); log::warn!("unhandled lsp request: {}", method);
return None; return None;
@ -319,6 +313,7 @@ pub fn get(&mut self, language_config: &LanguageConfiguration) -> Result<Arc<Cli
&config.command, &config.command,
&config.args, &config.args,
language_config.config.clone(), language_config.config.clone(),
language_config.roots.clone(),
id, id,
)?; )?;
self.incoming.push(UnboundedReceiverStream::new(incoming)); self.incoming.push(UnboundedReceiverStream::new(incoming));
@ -337,7 +332,10 @@ pub fn get(&mut self, language_config: &LanguageConfiguration) -> Result<Arc<Cli
}) })
.await; .await;
value.expect("failed to initialize capabilities"); if let Err(e) = value {
log::error!("failed to initialize language server: {}", e);
return;
}
// next up, notify<initialized> // next up, notify<initialized>
_client _client

View File

@ -1,6 +1,6 @@
[package] [package]
name = "helix-syntax" name = "helix-syntax"
version = "0.5.0" version = "0.6.0"
authors = ["Blaž Hrastnik <blaz@mxxn.io>"] authors = ["Blaž Hrastnik <blaz@mxxn.io>"]
edition = "2021" edition = "2021"
license = "MPL-2.0" license = "MPL-2.0"

13
helix-syntax/README.md Normal file
View File

@ -0,0 +1,13 @@
helix-syntax
============
Syntax highlighting for helix, (shallow) submodules resides here.
Differences from nvim-treesitter
--------------------------------
As the syntax are commonly ported from
<https://github.com/nvim-treesitter/nvim-treesitter>.
Note that we do not support the custom `#any-of` predicate which is
supported by neovim so one needs to change it to `#match` with regex.

View File

@ -175,7 +175,6 @@ fn build_dir(dir: &str, language: &str) {
fn main() { fn main() {
let ignore = vec![ let ignore = vec![
"tree-sitter-typescript".to_string(), "tree-sitter-typescript".to_string(),
"tree-sitter-haskell".to_string(), // aarch64 failures: https://github.com/tree-sitter/tree-sitter-haskell/issues/34
"tree-sitter-ocaml".to_string(), "tree-sitter-ocaml".to_string(),
]; ];
let dirs = collect_tree_sitter_dirs(&ignore).unwrap(); let dirs = collect_tree_sitter_dirs(&ignore).unwrap();

@ -0,0 +1 @@
Subproject commit 5dd3c62f1bbe378b220fe16b317b85247898639e

@ -0,0 +1 @@
Subproject commit 6a25376685d1d47968c2cef06d4db8d84a70025e

@ -0,0 +1 @@
Subproject commit 7af32bc04a66ab196f5b9f92ac471f29372ae2ce

@ -0,0 +1 @@
Subproject commit bd50ccf66b42c55252ac8efc1086af4ac6bab8cd

@ -0,0 +1 @@
Subproject commit 04e54ab6585dfd4fee6ddfe5849af56f101b6d4f

@ -0,0 +1 @@
Subproject commit 066e395e1107df17183cf3ae4230f1a1406cc972

@ -0,0 +1 @@
Subproject commit 0e4f0baf90b57e5aeb62dcdbf03062c6315d43ea

@ -0,0 +1 @@
Subproject commit c12e6ecb54485f764250556ffd7ccb18f8e2942b

@ -0,0 +1 @@
Subproject commit 332dc528f27044bc4427024dbb33e6941fc131f2

@ -1 +1 @@
Subproject commit 2a83dfdd759a632651f852aa4dc0af2525fae5cd Subproject commit 0fa917a7022d1cd2e9b779a6a8fc5dc7fad69c75

@ -0,0 +1 @@
Subproject commit 5e66e961eee421786bdda8495ed1db045e06b5fe

@ -1 +1 @@
Subproject commit 237f4eb4417c28f643a29d795ed227246afb66f9 Subproject commit b6ec26f181dd059eedd506fa5fbeae1b8e5556c8

@ -0,0 +1 @@
Subproject commit 3ec55082cf0be015d03148be8edfdfa8c56e77f9

@ -0,0 +1 @@
Subproject commit d98426109258b266e1e92358c5f11716d2e8f638

@ -0,0 +1 @@
Subproject commit 3b213925b9c4f42c1acfe2e10bfbb438d9c6834d

@ -0,0 +1 @@
Subproject commit 06fabca19454b2dc00c1b211a7cb7ad0bc2585f1

@ -0,0 +1 @@
Subproject commit a4b9187417d6be349ee5fd4b6e77b4172c6827dd

@ -0,0 +1 @@
Subproject commit ad8c32917a16dfbb387d1da567bf0c3fb6fffde2

@ -1 +1 @@
Subproject commit 0d63eaf94e8d6c0694551b016c802787e61b3fb2 Subproject commit 57f855461aeeca73bd4218754fb26b5ac143f98f

@ -0,0 +1 @@
Subproject commit e1cfca3c79896ff79842f057ea13e529b66af636

@ -0,0 +1 @@
Subproject commit 761eb9126b65e078b1b5770ac296b4af8870f933

@ -1 +1 @@
Subproject commit fb23ed9a99da012d86b7a5059b9d8928607cce29 Subproject commit 0a3dd53a7fc4b352a538397d054380aaa28be54c

@ -0,0 +1 @@
Subproject commit 568dd8a937347175fd58db83d4c4cdaeb6069bd2

@ -0,0 +1 @@
Subproject commit b7444181fb38e603e25ea8fcdac55f9492e49c27

@ -1 +1 @@
Subproject commit 1f27fd1dfe7f352408f01b4894c7825f3a1d6c47 Subproject commit 93331b8bd8b4ebee2b575490b2758f16ad4e9f30

View File

@ -1,6 +1,6 @@
[package] [package]
name = "helix-term" name = "helix-term"
version = "0.5.0" version = "0.6.0"
description = "A post-modern text editor." description = "A post-modern text editor."
authors = ["Blaž Hrastnik <blaz@mxxn.io>"] authors = ["Blaž Hrastnik <blaz@mxxn.io>"]
edition = "2021" edition = "2021"
@ -9,6 +9,7 @@ categories = ["editor", "command-line-utilities"]
repository = "https://github.com/helix-editor/helix" repository = "https://github.com/helix-editor/helix"
homepage = "https://helix-editor.com" homepage = "https://helix-editor.com"
include = ["src/**/*", "README.md"] include = ["src/**/*", "README.md"]
default-run = "hx"
[package.metadata.nix] [package.metadata.nix]
build = true build = true
@ -21,18 +22,18 @@ name = "hx"
path = "src/main.rs" path = "src/main.rs"
[dependencies] [dependencies]
helix-core = { version = "0.5", path = "../helix-core" } helix-core = { version = "0.6", path = "../helix-core" }
helix-view = { version = "0.5", path = "../helix-view" } helix-view = { version = "0.6", path = "../helix-view" }
helix-lsp = { version = "0.5", path = "../helix-lsp" } helix-lsp = { version = "0.6", path = "../helix-lsp" }
helix-dap = { version = "0.5", path = "../helix-dap" } helix-dap = { version = "0.6", path = "../helix-dap" }
anyhow = "1" anyhow = "1"
once_cell = "1.8" once_cell = "1.9"
tokio = { version = "1", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot"] } tokio = { version = "1", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot"] }
num_cpus = "1" num_cpus = "1"
tui = { path = "../helix-tui", package = "helix-tui", default-features = false, features = ["crossterm"] } tui = { path = "../helix-tui", package = "helix-tui", default-features = false, features = ["crossterm"] }
crossterm = { version = "0.22", features = ["event-stream"] } crossterm = { version = "0.23", features = ["event-stream"] }
signal-hook = "0.3" signal-hook = "0.3"
tokio-stream = "0.1" tokio-stream = "0.1"
futures-util = { version = "0.3", features = ["std", "async-await"], default-features = false } futures-util = { version = "0.3", features = ["std", "async-await"], default-features = false }
@ -46,7 +47,7 @@ log = "0.4"
fuzzy-matcher = "0.3" fuzzy-matcher = "0.3"
ignore = "0.4" ignore = "0.4"
# markdown doc rendering # markdown doc rendering
pulldown-cmark = { version = "0.8", default-features = false } pulldown-cmark = { version = "0.9", default-features = false }
# file type detection # file type detection
content_inspector = "0.2.4" content_inspector = "0.2.4"

View File

@ -1,12 +1,17 @@
use std::borrow::Cow;
use std::process::Command; use std::process::Command;
fn main() { fn main() {
let git_hash = Command::new("git") let git_hash = Command::new("git")
.args(&["describe", "--dirty"]) .args(&["rev-parse", "HEAD"])
.output() .output()
.map(|x| String::from_utf8(x.stdout).ok())
.ok() .ok()
.flatten() .and_then(|x| String::from_utf8(x.stdout).ok());
.unwrap_or_else(|| String::from(env!("CARGO_PKG_VERSION")));
println!("cargo:rustc-env=VERSION_AND_GIT_HASH={}", git_hash); let version: Cow<_> = match git_hash {
Some(git_hash) => format!("{} ({})", env!("CARGO_PKG_VERSION"), &git_hash[..8]).into(),
None => env!("CARGO_PKG_VERSION").into(),
};
println!("cargo:rustc-env=VERSION_AND_GIT_HASH={}", version);
} }

View File

@ -1,10 +1,16 @@
use helix_core::{merge_toml_values, syntax}; use helix_core::{merge_toml_values, pos_at_coords, syntax, Selection};
use helix_dap::{self as dap, Payload, Request}; use helix_dap::{self as dap, Payload, Request};
use helix_lsp::{lsp, util::lsp_pos_to_pos, LspProgressMap}; use helix_lsp::{lsp, util::lsp_pos_to_pos, LspProgressMap};
use helix_view::{editor::Breakpoint, theme, Editor}; use helix_view::{editor::Breakpoint, theme, Editor};
use serde_json::json;
use crate::{ use crate::{
args::Args, commands::fetch_stack_trace, compositor::Compositor, config::Config, job::Jobs, ui, args::Args,
commands::{align_view, apply_workspace_edit, fetch_stack_trace, Align},
compositor::Compositor,
config::Config,
job::Jobs,
ui,
}; };
use log::{error, warn}; use log::{error, warn};
@ -78,17 +84,27 @@ pub fn new(args: Args, mut config: Config) -> Result<Self, Error> {
None => Ok(def_lang_conf), None => Ok(def_lang_conf),
}; };
let theme = if let Some(theme) = &config.theme { let true_color = config.editor.true_color || crate::true_color();
match theme_loader.load(theme) { let theme = config
Ok(theme) => theme, .theme
Err(e) => { .as_ref()
log::warn!("failed to load theme `{}` - {}", theme, e); .and_then(|theme| {
theme_loader
.load(theme)
.map_err(|e| {
log::warn!("failed to load theme `{}` - {}", theme, e);
e
})
.ok()
.filter(|theme| (true_color || theme.is_16_color()))
})
.unwrap_or_else(|| {
if true_color {
theme_loader.default() theme_loader.default()
} else {
theme_loader.base16_default()
} }
} });
} else {
theme_loader.default()
};
let syn_loader_conf: helix_core::syntax::Configuration = lang_conf let syn_loader_conf: helix_core::syntax::Configuration = lang_conf
.and_then(|conf| conf.try_into()) .and_then(|conf| conf.try_into())
@ -118,7 +134,7 @@ pub fn new(args: Args, mut config: Config) -> Result<Self, Error> {
// Unset path to prevent accidentally saving to the original tutor file. // Unset path to prevent accidentally saving to the original tutor file.
doc_mut!(editor).set_path(None)?; doc_mut!(editor).set_path(None)?;
} else if !args.files.is_empty() { } else if !args.files.is_empty() {
let first = &args.files[0]; // we know it's not empty let first = &args.files[0].0; // we know it's not empty
if first.is_dir() { if first.is_dir() {
std::env::set_current_dir(&first)?; std::env::set_current_dir(&first)?;
editor.new_file(Action::VerticalSplit); editor.new_file(Action::VerticalSplit);
@ -126,16 +142,25 @@ pub fn new(args: Args, mut config: Config) -> Result<Self, Error> {
} else { } else {
let nr_of_files = args.files.len(); let nr_of_files = args.files.len();
editor.open(first.to_path_buf(), Action::VerticalSplit)?; editor.open(first.to_path_buf(), Action::VerticalSplit)?;
for file in args.files { for (file, pos) in args.files {
if file.is_dir() { if file.is_dir() {
return Err(anyhow::anyhow!( return Err(anyhow::anyhow!(
"expected a path to file, found a directory. (to open a directory pass it as first argument)" "expected a path to file, found a directory. (to open a directory pass it as first argument)"
)); ));
} else { } else {
editor.open(file.to_path_buf(), Action::Load)?; let doc_id = editor.open(file, Action::Load)?;
// with Action::Load all documents have the same view
let view_id = editor.tree.focus;
let doc = editor.document_mut(doc_id).unwrap();
let pos = Selection::point(pos_at_coords(doc.text().slice(..), pos, true));
doc.set_selection(view_id, pos);
} }
} }
editor.set_status(format!("Loaded {} files.", nr_of_files)); editor.set_status(format!("Loaded {} files.", nr_of_files));
// align the view to center after all files are loaded,
// does not affect views without pos since it is at the top
let (view, doc) = current!(editor);
align_view(doc, view, Align::Center);
} }
} else if stdin().is_tty() { } else if stdin().is_tty() {
editor.new_file(Action::VerticalSplit); editor.new_file(Action::VerticalSplit);
@ -197,7 +222,6 @@ pub async fn event_loop(&mut self) {
loop { loop {
if self.editor.should_close() { if self.editor.should_close() {
self.jobs.finish();
break; break;
} }
@ -328,7 +352,7 @@ pub async fn handle_debugger_message(&mut self, payload: helix_dap::Payload) {
None => return, None => return,
}; };
match payload { match payload {
Payload::Event(ev) => match ev { Payload::Event(ev) => match *ev {
Event::Stopped(events::Stopped { Event::Stopped(events::Stopped {
thread_id, thread_id,
description, description,
@ -529,12 +553,8 @@ pub async fn handle_language_server_message(
// trigger textDocument/didOpen for docs that are already open // trigger textDocument/didOpen for docs that are already open
for doc in docs { for doc in docs {
// TODO: extract and share with editor.open let language_id =
let language_id = doc doc.language_id().map(ToOwned::to_owned).unwrap_or_default();
.language()
.and_then(|s| s.split('.').last()) // source.rust
.map(ToOwned::to_owned)
.unwrap_or_default();
tokio::spawn(language_server.text_document_did_open( tokio::spawn(language_server.text_document_did_open(
doc.url().unwrap(), doc.url().unwrap(),
@ -549,6 +569,7 @@ pub async fn handle_language_server_message(
let doc = self.editor.document_by_path_mut(&path); let doc = self.editor.document_by_path_mut(&path);
if let Some(doc) = doc { if let Some(doc) = doc {
let lang_conf = doc.language_config();
let text = doc.text(); let text = doc.text();
let diagnostics = params let diagnostics = params
@ -586,19 +607,31 @@ pub async fn handle_language_server_message(
return None; return None;
}; };
let severity =
diagnostic.severity.map(|severity| match severity {
DiagnosticSeverity::ERROR => Error,
DiagnosticSeverity::WARNING => Warning,
DiagnosticSeverity::INFORMATION => Info,
DiagnosticSeverity::HINT => Hint,
severity => unreachable!(
"unrecognized diagnostic severity: {:?}",
severity
),
});
if let Some(lang_conf) = lang_conf {
if let Some(severity) = severity {
if severity < lang_conf.diagnostic_severity {
return None;
}
}
};
Some(Diagnostic { Some(Diagnostic {
range: Range { start, end }, range: Range { start, end },
line: diagnostic.range.start.line as usize, line: diagnostic.range.start.line as usize,
message: diagnostic.message, message: diagnostic.message,
severity: diagnostic.severity.map( severity,
|severity| match severity {
DiagnosticSeverity::ERROR => Error,
DiagnosticSeverity::WARNING => Warning,
DiagnosticSeverity::INFORMATION => Info,
DiagnosticSeverity::HINT => Hint,
severity => unimplemented!("{:?}", severity),
},
),
// code // code
// source // source
}) })
@ -705,14 +738,6 @@ pub async fn handle_language_server_message(
Call::MethodCall(helix_lsp::jsonrpc::MethodCall { Call::MethodCall(helix_lsp::jsonrpc::MethodCall {
method, params, id, .. method, params, id, ..
}) => { }) => {
let language_server = match self.editor.language_servers.get_by_id(server_id) {
Some(language_server) => language_server,
None => {
warn!("can't find language server with id `{}`", server_id);
return;
}
};
let call = match MethodCall::parse(&method, params) { let call = match MethodCall::parse(&method, params) {
Some(call) => call, Some(call) => call,
None => { None => {
@ -742,8 +767,42 @@ pub async fn handle_language_server_message(
if spinner.is_stopped() { if spinner.is_stopped() {
spinner.start(); spinner.start();
} }
let language_server =
match self.editor.language_servers.get_by_id(server_id) {
Some(language_server) => language_server,
None => {
warn!("can't find language server with id `{}`", server_id);
return;
}
};
tokio::spawn(language_server.reply(id, Ok(serde_json::Value::Null))); tokio::spawn(language_server.reply(id, Ok(serde_json::Value::Null)));
} }
MethodCall::ApplyWorkspaceEdit(params) => {
apply_workspace_edit(
&mut self.editor,
helix_lsp::OffsetEncoding::Utf8,
&params.edit,
);
let language_server =
match self.editor.language_servers.get_by_id(server_id) {
Some(language_server) => language_server,
None => {
warn!("can't find language server with id `{}`", server_id);
return;
}
};
tokio::spawn(language_server.reply(
id,
Ok(json!(lsp::ApplyWorkspaceEditResponse {
applied: true,
failure_reason: None,
failed_change: None,
})),
));
}
} }
} }
e => unreachable!("{:?}", e), e => unreachable!("{:?}", e),
@ -789,6 +848,8 @@ pub async fn run(&mut self) -> Result<i32, Error> {
self.event_loop().await; self.event_loop().await;
self.jobs.finish().await;
if self.editor.close_language_servers(None).await.is_err() { if self.editor.close_language_servers(None).await.is_err() {
log::error!("Timed out waiting for language servers to shutdown"); log::error!("Timed out waiting for language servers to shutdown");
}; };

View File

@ -1,5 +1,6 @@
use anyhow::{Error, Result}; use anyhow::{Error, Result};
use std::path::PathBuf; use helix_core::Position;
use std::path::{Path, PathBuf};
#[derive(Default)] #[derive(Default)]
pub struct Args { pub struct Args {
@ -7,7 +8,7 @@ pub struct Args {
pub display_version: bool, pub display_version: bool,
pub load_tutor: bool, pub load_tutor: bool,
pub verbosity: u64, pub verbosity: u64,
pub files: Vec<PathBuf>, pub files: Vec<(PathBuf, Position)>,
} }
impl Args { impl Args {
@ -41,15 +42,49 @@ pub fn parse_args() -> Result<Args> {
} }
} }
} }
arg => args.files.push(PathBuf::from(arg)), arg => args.files.push(parse_file(arg)),
} }
} }
// push the remaining args, if any to the files // push the remaining args, if any to the files
for filename in iter { for arg in iter {
args.files.push(PathBuf::from(filename)); args.files.push(parse_file(arg));
} }
Ok(args) Ok(args)
} }
} }
/// Parse arg into [`PathBuf`] and position.
pub(crate) fn parse_file(s: &str) -> (PathBuf, Position) {
let def = || (PathBuf::from(s), Position::default());
if Path::new(s).exists() {
return def();
}
split_path_row_col(s)
.or_else(|| split_path_row(s))
.unwrap_or_else(def)
}
/// Split file.rs:10:2 into [`PathBuf`], row and col.
///
/// Does not validate if file.rs is a file or directory.
fn split_path_row_col(s: &str) -> Option<(PathBuf, Position)> {
let mut s = s.rsplitn(3, ':');
let col: usize = s.next()?.parse().ok()?;
let row: usize = s.next()?.parse().ok()?;
let path = s.next()?.into();
let pos = Position::new(row.saturating_sub(1), col.saturating_sub(1));
Some((path, pos))
}
/// Split file.rs:10 into [`PathBuf`] and row.
///
/// Does not validate if file.rs is a file or directory.
fn split_path_row(s: &str) -> Option<(PathBuf, Position)> {
let (row, path) = s.rsplit_once(':')?;
let row: usize = row.parse().ok()?;
let path = path.into();
let pos = Position::new(row.saturating_sub(1), 0);
Some((path, pos))
}

File diff suppressed because it is too large Load Diff

View File

@ -194,7 +194,7 @@ pub fn dap_start_impl(
cx: &mut compositor::Context, cx: &mut compositor::Context,
name: Option<&str>, name: Option<&str>,
socket: Option<std::net::SocketAddr>, socket: Option<std::net::SocketAddr>,
params: Option<Vec<&str>>, params: Option<Vec<std::borrow::Cow<str>>>,
) -> Result<(), anyhow::Error> { ) -> Result<(), anyhow::Error> {
let doc = doc!(cx.editor); let doc = doc!(cx.editor);
@ -242,7 +242,7 @@ pub fn dap_start_impl(
let mut param = x.to_string(); let mut param = x.to_string();
if let Some(DebugConfigCompletion::Advanced(cfg)) = template.completion.get(i) { if let Some(DebugConfigCompletion::Advanced(cfg)) = template.completion.get(i) {
if matches!(cfg.completion.as_deref(), Some("filename" | "directory")) { if matches!(cfg.completion.as_deref(), Some("filename" | "directory")) {
param = std::fs::canonicalize(x) param = std::fs::canonicalize(x.as_ref())
.ok() .ok()
.and_then(|pb| pb.into_os_string().into_string().ok()) .and_then(|pb| pb.into_os_string().into_string().ok())
.unwrap_or_else(|| x.to_string()); .unwrap_or_else(|| x.to_string());
@ -408,7 +408,7 @@ fn debug_parameter_prompt(
cx, cx,
Some(&config_name), Some(&config_name),
None, None,
Some(params.iter().map(|x| x.as_str()).collect()), Some(params.iter().map(|x| x.into()).collect()),
) { ) {
cx.editor.set_error(e.to_string()); cx.editor.set_error(e.to_string());
} }
@ -651,7 +651,7 @@ pub fn dap_variables(cx: &mut Context) {
} }
let contents = Text::from(tui::text::Text::from(variables)); let contents = Text::from(tui::text::Text::from(variables));
let popup = Popup::new(contents); let popup = Popup::new("dap-variables", contents);
cx.push_layer(Box::new(popup)); cx.push_layer(Box::new(popup));
} }

View File

@ -7,7 +7,7 @@
use crossterm::event::Event; use crossterm::event::Event;
use tui::buffer::Buffer as Surface; use tui::buffer::Buffer as Surface;
pub type Callback = Box<dyn FnOnce(&mut Compositor)>; pub type Callback = Box<dyn FnOnce(&mut Compositor, &mut Context)>;
// --> EventResult should have a callback that takes a context with methods like .popup(), // --> EventResult should have a callback that takes a context with methods like .popup(),
// .prompt() etc. That way we can abstract it from the renderer. // .prompt() etc. That way we can abstract it from the renderer.
@ -55,15 +55,20 @@ fn cursor(&self, _area: Rect, _ctx: &Editor) -> (Option<Position>, CursorKind) {
/// May be used by the parent component to compute the child area. /// May be used by the parent component to compute the child area.
/// viewport is the maximum allowed area, and the child should stay within those bounds. /// viewport is the maximum allowed area, and the child should stay within those bounds.
///
/// The returned size might be larger than the viewport if the child is too big to fit.
/// In this case the parent can use the values to calculate scroll.
fn required_size(&mut self, _viewport: (u16, u16)) -> Option<(u16, u16)> { fn required_size(&mut self, _viewport: (u16, u16)) -> Option<(u16, u16)> {
// TODO: for scrolling, the scroll wrapper should place a size + offset on the Context
// that way render can use it
None None
} }
fn type_name(&self) -> &'static str { fn type_name(&self) -> &'static str {
std::any::type_name::<Self>() std::any::type_name::<Self>()
} }
fn id(&self) -> Option<&'static str> {
None
}
} }
use anyhow::Error; use anyhow::Error;
@ -121,17 +126,32 @@ pub fn push(&mut self, mut layer: Box<dyn Component>) {
self.layers.push(layer); self.layers.push(layer);
} }
/// Replace a component that has the given `id` with the new layer and if
/// no component is found, push the layer normally.
pub fn replace_or_push(&mut self, id: &'static str, layer: Box<dyn Component>) {
if let Some(component) = self.find_id(id) {
*component = layer;
} else {
self.push(layer)
}
}
pub fn pop(&mut self) -> Option<Box<dyn Component>> { pub fn pop(&mut self) -> Option<Box<dyn Component>> {
self.layers.pop() self.layers.pop()
} }
pub fn handle_event(&mut self, event: Event, cx: &mut Context) -> bool { pub fn handle_event(&mut self, event: Event, cx: &mut Context) -> bool {
// If it is a key event and a macro is being recorded, push the key event to the recording.
if let (Event::Key(key), Some((_, keys))) = (event, &mut cx.editor.macro_recording) {
keys.push(key.into());
}
// propagate events through the layers until we either find a layer that consumes it or we // propagate events through the layers until we either find a layer that consumes it or we
// run out of layers (event bubbling) // run out of layers (event bubbling)
for layer in self.layers.iter_mut().rev() { for layer in self.layers.iter_mut().rev() {
match layer.handle_event(event, cx) { match layer.handle_event(event, cx) {
EventResult::Consumed(Some(callback)) => { EventResult::Consumed(Some(callback)) => {
callback(self); callback(self, cx);
return true; return true;
} }
EventResult::Consumed(None) => return true, EventResult::Consumed(None) => return true,
@ -184,6 +204,14 @@ pub fn find<T: 'static>(&mut self) -> Option<&mut T> {
.find(|component| component.type_name() == type_name) .find(|component| component.type_name() == type_name)
.and_then(|component| component.as_any_mut().downcast_mut()) .and_then(|component| component.as_any_mut().downcast_mut())
} }
pub fn find_id<T: 'static>(&mut self, id: &'static str) -> Option<&mut T> {
let type_name = std::any::type_name::<T>();
self.layers
.iter_mut()
.find(|component| component.type_name() == type_name && component.id() == Some(id))
.and_then(|component| component.as_any_mut().downcast_mut())
}
} }
// View casting, taken straight from Cursive // View casting, taken straight from Cursive

View File

@ -20,14 +20,18 @@ pub struct LspConfig {
pub display_messages: bool, pub display_messages: bool,
} }
#[test] #[cfg(test)]
fn parsing_keymaps_config_file() { mod tests {
use crate::keymap; use super::*;
use crate::keymap::Keymap;
use helix_core::hashmap;
use helix_view::document::Mode;
let sample_keymaps = r#" #[test]
fn parsing_keymaps_config_file() {
use crate::keymap;
use crate::keymap::Keymap;
use helix_core::hashmap;
use helix_view::document::Mode;
let sample_keymaps = r#"
[keys.insert] [keys.insert]
y = "move_line_down" y = "move_line_down"
S-C-a = "delete_selection" S-C-a = "delete_selection"
@ -36,19 +40,20 @@ fn parsing_keymaps_config_file() {
A-F12 = "move_next_word_end" A-F12 = "move_next_word_end"
"#; "#;
assert_eq!( assert_eq!(
toml::from_str::<Config>(sample_keymaps).unwrap(), toml::from_str::<Config>(sample_keymaps).unwrap(),
Config { Config {
keys: Keymaps(hashmap! { keys: Keymaps(hashmap! {
Mode::Insert => Keymap::new(keymap!({ "Insert mode" Mode::Insert => Keymap::new(keymap!({ "Insert mode"
"y" => move_line_down, "y" => move_line_down,
"S-C-a" => delete_selection, "S-C-a" => delete_selection,
})), })),
Mode::Normal => Keymap::new(keymap!({ "Normal mode" Mode::Normal => Keymap::new(keymap!({ "Normal mode"
"A-F12" => move_next_word_end, "A-F12" => move_next_word_end,
})), })),
}), }),
..Default::default() ..Default::default()
} }
); );
}
} }

View File

@ -22,8 +22,8 @@ pub struct Jobs {
} }
impl Job { impl Job {
pub fn new<F: Future<Output = anyhow::Result<()>> + Send + 'static>(f: F) -> Job { pub fn new<F: Future<Output = anyhow::Result<()>> + Send + 'static>(f: F) -> Self {
Job { Self {
future: f.map(|r| r.map(|()| None)).boxed(), future: f.map(|r| r.map(|()| None)).boxed(),
wait: false, wait: false,
} }
@ -31,22 +31,22 @@ pub fn new<F: Future<Output = anyhow::Result<()>> + Send + 'static>(f: F) -> Job
pub fn with_callback<F: Future<Output = anyhow::Result<Callback>> + Send + 'static>( pub fn with_callback<F: Future<Output = anyhow::Result<Callback>> + Send + 'static>(
f: F, f: F,
) -> Job { ) -> Self {
Job { Self {
future: f.map(|r| r.map(Some)).boxed(), future: f.map(|r| r.map(Some)).boxed(),
wait: false, wait: false,
} }
} }
pub fn wait_before_exiting(mut self) -> Job { pub fn wait_before_exiting(mut self) -> Self {
self.wait = true; self.wait = true;
self self
} }
} }
impl Jobs { impl Jobs {
pub fn new() -> Jobs { pub fn new() -> Self {
Jobs::default() Self::default()
} }
pub fn spawn<F: Future<Output = anyhow::Result<()>> + Send + 'static>(&mut self, f: F) { pub fn spawn<F: Future<Output = anyhow::Result<()>> + Send + 'static>(&mut self, f: F) {
@ -93,8 +93,8 @@ pub fn add(&self, j: Job) {
} }
/// Blocks until all the jobs that need to be waited on are done. /// Blocks until all the jobs that need to be waited on are done.
pub fn finish(&mut self) { pub async fn finish(&mut self) {
let wait_futures = std::mem::take(&mut self.wait_futures); let wait_futures = std::mem::take(&mut self.wait_futures);
helix_lsp::block_on(wait_futures.for_each(|_| future::ready(()))); wait_futures.for_each(|_| future::ready(())).await
} }
} }

View File

@ -1,4 +1,4 @@
pub use crate::commands::Command; pub use crate::commands::MappableCommand;
use crate::config::Config; use crate::config::Config;
use helix_core::hashmap; use helix_core::hashmap;
use helix_view::{document::Mode, info::Info, input::KeyEvent}; use helix_view::{document::Mode, info::Info, input::KeyEvent};
@ -92,7 +92,7 @@ macro_rules! alt {
#[macro_export] #[macro_export]
macro_rules! keymap { macro_rules! keymap {
(@trie $cmd:ident) => { (@trie $cmd:ident) => {
$crate::keymap::KeyTrie::Leaf($crate::commands::Command::$cmd) $crate::keymap::KeyTrie::Leaf($crate::commands::MappableCommand::$cmd)
}; };
(@trie (@trie
@ -120,7 +120,7 @@ macro_rules! keymap {
_key, _key,
keymap!(@trie $value) keymap!(@trie $value)
); );
debug_assert!(_duplicate.is_none(), "Duplicate key found: {:?}", _duplicate.unwrap()); assert!(_duplicate.is_none(), "Duplicate key found: {:?}", _duplicate.unwrap());
_order.push(_key); _order.push(_key);
)+ )+
)* )*
@ -222,9 +222,8 @@ pub fn infobox(&self) -> Info {
.map(|(desc, keys)| (desc.strip_prefix(&prefix).unwrap(), keys)) .map(|(desc, keys)| (desc.strip_prefix(&prefix).unwrap(), keys))
.collect(); .collect();
} }
Info::new(self.name(), body) Info::from_keymap(self.name(), body)
} }
/// Get a reference to the key trie node's order. /// Get a reference to the key trie node's order.
pub fn order(&self) -> &[KeyEvent] { pub fn order(&self) -> &[KeyEvent] {
self.order.as_slice() self.order.as_slice()
@ -260,8 +259,8 @@ fn deref_mut(&mut self) -> &mut Self::Target {
#[derive(Debug, Clone, PartialEq, Deserialize)] #[derive(Debug, Clone, PartialEq, Deserialize)]
#[serde(untagged)] #[serde(untagged)]
pub enum KeyTrie { pub enum KeyTrie {
Leaf(Command), Leaf(MappableCommand),
Sequence(Vec<Command>), Sequence(Vec<MappableCommand>),
Node(KeyTrieNode), Node(KeyTrieNode),
} }
@ -304,9 +303,9 @@ pub fn search(&self, keys: &[KeyEvent]) -> Option<&KeyTrie> {
pub enum KeymapResultKind { pub enum KeymapResultKind {
/// Needs more keys to execute a command. Contains valid keys for next keystroke. /// Needs more keys to execute a command. Contains valid keys for next keystroke.
Pending(KeyTrieNode), Pending(KeyTrieNode),
Matched(Command), Matched(MappableCommand),
/// Matched a sequence of commands to execute. /// Matched a sequence of commands to execute.
MatchedSequence(Vec<Command>), MatchedSequence(Vec<MappableCommand>),
/// Key was not found in the root keymap /// Key was not found in the root keymap
NotFound, NotFound,
/// Key is invalid in combination with previous keys. Contains keys leading upto /// Key is invalid in combination with previous keys. Contains keys leading upto
@ -344,7 +343,7 @@ pub struct Keymap {
impl Keymap { impl Keymap {
pub fn new(root: KeyTrie) -> Self { pub fn new(root: KeyTrie) -> Self {
Keymap { Self {
root, root,
state: Vec::new(), state: Vec::new(),
sticky: None, sticky: None,
@ -368,7 +367,7 @@ pub fn pending(&self) -> &[KeyEvent] {
/// key cancels pending keystrokes. If there are no pending keystrokes but a /// key cancels pending keystrokes. If there are no pending keystrokes but a
/// sticky node is in use, it will be cleared. /// sticky node is in use, it will be cleared.
pub fn get(&mut self, key: KeyEvent) -> KeymapResult { pub fn get(&mut self, key: KeyEvent) -> KeymapResult {
if let key!(Esc) = key { if key!(Esc) == key {
if !self.state.is_empty() { if !self.state.is_empty() {
return KeymapResult::new( return KeymapResult::new(
// Note that Esc is not included here // Note that Esc is not included here
@ -386,10 +385,10 @@ pub fn get(&mut self, key: KeyEvent) -> KeymapResult {
}; };
let trie = match trie_node.search(&[*first]) { let trie = match trie_node.search(&[*first]) {
Some(&KeyTrie::Leaf(cmd)) => { Some(KeyTrie::Leaf(ref cmd)) => {
return KeymapResult::new(KeymapResultKind::Matched(cmd), self.sticky()) return KeymapResult::new(KeymapResultKind::Matched(cmd.clone()), self.sticky())
} }
Some(&KeyTrie::Sequence(ref cmds)) => { Some(KeyTrie::Sequence(ref cmds)) => {
return KeymapResult::new( return KeymapResult::new(
KeymapResultKind::MatchedSequence(cmds.clone()), KeymapResultKind::MatchedSequence(cmds.clone()),
self.sticky(), self.sticky(),
@ -408,9 +407,9 @@ pub fn get(&mut self, key: KeyEvent) -> KeymapResult {
} }
KeymapResult::new(KeymapResultKind::Pending(map.clone()), self.sticky()) KeymapResult::new(KeymapResultKind::Pending(map.clone()), self.sticky())
} }
Some(&KeyTrie::Leaf(cmd)) => { Some(&KeyTrie::Leaf(ref cmd)) => {
self.state.clear(); self.state.clear();
return KeymapResult::new(KeymapResultKind::Matched(cmd), self.sticky()); return KeymapResult::new(KeymapResultKind::Matched(cmd.clone()), self.sticky());
} }
Some(&KeyTrie::Sequence(ref cmds)) => { Some(&KeyTrie::Sequence(ref cmds)) => {
self.state.clear(); self.state.clear();
@ -477,7 +476,7 @@ fn deref_mut(&mut self) -> &mut Self::Target {
} }
impl Default for Keymaps { impl Default for Keymaps {
fn default() -> Keymaps { fn default() -> Self {
let normal = keymap!({ "Normal mode" let normal = keymap!({ "Normal mode"
"h" | "left" => move_char_left, "h" | "left" => move_char_left,
"j" | "down" => move_line_down, "j" | "down" => move_line_down,
@ -521,9 +520,10 @@ fn default() -> Keymaps {
"r" => goto_reference, "r" => goto_reference,
"i" => goto_implementation, "i" => goto_implementation,
"t" => goto_window_top, "t" => goto_window_top,
"m" => goto_window_middle, "c" => goto_window_center,
"b" => goto_window_bottom, "b" => goto_window_bottom,
"a" => goto_last_accessed_file, "a" => goto_last_accessed_file,
"m" => goto_last_modified_file,
"n" => goto_next_buffer, "n" => goto_next_buffer,
"p" => goto_previous_buffer, "p" => goto_previous_buffer,
"." => goto_last_modification, "." => goto_last_modification,
@ -551,6 +551,11 @@ fn default() -> Keymaps {
"S" => split_selection, "S" => split_selection,
";" => collapse_selection, ";" => collapse_selection,
"A-;" => flip_selections, "A-;" => flip_selections,
"A-k" => expand_selection,
"A-j" => shrink_selection,
"A-h" => select_prev_sibling,
"A-l" => select_next_sibling,
"%" => select_all, "%" => select_all,
"x" => extend_line, "x" => extend_line,
"X" => extend_to_line_bounds, "X" => extend_to_line_bounds,
@ -592,6 +597,9 @@ fn default() -> Keymaps {
// paste_all // paste_all
"P" => paste_before, "P" => paste_before,
"Q" => record_macro,
"q" => replay_macro,
">" => indent, ">" => indent,
"<" => unindent, "<" => unindent,
"=" => format_selections, "=" => format_selections,
@ -613,6 +621,8 @@ fn default() -> Keymaps {
"A-(" => rotate_selection_contents_backward, "A-(" => rotate_selection_contents_backward,
"A-)" => rotate_selection_contents_forward, "A-)" => rotate_selection_contents_forward,
"A-:" => ensure_selections_forward,
"esc" => normal_mode, "esc" => normal_mode,
"C-b" | "pageup" => page_up, "C-b" | "pageup" => page_up,
"C-f" | "pagedown" => page_down, "C-f" | "pagedown" => page_down,
@ -640,7 +650,7 @@ fn default() -> Keymaps {
"tab" => jump_forward, // tab == <C-i> "tab" => jump_forward, // tab == <C-i>
"C-o" => jump_backward, "C-o" => jump_backward,
// "C-s" => save_selection, "C-s" => save_selection,
"space" => { "Space" "space" => { "Space"
"f" => file_picker, "f" => file_picker,
@ -763,8 +773,10 @@ fn default() -> Keymaps {
"del" => delete_char_forward, "del" => delete_char_forward,
"C-d" => delete_char_forward, "C-d" => delete_char_forward,
"ret" => insert_newline, "ret" => insert_newline,
"C-j" => insert_newline,
"tab" => insert_tab, "tab" => insert_tab,
"C-w" => delete_word_backward, "C-w" => delete_word_backward,
"A-backspace" => delete_word_backward,
"A-d" => delete_word_forward, "A-d" => delete_word_forward,
"left" => move_char_left, "left" => move_char_left,
@ -779,6 +791,8 @@ fn default() -> Keymaps {
"A-left" => move_prev_word_end, "A-left" => move_prev_word_end,
"A-f" => move_next_word_start, "A-f" => move_next_word_start,
"A-right" => move_next_word_start, "A-right" => move_next_word_start,
"A-<" => goto_file_start,
"A->" => goto_file_end,
"pageup" => page_up, "pageup" => page_up,
"pagedown" => page_down, "pagedown" => page_down,
"home" => goto_line_start, "home" => goto_line_start,
@ -792,7 +806,7 @@ fn default() -> Keymaps {
"C-x" => completion, "C-x" => completion,
"C-r" => insert_register, "C-r" => insert_register,
}); });
Keymaps(hashmap!( Self(hashmap!(
Mode::Normal => Keymap::new(normal), Mode::Normal => Keymap::new(normal),
Mode::Select => Keymap::new(select), Mode::Select => Keymap::new(select),
Mode::Insert => Keymap::new(insert), Mode::Insert => Keymap::new(insert),
@ -852,36 +866,36 @@ fn merge_partial_keys() {
let keymap = merged_config.keys.0.get_mut(&Mode::Normal).unwrap(); let keymap = merged_config.keys.0.get_mut(&Mode::Normal).unwrap();
assert_eq!( assert_eq!(
keymap.get(key!('i')).kind, keymap.get(key!('i')).kind,
KeymapResultKind::Matched(Command::normal_mode), KeymapResultKind::Matched(MappableCommand::normal_mode),
"Leaf should replace leaf" "Leaf should replace leaf"
); );
assert_eq!( assert_eq!(
keymap.get(key!('无')).kind, keymap.get(key!('无')).kind,
KeymapResultKind::Matched(Command::insert_mode), KeymapResultKind::Matched(MappableCommand::insert_mode),
"New leaf should be present in merged keymap" "New leaf should be present in merged keymap"
); );
// Assumes that z is a node in the default keymap // Assumes that z is a node in the default keymap
assert_eq!( assert_eq!(
keymap.get(key!('z')).kind, keymap.get(key!('z')).kind,
KeymapResultKind::Matched(Command::jump_backward), KeymapResultKind::Matched(MappableCommand::jump_backward),
"Leaf should replace node" "Leaf should replace node"
); );
// Assumes that `g` is a node in default keymap // Assumes that `g` is a node in default keymap
assert_eq!( assert_eq!(
keymap.root().search(&[key!('g'), key!('$')]).unwrap(), keymap.root().search(&[key!('g'), key!('$')]).unwrap(),
&KeyTrie::Leaf(Command::goto_line_end), &KeyTrie::Leaf(MappableCommand::goto_line_end),
"Leaf should be present in merged subnode" "Leaf should be present in merged subnode"
); );
// Assumes that `gg` is in default keymap // Assumes that `gg` is in default keymap
assert_eq!( assert_eq!(
keymap.root().search(&[key!('g'), key!('g')]).unwrap(), keymap.root().search(&[key!('g'), key!('g')]).unwrap(),
&KeyTrie::Leaf(Command::delete_char_forward), &KeyTrie::Leaf(MappableCommand::delete_char_forward),
"Leaf should replace old leaf in merged subnode" "Leaf should replace old leaf in merged subnode"
); );
// Assumes that `ge` is in default keymap // Assumes that `ge` is in default keymap
assert_eq!( assert_eq!(
keymap.root().search(&[key!('g'), key!('e')]).unwrap(), keymap.root().search(&[key!('g'), key!('e')]).unwrap(),
&KeyTrie::Leaf(Command::goto_last_line), &KeyTrie::Leaf(MappableCommand::goto_last_line),
"Old leaves in subnode should be present in merged node" "Old leaves in subnode should be present in merged node"
); );
@ -915,7 +929,7 @@ fn order_should_be_set() {
.root() .root()
.search(&[key!(' '), key!('s'), key!('v')]) .search(&[key!(' '), key!('s'), key!('v')])
.unwrap(), .unwrap(),
&KeyTrie::Leaf(Command::vsplit), &KeyTrie::Leaf(MappableCommand::vsplit),
"Leaf should be present in merged subnode" "Leaf should be present in merged subnode"
); );
// Make sure an order was set during merge // Make sure an order was set during merge

View File

@ -9,3 +9,14 @@
pub mod job; pub mod job;
pub mod keymap; pub mod keymap;
pub mod ui; pub mod ui;
#[cfg(not(windows))]
fn true_color() -> bool {
std::env::var("COLORTERM")
.map(|v| matches!(v.as_str(), "truecolor" | "24bit"))
.unwrap_or(false)
}
#[cfg(windows)]
fn true_color() -> bool {
true
}

View File

@ -56,7 +56,7 @@ async fn main_impl() -> Result<i32> {
hx [FLAGS] [files]... hx [FLAGS] [files]...
ARGS: ARGS:
<files>... Sets the input file to use <files>... Sets the input file to use, position can also be specified via file[:row[:col]]
FLAGS: FLAGS:
-h, --help Prints help information -h, --help Prints help information

View File

@ -154,8 +154,19 @@ fn item_to_transaction(
); );
doc.apply(&transaction, view.id); doc.apply(&transaction, view.id);
if let Some(additional_edits) = &item.additional_text_edits { // apply additional edits, mostly used to auto import unqualified types
// gopls uses this to add extra imports let resolved_additional_text_edits = if item.additional_text_edits.is_some() {
None
} else {
Self::resolve_completion_item(doc, item.clone())
.and_then(|item| item.additional_text_edits)
};
if let Some(additional_edits) = item
.additional_text_edits
.as_ref()
.or_else(|| resolved_additional_text_edits.as_ref())
{
if !additional_edits.is_empty() { if !additional_edits.is_empty() {
let transaction = util::generate_transaction_from_edits( let transaction = util::generate_transaction_from_edits(
doc.text(), doc.text(),
@ -168,7 +179,7 @@ fn item_to_transaction(
} }
}; };
}); });
let popup = Popup::new(menu); let popup = Popup::new("completion", menu);
let mut completion = Self { let mut completion = Self {
popup, popup,
start_offset, start_offset,
@ -181,6 +192,31 @@ fn item_to_transaction(
completion completion
} }
fn resolve_completion_item(
doc: &Document,
completion_item: lsp::CompletionItem,
) -> Option<CompletionItem> {
let language_server = doc.language_server()?;
let completion_resolve_provider = language_server
.capabilities()
.completion_provider
.as_ref()?
.resolve_provider;
if completion_resolve_provider != Some(true) {
return None;
}
let future = language_server.resolve_completion_item(completion_item);
let response = helix_lsp::block_on(future);
match response {
Ok(completion_item) => Some(completion_item),
Err(err) => {
log::error!("execute LSP command: {}", err);
None
}
}
}
pub fn recompute_filter(&mut self, editor: &Editor) { pub fn recompute_filter(&mut self, editor: &Editor) {
// recompute menu based on matches // recompute menu based on matches
let menu = self.popup.contents_mut(); let menu = self.popup.contents_mut();
@ -268,6 +304,9 @@ fn render(&mut self, area: Rect, surface: &mut Surface, cx: &mut Context) {
let cursor_pos = doc.selection(view.id).primary().cursor(text); let cursor_pos = doc.selection(view.id).primary().cursor(text);
let coords = helix_core::visual_coords_at_pos(text, cursor_pos, doc.tab_width()); let coords = helix_core::visual_coords_at_pos(text, cursor_pos, doc.tab_width());
let cursor_pos = (coords.row - view.offset.row) as u16; let cursor_pos = (coords.row - view.offset.row) as u16;
let markdown_ui =
|content, syn_loader| Markdown::new(content, syn_loader).style_group("completion");
let mut markdown_doc = match &option.documentation { let mut markdown_doc = match &option.documentation {
Some(lsp::Documentation::String(contents)) Some(lsp::Documentation::String(contents))
| Some(lsp::Documentation::MarkupContent(lsp::MarkupContent { | Some(lsp::Documentation::MarkupContent(lsp::MarkupContent {
@ -275,7 +314,7 @@ fn render(&mut self, area: Rect, surface: &mut Surface, cx: &mut Context) {
value: contents, value: contents,
})) => { })) => {
// TODO: convert to wrapped text // TODO: convert to wrapped text
Markdown::new( markdown_ui(
format!( format!(
"```{}\n{}\n```\n{}", "```{}\n{}\n```\n{}",
language, language,
@ -290,7 +329,7 @@ fn render(&mut self, area: Rect, surface: &mut Surface, cx: &mut Context) {
value: contents, value: contents,
})) => { })) => {
// TODO: set language based on doc scope // TODO: set language based on doc scope
Markdown::new( markdown_ui(
format!( format!(
"```{}\n{}\n```\n{}", "```{}\n{}\n```\n{}",
language, language,
@ -304,7 +343,7 @@ fn render(&mut self, area: Rect, surface: &mut Surface, cx: &mut Context) {
// TODO: copied from above // TODO: copied from above
// TODO: set language based on doc scope // TODO: set language based on doc scope
Markdown::new( markdown_ui(
format!( format!(
"```{}\n{}\n```", "```{}\n{}\n```",
language, language,
@ -328,8 +367,8 @@ fn render(&mut self, area: Rect, surface: &mut Surface, cx: &mut Context) {
let y = popup_y; let y = popup_y;
if let Some((rel_width, rel_height)) = markdown_doc.required_size((width, height)) { if let Some((rel_width, rel_height)) = markdown_doc.required_size((width, height)) {
width = rel_width; width = rel_width.min(width);
height = rel_height; height = rel_height.min(height);
} }
Rect::new(x, y, width, height) Rect::new(x, y, width, height)
} else { } else {

View File

@ -7,8 +7,10 @@
}; };
use helix_core::{ use helix_core::{
coords_at_pos, coords_at_pos, encoding,
graphemes::{ensure_grapheme_boundary_next, next_grapheme_boundary, prev_grapheme_boundary}, graphemes::{
ensure_grapheme_boundary_next_byte, next_grapheme_boundary, prev_grapheme_boundary,
},
movement::Direction, movement::Direction,
syntax::{self, HighlightEvent}, syntax::{self, HighlightEvent},
unicode::segmentation::UnicodeSegmentation, unicode::segmentation::UnicodeSegmentation,
@ -17,8 +19,8 @@
}; };
use helix_view::{ use helix_view::{
document::{Mode, SCRATCH_BUFFER_NAME}, document::{Mode, SCRATCH_BUFFER_NAME},
editor::CursorShapeConfig,
graphics::{CursorKind, Modifier, Rect, Style}, graphics::{CursorKind, Modifier, Rect, Style},
info::Info,
input::KeyEvent, input::KeyEvent,
keyboard::{KeyCode, KeyModifiers}, keyboard::{KeyCode, KeyModifiers},
Document, Editor, Theme, View, Document, Editor, Theme, View,
@ -31,10 +33,9 @@
pub struct EditorView { pub struct EditorView {
keymaps: Keymaps, keymaps: Keymaps,
on_next_key: Option<Box<dyn FnOnce(&mut commands::Context, KeyEvent)>>, on_next_key: Option<Box<dyn FnOnce(&mut commands::Context, KeyEvent)>>,
last_insert: (commands::Command, Vec<KeyEvent>), last_insert: (commands::MappableCommand, Vec<KeyEvent>),
pub(crate) completion: Option<Completion>, pub(crate) completion: Option<Completion>,
spinners: ProgressSpinners, spinners: ProgressSpinners,
autoinfo: Option<Info>,
} }
impl Default for EditorView { impl Default for EditorView {
@ -48,10 +49,9 @@ pub fn new(keymaps: Keymaps) -> Self {
Self { Self {
keymaps, keymaps,
on_next_key: None, on_next_key: None,
last_insert: (commands::Command::normal_mode, Vec::new()), last_insert: (commands::MappableCommand::normal_mode, Vec::new()),
completion: None, completion: None,
spinners: ProgressSpinners::default(), spinners: ProgressSpinners::default(),
autoinfo: None,
} }
} }
@ -106,13 +106,12 @@ pub fn render_view(
} }
} }
let highlights = let highlights = Self::doc_syntax_highlights(doc, view.offset, inner.height, theme);
Self::doc_syntax_highlights(doc, view.offset, inner.height, theme, &editor.syn_loader);
let highlights = syntax::merge(highlights, Self::doc_diagnostics_highlights(doc, theme)); let highlights = syntax::merge(highlights, Self::doc_diagnostics_highlights(doc, theme));
let highlights: Box<dyn Iterator<Item = HighlightEvent>> = if is_focused { let highlights: Box<dyn Iterator<Item = HighlightEvent>> = if is_focused {
Box::new(syntax::merge( Box::new(syntax::merge(
highlights, highlights,
Self::doc_selection_highlights(doc, view, theme), Self::doc_selection_highlights(doc, view, theme, &editor.config.cursor_shape),
)) ))
} else { } else {
Box::new(highlights) Box::new(highlights)
@ -130,8 +129,7 @@ pub fn render_view(
let x = area.right(); let x = area.right();
let border_style = theme.get("ui.window"); let border_style = theme.get("ui.window");
for y in area.top()..area.bottom() { for y in area.top()..area.bottom() {
surface surface[(x, y)]
.get_mut(x, y)
.set_symbol(tui::symbols::line::VERTICAL) .set_symbol(tui::symbols::line::VERTICAL)
//.set_symbol(" ") //.set_symbol(" ")
.set_style(border_style); .set_style(border_style);
@ -154,8 +152,7 @@ pub fn doc_syntax_highlights<'doc>(
doc: &'doc Document, doc: &'doc Document,
offset: Position, offset: Position,
height: u16, height: u16,
theme: &Theme, _theme: &Theme,
loader: &syntax::Loader,
) -> Box<dyn Iterator<Item = HighlightEvent> + 'doc> { ) -> Box<dyn Iterator<Item = HighlightEvent> + 'doc> {
let text = doc.text().slice(..); let text = doc.text().slice(..);
let last_line = std::cmp::min( let last_line = std::cmp::min(
@ -172,48 +169,34 @@ pub fn doc_syntax_highlights<'doc>(
start..end start..end
}; };
// TODO: range doesn't actually restrict source, just highlight range match doc.syntax() {
let highlights = match doc.syntax() {
Some(syntax) => { Some(syntax) => {
let scopes = theme.scopes(); let iter = syntax
syntax // TODO: range doesn't actually restrict source, just highlight range
.highlight_iter(text.slice(..), Some(range), None, |language| { .highlight_iter(text.slice(..), Some(range), None)
loader.language_configuration_for_injection_string(language)
.and_then(|language_config| {
let config = language_config.highlight_config(scopes)?;
let config_ref = config.as_ref();
// SAFETY: the referenced `HighlightConfiguration` behind
// the `Arc` is guaranteed to remain valid throughout the
// duration of the highlight.
let config_ref = unsafe {
std::mem::transmute::<
_,
&'static syntax::HighlightConfiguration,
>(config_ref)
};
Some(config_ref)
})
})
.map(|event| event.unwrap()) .map(|event| event.unwrap())
.collect() // TODO: we collect here to avoid holding the lock, fix later .map(move |event| match event {
} // convert byte offsets to char offset
None => vec![HighlightEvent::Source { HighlightEvent::Source { start, end } => {
start: range.start, let start =
end: range.end, text.byte_to_char(ensure_grapheme_boundary_next_byte(text, start));
}], let end =
} text.byte_to_char(ensure_grapheme_boundary_next_byte(text, end));
.into_iter() HighlightEvent::Source { start, end }
.map(move |event| match event { }
// convert byte offsets to char offset event => event,
HighlightEvent::Source { start, end } => { });
let start = ensure_grapheme_boundary_next(text, text.byte_to_char(start));
let end = ensure_grapheme_boundary_next(text, text.byte_to_char(end));
HighlightEvent::Source { start, end }
}
event => event,
});
Box::new(highlights) Box::new(iter)
}
None => Box::new(
[HighlightEvent::Source {
start: text.byte_to_char(range.start),
end: text.byte_to_char(range.end),
}]
.into_iter(),
),
}
} }
/// Get highlight spans for document diagnostics /// Get highlight spans for document diagnostics
@ -245,11 +228,16 @@ pub fn doc_selection_highlights(
doc: &Document, doc: &Document,
view: &View, view: &View,
theme: &Theme, theme: &Theme,
cursor_shape_config: &CursorShapeConfig,
) -> Vec<(usize, std::ops::Range<usize>)> { ) -> Vec<(usize, std::ops::Range<usize>)> {
let text = doc.text().slice(..); let text = doc.text().slice(..);
let selection = doc.selection(view.id); let selection = doc.selection(view.id);
let primary_idx = selection.primary_index(); let primary_idx = selection.primary_index();
let mode = doc.mode();
let cursorkind = cursor_shape_config.from_mode(mode);
let cursor_is_block = cursorkind == CursorKind::Block;
let selection_scope = theme let selection_scope = theme
.find_scope_index("ui.selection") .find_scope_index("ui.selection")
.expect("could not find `ui.selection` scope in the theme!"); .expect("could not find `ui.selection` scope in the theme!");
@ -257,7 +245,7 @@ pub fn doc_selection_highlights(
.find_scope_index("ui.cursor") .find_scope_index("ui.cursor")
.unwrap_or(selection_scope); .unwrap_or(selection_scope);
let cursor_scope = match doc.mode() { let cursor_scope = match mode {
Mode::Insert => theme.find_scope_index("ui.cursor.insert"), Mode::Insert => theme.find_scope_index("ui.cursor.insert"),
Mode::Select => theme.find_scope_index("ui.cursor.select"), Mode::Select => theme.find_scope_index("ui.cursor.select"),
Mode::Normal => Some(base_cursor_scope), Mode::Normal => Some(base_cursor_scope),
@ -273,7 +261,8 @@ pub fn doc_selection_highlights(
let mut spans: Vec<(usize, std::ops::Range<usize>)> = Vec::new(); let mut spans: Vec<(usize, std::ops::Range<usize>)> = Vec::new();
for (i, range) in selection.iter().enumerate() { for (i, range) in selection.iter().enumerate() {
let (cursor_scope, selection_scope) = if i == primary_idx { let selection_is_primary = i == primary_idx;
let (cursor_scope, selection_scope) = if selection_is_primary {
(primary_cursor_scope, primary_selection_scope) (primary_cursor_scope, primary_selection_scope)
} else { } else {
(cursor_scope, selection_scope) (cursor_scope, selection_scope)
@ -281,7 +270,14 @@ pub fn doc_selection_highlights(
// Special-case: cursor at end of the rope. // Special-case: cursor at end of the rope.
if range.head == range.anchor && range.head == text.len_chars() { if range.head == range.anchor && range.head == text.len_chars() {
spans.push((cursor_scope, range.head..range.head + 1)); if !selection_is_primary || cursor_is_block {
// Bar and underline cursors are drawn by the terminal
// BUG: If the editor area loses focus while having a bar or
// underline cursor (eg. when a regex prompt has focus) then
// the primary cursor will be invisible. This doesn't happen
// with block cursors since we manually draw *all* cursors.
spans.push((cursor_scope, range.head..range.head + 1));
}
continue; continue;
} }
@ -290,11 +286,15 @@ pub fn doc_selection_highlights(
// Standard case. // Standard case.
let cursor_start = prev_grapheme_boundary(text, range.head); let cursor_start = prev_grapheme_boundary(text, range.head);
spans.push((selection_scope, range.anchor..cursor_start)); spans.push((selection_scope, range.anchor..cursor_start));
spans.push((cursor_scope, cursor_start..range.head)); if !selection_is_primary || cursor_is_block {
spans.push((cursor_scope, cursor_start..range.head));
}
} else { } else {
// Reverse case. // Reverse case.
let cursor_end = next_grapheme_boundary(text, range.head); let cursor_end = next_grapheme_boundary(text, range.head);
spans.push((cursor_scope, range.head..cursor_end)); if !selection_is_primary || cursor_is_block {
spans.push((cursor_scope, range.head..cursor_end));
}
spans.push((selection_scope, cursor_end..range.anchor)); spans.push((selection_scope, cursor_end..range.anchor));
} }
} }
@ -320,6 +320,10 @@ pub fn render_text_highlights<H: Iterator<Item = HighlightEvent>>(
let text_style = theme.get("ui.text"); let text_style = theme.get("ui.text");
// It's slightly more efficient to produce a full RopeSlice from the Rope, then slice that a bunch
// of times than it is to always call Rope::slice/get_slice (it will internally always hit RSEnum::Light).
let text = text.slice(..);
'outer: for event in highlights { 'outer: for event in highlights {
match event { match event {
HighlightEvent::HighlightStart(span) => { HighlightEvent::HighlightStart(span) => {
@ -336,17 +340,16 @@ pub fn render_text_highlights<H: Iterator<Item = HighlightEvent>>(
use helix_core::graphemes::{grapheme_width, RopeGraphemes}; use helix_core::graphemes::{grapheme_width, RopeGraphemes};
let style = spans.iter().fold(text_style, |acc, span| {
let style = theme.get(theme.scopes()[span.0].as_str());
acc.patch(style)
});
for grapheme in RopeGraphemes::new(text) { for grapheme in RopeGraphemes::new(text) {
let out_of_bounds = visual_x < offset.col as u16 let out_of_bounds = visual_x < offset.col as u16
|| visual_x >= viewport.width + offset.col as u16; || visual_x >= viewport.width + offset.col as u16;
if LineEnding::from_rope_slice(&grapheme).is_some() { if LineEnding::from_rope_slice(&grapheme).is_some() {
if !out_of_bounds { if !out_of_bounds {
let style = spans.iter().fold(text_style, |acc, span| {
acc.patch(theme.highlight(span.0))
});
// we still want to render an empty cell with the style // we still want to render an empty cell with the style
surface.set_string( surface.set_string(
viewport.x + visual_x - offset.col as u16, viewport.x + visual_x - offset.col as u16,
@ -377,6 +380,10 @@ pub fn render_text_highlights<H: Iterator<Item = HighlightEvent>>(
}; };
if !out_of_bounds { if !out_of_bounds {
let style = spans.iter().fold(text_style, |acc, span| {
acc.patch(theme.highlight(span.0))
});
// if we're offscreen just keep going until we hit a new line // if we're offscreen just keep going until we hit a new line
surface.set_string( surface.set_string(
viewport.x + visual_x - offset.col as u16, viewport.x + visual_x - offset.col as u16,
@ -422,8 +429,7 @@ pub fn render_focused_view_elements(
.add_modifier(Modifier::DIM) .add_modifier(Modifier::DIM)
}); });
surface surface[(viewport.x + pos.col as u16, viewport.y + pos.row as u16)]
.get_mut(viewport.x + pos.col as u16, viewport.y + pos.row as u16)
.set_style(style); .set_style(style);
} }
} }
@ -453,6 +459,8 @@ pub fn render_gutter(
let mut offset = 0; let mut offset = 0;
let gutter_style = theme.get("ui.gutter");
// avoid lots of small allocations by reusing a text buffer for each line // avoid lots of small allocations by reusing a text buffer for each line
let mut text = String::with_capacity(8); let mut text = String::with_capacity(8);
@ -468,7 +476,7 @@ pub fn render_gutter(
viewport.y + i as u16, viewport.y + i as u16,
&text, &text,
*width, *width,
style, gutter_style.patch(style),
); );
} }
text.clear(); text.clear();
@ -574,21 +582,6 @@ pub fn render_statusline(
} }
surface.set_string(viewport.x + 5, viewport.y, progress, base_style); surface.set_string(viewport.x + 5, viewport.y, progress, base_style);
let rel_path = doc.relative_path();
let path = rel_path
.as_ref()
.map(|p| p.to_string_lossy())
.unwrap_or_else(|| SCRATCH_BUFFER_NAME.into());
let title = format!("{}{}", path, if doc.is_modified() { "[+]" } else { "" });
surface.set_stringn(
viewport.x + 8,
viewport.y,
title,
viewport.width.saturating_sub(6) as usize,
base_style,
);
//------------------------------- //-------------------------------
// Right side of the status line. // Right side of the status line.
//------------------------------- //-------------------------------
@ -662,6 +655,13 @@ pub fn render_statusline(
base_style, base_style,
)); ));
let enc = doc.encoding();
if enc != encoding::UTF_8 {
right_side_text
.0
.push(Span::styled(format!(" {} ", enc.name()), base_style));
}
// Render to the statusline. // Render to the statusline.
surface.set_spans( surface.set_spans(
viewport.x viewport.x
@ -672,6 +672,31 @@ pub fn render_statusline(
&right_side_text, &right_side_text,
right_side_text.width() as u16, right_side_text.width() as u16,
); );
//-------------------------------
// Middle / File path / Title
//-------------------------------
let title = {
let rel_path = doc.relative_path();
let path = rel_path
.as_ref()
.map(|p| p.to_string_lossy())
.unwrap_or_else(|| SCRATCH_BUFFER_NAME.into());
format!("{}{}", path, if doc.is_modified() { "[+]" } else { "" })
};
surface.set_string_truncated(
viewport.x + 8, // 8: 1 space + 3 char mode string + 1 space + 1 spinner + 1 space
viewport.y,
title,
viewport
.width
.saturating_sub(6)
.saturating_sub(right_side_text.width() as u16 + 1) as usize, // "+ 1": a space between the title and the selection info
base_style,
true,
true,
);
} }
/// Handle events by looking them up in `self.keymaps`. Returns None /// Handle events by looking them up in `self.keymaps`. Returns None
@ -684,12 +709,13 @@ fn handle_keymap_event(
cxt: &mut commands::Context, cxt: &mut commands::Context,
event: KeyEvent, event: KeyEvent,
) -> Option<KeymapResult> { ) -> Option<KeymapResult> {
cxt.editor.autoinfo = None;
let key_result = self.keymaps.get_mut(&mode).unwrap().get(event); let key_result = self.keymaps.get_mut(&mode).unwrap().get(event);
self.autoinfo = key_result.sticky.map(|node| node.infobox()); cxt.editor.autoinfo = key_result.sticky.map(|node| node.infobox());
match &key_result.kind { match &key_result.kind {
KeymapResultKind::Matched(command) => command.execute(cxt), KeymapResultKind::Matched(command) => command.execute(cxt),
KeymapResultKind::Pending(node) => self.autoinfo = Some(node.infobox()), KeymapResultKind::Pending(node) => cxt.editor.autoinfo = Some(node.infobox()),
KeymapResultKind::MatchedSequence(commands) => { KeymapResultKind::MatchedSequence(commands) => {
for command in commands { for command in commands {
command.execute(cxt); command.execute(cxt);
@ -789,8 +815,9 @@ pub fn set_completion(
pub fn clear_completion(&mut self, editor: &mut Editor) { pub fn clear_completion(&mut self, editor: &mut Editor) {
self.completion = None; self.completion = None;
// Clear any savepoints // Clear any savepoints
let (_, doc) = current!(editor); let doc = doc_mut!(editor);
doc.savepoint = None; doc.savepoint = None;
editor.clear_idle_timer(); // don't retrigger editor.clear_idle_timer(); // don't retrigger
} }
@ -927,7 +954,7 @@ fn handle_mouse_event(
return EventResult::Ignored; return EventResult::Ignored;
} }
commands::Command::yank_main_selection_to_primary_clipboard.execute(cxt); commands::MappableCommand::yank_main_selection_to_primary_clipboard.execute(cxt);
EventResult::Consumed(None) EventResult::Consumed(None)
} }
@ -953,9 +980,9 @@ fn handle_mouse_event(
if let Ok(pos) = doc.text().try_line_to_char(line) { if let Ok(pos) = doc.text().try_line_to_char(line) {
doc.set_selection(view_id, Selection::point(pos)); doc.set_selection(view_id, Selection::point(pos));
if modifiers == crossterm::event::KeyModifiers::ALT { if modifiers == crossterm::event::KeyModifiers::ALT {
commands::Command::dap_edit_log.execute(cxt); commands::MappableCommand::dap_edit_log.execute(cxt);
} else { } else {
commands::Command::dap_edit_condition.execute(cxt); commands::MappableCommand::dap_edit_condition.execute(cxt);
} }
return EventResult::Consumed(None); return EventResult::Consumed(None);
@ -977,7 +1004,8 @@ fn handle_mouse_event(
} }
if modifiers == crossterm::event::KeyModifiers::ALT { if modifiers == crossterm::event::KeyModifiers::ALT {
commands::Command::replace_selections_with_primary_clipboard.execute(cxt); commands::MappableCommand::replace_selections_with_primary_clipboard
.execute(cxt);
return EventResult::Consumed(None); return EventResult::Consumed(None);
} }
@ -991,7 +1019,7 @@ fn handle_mouse_event(
let doc = editor.document_mut(editor.tree.get(view_id).doc).unwrap(); let doc = editor.document_mut(editor.tree.get(view_id).doc).unwrap();
doc.set_selection(view_id, Selection::point(pos)); doc.set_selection(view_id, Selection::point(pos));
editor.tree.focus = view_id; editor.tree.focus = view_id;
commands::Command::paste_primary_clipboard_before.execute(cxt); commands::MappableCommand::paste_primary_clipboard_before.execute(cxt);
return EventResult::Consumed(None); return EventResult::Consumed(None);
} }
@ -1004,14 +1032,18 @@ fn handle_mouse_event(
} }
impl Component for EditorView { impl Component for EditorView {
fn handle_event(&mut self, event: Event, cx: &mut Context) -> EventResult { fn handle_event(
let mut cxt = commands::Context { &mut self,
editor: &mut cx.editor, event: Event,
context: &mut crate::compositor::Context,
) -> EventResult {
let mut cx = commands::Context {
editor: context.editor,
count: None, count: None,
register: None, register: None,
callback: None, callback: None,
on_next_key_callback: None, on_next_key_callback: None,
jobs: cx.jobs, jobs: context.jobs,
}; };
match event { match event {
@ -1021,18 +1053,19 @@ fn handle_event(&mut self, event: Event, cx: &mut Context) -> EventResult {
EventResult::Consumed(None) EventResult::Consumed(None)
} }
Event::Key(key) => { Event::Key(key) => {
cxt.editor.reset_idle_timer(); cx.editor.reset_idle_timer();
let mut key = KeyEvent::from(key); let mut key = KeyEvent::from(key);
canonicalize_key(&mut key); canonicalize_key(&mut key);
// clear status
cxt.editor.status_msg = None;
let (_, doc) = current!(cxt.editor); // clear status
cx.editor.status_msg = None;
let doc = doc!(cx.editor);
let mode = doc.mode(); let mode = doc.mode();
if let Some(on_next_key) = self.on_next_key.take() { if let Some(on_next_key) = self.on_next_key.take() {
// if there's a command waiting input, do that first // if there's a command waiting input, do that first
on_next_key(&mut cxt, key); on_next_key(&mut cx, key);
} else { } else {
match mode { match mode {
Mode::Insert => { Mode::Insert => {
@ -1044,8 +1077,8 @@ fn handle_event(&mut self, event: Event, cx: &mut Context) -> EventResult {
if let Some(completion) = &mut self.completion { if let Some(completion) = &mut self.completion {
// use a fake context here // use a fake context here
let mut cx = Context { let mut cx = Context {
editor: cxt.editor, editor: cx.editor,
jobs: cxt.jobs, jobs: cx.jobs,
scroll: None, scroll: None,
}; };
let res = completion.handle_event(event, &mut cx); let res = completion.handle_event(event, &mut cx);
@ -1055,40 +1088,46 @@ fn handle_event(&mut self, event: Event, cx: &mut Context) -> EventResult {
if callback.is_some() { if callback.is_some() {
// assume close_fn // assume close_fn
self.clear_completion(cxt.editor); self.clear_completion(cx.editor);
} }
} }
} }
// if completion didn't take the event, we pass it onto commands // if completion didn't take the event, we pass it onto commands
if !consumed { if !consumed {
self.insert_mode(&mut cxt, key); self.insert_mode(&mut cx, key);
// lastly we recalculate completion // lastly we recalculate completion
if let Some(completion) = &mut self.completion { if let Some(completion) = &mut self.completion {
completion.update(&mut cxt); completion.update(&mut cx);
if completion.is_empty() { if completion.is_empty() {
self.clear_completion(cxt.editor); self.clear_completion(cx.editor);
} }
} }
} }
} }
mode => self.command_mode(mode, &mut cxt, key), mode => self.command_mode(mode, &mut cx, key),
} }
} }
self.on_next_key = cxt.on_next_key_callback.take(); self.on_next_key = cx.on_next_key_callback.take();
// appease borrowck // appease borrowck
let callback = cxt.callback.take(); let callback = cx.callback.take();
// if the command consumed the last view, skip the render. // if the command consumed the last view, skip the render.
// on the next loop cycle the Application will then terminate. // on the next loop cycle the Application will then terminate.
if cxt.editor.should_close() { if cx.editor.should_close() {
return EventResult::Ignored; return EventResult::Ignored;
} }
let (view, doc) = current!(cxt.editor); let (view, doc) = current!(cx.editor);
view.ensure_cursor_in_view(doc, cxt.editor.config.scrolloff); view.ensure_cursor_in_view(doc, cx.editor.config.scrolloff);
// Store a history state if not in insert mode. This also takes care of
// commiting changes when leaving insert mode.
if doc.mode() != Mode::Insert {
doc.append_changes_to_history(view.id);
}
// mode transitions // mode transitions
match (mode, doc.mode()) { match (mode, doc.mode()) {
@ -1117,7 +1156,7 @@ fn handle_event(&mut self, event: Event, cx: &mut Context) -> EventResult {
EventResult::Consumed(callback) EventResult::Consumed(callback)
} }
Event::Mouse(event) => self.handle_mouse_event(event, &mut cxt), Event::Mouse(event) => self.handle_mouse_event(event, &mut cx),
} }
} }
@ -1134,8 +1173,9 @@ fn render(&mut self, area: Rect, surface: &mut Surface, cx: &mut Context) {
} }
if cx.editor.config.auto_info { if cx.editor.config.auto_info {
if let Some(ref mut info) = self.autoinfo { if let Some(mut info) = cx.editor.autoinfo.take() {
info.render(area, surface, cx); info.render(area, surface, cx);
cx.editor.autoinfo = Some(info)
} }
} }
@ -1173,13 +1213,31 @@ fn render(&mut self, area: Rect, surface: &mut Surface, cx: &mut Context) {
disp.push_str(&s); disp.push_str(&s);
} }
} }
let style = cx.editor.theme.get("ui.text");
let macro_width = if cx.editor.macro_recording.is_some() {
3
} else {
0
};
surface.set_string( surface.set_string(
area.x + area.width.saturating_sub(key_width), area.x + area.width.saturating_sub(key_width + macro_width),
area.y + area.height.saturating_sub(1), area.y + area.height.saturating_sub(1),
disp.get(disp.len().saturating_sub(key_width as usize)..) disp.get(disp.len().saturating_sub(key_width as usize)..)
.unwrap_or(&disp), .unwrap_or(&disp),
cx.editor.theme.get("ui.text"), style,
); );
if let Some((reg, _)) = cx.editor.macro_recording {
let disp = format!("[{}]", reg);
let style = style
.fg(helix_view::graphics::Color::Yellow)
.add_modifier(Modifier::BOLD);
surface.set_string(
area.x + area.width.saturating_sub(3),
area.y + area.height.saturating_sub(1),
&disp,
style,
);
}
} }
if let Some(completion) = self.completion.as_mut() { if let Some(completion) = self.completion.as_mut() {
@ -1188,11 +1246,11 @@ fn render(&mut self, area: Rect, surface: &mut Surface, cx: &mut Context) {
} }
fn cursor(&self, _area: Rect, editor: &Editor) -> (Option<Position>, CursorKind) { fn cursor(&self, _area: Rect, editor: &Editor) -> (Option<Position>, CursorKind) {
// match view.doc.mode() { match editor.cursor() {
// Mode::Insert => write!(stdout, "\x1B[6 q"), // All block cursors are drawn manually
// mode => write!(stdout, "\x1B[2 q"), (pos, CursorKind::Block) => (pos, CursorKind::Hidden),
// }; cursor => cursor,
editor.cursor() }
} }
} }

View File

@ -21,6 +21,9 @@ pub struct Markdown {
contents: String, contents: String,
config_loader: Arc<syntax::Loader>, config_loader: Arc<syntax::Loader>,
block_style: String,
heading_style: String,
} }
// TODO: pre-render and self reference via Pin // TODO: pre-render and self reference via Pin
@ -31,120 +34,137 @@ pub fn new(contents: String, config_loader: Arc<syntax::Loader>) -> Self {
Self { Self {
contents, contents,
config_loader, config_loader,
block_style: "markup.raw.inline".into(),
heading_style: "markup.heading".into(),
} }
} }
}
fn parse<'a>( pub fn style_group(mut self, suffix: &str) -> Self {
contents: &'a str, self.block_style = format!("markup.raw.inline.{}", suffix);
theme: Option<&Theme>, self.heading_style = format!("markup.heading.{}", suffix);
loader: &syntax::Loader, self
) -> tui::text::Text<'a> {
// // also 2021-03-04T16:33:58.553 helix_lsp::transport [INFO] <- {"contents":{"kind":"markdown","value":"\n```rust\ncore::num\n```\n\n```rust\npub const fn saturating_sub(self, rhs:Self) ->Self\n```\n\n---\n\n```rust\n```"},"range":{"end":{"character":61,"line":101},"start":{"character":47,"line":101}}}
// let text = "\n```rust\ncore::iter::traits::iterator::Iterator\n```\n\n```rust\nfn collect<B: FromIterator<Self::Item>>(self) -> B\nwhere\n Self: Sized,\n```\n\n---\n\nTransforms an iterator into a collection.\n\n`collect()` can take anything iterable, and turn it into a relevant\ncollection. This is one of the more powerful methods in the standard\nlibrary, used in a variety of contexts.\n\nThe most basic pattern in which `collect()` is used is to turn one\ncollection into another. You take a collection, call [`iter`](https://doc.rust-lang.org/nightly/core/iter/traits/iterator/trait.Iterator.html) on it,\ndo a bunch of transformations, and then `collect()` at the end.\n\n`collect()` can also create instances of types that are not typical\ncollections. For example, a [`String`](https://doc.rust-lang.org/nightly/core/iter/std/string/struct.String.html) can be built from [`char`](type@char)s,\nand an iterator of [`Result<T, E>`](https://doc.rust-lang.org/nightly/core/result/enum.Result.html) items can be collected\ninto `Result<Collection<T>, E>`. See the examples below for more.\n\nBecause `collect()` is so general, it can cause problems with type\ninference. As such, `collect()` is one of the few times you'll see\nthe syntax affectionately known as the 'turbofish': `::<>`. This\nhelps the inference algorithm understand specifically which collection\nyou're trying to collect into.\n\n# Examples\n\nBasic usage:\n\n```rust\nlet a = [1, 2, 3];\n\nlet doubled: Vec<i32> = a.iter()\n .map(|&x| x * 2)\n .collect();\n\nassert_eq!(vec![2, 4, 6], doubled);\n```\n\nNote that we needed the `: Vec<i32>` on the left-hand side. This is because\nwe could collect into, for example, a [`VecDeque<T>`](https://doc.rust-lang.org/nightly/core/iter/std/collections/struct.VecDeque.html) instead:\n\n```rust\nuse std::collections::VecDeque;\n\nlet a = [1, 2, 3];\n\nlet doubled: VecDeque<i32> = a.iter().map(|&x| x * 2).collect();\n\nassert_eq!(2, doubled[0]);\nassert_eq!(4, doubled[1]);\nassert_eq!(6, doubled[2]);\n```\n\nUsing the 'turbofish' instead of annotating `doubled`:\n\n```rust\nlet a = [1, 2, 3];\n\nlet doubled = a.iter().map(|x| x * 2).collect::<Vec<i32>>();\n\nassert_eq!(vec![2, 4, 6], doubled);\n```\n\nBecause `collect()` only cares about what you're collecting into, you can\nstill use a partial type hint, `_`, with the turbofish:\n\n```rust\nlet a = [1, 2, 3];\n\nlet doubled = a.iter().map(|x| x * 2).collect::<Vec<_>>();\n\nassert_eq!(vec![2, 4, 6], doubled);\n```\n\nUsing `collect()` to make a [`String`](https://doc.rust-lang.org/nightly/core/iter/std/string/struct.String.html):\n\n```rust\nlet chars = ['g', 'd', 'k', 'k', 'n'];\n\nlet hello: String = chars.iter()\n .map(|&x| x as u8)\n .map(|x| (x + 1) as char)\n .collect();\n\nassert_eq!(\"hello\", hello);\n```\n\nIf you have a list of [`Result<T, E>`](https://doc.rust-lang.org/nightly/core/result/enum.Result.html)s, you can use `collect()` to\nsee if any of them failed:\n\n```rust\nlet results = [Ok(1), Err(\"nope\"), Ok(3), Err(\"bad\")];\n\nlet result: Result<Vec<_>, &str> = results.iter().cloned().collect();\n\n// gives us the first error\nassert_eq!(Err(\"nope\"), result);\n\nlet results = [Ok(1), Ok(3)];\n\nlet result: Result<Vec<_>, &str> = results.iter().cloned().collect();\n\n// gives us the list of answers\nassert_eq!(Ok(vec![1, 3]), result);\n```";
let mut options = Options::empty();
options.insert(Options::ENABLE_STRIKETHROUGH);
let parser = Parser::new_ext(contents, options);
// TODO: if possible, render links as terminal hyperlinks: https://gist.github.com/egmontkob/eb114294efbcd5adb1944c9f3cb5feda
let mut tags = Vec::new();
let mut spans = Vec::new();
let mut lines = Vec::new();
fn to_span(text: pulldown_cmark::CowStr) -> Span {
use std::ops::Deref;
Span::raw::<std::borrow::Cow<_>>(match text {
CowStr::Borrowed(s) => s.into(),
CowStr::Boxed(s) => s.to_string().into(),
CowStr::Inlined(s) => s.deref().to_owned().into(),
})
} }
let text_style = theme.map(|theme| theme.get("ui.text")).unwrap_or_default(); fn parse(&self, theme: Option<&Theme>) -> tui::text::Text<'_> {
// // also 2021-03-04T16:33:58.553 helix_lsp::transport [INFO] <- {"contents":{"kind":"markdown","value":"\n```rust\ncore::num\n```\n\n```rust\npub const fn saturating_sub(self, rhs:Self) ->Self\n```\n\n---\n\n```rust\n```"},"range":{"end":{"character":61,"line":101},"start":{"character":47,"line":101}}}
// let text = "\n```rust\ncore::iter::traits::iterator::Iterator\n```\n\n```rust\nfn collect<B: FromIterator<Self::Item>>(self) -> B\nwhere\n Self: Sized,\n```\n\n---\n\nTransforms an iterator into a collection.\n\n`collect()` can take anything iterable, and turn it into a relevant\ncollection. This is one of the more powerful methods in the standard\nlibrary, used in a variety of contexts.\n\nThe most basic pattern in which `collect()` is used is to turn one\ncollection into another. You take a collection, call [`iter`](https://doc.rust-lang.org/nightly/core/iter/traits/iterator/trait.Iterator.html) on it,\ndo a bunch of transformations, and then `collect()` at the end.\n\n`collect()` can also create instances of types that are not typical\ncollections. For example, a [`String`](https://doc.rust-lang.org/nightly/core/iter/std/string/struct.String.html) can be built from [`char`](type@char)s,\nand an iterator of [`Result<T, E>`](https://doc.rust-lang.org/nightly/core/result/enum.Result.html) items can be collected\ninto `Result<Collection<T>, E>`. See the examples below for more.\n\nBecause `collect()` is so general, it can cause problems with type\ninference. As such, `collect()` is one of the few times you'll see\nthe syntax affectionately known as the 'turbofish': `::<>`. This\nhelps the inference algorithm understand specifically which collection\nyou're trying to collect into.\n\n# Examples\n\nBasic usage:\n\n```rust\nlet a = [1, 2, 3];\n\nlet doubled: Vec<i32> = a.iter()\n .map(|&x| x * 2)\n .collect();\n\nassert_eq!(vec![2, 4, 6], doubled);\n```\n\nNote that we needed the `: Vec<i32>` on the left-hand side. This is because\nwe could collect into, for example, a [`VecDeque<T>`](https://doc.rust-lang.org/nightly/core/iter/std/collections/struct.VecDeque.html) instead:\n\n```rust\nuse std::collections::VecDeque;\n\nlet a = [1, 2, 3];\n\nlet doubled: VecDeque<i32> = a.iter().map(|&x| x * 2).collect();\n\nassert_eq!(2, doubled[0]);\nassert_eq!(4, doubled[1]);\nassert_eq!(6, doubled[2]);\n```\n\nUsing the 'turbofish' instead of annotating `doubled`:\n\n```rust\nlet a = [1, 2, 3];\n\nlet doubled = a.iter().map(|x| x * 2).collect::<Vec<i32>>();\n\nassert_eq!(vec![2, 4, 6], doubled);\n```\n\nBecause `collect()` only cares about what you're collecting into, you can\nstill use a partial type hint, `_`, with the turbofish:\n\n```rust\nlet a = [1, 2, 3];\n\nlet doubled = a.iter().map(|x| x * 2).collect::<Vec<_>>();\n\nassert_eq!(vec![2, 4, 6], doubled);\n```\n\nUsing `collect()` to make a [`String`](https://doc.rust-lang.org/nightly/core/iter/std/string/struct.String.html):\n\n```rust\nlet chars = ['g', 'd', 'k', 'k', 'n'];\n\nlet hello: String = chars.iter()\n .map(|&x| x as u8)\n .map(|x| (x + 1) as char)\n .collect();\n\nassert_eq!(\"hello\", hello);\n```\n\nIf you have a list of [`Result<T, E>`](https://doc.rust-lang.org/nightly/core/result/enum.Result.html)s, you can use `collect()` to\nsee if any of them failed:\n\n```rust\nlet results = [Ok(1), Err(\"nope\"), Ok(3), Err(\"bad\")];\n\nlet result: Result<Vec<_>, &str> = results.iter().cloned().collect();\n\n// gives us the first error\nassert_eq!(Err(\"nope\"), result);\n\nlet results = [Ok(1), Ok(3)];\n\nlet result: Result<Vec<_>, &str> = results.iter().cloned().collect();\n\n// gives us the list of answers\nassert_eq!(Ok(vec![1, 3]), result);\n```";
// TODO: use better scopes for these, `markup.raw.block`, `markup.heading` let mut options = Options::empty();
let code_style = theme options.insert(Options::ENABLE_STRIKETHROUGH);
.map(|theme| theme.get("ui.text.focus")) let parser = Parser::new_ext(&self.contents, options);
.unwrap_or_default(); // white
let heading_style = theme
.map(|theme| theme.get("ui.linenr.selected"))
.unwrap_or_default(); // lilac
for event in parser { // TODO: if possible, render links as terminal hyperlinks: https://gist.github.com/egmontkob/eb114294efbcd5adb1944c9f3cb5feda
match event { let mut tags = Vec::new();
Event::Start(tag) => tags.push(tag), let mut spans = Vec::new();
Event::End(tag) => { let mut lines = Vec::new();
tags.pop();
match tag { fn to_span(text: pulldown_cmark::CowStr) -> Span {
Tag::Heading(_) | Tag::Paragraph | Tag::CodeBlock(CodeBlockKind::Fenced(_)) => { use std::ops::Deref;
// whenever code block or paragraph closes, new line Span::raw::<std::borrow::Cow<_>>(match text {
let spans = std::mem::take(&mut spans); CowStr::Borrowed(s) => s.into(),
if !spans.is_empty() { CowStr::Boxed(s) => s.to_string().into(),
lines.push(Spans::from(spans)); CowStr::Inlined(s) => s.deref().to_owned().into(),
})
}
macro_rules! get_theme {
($s1: expr) => {
theme
.map(|theme| theme.try_get($s1.as_str()))
.flatten()
.unwrap_or_default()
};
}
let text_style = theme.map(|theme| theme.get("ui.text")).unwrap_or_default();
let code_style = get_theme!(self.block_style);
let heading_style = get_theme!(self.heading_style);
for event in parser {
match event {
Event::Start(tag) => tags.push(tag),
Event::End(tag) => {
tags.pop();
match tag {
Tag::Heading(_, _, _)
| Tag::Paragraph
| Tag::CodeBlock(CodeBlockKind::Fenced(_)) => {
// whenever code block or paragraph closes, new line
let spans = std::mem::take(&mut spans);
if !spans.is_empty() {
lines.push(Spans::from(spans));
}
lines.push(Spans::default());
} }
lines.push(Spans::default()); _ => (),
} }
_ => (),
} }
} Event::Text(text) => {
Event::Text(text) => { // TODO: temp workaround
// TODO: temp workaround if let Some(Tag::CodeBlock(CodeBlockKind::Fenced(language))) = tags.last() {
if let Some(Tag::CodeBlock(CodeBlockKind::Fenced(language))) = tags.last() { if let Some(theme) = theme {
if let Some(theme) = theme { let rope = Rope::from(text.as_ref());
let rope = Rope::from(text.as_ref()); let syntax = self
let syntax = loader .config_loader
.language_configuration_for_injection_string(language) .language_configuration_for_injection_string(language)
.and_then(|config| config.highlight_config(theme.scopes())) .and_then(|config| config.highlight_config(theme.scopes()))
.map(|config| Syntax::new(&rope, config)); .map(|config| {
Syntax::new(&rope, config, self.config_loader.clone())
});
if let Some(syntax) = syntax { if let Some(syntax) = syntax {
// if we have a syntax available, highlight_iter and generate spans // if we have a syntax available, highlight_iter and generate spans
let mut highlights = Vec::new(); let mut highlights = Vec::new();
for event in syntax.highlight_iter(rope.slice(..), None, None, |_| None) for event in syntax.highlight_iter(rope.slice(..), None, None) {
{ match event.unwrap() {
match event.unwrap() { HighlightEvent::HighlightStart(span) => {
HighlightEvent::HighlightStart(span) => { highlights.push(span);
highlights.push(span);
}
HighlightEvent::HighlightEnd => {
highlights.pop();
}
HighlightEvent::Source { start, end } => {
let style = match highlights.first() {
Some(span) => theme.get(&theme.scopes()[span.0]),
None => text_style,
};
// TODO: replace tabs with indentation
let mut slice = &text[start..end];
// TODO: do we need to handle all unicode line endings
// here, or is just '\n' okay?
while let Some(end) = slice.find('\n') {
// emit span up to newline
let text = &slice[..end];
let text = text.replace('\t', " "); // replace tabs
let span = Span::styled(text, style);
spans.push(span);
// truncate slice to after newline
slice = &slice[end + 1..];
// make a new line
let spans = std::mem::take(&mut spans);
lines.push(Spans::from(spans));
} }
HighlightEvent::HighlightEnd => {
highlights.pop();
}
HighlightEvent::Source { start, end } => {
let style = match highlights.first() {
Some(span) => theme.get(&theme.scopes()[span.0]),
None => text_style,
};
// if there's anything left, emit it too // TODO: replace tabs with indentation
if !slice.is_empty() {
let span = let mut slice = &text[start..end];
Span::styled(slice.replace('\t', " "), style); // TODO: do we need to handle all unicode line endings
spans.push(span); // here, or is just '\n' okay?
while let Some(end) = slice.find('\n') {
// emit span up to newline
let text = &slice[..end];
let text = text.replace('\t', " "); // replace tabs
let span = Span::styled(text, style);
spans.push(span);
// truncate slice to after newline
slice = &slice[end + 1..];
// make a new line
let spans = std::mem::take(&mut spans);
lines.push(Spans::from(spans));
}
// if there's anything left, emit it too
if !slice.is_empty() {
let span = Span::styled(
slice.replace('\t', " "),
style,
);
spans.push(span);
}
} }
} }
} }
} else {
for line in text.lines() {
let span = Span::styled(line.to_string(), code_style);
lines.push(Spans::from(span));
}
} }
} else { } else {
for line in text.lines() { for line in text.lines() {
@ -152,64 +172,60 @@ fn to_span(text: pulldown_cmark::CowStr) -> Span {
lines.push(Spans::from(span)); lines.push(Spans::from(span));
} }
} }
} else if let Some(Tag::Heading(_, _, _)) = tags.last() {
let mut span = to_span(text);
span.style = heading_style;
spans.push(span);
} else { } else {
for line in text.lines() { let mut span = to_span(text);
let span = Span::styled(line.to_string(), code_style); span.style = text_style;
lines.push(Spans::from(span)); spans.push(span);
}
} }
} else if let Some(Tag::Heading(_)) = tags.last() { }
Event::Code(text) | Event::Html(text) => {
let mut span = to_span(text); let mut span = to_span(text);
span.style = heading_style; span.style = code_style;
spans.push(span);
} else {
let mut span = to_span(text);
span.style = text_style;
spans.push(span); spans.push(span);
} }
Event::SoftBreak | Event::HardBreak => {
// let spans = std::mem::replace(&mut spans, Vec::new());
// lines.push(Spans::from(spans));
spans.push(Span::raw(" "));
}
Event::Rule => {
let mut span = Span::raw("---");
span.style = code_style;
lines.push(Spans::from(span));
lines.push(Spans::default());
}
// TaskListMarker(bool) true if checked
_ => {
log::warn!("unhandled markdown event {:?}", event);
}
} }
Event::Code(text) | Event::Html(text) => { // build up a vec of Paragraph tui widgets
let mut span = to_span(text); }
span.style = code_style;
spans.push(span); if !spans.is_empty() {
} lines.push(Spans::from(spans));
Event::SoftBreak | Event::HardBreak => { }
// let spans = std::mem::replace(&mut spans, Vec::new());
// lines.push(Spans::from(spans)); // if last line is empty, remove it
spans.push(Span::raw(" ")); if let Some(line) = lines.last() {
} if line.0.is_empty() {
Event::Rule => { lines.pop();
let mut span = Span::raw("---");
span.style = code_style;
lines.push(Spans::from(span));
lines.push(Spans::default());
}
// TaskListMarker(bool) true if checked
_ => {
log::warn!("unhandled markdown event {:?}", event);
} }
} }
// build up a vec of Paragraph tui widgets
}
if !spans.is_empty() { Text::from(lines)
lines.push(Spans::from(spans));
} }
// if last line is empty, remove it
if let Some(line) = lines.last() {
if line.0.is_empty() {
lines.pop();
}
}
Text::from(lines)
} }
impl Component for Markdown { impl Component for Markdown {
fn render(&mut self, area: Rect, surface: &mut Surface, cx: &mut Context) { fn render(&mut self, area: Rect, surface: &mut Surface, cx: &mut Context) {
use tui::widgets::{Paragraph, Widget, Wrap}; use tui::widgets::{Paragraph, Widget, Wrap};
let text = parse(&self.contents, Some(&cx.editor.theme), &self.config_loader); let text = self.parse(Some(&cx.editor.theme));
let par = Paragraph::new(text) let par = Paragraph::new(text)
.wrap(Wrap { trim: false }) .wrap(Wrap { trim: false })
@ -227,7 +243,8 @@ fn required_size(&mut self, viewport: (u16, u16)) -> Option<(u16, u16)> {
if padding >= viewport.1 || padding >= viewport.0 { if padding >= viewport.1 || padding >= viewport.0 {
return None; return None;
} }
let contents = parse(&self.contents, None, &self.config_loader); let contents = self.parse(None);
// TODO: account for tab width // TODO: account for tab width
let max_text_width = (viewport.0 - padding).min(120); let max_text_width = (viewport.0 - padding).min(120);
let mut text_width = 0; let mut text_width = 0;
@ -241,11 +258,6 @@ fn required_size(&mut self, viewport: (u16, u16)) -> Option<(u16, u16)> {
} else if content_width > text_width { } else if content_width > text_width {
text_width = content_width; text_width = content_width;
} }
if height >= viewport.1 {
height = viewport.1;
break;
}
} }
Some((text_width + padding, height)) Some((text_width + padding, height))

View File

@ -14,11 +14,18 @@
use tui::layout::Constraint; use tui::layout::Constraint;
pub trait Item { pub trait Item {
fn sort_text(&self) -> &str;
fn filter_text(&self) -> &str;
fn label(&self) -> &str; fn label(&self) -> &str;
fn row(&self) -> Row;
fn sort_text(&self) -> &str {
self.label()
}
fn filter_text(&self) -> &str {
self.label()
}
fn row(&self) -> Row {
Row::new(vec![Cell::from(self.label())])
}
} }
pub struct Menu<T: Item> { pub struct Menu<T: Item> {
@ -132,7 +139,17 @@ fn recalculate_size(&mut self, viewport: (u16, u16)) {
acc acc
}); });
let len = max_lens.iter().sum::<usize>() + n + 1; // +1: reserve some space for scrollbar
let height = self.matches.len().min(10).min(viewport.1 as usize);
// do all the matches fit on a single screen?
let fits = self.matches.len() <= height;
let mut len = max_lens.iter().sum::<usize>() + n;
if !fits {
len += 1; // +1: reserve some space for scrollbar
}
let width = len.min(viewport.0 as usize); let width = len.min(viewport.0 as usize);
self.widths = max_lens self.widths = max_lens
@ -140,8 +157,6 @@ fn recalculate_size(&mut self, viewport: (u16, u16)) {
.map(|len| Constraint::Length(len as u16)) .map(|len| Constraint::Length(len as u16))
.collect(); .collect();
let height = self.matches.len().min(10).min(viewport.1 as usize);
self.size = (width as u16, height as u16); self.size = (width as u16, height as u16);
// adjust scroll offsets if size changed // adjust scroll offsets if size changed
@ -190,7 +205,7 @@ fn handle_event(&mut self, event: Event, cx: &mut Context) -> EventResult {
_ => return EventResult::Ignored, _ => return EventResult::Ignored,
}; };
let close_fn = EventResult::Consumed(Some(Box::new(|compositor: &mut Compositor| { let close_fn = EventResult::Consumed(Some(Box::new(|compositor: &mut Compositor, _| {
// remove the layer // remove the layer
compositor.pop(); compositor.pop();
}))); })));
@ -202,7 +217,7 @@ fn handle_event(&mut self, event: Event, cx: &mut Context) -> EventResult {
return close_fn; return close_fn;
} }
// arrow up/ctrl-p/shift-tab prev completion choice (including updating the doc) // arrow up/ctrl-p/shift-tab prev completion choice (including updating the doc)
shift!(BackTab) | key!(Up) | ctrl!('p') | ctrl!('k') => { shift!(Tab) | key!(Up) | ctrl!('p') | ctrl!('k') => {
self.move_up(); self.move_up();
(self.callback_fn)(cx.editor, self.selection(), MenuEvent::Update); (self.callback_fn)(cx.editor, self.selection(), MenuEvent::Update);
return EventResult::Consumed(None); return EventResult::Consumed(None);
@ -297,12 +312,14 @@ const fn div_ceil(a: usize, b: usize) -> usize {
}, },
); );
let fits = len <= win_height;
for (i, _) in (scroll..(scroll + win_height).min(len)).enumerate() { for (i, _) in (scroll..(scroll + win_height).min(len)).enumerate() {
let is_marked = i >= scroll_line && i < scroll_line + scroll_height; let is_marked = i >= scroll_line && i < scroll_line + scroll_height;
if is_marked { if !fits && is_marked {
let cell = surface.get_mut(area.x + area.width - 2, area.y + i as u16); let cell = &mut surface[(area.x + area.width - 2, area.y + i as u16)];
cell.set_symbol(" "); cell.set_symbol("");
// cell.set_style(selected); // cell.set_style(selected);
// cell.set_style(if is_marked { selected } else { style }); // cell.set_style(if is_marked { selected } else { style });
} }

View File

@ -2,7 +2,7 @@
pub(crate) mod editor; pub(crate) mod editor;
mod info; mod info;
mod markdown; mod markdown;
mod menu; pub mod menu;
mod picker; mod picker;
mod popup; mod popup;
mod prompt; mod prompt;
@ -65,7 +65,7 @@ pub fn regex_prompt(
return; return;
} }
let case_insensitive = if cx.editor.config.smart_case { let case_insensitive = if cx.editor.config.search.smart_case {
!input.chars().any(char::is_uppercase) !input.chars().any(char::is_uppercase)
} else { } else {
false false
@ -174,7 +174,9 @@ pub mod completers {
use crate::ui::prompt::Completion; use crate::ui::prompt::Completion;
use fuzzy_matcher::skim::SkimMatcherV2 as Matcher; use fuzzy_matcher::skim::SkimMatcherV2 as Matcher;
use fuzzy_matcher::FuzzyMatcher; use fuzzy_matcher::FuzzyMatcher;
use helix_view::editor::Config;
use helix_view::theme; use helix_view::theme;
use once_cell::sync::Lazy;
use std::borrow::Cow; use std::borrow::Cow;
use std::cmp::Reverse; use std::cmp::Reverse;
@ -186,6 +188,7 @@ pub fn theme(input: &str) -> Vec<Completion> {
&helix_core::config_dir().join("themes"), &helix_core::config_dir().join("themes"),
)); ));
names.push("default".into()); names.push("default".into());
names.push("base16_default".into());
let mut names: Vec<_> = names let mut names: Vec<_> = names
.into_iter() .into_iter()
@ -207,6 +210,31 @@ pub fn theme(input: &str) -> Vec<Completion> {
names names
} }
pub fn setting(input: &str) -> Vec<Completion> {
static KEYS: Lazy<Vec<String>> = Lazy::new(|| {
serde_json::to_value(Config::default())
.unwrap()
.as_object()
.unwrap()
.keys()
.cloned()
.collect()
});
let matcher = Matcher::default();
let mut matches: Vec<_> = KEYS
.iter()
.filter_map(|name| matcher.fuzzy_match(name, input).map(|score| (name, score)))
.collect();
matches.sort_unstable_by_key(|(_file, score)| Reverse(*score));
matches
.into_iter()
.map(|(name, _)| ((0..), name.into()))
.collect()
}
pub fn filename(input: &str) -> Vec<Completion> { pub fn filename(input: &str) -> Vec<Completion> {
filename_impl(input, |entry| { filename_impl(input, |entry| {
let is_dir = entry.file_type().map_or(false, |entry| entry.is_dir()); let is_dir = entry.file_type().map_or(false, |entry| entry.is_dir());
@ -255,7 +283,7 @@ fn filename_impl<F>(input: &str, filter_fn: F) -> Vec<Completion>
let is_tilde = input.starts_with('~') && input.len() == 1; let is_tilde = input.starts_with('~') && input.len() == 1;
let path = helix_core::path::expand_tilde(Path::new(input)); let path = helix_core::path::expand_tilde(Path::new(input));
let (dir, file_name) = if input.ends_with('/') { let (dir, file_name) = if input.ends_with(std::path::MAIN_SEPARATOR) {
(path, None) (path, None)
} else { } else {
let file_name = path let file_name = path

Some files were not shown because too many files have changed in this diff Show More