2020-10-21 11:42:45 +04:00
|
|
|
|
mod client;
|
2022-06-18 07:59:57 +04:00
|
|
|
|
pub mod jsonrpc;
|
2022-10-04 05:41:31 +04:00
|
|
|
|
pub mod snippet;
|
2020-10-19 11:09:44 +04:00
|
|
|
|
mod transport;
|
|
|
|
|
|
2020-10-21 11:42:45 +04:00
|
|
|
|
pub use client::Client;
|
2021-06-07 23:11:17 +04:00
|
|
|
|
pub use futures_executor::block_on;
|
|
|
|
|
pub use jsonrpc::Call;
|
2020-10-21 11:42:45 +04:00
|
|
|
|
pub use lsp::{Position, Url};
|
2021-06-07 23:11:17 +04:00
|
|
|
|
pub use lsp_types as lsp;
|
2020-10-20 10:42:53 +04:00
|
|
|
|
|
2021-06-07 23:11:17 +04:00
|
|
|
|
use futures_util::stream::select_all::SelectAll;
|
2023-04-03 05:56:48 +04:00
|
|
|
|
use helix_core::{
|
|
|
|
|
path,
|
|
|
|
|
syntax::{LanguageConfiguration, LanguageServerConfiguration},
|
|
|
|
|
};
|
2022-09-20 11:44:36 +04:00
|
|
|
|
use tokio::sync::mpsc::UnboundedReceiver;
|
2021-03-14 12:13:55 +04:00
|
|
|
|
|
2021-06-07 22:50:20 +04:00
|
|
|
|
use std::{
|
2022-05-23 20:10:48 +04:00
|
|
|
|
collections::HashMap,
|
2023-02-07 18:59:04 +04:00
|
|
|
|
path::{Path, PathBuf},
|
2022-05-23 20:10:48 +04:00
|
|
|
|
sync::Arc,
|
2021-06-07 22:50:20 +04:00
|
|
|
|
};
|
2020-11-05 10:15:19 +04:00
|
|
|
|
|
2021-06-07 23:11:17 +04:00
|
|
|
|
use thiserror::Error;
|
2021-05-06 08:56:34 +04:00
|
|
|
|
use tokio_stream::wrappers::UnboundedReceiverStream;
|
|
|
|
|
|
2021-06-07 23:11:17 +04:00
|
|
|
|
pub type Result<T> = core::result::Result<T, Error>;
|
2022-05-23 20:10:48 +04:00
|
|
|
|
type LanguageServerName = String;
|
2021-05-12 12:24:55 +04:00
|
|
|
|
|
2020-10-21 11:42:45 +04:00
|
|
|
|
#[derive(Error, Debug)]
|
|
|
|
|
pub enum Error {
|
|
|
|
|
#[error("protocol error: {0}")]
|
|
|
|
|
Rpc(#[from] jsonrpc::Error),
|
|
|
|
|
#[error("failed to parse: {0}")]
|
|
|
|
|
Parse(#[from] serde_json::Error),
|
2021-06-07 23:11:17 +04:00
|
|
|
|
#[error("IO Error: {0}")]
|
|
|
|
|
IO(#[from] std::io::Error),
|
2023-02-16 05:16:25 +04:00
|
|
|
|
#[error("request {0} timed out")]
|
|
|
|
|
Timeout(jsonrpc::Id),
|
2021-06-07 22:50:20 +04:00
|
|
|
|
#[error("server closed the stream")]
|
|
|
|
|
StreamClosed,
|
2022-05-11 06:00:55 +04:00
|
|
|
|
#[error("Unhandled")]
|
|
|
|
|
Unhandled,
|
2020-10-21 11:42:45 +04:00
|
|
|
|
#[error(transparent)]
|
|
|
|
|
Other(#[from] anyhow::Error),
|
|
|
|
|
}
|
2020-10-18 13:01:06 +04:00
|
|
|
|
|
2022-05-23 20:10:48 +04:00
|
|
|
|
#[derive(Clone, Copy, Debug, Default, PartialEq, Eq)]
|
2021-04-14 10:30:15 +04:00
|
|
|
|
pub enum OffsetEncoding {
|
|
|
|
|
/// UTF-8 code units aka bytes
|
|
|
|
|
Utf8,
|
2023-02-11 10:50:01 +04:00
|
|
|
|
/// UTF-32 code units aka chars
|
|
|
|
|
Utf32,
|
2021-04-14 10:30:15 +04:00
|
|
|
|
/// UTF-16 code units
|
2023-02-11 10:50:01 +04:00
|
|
|
|
#[default]
|
2021-04-14 10:30:15 +04:00
|
|
|
|
Utf16,
|
|
|
|
|
}
|
|
|
|
|
|
2020-10-21 08:47:20 +04:00
|
|
|
|
pub mod util {
|
|
|
|
|
use super::*;
|
2023-02-09 11:19:29 +04:00
|
|
|
|
use helix_core::line_ending::{line_end_byte_index, line_end_char_index};
|
2023-03-10 02:08:55 +04:00
|
|
|
|
use helix_core::{chars, RopeSlice, SmallVec};
|
2022-12-06 05:29:40 +04:00
|
|
|
|
use helix_core::{diagnostic::NumberOrString, Range, Rope, Selection, Tendril, Transaction};
|
2020-10-21 08:47:20 +04:00
|
|
|
|
|
2022-04-17 07:05:23 +04:00
|
|
|
|
/// Converts a diagnostic in the document to [`lsp::Diagnostic`].
|
|
|
|
|
///
|
|
|
|
|
/// Panics when [`pos_to_lsp_pos`] would for an invalid range on the diagnostic.
|
|
|
|
|
pub fn diagnostic_to_lsp_diagnostic(
|
|
|
|
|
doc: &Rope,
|
|
|
|
|
diag: &helix_core::diagnostic::Diagnostic,
|
|
|
|
|
offset_encoding: OffsetEncoding,
|
|
|
|
|
) -> lsp::Diagnostic {
|
|
|
|
|
use helix_core::diagnostic::Severity::*;
|
|
|
|
|
|
|
|
|
|
let range = Range::new(diag.range.start, diag.range.end);
|
|
|
|
|
let severity = diag.severity.map(|s| match s {
|
|
|
|
|
Hint => lsp::DiagnosticSeverity::HINT,
|
|
|
|
|
Info => lsp::DiagnosticSeverity::INFORMATION,
|
|
|
|
|
Warning => lsp::DiagnosticSeverity::WARNING,
|
|
|
|
|
Error => lsp::DiagnosticSeverity::ERROR,
|
|
|
|
|
});
|
|
|
|
|
|
2022-07-26 05:26:50 +04:00
|
|
|
|
let code = match diag.code.clone() {
|
|
|
|
|
Some(x) => match x {
|
|
|
|
|
NumberOrString::Number(x) => Some(lsp::NumberOrString::Number(x)),
|
|
|
|
|
NumberOrString::String(x) => Some(lsp::NumberOrString::String(x)),
|
|
|
|
|
},
|
|
|
|
|
None => None,
|
|
|
|
|
};
|
|
|
|
|
|
2022-09-20 11:28:00 +04:00
|
|
|
|
let new_tags: Vec<_> = diag
|
|
|
|
|
.tags
|
|
|
|
|
.iter()
|
|
|
|
|
.map(|tag| match tag {
|
|
|
|
|
helix_core::diagnostic::DiagnosticTag::Unnecessary => {
|
|
|
|
|
lsp::DiagnosticTag::UNNECESSARY
|
|
|
|
|
}
|
|
|
|
|
helix_core::diagnostic::DiagnosticTag::Deprecated => lsp::DiagnosticTag::DEPRECATED,
|
|
|
|
|
})
|
|
|
|
|
.collect();
|
2022-09-20 11:21:15 +04:00
|
|
|
|
|
2022-09-20 11:28:00 +04:00
|
|
|
|
let tags = if !new_tags.is_empty() {
|
2022-09-20 11:21:15 +04:00
|
|
|
|
Some(new_tags)
|
|
|
|
|
} else {
|
|
|
|
|
None
|
|
|
|
|
};
|
|
|
|
|
|
2022-12-02 05:18:45 +04:00
|
|
|
|
lsp::Diagnostic {
|
|
|
|
|
range: range_to_lsp_range(doc, range, offset_encoding),
|
2022-04-17 07:05:23 +04:00
|
|
|
|
severity,
|
2022-07-26 05:26:50 +04:00
|
|
|
|
code,
|
2022-12-02 05:18:45 +04:00
|
|
|
|
source: diag.source.clone(),
|
|
|
|
|
message: diag.message.to_owned(),
|
|
|
|
|
related_information: None,
|
2022-09-20 11:21:15 +04:00
|
|
|
|
tags,
|
2022-12-02 05:18:45 +04:00
|
|
|
|
data: diag.data.to_owned(),
|
|
|
|
|
..Default::default()
|
|
|
|
|
}
|
2022-04-17 07:05:23 +04:00
|
|
|
|
}
|
|
|
|
|
|
2021-06-12 11:04:30 +04:00
|
|
|
|
/// Converts [`lsp::Position`] to a position in the document.
|
|
|
|
|
///
|
2023-02-09 11:19:29 +04:00
|
|
|
|
/// Returns `None` if position.line is out of bounds or an overflow occurs
|
2021-04-14 10:30:15 +04:00
|
|
|
|
pub fn lsp_pos_to_pos(
|
|
|
|
|
doc: &Rope,
|
|
|
|
|
pos: lsp::Position,
|
|
|
|
|
offset_encoding: OffsetEncoding,
|
2021-06-12 11:04:30 +04:00
|
|
|
|
) -> Option<usize> {
|
|
|
|
|
let pos_line = pos.line as usize;
|
2022-01-03 06:26:17 +04:00
|
|
|
|
if pos_line > doc.len_lines() - 1 {
|
2023-04-03 05:58:50 +04:00
|
|
|
|
// If it extends past the end, truncate it to the end. This is because the
|
|
|
|
|
// way the LSP describes the range including the last newline is by
|
|
|
|
|
// specifying a line number after what we would call the last line.
|
|
|
|
|
log::warn!("LSP position {pos:?} out of range assuming EOF");
|
|
|
|
|
return Some(doc.len_chars());
|
2022-01-03 06:26:17 +04:00
|
|
|
|
}
|
|
|
|
|
|
2023-02-09 11:19:29 +04:00
|
|
|
|
// We need to be careful here to fully comply ith the LSP spec.
|
|
|
|
|
// Two relevant quotes from the spec:
|
|
|
|
|
//
|
|
|
|
|
// https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#position
|
|
|
|
|
// > If the character value is greater than the line length it defaults back
|
|
|
|
|
// > to the line length.
|
|
|
|
|
//
|
|
|
|
|
// https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textDocuments
|
|
|
|
|
// > To ensure that both client and server split the string into the same
|
|
|
|
|
// > line representation the protocol specifies the following end-of-line sequences:
|
|
|
|
|
// > ‘\n’, ‘\r\n’ and ‘\r’. Positions are line end character agnostic.
|
|
|
|
|
// > So you can not specify a position that denotes \r|\n or \n| where | represents the character offset.
|
|
|
|
|
//
|
2023-04-07 19:10:38 +04:00
|
|
|
|
// This means that while the line must be in bounds the `character`
|
2023-02-09 11:19:29 +04:00
|
|
|
|
// must be capped to the end of the line.
|
|
|
|
|
// Note that the end of the line here is **before** the line terminator
|
2023-04-07 19:10:38 +04:00
|
|
|
|
// so we must use `line_end_char_index` instead of `doc.line_to_char(pos_line + 1)`
|
2023-02-09 11:19:29 +04:00
|
|
|
|
//
|
|
|
|
|
// FIXME: Helix does not fully comply with the LSP spec for line terminators.
|
|
|
|
|
// The LSP standard requires that line terminators are ['\n', '\r\n', '\r'].
|
|
|
|
|
// Without the unicode-linebreak feature disabled, the `\r` terminator is not handled by helix.
|
|
|
|
|
// With the unicode-linebreak feature, helix recognizes multiple extra line break chars
|
|
|
|
|
// which means that positions will be decoded/encoded incorrectly in their presence
|
|
|
|
|
|
|
|
|
|
let line = match offset_encoding {
|
2021-04-14 10:30:15 +04:00
|
|
|
|
OffsetEncoding::Utf8 => {
|
2023-02-09 11:19:29 +04:00
|
|
|
|
let line_start = doc.line_to_byte(pos_line);
|
|
|
|
|
let line_end = line_end_byte_index(&doc.slice(..), pos_line);
|
|
|
|
|
line_start..line_end
|
2021-04-14 10:30:15 +04:00
|
|
|
|
}
|
|
|
|
|
OffsetEncoding::Utf16 => {
|
2023-02-09 11:19:29 +04:00
|
|
|
|
// TODO directly translate line index to char-idx
|
|
|
|
|
// ropey can do this just as easily as utf-8 byte translation
|
|
|
|
|
// but the functions are just missing.
|
|
|
|
|
// Translate to char first and then utf-16 as a workaround
|
|
|
|
|
let line_start = doc.line_to_char(pos_line);
|
|
|
|
|
let line_end = line_end_char_index(&doc.slice(..), pos_line);
|
|
|
|
|
doc.char_to_utf16_cu(line_start)..doc.char_to_utf16_cu(line_end)
|
2021-04-14 10:30:15 +04:00
|
|
|
|
}
|
2023-02-11 10:50:01 +04:00
|
|
|
|
OffsetEncoding::Utf32 => {
|
|
|
|
|
let line_start = doc.line_to_char(pos_line);
|
|
|
|
|
let line_end = line_end_char_index(&doc.slice(..), pos_line);
|
|
|
|
|
line_start..line_end
|
|
|
|
|
}
|
2023-02-09 11:19:29 +04:00
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
// The LSP spec demands that the offset is capped to the end of the line
|
|
|
|
|
let pos = line
|
|
|
|
|
.start
|
|
|
|
|
.checked_add(pos.character as usize)
|
|
|
|
|
.unwrap_or(line.end)
|
|
|
|
|
.min(line.end);
|
|
|
|
|
|
|
|
|
|
match offset_encoding {
|
|
|
|
|
OffsetEncoding::Utf8 => doc.try_byte_to_char(pos).ok(),
|
|
|
|
|
OffsetEncoding::Utf16 => doc.try_utf16_cu_to_char(pos).ok(),
|
2023-02-11 10:50:01 +04:00
|
|
|
|
OffsetEncoding::Utf32 => Some(pos),
|
2021-04-14 10:30:15 +04:00
|
|
|
|
}
|
2020-10-21 08:47:20 +04:00
|
|
|
|
}
|
2021-06-12 11:04:30 +04:00
|
|
|
|
|
|
|
|
|
/// Converts position in the document to [`lsp::Position`].
|
|
|
|
|
///
|
|
|
|
|
/// Panics when `pos` is out of `doc` bounds or operation overflows.
|
2021-04-14 10:30:15 +04:00
|
|
|
|
pub fn pos_to_lsp_pos(
|
|
|
|
|
doc: &Rope,
|
|
|
|
|
pos: usize,
|
|
|
|
|
offset_encoding: OffsetEncoding,
|
|
|
|
|
) -> lsp::Position {
|
|
|
|
|
match offset_encoding {
|
|
|
|
|
OffsetEncoding::Utf8 => {
|
|
|
|
|
let line = doc.char_to_line(pos);
|
2023-02-09 11:19:29 +04:00
|
|
|
|
let line_start = doc.line_to_byte(line);
|
|
|
|
|
let col = doc.char_to_byte(pos) - line_start;
|
2021-04-14 10:30:15 +04:00
|
|
|
|
|
|
|
|
|
lsp::Position::new(line as u32, col as u32)
|
|
|
|
|
}
|
|
|
|
|
OffsetEncoding::Utf16 => {
|
|
|
|
|
let line = doc.char_to_line(pos);
|
|
|
|
|
let line_start = doc.char_to_utf16_cu(doc.line_to_char(line));
|
|
|
|
|
let col = doc.char_to_utf16_cu(pos) - line_start;
|
|
|
|
|
|
2023-02-11 10:50:01 +04:00
|
|
|
|
lsp::Position::new(line as u32, col as u32)
|
|
|
|
|
}
|
|
|
|
|
OffsetEncoding::Utf32 => {
|
|
|
|
|
let line = doc.char_to_line(pos);
|
|
|
|
|
let line_start = doc.line_to_char(line);
|
|
|
|
|
let col = pos - line_start;
|
|
|
|
|
|
2021-04-14 10:30:15 +04:00
|
|
|
|
lsp::Position::new(line as u32, col as u32)
|
|
|
|
|
}
|
|
|
|
|
}
|
2020-10-23 13:48:03 +04:00
|
|
|
|
}
|
2020-12-23 12:03:20 +04:00
|
|
|
|
|
2021-06-12 11:04:30 +04:00
|
|
|
|
/// Converts a range in the document to [`lsp::Range`].
|
2021-04-14 10:30:15 +04:00
|
|
|
|
pub fn range_to_lsp_range(
|
|
|
|
|
doc: &Rope,
|
|
|
|
|
range: Range,
|
|
|
|
|
offset_encoding: OffsetEncoding,
|
|
|
|
|
) -> lsp::Range {
|
|
|
|
|
let start = pos_to_lsp_pos(doc, range.from(), offset_encoding);
|
|
|
|
|
let end = pos_to_lsp_pos(doc, range.to(), offset_encoding);
|
2021-02-26 10:52:43 +04:00
|
|
|
|
|
|
|
|
|
lsp::Range::new(start, end)
|
|
|
|
|
}
|
|
|
|
|
|
2021-06-12 16:45:21 +04:00
|
|
|
|
pub fn lsp_range_to_range(
|
|
|
|
|
doc: &Rope,
|
2023-04-03 05:58:50 +04:00
|
|
|
|
mut range: lsp::Range,
|
2021-06-12 16:45:21 +04:00
|
|
|
|
offset_encoding: OffsetEncoding,
|
|
|
|
|
) -> Option<Range> {
|
2023-04-03 05:58:50 +04:00
|
|
|
|
// This is sort of an edgecase. It's not clear from the spec how to deal with
|
|
|
|
|
// ranges where end < start. They don't make much sense but vscode simply caps start to end
|
|
|
|
|
// and because it's not specified quite a few LS rely on this as a result (for example the TS server)
|
|
|
|
|
if range.start > range.end {
|
|
|
|
|
log::error!(
|
|
|
|
|
"Invalid LSP range start {:?} > end {:?}, using an empty range at the end instead",
|
|
|
|
|
range.start,
|
|
|
|
|
range.end
|
|
|
|
|
);
|
|
|
|
|
range.start = range.end;
|
|
|
|
|
}
|
2021-06-12 16:45:21 +04:00
|
|
|
|
let start = lsp_pos_to_pos(doc, range.start, offset_encoding)?;
|
|
|
|
|
let end = lsp_pos_to_pos(doc, range.end, offset_encoding)?;
|
|
|
|
|
|
|
|
|
|
Some(Range::new(start, end))
|
|
|
|
|
}
|
|
|
|
|
|
2023-03-10 02:08:55 +04:00
|
|
|
|
/// If the LS did not provide a range for the completion or the range of the
|
|
|
|
|
/// primary cursor can not be used for the secondary cursor, this function
|
|
|
|
|
/// can be used to find the completion range for a cursor
|
2023-03-10 02:21:02 +04:00
|
|
|
|
fn find_completion_range(text: RopeSlice, replace_mode: bool, cursor: usize) -> (usize, usize) {
|
2023-03-10 02:08:55 +04:00
|
|
|
|
let start = cursor
|
|
|
|
|
- text
|
|
|
|
|
.chars_at(cursor)
|
|
|
|
|
.reversed()
|
|
|
|
|
.take_while(|ch| chars::char_is_word(*ch))
|
|
|
|
|
.count();
|
2023-03-10 02:21:02 +04:00
|
|
|
|
let mut end = cursor;
|
|
|
|
|
if replace_mode {
|
|
|
|
|
end += text
|
|
|
|
|
.chars_at(cursor)
|
|
|
|
|
.skip(1)
|
|
|
|
|
.take_while(|ch| chars::char_is_word(*ch))
|
|
|
|
|
.count();
|
|
|
|
|
}
|
|
|
|
|
(start, end)
|
2023-03-10 02:08:55 +04:00
|
|
|
|
}
|
|
|
|
|
fn completion_range(
|
|
|
|
|
text: RopeSlice,
|
|
|
|
|
edit_offset: Option<(i128, i128)>,
|
2023-03-10 02:21:02 +04:00
|
|
|
|
replace_mode: bool,
|
2023-03-10 02:08:55 +04:00
|
|
|
|
cursor: usize,
|
|
|
|
|
) -> Option<(usize, usize)> {
|
|
|
|
|
let res = match edit_offset {
|
|
|
|
|
Some((start_offset, end_offset)) => {
|
|
|
|
|
let start_offset = cursor as i128 + start_offset;
|
|
|
|
|
if start_offset < 0 {
|
|
|
|
|
return None;
|
|
|
|
|
}
|
|
|
|
|
let end_offset = cursor as i128 + end_offset;
|
|
|
|
|
if end_offset > text.len_chars() as i128 {
|
|
|
|
|
return None;
|
|
|
|
|
}
|
|
|
|
|
(start_offset as usize, end_offset as usize)
|
|
|
|
|
}
|
2023-03-10 02:21:02 +04:00
|
|
|
|
None => find_completion_range(text, replace_mode, cursor),
|
2023-03-10 02:08:55 +04:00
|
|
|
|
};
|
|
|
|
|
Some(res)
|
|
|
|
|
}
|
|
|
|
|
|
2022-12-06 05:29:40 +04:00
|
|
|
|
/// Creates a [Transaction] from the [lsp::TextEdit] in a completion response.
|
|
|
|
|
/// The transaction applies the edit to all cursors.
|
|
|
|
|
pub fn generate_transaction_from_completion_edit(
|
|
|
|
|
doc: &Rope,
|
|
|
|
|
selection: &Selection,
|
2023-03-10 02:08:55 +04:00
|
|
|
|
edit_offset: Option<(i128, i128)>,
|
2023-03-10 02:21:02 +04:00
|
|
|
|
replace_mode: bool,
|
2023-03-03 09:41:06 +04:00
|
|
|
|
new_text: String,
|
2022-12-06 05:29:40 +04:00
|
|
|
|
) -> Transaction {
|
2023-03-03 09:41:06 +04:00
|
|
|
|
let replacement: Option<Tendril> = if new_text.is_empty() {
|
2022-12-06 05:29:40 +04:00
|
|
|
|
None
|
|
|
|
|
} else {
|
2023-03-03 09:41:06 +04:00
|
|
|
|
Some(new_text.into())
|
2022-12-06 05:29:40 +04:00
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
let text = doc.slice(..);
|
2023-03-10 02:21:02 +04:00
|
|
|
|
let (removed_start, removed_end) = completion_range(
|
|
|
|
|
text,
|
|
|
|
|
edit_offset,
|
|
|
|
|
replace_mode,
|
|
|
|
|
selection.primary().cursor(text),
|
|
|
|
|
)
|
|
|
|
|
.expect("transaction must be valid for primary selection");
|
2023-03-10 02:08:55 +04:00
|
|
|
|
let removed_text = text.slice(removed_start..removed_end);
|
|
|
|
|
|
|
|
|
|
let (transaction, mut selection) = Transaction::change_by_selection_ignore_overlapping(
|
|
|
|
|
doc,
|
|
|
|
|
selection,
|
|
|
|
|
|range| {
|
|
|
|
|
let cursor = range.cursor(text);
|
2023-03-10 02:21:02 +04:00
|
|
|
|
completion_range(text, edit_offset, replace_mode, cursor)
|
2023-03-10 02:08:55 +04:00
|
|
|
|
.filter(|(start, end)| text.slice(start..end) == removed_text)
|
2023-03-10 02:21:02 +04:00
|
|
|
|
.unwrap_or_else(|| find_completion_range(text, replace_mode, cursor))
|
2023-03-10 02:08:55 +04:00
|
|
|
|
},
|
|
|
|
|
|_, _| replacement.clone(),
|
|
|
|
|
);
|
|
|
|
|
if transaction.changes().is_empty() {
|
|
|
|
|
return transaction;
|
|
|
|
|
}
|
|
|
|
|
selection = selection.map(transaction.changes());
|
|
|
|
|
transaction.with_selection(selection)
|
2022-12-06 05:29:40 +04:00
|
|
|
|
}
|
|
|
|
|
|
2023-02-17 19:51:00 +04:00
|
|
|
|
/// Creates a [Transaction] from the [snippet::Snippet] in a completion response.
|
|
|
|
|
/// The transaction applies the edit to all cursors.
|
2023-03-10 02:08:55 +04:00
|
|
|
|
#[allow(clippy::too_many_arguments)]
|
2023-02-17 19:51:00 +04:00
|
|
|
|
pub fn generate_transaction_from_snippet(
|
|
|
|
|
doc: &Rope,
|
|
|
|
|
selection: &Selection,
|
2023-03-10 02:08:55 +04:00
|
|
|
|
edit_offset: Option<(i128, i128)>,
|
2023-03-10 02:21:02 +04:00
|
|
|
|
replace_mode: bool,
|
2023-02-17 19:51:00 +04:00
|
|
|
|
snippet: snippet::Snippet,
|
|
|
|
|
line_ending: &str,
|
|
|
|
|
include_placeholder: bool,
|
2023-03-10 02:08:55 +04:00
|
|
|
|
tab_width: usize,
|
2023-03-11 06:34:43 +04:00
|
|
|
|
indent_width: usize,
|
2023-02-17 19:51:00 +04:00
|
|
|
|
) -> Transaction {
|
|
|
|
|
let text = doc.slice(..);
|
|
|
|
|
|
2023-03-10 02:08:55 +04:00
|
|
|
|
let mut off = 0i128;
|
|
|
|
|
let mut mapped_doc = doc.clone();
|
|
|
|
|
let mut selection_tabstops: SmallVec<[_; 1]> = SmallVec::new();
|
2023-03-10 02:21:02 +04:00
|
|
|
|
let (removed_start, removed_end) = completion_range(
|
|
|
|
|
text,
|
|
|
|
|
edit_offset,
|
|
|
|
|
replace_mode,
|
|
|
|
|
selection.primary().cursor(text),
|
|
|
|
|
)
|
|
|
|
|
.expect("transaction must be valid for primary selection");
|
2023-03-10 02:08:55 +04:00
|
|
|
|
let removed_text = text.slice(removed_start..removed_end);
|
|
|
|
|
|
|
|
|
|
let (transaction, selection) = Transaction::change_by_selection_ignore_overlapping(
|
|
|
|
|
doc,
|
|
|
|
|
selection,
|
|
|
|
|
|range| {
|
|
|
|
|
let cursor = range.cursor(text);
|
2023-03-10 02:21:02 +04:00
|
|
|
|
completion_range(text, edit_offset, replace_mode, cursor)
|
2023-03-10 02:08:55 +04:00
|
|
|
|
.filter(|(start, end)| text.slice(start..end) == removed_text)
|
2023-03-10 02:21:02 +04:00
|
|
|
|
.unwrap_or_else(|| find_completion_range(text, replace_mode, cursor))
|
2023-03-10 02:08:55 +04:00
|
|
|
|
},
|
|
|
|
|
|replacement_start, replacement_end| {
|
|
|
|
|
let mapped_replacement_start = (replacement_start as i128 + off) as usize;
|
|
|
|
|
let mapped_replacement_end = (replacement_end as i128 + off) as usize;
|
|
|
|
|
|
|
|
|
|
let line_idx = mapped_doc.char_to_line(mapped_replacement_start);
|
2023-03-11 06:34:43 +04:00
|
|
|
|
let indent_level = helix_core::indent::indent_level_for_line(
|
2023-03-10 02:08:55 +04:00
|
|
|
|
mapped_doc.line(line_idx),
|
|
|
|
|
tab_width,
|
2023-03-11 06:34:43 +04:00
|
|
|
|
indent_width,
|
|
|
|
|
) * indent_width;
|
|
|
|
|
|
2023-03-10 02:08:55 +04:00
|
|
|
|
let newline_with_offset = format!(
|
2023-03-11 06:34:43 +04:00
|
|
|
|
"{line_ending}{blank:indent_level$}",
|
2023-03-10 02:08:55 +04:00
|
|
|
|
line_ending = line_ending,
|
|
|
|
|
blank = ""
|
|
|
|
|
);
|
|
|
|
|
|
|
|
|
|
let (replacement, tabstops) =
|
|
|
|
|
snippet::render(&snippet, &newline_with_offset, include_placeholder);
|
|
|
|
|
selection_tabstops.push((mapped_replacement_start, tabstops));
|
|
|
|
|
mapped_doc.remove(mapped_replacement_start..mapped_replacement_end);
|
|
|
|
|
mapped_doc.insert(mapped_replacement_start, &replacement);
|
|
|
|
|
off +=
|
|
|
|
|
replacement_start as i128 - replacement_end as i128 + replacement.len() as i128;
|
|
|
|
|
|
|
|
|
|
Some(replacement)
|
|
|
|
|
},
|
|
|
|
|
);
|
|
|
|
|
|
|
|
|
|
let changes = transaction.changes();
|
|
|
|
|
if changes.is_empty() {
|
|
|
|
|
return transaction;
|
|
|
|
|
}
|
2023-02-17 19:51:00 +04:00
|
|
|
|
|
2023-03-10 02:08:55 +04:00
|
|
|
|
let mut mapped_selection = SmallVec::with_capacity(selection.len());
|
|
|
|
|
let mut mapped_primary_idx = 0;
|
|
|
|
|
let primary_range = selection.primary();
|
|
|
|
|
for (range, (tabstop_anchor, tabstops)) in selection.into_iter().zip(selection_tabstops) {
|
|
|
|
|
if range == primary_range {
|
|
|
|
|
mapped_primary_idx = mapped_selection.len()
|
|
|
|
|
}
|
2023-02-17 19:51:00 +04:00
|
|
|
|
|
2023-03-10 02:08:55 +04:00
|
|
|
|
let range = range.map(changes);
|
|
|
|
|
let tabstops = tabstops.first().filter(|tabstops| !tabstops.is_empty());
|
|
|
|
|
let Some(tabstops) = tabstops else{
|
|
|
|
|
// no tabstop normal mapping
|
|
|
|
|
mapped_selection.push(range);
|
|
|
|
|
continue;
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
// expand the selection to cover the tabstop to retain the helix selection semantic
|
|
|
|
|
// the tabstop closest to the range simply replaces `head` while anchor remains in place
|
|
|
|
|
// the remaining tabstops receive their own single-width cursor
|
|
|
|
|
if range.head < range.anchor {
|
|
|
|
|
let first_tabstop = tabstop_anchor + tabstops[0].1;
|
|
|
|
|
|
|
|
|
|
// if selection is forward but was moved to the right it is
|
|
|
|
|
// contained entirely in the replacement text, just do a point
|
|
|
|
|
// selection (fallback below)
|
|
|
|
|
if range.anchor >= first_tabstop {
|
|
|
|
|
let range = Range::new(range.anchor, first_tabstop);
|
|
|
|
|
mapped_selection.push(range);
|
|
|
|
|
let rem_tabstops = tabstops[1..]
|
|
|
|
|
.iter()
|
|
|
|
|
.map(|tabstop| Range::point(tabstop_anchor + tabstop.1));
|
|
|
|
|
mapped_selection.extend(rem_tabstops);
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
} else {
|
|
|
|
|
let last_idx = tabstops.len() - 1;
|
|
|
|
|
let last_tabstop = tabstop_anchor + tabstops[last_idx].1;
|
|
|
|
|
|
|
|
|
|
// if selection is forward but was moved to the right it is
|
|
|
|
|
// contained entirely in the replacement text, just do a point
|
|
|
|
|
// selection (fallback below)
|
|
|
|
|
if range.anchor <= last_tabstop {
|
|
|
|
|
// we can't properly compute the the next grapheme
|
|
|
|
|
// here because the transaction hasn't been applied yet
|
|
|
|
|
// that is not a problem because the range gets grapheme aligned anyway
|
|
|
|
|
// tough so just adding one will always cause head to be grapheme
|
|
|
|
|
// aligned correctly when applied to the document
|
|
|
|
|
let range = Range::new(range.anchor, last_tabstop + 1);
|
|
|
|
|
mapped_selection.push(range);
|
|
|
|
|
let rem_tabstops = tabstops[..last_idx]
|
|
|
|
|
.iter()
|
|
|
|
|
.map(|tabstop| Range::point(tabstop_anchor + tabstop.0));
|
|
|
|
|
mapped_selection.extend(rem_tabstops);
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
let tabstops = tabstops
|
|
|
|
|
.iter()
|
|
|
|
|
.map(|tabstop| Range::point(tabstop_anchor + tabstop.0));
|
|
|
|
|
mapped_selection.extend(tabstops);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
transaction.with_selection(Selection::new(mapped_selection, mapped_primary_idx))
|
2023-02-17 19:51:00 +04:00
|
|
|
|
}
|
|
|
|
|
|
2021-04-14 10:30:15 +04:00
|
|
|
|
pub fn generate_transaction_from_edits(
|
|
|
|
|
doc: &Rope,
|
2022-06-02 06:30:22 +04:00
|
|
|
|
mut edits: Vec<lsp::TextEdit>,
|
2021-04-14 10:30:15 +04:00
|
|
|
|
offset_encoding: OffsetEncoding,
|
|
|
|
|
) -> Transaction {
|
2022-06-02 06:30:22 +04:00
|
|
|
|
// Sort edits by start range, since some LSPs (Omnisharp) send them
|
|
|
|
|
// in reverse order.
|
|
|
|
|
edits.sort_unstable_by_key(|edit| edit.range.start);
|
|
|
|
|
|
2022-10-02 23:23:23 +04:00
|
|
|
|
// Generate a diff if the edit is a full document replacement.
|
|
|
|
|
#[allow(clippy::collapsible_if)]
|
|
|
|
|
if edits.len() == 1 {
|
|
|
|
|
let is_document_replacement = edits.first().and_then(|edit| {
|
|
|
|
|
let start = lsp_pos_to_pos(doc, edit.range.start, offset_encoding)?;
|
|
|
|
|
let end = lsp_pos_to_pos(doc, edit.range.end, offset_encoding)?;
|
|
|
|
|
Some(start..end)
|
|
|
|
|
}) == Some(0..doc.len_chars());
|
|
|
|
|
if is_document_replacement {
|
|
|
|
|
let new_text = Rope::from(edits.pop().unwrap().new_text);
|
|
|
|
|
return helix_core::diff::compare_ropes(doc, &new_text);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2020-12-23 12:03:20 +04:00
|
|
|
|
Transaction::change(
|
2021-03-18 08:45:57 +04:00
|
|
|
|
doc,
|
2020-12-23 12:03:20 +04:00
|
|
|
|
edits.into_iter().map(|edit| {
|
|
|
|
|
// simplify "" into None for cleaner changesets
|
|
|
|
|
let replacement = if !edit.new_text.is_empty() {
|
|
|
|
|
Some(edit.new_text.into())
|
|
|
|
|
} else {
|
|
|
|
|
None
|
|
|
|
|
};
|
|
|
|
|
|
2021-06-12 11:04:30 +04:00
|
|
|
|
let start =
|
|
|
|
|
if let Some(start) = lsp_pos_to_pos(doc, edit.range.start, offset_encoding) {
|
|
|
|
|
start
|
|
|
|
|
} else {
|
|
|
|
|
return (0, 0, None);
|
|
|
|
|
};
|
|
|
|
|
let end = if let Some(end) = lsp_pos_to_pos(doc, edit.range.end, offset_encoding) {
|
|
|
|
|
end
|
|
|
|
|
} else {
|
|
|
|
|
return (0, 0, None);
|
|
|
|
|
};
|
2020-12-23 12:03:20 +04:00
|
|
|
|
(start, end, replacement)
|
|
|
|
|
}),
|
|
|
|
|
)
|
|
|
|
|
}
|
2020-10-18 13:01:06 +04:00
|
|
|
|
}
|
|
|
|
|
|
2021-06-18 07:37:40 +04:00
|
|
|
|
#[derive(Debug, PartialEq, Clone)]
|
|
|
|
|
pub enum MethodCall {
|
|
|
|
|
WorkDoneProgressCreate(lsp::WorkDoneProgressCreateParams),
|
2021-12-21 13:21:45 +04:00
|
|
|
|
ApplyWorkspaceEdit(lsp::ApplyWorkspaceEditParams),
|
2022-04-01 06:20:41 +04:00
|
|
|
|
WorkspaceFolders,
|
2022-02-28 12:57:22 +04:00
|
|
|
|
WorkspaceConfiguration(lsp::ConfigurationParams),
|
2023-03-13 04:29:58 +04:00
|
|
|
|
RegisterCapability(lsp::RegistrationParams),
|
2021-06-18 07:37:40 +04:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
impl MethodCall {
|
2022-05-11 06:00:55 +04:00
|
|
|
|
pub fn parse(method: &str, params: jsonrpc::Params) -> Result<MethodCall> {
|
2021-06-18 07:37:40 +04:00
|
|
|
|
use lsp::request::Request;
|
|
|
|
|
let request = match method {
|
|
|
|
|
lsp::request::WorkDoneProgressCreate::METHOD => {
|
2022-05-11 06:00:55 +04:00
|
|
|
|
let params: lsp::WorkDoneProgressCreateParams = params.parse()?;
|
2021-06-18 07:37:40 +04:00
|
|
|
|
Self::WorkDoneProgressCreate(params)
|
|
|
|
|
}
|
2021-12-21 13:21:45 +04:00
|
|
|
|
lsp::request::ApplyWorkspaceEdit::METHOD => {
|
2022-05-11 06:00:55 +04:00
|
|
|
|
let params: lsp::ApplyWorkspaceEditParams = params.parse()?;
|
2021-12-21 13:21:45 +04:00
|
|
|
|
Self::ApplyWorkspaceEdit(params)
|
|
|
|
|
}
|
2022-04-01 06:20:41 +04:00
|
|
|
|
lsp::request::WorkspaceFoldersRequest::METHOD => Self::WorkspaceFolders,
|
2022-02-28 12:57:22 +04:00
|
|
|
|
lsp::request::WorkspaceConfiguration::METHOD => {
|
2022-05-11 06:00:55 +04:00
|
|
|
|
let params: lsp::ConfigurationParams = params.parse()?;
|
2022-02-28 12:57:22 +04:00
|
|
|
|
Self::WorkspaceConfiguration(params)
|
|
|
|
|
}
|
2023-03-13 04:29:58 +04:00
|
|
|
|
lsp::request::RegisterCapability::METHOD => {
|
|
|
|
|
let params: lsp::RegistrationParams = params.parse()?;
|
|
|
|
|
Self::RegisterCapability(params)
|
|
|
|
|
}
|
2021-06-18 07:37:40 +04:00
|
|
|
|
_ => {
|
2022-05-11 06:00:55 +04:00
|
|
|
|
return Err(Error::Unhandled);
|
2021-06-18 07:37:40 +04:00
|
|
|
|
}
|
|
|
|
|
};
|
2022-05-11 06:00:55 +04:00
|
|
|
|
Ok(request)
|
2021-06-18 07:37:40 +04:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2020-10-19 11:09:44 +04:00
|
|
|
|
#[derive(Debug, PartialEq, Clone)]
|
2020-10-20 08:58:34 +04:00
|
|
|
|
pub enum Notification {
|
2021-09-02 08:55:08 +04:00
|
|
|
|
// we inject this notification to signal the LSP is ready
|
|
|
|
|
Initialized,
|
2022-11-19 08:14:36 +04:00
|
|
|
|
// and this notification to signal that the LSP exited
|
|
|
|
|
Exit,
|
2020-10-20 08:58:34 +04:00
|
|
|
|
PublishDiagnostics(lsp::PublishDiagnosticsParams),
|
2021-04-15 12:34:38 +04:00
|
|
|
|
ShowMessage(lsp::ShowMessageParams),
|
|
|
|
|
LogMessage(lsp::LogMessageParams),
|
2021-06-11 07:42:16 +04:00
|
|
|
|
ProgressMessage(lsp::ProgressParams),
|
2020-10-20 08:58:34 +04:00
|
|
|
|
}
|
2020-10-19 11:09:44 +04:00
|
|
|
|
|
|
|
|
|
impl Notification {
|
2022-05-11 06:00:55 +04:00
|
|
|
|
pub fn parse(method: &str, params: jsonrpc::Params) -> Result<Notification> {
|
2020-10-19 11:09:44 +04:00
|
|
|
|
use lsp::notification::Notification as _;
|
|
|
|
|
|
2021-05-08 13:17:13 +04:00
|
|
|
|
let notification = match method {
|
2021-09-02 08:55:08 +04:00
|
|
|
|
lsp::notification::Initialized::METHOD => Self::Initialized,
|
2022-11-19 08:14:36 +04:00
|
|
|
|
lsp::notification::Exit::METHOD => Self::Exit,
|
2020-10-19 11:09:44 +04:00
|
|
|
|
lsp::notification::PublishDiagnostics::METHOD => {
|
2022-05-11 06:00:55 +04:00
|
|
|
|
let params: lsp::PublishDiagnosticsParams = params.parse()?;
|
2021-05-09 12:13:59 +04:00
|
|
|
|
Self::PublishDiagnostics(params)
|
2020-10-19 11:09:44 +04:00
|
|
|
|
}
|
2021-04-15 12:34:38 +04:00
|
|
|
|
|
|
|
|
|
lsp::notification::ShowMessage::METHOD => {
|
2022-05-11 06:00:55 +04:00
|
|
|
|
let params: lsp::ShowMessageParams = params.parse()?;
|
2021-05-09 12:13:59 +04:00
|
|
|
|
Self::ShowMessage(params)
|
2021-04-15 12:34:38 +04:00
|
|
|
|
}
|
|
|
|
|
lsp::notification::LogMessage::METHOD => {
|
2022-05-11 06:00:55 +04:00
|
|
|
|
let params: lsp::LogMessageParams = params.parse()?;
|
2021-05-09 12:13:59 +04:00
|
|
|
|
Self::LogMessage(params)
|
2021-04-15 12:34:38 +04:00
|
|
|
|
}
|
2021-06-11 07:42:16 +04:00
|
|
|
|
lsp::notification::Progress::METHOD => {
|
2022-05-11 06:00:55 +04:00
|
|
|
|
let params: lsp::ProgressParams = params.parse()?;
|
2021-06-11 07:42:16 +04:00
|
|
|
|
Self::ProgressMessage(params)
|
|
|
|
|
}
|
2021-05-08 13:17:13 +04:00
|
|
|
|
_ => {
|
2022-05-11 06:00:55 +04:00
|
|
|
|
return Err(Error::Unhandled);
|
2021-05-08 13:17:13 +04:00
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
|
2022-05-11 06:00:55 +04:00
|
|
|
|
Ok(notification)
|
2020-10-19 11:09:44 +04:00
|
|
|
|
}
|
|
|
|
|
}
|
2020-10-23 13:48:03 +04:00
|
|
|
|
|
2021-06-07 18:34:19 +04:00
|
|
|
|
#[derive(Debug)]
|
2020-11-05 10:15:19 +04:00
|
|
|
|
pub struct Registry {
|
2022-05-23 20:10:48 +04:00
|
|
|
|
inner: HashMap<LanguageServerName, Vec<Arc<Client>>>,
|
|
|
|
|
syn_loader: Arc<helix_core::syntax::Loader>,
|
|
|
|
|
counter: usize,
|
2021-06-18 07:42:34 +04:00
|
|
|
|
pub incoming: SelectAll<UnboundedReceiverStream<(usize, Call)>>,
|
2020-11-05 10:15:19 +04:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
impl Registry {
|
2022-05-23 20:10:48 +04:00
|
|
|
|
pub fn new(syn_loader: Arc<helix_core::syntax::Loader>) -> Self {
|
2020-11-05 10:15:19 +04:00
|
|
|
|
Self {
|
2021-03-14 12:13:55 +04:00
|
|
|
|
inner: HashMap::new(),
|
2022-05-23 20:10:48 +04:00
|
|
|
|
syn_loader,
|
|
|
|
|
counter: 0,
|
2020-12-23 10:50:16 +04:00
|
|
|
|
incoming: SelectAll::new(),
|
2020-11-05 10:15:19 +04:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2021-09-02 08:55:08 +04:00
|
|
|
|
pub fn get_by_id(&self, id: usize) -> Option<&Client> {
|
2021-06-18 07:42:34 +04:00
|
|
|
|
self.inner
|
|
|
|
|
.values()
|
2023-02-07 18:59:04 +04:00
|
|
|
|
.flatten()
|
2022-05-23 20:10:48 +04:00
|
|
|
|
.find(|client| client.id() == id)
|
|
|
|
|
.map(|client| &**client)
|
2021-06-18 07:42:34 +04:00
|
|
|
|
}
|
|
|
|
|
|
2022-11-19 08:14:36 +04:00
|
|
|
|
pub fn remove_by_id(&mut self, id: usize) {
|
2022-05-23 20:10:48 +04:00
|
|
|
|
self.inner.retain(|_, language_servers| {
|
|
|
|
|
language_servers.retain(|ls| id != ls.id());
|
|
|
|
|
!language_servers.is_empty()
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn start_client(
|
|
|
|
|
&mut self,
|
|
|
|
|
name: String,
|
|
|
|
|
ls_config: &LanguageConfiguration,
|
|
|
|
|
doc_path: Option<&std::path::PathBuf>,
|
|
|
|
|
root_dirs: &[PathBuf],
|
|
|
|
|
enable_snippets: bool,
|
|
|
|
|
) -> Result<Arc<Client>> {
|
|
|
|
|
let config = self
|
|
|
|
|
.syn_loader
|
|
|
|
|
.language_server_configs()
|
|
|
|
|
.get(&name)
|
|
|
|
|
.ok_or_else(|| anyhow::anyhow!("Language server '{name}' not defined"))?;
|
|
|
|
|
self.counter += 1;
|
|
|
|
|
let id = self.counter;
|
|
|
|
|
let NewClient(client, incoming) = start_client(
|
|
|
|
|
id,
|
|
|
|
|
name,
|
|
|
|
|
ls_config,
|
|
|
|
|
config,
|
|
|
|
|
doc_path,
|
|
|
|
|
root_dirs,
|
|
|
|
|
enable_snippets,
|
|
|
|
|
)?;
|
|
|
|
|
self.incoming.push(UnboundedReceiverStream::new(incoming));
|
|
|
|
|
Ok(client)
|
2022-11-19 08:14:36 +04:00
|
|
|
|
}
|
|
|
|
|
|
2022-09-20 11:44:36 +04:00
|
|
|
|
pub fn restart(
|
|
|
|
|
&mut self,
|
|
|
|
|
language_config: &LanguageConfiguration,
|
2022-10-03 19:02:49 +04:00
|
|
|
|
doc_path: Option<&std::path::PathBuf>,
|
2023-01-31 03:31:21 +04:00
|
|
|
|
root_dirs: &[PathBuf],
|
2023-03-31 06:26:20 +04:00
|
|
|
|
enable_snippets: bool,
|
2022-05-23 20:10:48 +04:00
|
|
|
|
) -> Result<Vec<Arc<Client>>> {
|
|
|
|
|
language_config
|
|
|
|
|
.language_servers
|
|
|
|
|
.iter()
|
|
|
|
|
.filter_map(|config| {
|
|
|
|
|
let name = config.name().clone();
|
|
|
|
|
|
|
|
|
|
#[allow(clippy::map_entry)]
|
|
|
|
|
if self.inner.contains_key(&name) {
|
|
|
|
|
let client = match self.start_client(
|
|
|
|
|
name.clone(),
|
|
|
|
|
language_config,
|
|
|
|
|
doc_path,
|
|
|
|
|
root_dirs,
|
|
|
|
|
enable_snippets,
|
|
|
|
|
) {
|
|
|
|
|
Ok(client) => client,
|
|
|
|
|
error => return Some(error),
|
|
|
|
|
};
|
|
|
|
|
let old_clients = self.inner.insert(name, vec![client.clone()]).unwrap();
|
2022-09-20 11:44:36 +04:00
|
|
|
|
|
2022-05-23 20:10:48 +04:00
|
|
|
|
// TODO what if there are different language servers for different workspaces,
|
|
|
|
|
// I think the language servers will be stopped without being restarted, which is not intended
|
|
|
|
|
for old_client in old_clients {
|
|
|
|
|
tokio::spawn(async move {
|
|
|
|
|
let _ = old_client.force_shutdown().await;
|
|
|
|
|
});
|
|
|
|
|
}
|
2022-10-03 18:54:46 +04:00
|
|
|
|
|
2022-05-23 20:10:48 +04:00
|
|
|
|
Some(Ok(client))
|
|
|
|
|
} else {
|
|
|
|
|
None
|
2023-02-07 18:59:04 +04:00
|
|
|
|
}
|
2022-05-23 20:10:48 +04:00
|
|
|
|
})
|
|
|
|
|
.collect()
|
2022-09-20 11:44:36 +04:00
|
|
|
|
}
|
|
|
|
|
|
2022-05-23 20:10:48 +04:00
|
|
|
|
pub fn stop(&mut self, name: &str) {
|
|
|
|
|
if let Some(clients) = self.inner.remove(name) {
|
|
|
|
|
for client in clients {
|
2023-02-07 18:59:04 +04:00
|
|
|
|
tokio::spawn(async move {
|
|
|
|
|
let _ = client.force_shutdown().await;
|
|
|
|
|
});
|
|
|
|
|
}
|
2023-03-08 04:34:31 +04:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2022-10-03 19:02:49 +04:00
|
|
|
|
pub fn get(
|
|
|
|
|
&mut self,
|
|
|
|
|
language_config: &LanguageConfiguration,
|
|
|
|
|
doc_path: Option<&std::path::PathBuf>,
|
2023-01-31 03:31:21 +04:00
|
|
|
|
root_dirs: &[PathBuf],
|
2023-03-31 06:26:20 +04:00
|
|
|
|
enable_snippets: bool,
|
2022-05-23 20:10:48 +04:00
|
|
|
|
) -> Result<Vec<Arc<Client>>> {
|
|
|
|
|
language_config
|
|
|
|
|
.language_servers
|
|
|
|
|
.iter()
|
|
|
|
|
.map(|features| {
|
|
|
|
|
let name = features.name();
|
|
|
|
|
if let Some(clients) = self.inner.get_mut(name) {
|
|
|
|
|
if let Some((_, client)) = clients.iter_mut().enumerate().find(|(i, client)| {
|
|
|
|
|
client.try_add_doc(&language_config.roots, root_dirs, doc_path, *i == 0)
|
|
|
|
|
}) {
|
|
|
|
|
return Ok(client.clone());
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
let client = self.start_client(
|
|
|
|
|
name.clone(),
|
|
|
|
|
language_config,
|
|
|
|
|
doc_path,
|
|
|
|
|
root_dirs,
|
|
|
|
|
enable_snippets,
|
|
|
|
|
)?;
|
|
|
|
|
let clients = self.inner.entry(features.name().clone()).or_default();
|
|
|
|
|
clients.push(client.clone());
|
|
|
|
|
Ok(client)
|
|
|
|
|
})
|
|
|
|
|
.collect()
|
2020-11-05 10:15:19 +04:00
|
|
|
|
}
|
2021-06-19 06:56:50 +04:00
|
|
|
|
|
|
|
|
|
pub fn iter_clients(&self) -> impl Iterator<Item = &Arc<Client>> {
|
2022-05-23 20:10:48 +04:00
|
|
|
|
self.inner.values().flatten()
|
2021-06-19 06:56:50 +04:00
|
|
|
|
}
|
2020-11-05 10:15:19 +04:00
|
|
|
|
}
|
|
|
|
|
|
2021-06-18 07:37:40 +04:00
|
|
|
|
#[derive(Debug)]
|
|
|
|
|
pub enum ProgressStatus {
|
|
|
|
|
Created,
|
|
|
|
|
Started(lsp::WorkDoneProgress),
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
impl ProgressStatus {
|
|
|
|
|
pub fn progress(&self) -> Option<&lsp::WorkDoneProgress> {
|
|
|
|
|
match &self {
|
|
|
|
|
ProgressStatus::Created => None,
|
2021-06-27 08:27:35 +04:00
|
|
|
|
ProgressStatus::Started(progress) => Some(progress),
|
2021-06-18 07:37:40 +04:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[derive(Default, Debug)]
|
|
|
|
|
/// Acts as a container for progress reported by language servers. Each server
|
|
|
|
|
/// has a unique id assigned at creation through [`Registry`]. This id is then used
|
|
|
|
|
/// to store the progress in this map.
|
|
|
|
|
pub struct LspProgressMap(HashMap<usize, HashMap<lsp::ProgressToken, ProgressStatus>>);
|
|
|
|
|
|
|
|
|
|
impl LspProgressMap {
|
|
|
|
|
pub fn new() -> Self {
|
|
|
|
|
Self::default()
|
|
|
|
|
}
|
|
|
|
|
|
2022-04-27 23:21:20 +04:00
|
|
|
|
/// Returns a map of all tokens corresponding to the language server with `id`.
|
2021-06-18 07:37:40 +04:00
|
|
|
|
pub fn progress_map(&self, id: usize) -> Option<&HashMap<lsp::ProgressToken, ProgressStatus>> {
|
|
|
|
|
self.0.get(&id)
|
|
|
|
|
}
|
|
|
|
|
|
2021-06-20 23:31:45 +04:00
|
|
|
|
pub fn is_progressing(&self, id: usize) -> bool {
|
|
|
|
|
self.0.get(&id).map(|it| !it.is_empty()).unwrap_or_default()
|
|
|
|
|
}
|
|
|
|
|
|
2021-06-18 07:37:40 +04:00
|
|
|
|
/// Returns last progress status for a given server with `id` and `token`.
|
|
|
|
|
pub fn progress(&self, id: usize, token: &lsp::ProgressToken) -> Option<&ProgressStatus> {
|
|
|
|
|
self.0.get(&id).and_then(|values| values.get(token))
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Checks if progress `token` for server with `id` is created.
|
|
|
|
|
pub fn is_created(&mut self, id: usize, token: &lsp::ProgressToken) -> bool {
|
|
|
|
|
self.0
|
|
|
|
|
.get(&id)
|
|
|
|
|
.map(|values| values.get(token).is_some())
|
|
|
|
|
.unwrap_or_default()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
pub fn create(&mut self, id: usize, token: lsp::ProgressToken) {
|
|
|
|
|
self.0
|
|
|
|
|
.entry(id)
|
|
|
|
|
.or_default()
|
|
|
|
|
.insert(token, ProgressStatus::Created);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Ends the progress by removing the `token` from server with `id`, if removed returns the value.
|
|
|
|
|
pub fn end_progress(
|
|
|
|
|
&mut self,
|
|
|
|
|
id: usize,
|
|
|
|
|
token: &lsp::ProgressToken,
|
|
|
|
|
) -> Option<ProgressStatus> {
|
|
|
|
|
self.0.get_mut(&id).and_then(|vals| vals.remove(token))
|
|
|
|
|
}
|
|
|
|
|
|
2022-04-27 23:21:20 +04:00
|
|
|
|
/// Updates the progress of `token` for server with `id` to `status`, returns the value replaced or `None`.
|
2021-06-18 07:37:40 +04:00
|
|
|
|
pub fn update(
|
|
|
|
|
&mut self,
|
|
|
|
|
id: usize,
|
|
|
|
|
token: lsp::ProgressToken,
|
|
|
|
|
status: lsp::WorkDoneProgress,
|
|
|
|
|
) -> Option<ProgressStatus> {
|
|
|
|
|
self.0
|
|
|
|
|
.entry(id)
|
|
|
|
|
.or_default()
|
|
|
|
|
.insert(token, ProgressStatus::Started(status))
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2022-05-23 20:10:48 +04:00
|
|
|
|
struct NewClient(Arc<Client>, UnboundedReceiver<(usize, Call)>);
|
2022-09-20 11:44:36 +04:00
|
|
|
|
|
|
|
|
|
/// start_client takes both a LanguageConfiguration and a LanguageServerConfiguration to ensure that
|
|
|
|
|
/// it is only called when it makes sense.
|
|
|
|
|
fn start_client(
|
|
|
|
|
id: usize,
|
2022-05-23 20:10:48 +04:00
|
|
|
|
name: String,
|
2022-09-20 11:44:36 +04:00
|
|
|
|
config: &LanguageConfiguration,
|
|
|
|
|
ls_config: &LanguageServerConfiguration,
|
2022-10-03 19:02:49 +04:00
|
|
|
|
doc_path: Option<&std::path::PathBuf>,
|
2023-01-31 03:31:21 +04:00
|
|
|
|
root_dirs: &[PathBuf],
|
2023-03-31 06:26:20 +04:00
|
|
|
|
enable_snippets: bool,
|
2022-05-23 20:10:48 +04:00
|
|
|
|
) -> Result<NewClient> {
|
2022-09-20 11:44:36 +04:00
|
|
|
|
let (client, incoming, initialize_notify) = Client::start(
|
|
|
|
|
&ls_config.command,
|
|
|
|
|
&ls_config.args,
|
2022-05-23 20:10:48 +04:00
|
|
|
|
ls_config.config.clone(),
|
2022-12-09 08:09:23 +04:00
|
|
|
|
ls_config.environment.clone(),
|
2022-09-20 11:44:36 +04:00
|
|
|
|
&config.roots,
|
2023-01-31 03:31:21 +04:00
|
|
|
|
config.workspace_lsp_roots.as_deref().unwrap_or(root_dirs),
|
2022-09-20 11:44:36 +04:00
|
|
|
|
id,
|
2022-05-23 20:10:48 +04:00
|
|
|
|
name,
|
2022-09-20 11:44:36 +04:00
|
|
|
|
ls_config.timeout,
|
2022-10-03 19:02:49 +04:00
|
|
|
|
doc_path,
|
2022-09-20 11:44:36 +04:00
|
|
|
|
)?;
|
|
|
|
|
|
|
|
|
|
let client = Arc::new(client);
|
|
|
|
|
|
|
|
|
|
// Initialize the client asynchronously
|
|
|
|
|
let _client = client.clone();
|
|
|
|
|
tokio::spawn(async move {
|
|
|
|
|
use futures_util::TryFutureExt;
|
|
|
|
|
let value = _client
|
|
|
|
|
.capabilities
|
|
|
|
|
.get_or_try_init(|| {
|
|
|
|
|
_client
|
2023-03-31 06:26:20 +04:00
|
|
|
|
.initialize(enable_snippets)
|
2022-09-20 11:44:36 +04:00
|
|
|
|
.map_ok(|response| response.capabilities)
|
|
|
|
|
})
|
|
|
|
|
.await;
|
|
|
|
|
|
|
|
|
|
if let Err(e) = value {
|
|
|
|
|
log::error!("failed to initialize language server: {}", e);
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// next up, notify<initialized>
|
|
|
|
|
_client
|
|
|
|
|
.notify::<lsp::notification::Initialized>(lsp::InitializedParams {})
|
|
|
|
|
.await
|
|
|
|
|
.unwrap();
|
|
|
|
|
|
|
|
|
|
initialize_notify.notify_one();
|
|
|
|
|
});
|
|
|
|
|
|
2022-05-23 20:10:48 +04:00
|
|
|
|
Ok(NewClient(client, incoming))
|
2022-09-20 11:44:36 +04:00
|
|
|
|
}
|
|
|
|
|
|
2023-02-07 18:59:04 +04:00
|
|
|
|
/// Find an LSP workspace of a file using the following mechanism:
|
|
|
|
|
/// * if the file is outside `workspace` return `None`
|
|
|
|
|
/// * start at `file` and search the file tree upward
|
|
|
|
|
/// * stop the search at the first `root_dirs` entry that contains `file`
|
2023-04-07 19:10:38 +04:00
|
|
|
|
/// * if no `root_dirs` matches `file` stop at workspace
|
2023-02-07 18:59:04 +04:00
|
|
|
|
/// * Returns the top most directory that contains a `root_marker`
|
|
|
|
|
/// * If no root marker and we stopped at a `root_dirs` entry, return the directory we stopped at
|
|
|
|
|
/// * If we stopped at `workspace` instead and `workspace_is_cwd == false` return `None`
|
|
|
|
|
/// * If we stopped at `workspace` instead and `workspace_is_cwd == true` return `workspace`
|
|
|
|
|
pub fn find_lsp_workspace(
|
|
|
|
|
file: &str,
|
|
|
|
|
root_markers: &[String],
|
|
|
|
|
root_dirs: &[PathBuf],
|
|
|
|
|
workspace: &Path,
|
|
|
|
|
workspace_is_cwd: bool,
|
|
|
|
|
) -> Option<PathBuf> {
|
2023-01-31 03:31:21 +04:00
|
|
|
|
let file = std::path::Path::new(file);
|
2023-04-03 05:56:48 +04:00
|
|
|
|
let mut file = if file.is_absolute() {
|
2023-01-31 03:31:21 +04:00
|
|
|
|
file.to_path_buf()
|
|
|
|
|
} else {
|
|
|
|
|
let current_dir = std::env::current_dir().expect("unable to determine current directory");
|
|
|
|
|
current_dir.join(file)
|
|
|
|
|
};
|
2023-04-03 05:56:48 +04:00
|
|
|
|
file = path::get_normalized_path(&file);
|
2023-01-31 03:31:21 +04:00
|
|
|
|
|
2023-02-07 18:59:04 +04:00
|
|
|
|
if !file.starts_with(workspace) {
|
|
|
|
|
return None;
|
|
|
|
|
}
|
2023-01-31 03:31:21 +04:00
|
|
|
|
|
|
|
|
|
let mut top_marker = None;
|
|
|
|
|
for ancestor in file.ancestors() {
|
|
|
|
|
if root_markers
|
|
|
|
|
.iter()
|
|
|
|
|
.any(|marker| ancestor.join(marker).exists())
|
|
|
|
|
{
|
|
|
|
|
top_marker = Some(ancestor);
|
|
|
|
|
}
|
|
|
|
|
|
2023-02-07 18:59:04 +04:00
|
|
|
|
if root_dirs
|
|
|
|
|
.iter()
|
2023-04-03 05:56:48 +04:00
|
|
|
|
.any(|root_dir| path::get_normalized_path(&workspace.join(root_dir)) == ancestor)
|
2023-01-31 03:31:21 +04:00
|
|
|
|
{
|
2023-02-07 18:59:04 +04:00
|
|
|
|
// if the worskapce is the cwd do not search any higher for workspaces
|
|
|
|
|
// but specify
|
|
|
|
|
return Some(top_marker.unwrap_or(workspace).to_owned());
|
|
|
|
|
}
|
|
|
|
|
if ancestor == workspace {
|
|
|
|
|
// if the workspace is the CWD, let the LSP decide what the workspace
|
|
|
|
|
// is
|
|
|
|
|
return top_marker
|
|
|
|
|
.or_else(|| (!workspace_is_cwd).then_some(workspace))
|
|
|
|
|
.map(Path::to_owned);
|
2023-01-31 03:31:21 +04:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2023-02-07 18:59:04 +04:00
|
|
|
|
debug_assert!(false, "workspace must be an ancestor of <file>");
|
|
|
|
|
None
|
2023-01-31 03:31:21 +04:00
|
|
|
|
}
|
|
|
|
|
|
2021-06-12 11:04:30 +04:00
|
|
|
|
#[cfg(test)]
|
|
|
|
|
mod tests {
|
|
|
|
|
use super::{lsp, util::*, OffsetEncoding};
|
|
|
|
|
use helix_core::Rope;
|
|
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
|
fn converts_lsp_pos_to_pos() {
|
|
|
|
|
macro_rules! test_case {
|
|
|
|
|
($doc:expr, ($x:expr, $y:expr) => $want:expr) => {
|
|
|
|
|
let doc = Rope::from($doc);
|
|
|
|
|
let pos = lsp::Position::new($x, $y);
|
|
|
|
|
assert_eq!($want, lsp_pos_to_pos(&doc, pos, OffsetEncoding::Utf16));
|
|
|
|
|
assert_eq!($want, lsp_pos_to_pos(&doc, pos, OffsetEncoding::Utf8))
|
|
|
|
|
};
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
test_case!("", (0, 0) => Some(0));
|
2023-02-09 11:19:29 +04:00
|
|
|
|
test_case!("", (0, 1) => Some(0));
|
2023-04-03 05:58:50 +04:00
|
|
|
|
test_case!("", (1, 0) => Some(0));
|
2021-06-12 11:04:30 +04:00
|
|
|
|
test_case!("\n\n", (0, 0) => Some(0));
|
|
|
|
|
test_case!("\n\n", (1, 0) => Some(1));
|
2023-02-09 11:19:29 +04:00
|
|
|
|
test_case!("\n\n", (1, 1) => Some(1));
|
2021-06-12 11:04:30 +04:00
|
|
|
|
test_case!("\n\n", (2, 0) => Some(2));
|
2023-04-03 05:58:50 +04:00
|
|
|
|
test_case!("\n\n", (3, 0) => Some(2));
|
2021-06-12 11:04:30 +04:00
|
|
|
|
test_case!("test\n\n\n\ncase", (4, 3) => Some(11));
|
|
|
|
|
test_case!("test\n\n\n\ncase", (4, 4) => Some(12));
|
2023-02-09 11:19:29 +04:00
|
|
|
|
test_case!("test\n\n\n\ncase", (4, 5) => Some(12));
|
2023-04-03 05:58:50 +04:00
|
|
|
|
test_case!("", (u32::MAX, u32::MAX) => Some(0));
|
2021-06-12 11:04:30 +04:00
|
|
|
|
}
|
2023-02-09 11:19:29 +04:00
|
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
|
fn emoji_format_gh_4791() {
|
|
|
|
|
use lsp_types::{Position, Range, TextEdit};
|
|
|
|
|
|
|
|
|
|
let edits = vec![
|
|
|
|
|
TextEdit {
|
|
|
|
|
range: Range {
|
|
|
|
|
start: Position {
|
|
|
|
|
line: 0,
|
|
|
|
|
character: 1,
|
|
|
|
|
},
|
|
|
|
|
end: Position {
|
|
|
|
|
line: 1,
|
|
|
|
|
character: 0,
|
|
|
|
|
},
|
|
|
|
|
},
|
|
|
|
|
new_text: "\n ".to_string(),
|
|
|
|
|
},
|
|
|
|
|
TextEdit {
|
|
|
|
|
range: Range {
|
|
|
|
|
start: Position {
|
|
|
|
|
line: 1,
|
|
|
|
|
character: 7,
|
|
|
|
|
},
|
|
|
|
|
end: Position {
|
|
|
|
|
line: 2,
|
|
|
|
|
character: 0,
|
|
|
|
|
},
|
|
|
|
|
},
|
|
|
|
|
new_text: "\n ".to_string(),
|
|
|
|
|
},
|
|
|
|
|
];
|
|
|
|
|
|
|
|
|
|
let mut source = Rope::from_str("[\n\"🇺🇸\",\n\"🎄\",\n]");
|
|
|
|
|
|
|
|
|
|
let transaction = generate_transaction_from_edits(&source, edits, OffsetEncoding::Utf8);
|
|
|
|
|
assert!(transaction.apply(&mut source));
|
|
|
|
|
}
|
2021-06-12 11:04:30 +04:00
|
|
|
|
}
|