mirror of
https://github.com/helix-editor/helix.git
synced 2024-11-25 02:46:17 +04:00
implement proper lsp-workspace support
fix typo Co-authored-by: LeoniePhiline <22329650+LeoniePhiline@users.noreply.github.com>
This commit is contained in:
parent
2d10a429eb
commit
5b3dd6a678
1
Cargo.lock
generated
1
Cargo.lock
generated
@ -1149,6 +1149,7 @@ dependencies = [
|
|||||||
"helix-parsec",
|
"helix-parsec",
|
||||||
"log",
|
"log",
|
||||||
"lsp-types",
|
"lsp-types",
|
||||||
|
"parking_lot",
|
||||||
"serde",
|
"serde",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"thiserror",
|
"thiserror",
|
||||||
|
@ -64,7 +64,7 @@ ## Language configuration
|
|||||||
| `grammar` | The tree-sitter grammar to use (defaults to the value of `name`) |
|
| `grammar` | The tree-sitter grammar to use (defaults to the value of `name`) |
|
||||||
| `formatter` | The formatter for the language, it will take precedence over the lsp when defined. The formatter must be able to take the original file as input from stdin and write the formatted file to stdout |
|
| `formatter` | The formatter for the language, it will take precedence over the lsp when defined. The formatter must be able to take the original file as input from stdin and write the formatted file to stdout |
|
||||||
| `text-width` | Maximum line length. Used for the `:reflow` command and soft-wrapping if `soft-wrap.wrap_at_text_width` is set, defaults to `editor.text-width` |
|
| `text-width` | Maximum line length. Used for the `:reflow` command and soft-wrapping if `soft-wrap.wrap_at_text_width` is set, defaults to `editor.text-width` |
|
||||||
| `workspace-lsp-roots` | Directories relative to the workspace root that are treated as LSP roots. Should only be set in `.helix/config.toml`. Overwrites the setting of the same name in `config.toml` if set. | `` |
|
| `workspace-lsp-roots` | Directories relative to the workspace root that are treated as LSP roots. Should only be set in `.helix/config.toml`. Overwrites the setting of the same name in `config.toml` if set. |
|
||||||
|
|
||||||
### File-type detection and the `file-types` key
|
### File-type detection and the `file-types` key
|
||||||
|
|
||||||
|
@ -9,35 +9,38 @@ pub fn default_lang_config() -> toml::Value {
|
|||||||
|
|
||||||
/// User configured languages.toml file, merged with the default config.
|
/// User configured languages.toml file, merged with the default config.
|
||||||
pub fn user_lang_config() -> Result<toml::Value, toml::de::Error> {
|
pub fn user_lang_config() -> Result<toml::Value, toml::de::Error> {
|
||||||
let config = [crate::config_dir(), crate::find_workspace().join(".helix")]
|
let config = [
|
||||||
.into_iter()
|
crate::config_dir(),
|
||||||
.map(|path| path.join("languages.toml"))
|
crate::find_workspace().0.join(".helix"),
|
||||||
.filter_map(|file| {
|
]
|
||||||
std::fs::read_to_string(file)
|
.into_iter()
|
||||||
.map(|config| toml::from_str(&config))
|
.map(|path| path.join("languages.toml"))
|
||||||
.ok()
|
.filter_map(|file| {
|
||||||
})
|
std::fs::read_to_string(file)
|
||||||
.collect::<Result<Vec<_>, _>>()?
|
.map(|config| toml::from_str(&config))
|
||||||
.into_iter()
|
.ok()
|
||||||
.fold(default_lang_config(), |a, b| {
|
})
|
||||||
// combines for example
|
.collect::<Result<Vec<_>, _>>()?
|
||||||
// b:
|
.into_iter()
|
||||||
// [[language]]
|
.fold(default_lang_config(), |a, b| {
|
||||||
// name = "toml"
|
// combines for example
|
||||||
// language-server = { command = "taplo", args = ["lsp", "stdio"] }
|
// b:
|
||||||
//
|
// [[language]]
|
||||||
// a:
|
// name = "toml"
|
||||||
// [[language]]
|
// language-server = { command = "taplo", args = ["lsp", "stdio"] }
|
||||||
// language-server = { command = "/usr/bin/taplo" }
|
//
|
||||||
//
|
// a:
|
||||||
// into:
|
// [[language]]
|
||||||
// [[language]]
|
// language-server = { command = "/usr/bin/taplo" }
|
||||||
// name = "toml"
|
//
|
||||||
// language-server = { command = "/usr/bin/taplo" }
|
// into:
|
||||||
//
|
// [[language]]
|
||||||
// thus it overrides the third depth-level of b with values of a if they exist, but otherwise merges their values
|
// name = "toml"
|
||||||
crate::merge_toml_values(a, b, 3)
|
// language-server = { command = "/usr/bin/taplo" }
|
||||||
});
|
//
|
||||||
|
// thus it overrides the third depth-level of b with values of a if they exist, but otherwise merges their values
|
||||||
|
crate::merge_toml_values(a, b, 3)
|
||||||
|
});
|
||||||
|
|
||||||
Ok(config)
|
Ok(config)
|
||||||
}
|
}
|
||||||
|
@ -129,7 +129,7 @@ pub fn config_file() -> PathBuf {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn workspace_config_file() -> PathBuf {
|
pub fn workspace_config_file() -> PathBuf {
|
||||||
find_workspace().join(".helix").join("config.toml")
|
find_workspace().0.join(".helix").join("config.toml")
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn lang_config_file() -> PathBuf {
|
pub fn lang_config_file() -> PathBuf {
|
||||||
@ -283,14 +283,19 @@ fn language_toml_nested_array_merges() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Finds the current workspace folder.
|
/// Finds the current workspace folder.
|
||||||
/// Used as a ceiling dir for root resolve, for the filepicker and other related
|
/// Used as a ceiling dir for LSP root resolution, the filepicker and potentially as a future filewatching root
|
||||||
pub fn find_workspace() -> PathBuf {
|
///
|
||||||
|
/// This function starts searching the FS upward from the CWD
|
||||||
|
/// and returns the first directory that contains either `.git` or `.helix`.
|
||||||
|
/// If no workspace was found returns (CWD, true).
|
||||||
|
/// Otherwise (workspace, false) is returned
|
||||||
|
pub fn find_workspace() -> (PathBuf, bool) {
|
||||||
let current_dir = std::env::current_dir().expect("unable to determine current directory");
|
let current_dir = std::env::current_dir().expect("unable to determine current directory");
|
||||||
for ancestor in current_dir.ancestors() {
|
for ancestor in current_dir.ancestors() {
|
||||||
if ancestor.join(".git").exists() || ancestor.join(".helix").exists() {
|
if ancestor.join(".git").exists() || ancestor.join(".helix").exists() {
|
||||||
return ancestor.to_owned();
|
return (ancestor.to_owned(), false);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
current_dir
|
(current_dir, true)
|
||||||
}
|
}
|
||||||
|
@ -27,3 +27,4 @@ thiserror = "1.0"
|
|||||||
tokio = { version = "1.27", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot", "sync"] }
|
tokio = { version = "1.27", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot", "sync"] }
|
||||||
tokio-stream = "0.1.12"
|
tokio-stream = "0.1.12"
|
||||||
which = "4.4"
|
which = "4.4"
|
||||||
|
parking_lot = "0.12.1"
|
||||||
|
@ -1,13 +1,17 @@
|
|||||||
use crate::{
|
use crate::{
|
||||||
find_root, jsonrpc,
|
find_lsp_workspace, jsonrpc,
|
||||||
transport::{Payload, Transport},
|
transport::{Payload, Transport},
|
||||||
Call, Error, OffsetEncoding, Result,
|
Call, Error, OffsetEncoding, Result,
|
||||||
};
|
};
|
||||||
|
|
||||||
use helix_core::{ChangeSet, Rope};
|
use helix_core::{find_workspace, ChangeSet, Rope};
|
||||||
use helix_loader::{self, VERSION_AND_GIT_HASH};
|
use helix_loader::{self, VERSION_AND_GIT_HASH};
|
||||||
use lsp::PositionEncodingKind;
|
use lsp::{
|
||||||
|
notification::DidChangeWorkspaceFolders, DidChangeWorkspaceFoldersParams, OneOf,
|
||||||
|
PositionEncodingKind, WorkspaceFolder, WorkspaceFoldersChangeEvent,
|
||||||
|
};
|
||||||
use lsp_types as lsp;
|
use lsp_types as lsp;
|
||||||
|
use parking_lot::Mutex;
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
use std::future::Future;
|
use std::future::Future;
|
||||||
@ -26,6 +30,17 @@
|
|||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
fn workspace_for_uri(uri: lsp::Url) -> WorkspaceFolder {
|
||||||
|
lsp::WorkspaceFolder {
|
||||||
|
name: uri
|
||||||
|
.path_segments()
|
||||||
|
.and_then(|segments| segments.last())
|
||||||
|
.map(|basename| basename.to_string())
|
||||||
|
.unwrap_or_default(),
|
||||||
|
uri,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct Client {
|
pub struct Client {
|
||||||
id: usize,
|
id: usize,
|
||||||
@ -36,11 +51,120 @@ pub struct Client {
|
|||||||
config: Option<Value>,
|
config: Option<Value>,
|
||||||
root_path: std::path::PathBuf,
|
root_path: std::path::PathBuf,
|
||||||
root_uri: Option<lsp::Url>,
|
root_uri: Option<lsp::Url>,
|
||||||
workspace_folders: Vec<lsp::WorkspaceFolder>,
|
workspace_folders: Mutex<Vec<lsp::WorkspaceFolder>>,
|
||||||
|
initalize_notify: Arc<Notify>,
|
||||||
|
/// workspace folders added while the server is still initalizing
|
||||||
req_timeout: u64,
|
req_timeout: u64,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Client {
|
impl Client {
|
||||||
|
pub fn try_add_doc(
|
||||||
|
self: &Arc<Self>,
|
||||||
|
root_markers: &[String],
|
||||||
|
manual_roots: &[PathBuf],
|
||||||
|
doc_path: Option<&std::path::PathBuf>,
|
||||||
|
may_support_workspace: bool,
|
||||||
|
) -> bool {
|
||||||
|
let (workspace, workspace_is_cwd) = find_workspace();
|
||||||
|
let root = find_lsp_workspace(
|
||||||
|
doc_path
|
||||||
|
.and_then(|x| x.parent().and_then(|x| x.to_str()))
|
||||||
|
.unwrap_or("."),
|
||||||
|
root_markers,
|
||||||
|
manual_roots,
|
||||||
|
&workspace,
|
||||||
|
workspace_is_cwd,
|
||||||
|
);
|
||||||
|
let root_uri = root
|
||||||
|
.as_ref()
|
||||||
|
.and_then(|root| lsp::Url::from_file_path(root).ok());
|
||||||
|
|
||||||
|
if self.root_path == root.unwrap_or(workspace)
|
||||||
|
|| root_uri.as_ref().map_or(false, |root_uri| {
|
||||||
|
self.workspace_folders
|
||||||
|
.lock()
|
||||||
|
.iter()
|
||||||
|
.any(|workspace| &workspace.uri == root_uri)
|
||||||
|
})
|
||||||
|
{
|
||||||
|
// workspace URI is already registered so we can use this client
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
// this server definitly doesn't support multiple workspace, no need to check capabilities
|
||||||
|
if !may_support_workspace {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
let Some(capabilities) = self.capabilities.get() else {
|
||||||
|
let client = Arc::clone(self);
|
||||||
|
// initalization hasn't finished yet, deal with this new root later
|
||||||
|
// TODO: In the edgecase that a **new root** is added
|
||||||
|
// for an LSP that **doesn't support workspace_folders** before initaliation is finished
|
||||||
|
// the new roots are ignored.
|
||||||
|
// That particular edgecase would require retroactively spawning new LSP
|
||||||
|
// clients and therefore also require us to retroactively update the corresponding
|
||||||
|
// documents LSP client handle. It's doable but a pretty weird edgecase so let's
|
||||||
|
// wait and see if anyone ever runs into it.
|
||||||
|
tokio::spawn(async move {
|
||||||
|
client.initalize_notify.notified().await;
|
||||||
|
if let Some(workspace_folders_caps) = client
|
||||||
|
.capabilities()
|
||||||
|
.workspace
|
||||||
|
.as_ref()
|
||||||
|
.and_then(|cap| cap.workspace_folders.as_ref())
|
||||||
|
.filter(|cap| cap.supported.unwrap_or(false))
|
||||||
|
{
|
||||||
|
client.add_workspace_folder(
|
||||||
|
root_uri,
|
||||||
|
&workspace_folders_caps.change_notifications,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
return true;
|
||||||
|
};
|
||||||
|
|
||||||
|
if let Some(workspace_folders_caps) = capabilities
|
||||||
|
.workspace
|
||||||
|
.as_ref()
|
||||||
|
.and_then(|cap| cap.workspace_folders.as_ref())
|
||||||
|
.filter(|cap| cap.supported.unwrap_or(false))
|
||||||
|
{
|
||||||
|
self.add_workspace_folder(root_uri, &workspace_folders_caps.change_notifications);
|
||||||
|
true
|
||||||
|
} else {
|
||||||
|
// the server doesn't support multi workspaces, we need a new client
|
||||||
|
false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn add_workspace_folder(
|
||||||
|
&self,
|
||||||
|
root_uri: Option<lsp::Url>,
|
||||||
|
change_notifications: &Option<OneOf<bool, String>>,
|
||||||
|
) {
|
||||||
|
// root_uri is None just means that there isn't really any LSP workspace
|
||||||
|
// associated with this file. For servers that support multiple workspaces
|
||||||
|
// there is just one server so we can always just use that shared instance.
|
||||||
|
// No need to add a new workspace root here as there is no logical root for this file
|
||||||
|
// let the server deal with this
|
||||||
|
let Some(root_uri) = root_uri else {
|
||||||
|
return;
|
||||||
|
};
|
||||||
|
|
||||||
|
// server supports workspace folders, let's add the new root to the list
|
||||||
|
self.workspace_folders
|
||||||
|
.lock()
|
||||||
|
.push(workspace_for_uri(root_uri.clone()));
|
||||||
|
if &Some(OneOf::Left(false)) == change_notifications {
|
||||||
|
// server specifically opted out of DidWorkspaceChange notifications
|
||||||
|
// let's assume the server will request the workspace folders itself
|
||||||
|
// and that we can therefore reuse the client (but are done now)
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
tokio::spawn(self.did_change_workspace(vec![workspace_for_uri(root_uri)], Vec::new()));
|
||||||
|
}
|
||||||
|
|
||||||
#[allow(clippy::type_complexity)]
|
#[allow(clippy::type_complexity)]
|
||||||
#[allow(clippy::too_many_arguments)]
|
#[allow(clippy::too_many_arguments)]
|
||||||
pub fn start(
|
pub fn start(
|
||||||
@ -76,30 +200,25 @@ pub fn start(
|
|||||||
|
|
||||||
let (server_rx, server_tx, initialize_notify) =
|
let (server_rx, server_tx, initialize_notify) =
|
||||||
Transport::start(reader, writer, stderr, id);
|
Transport::start(reader, writer, stderr, id);
|
||||||
|
let (workspace, workspace_is_cwd) = find_workspace();
|
||||||
let root_path = find_root(
|
let root = find_lsp_workspace(
|
||||||
doc_path
|
doc_path
|
||||||
.and_then(|x| x.parent().and_then(|x| x.to_str()))
|
.and_then(|x| x.parent().and_then(|x| x.to_str()))
|
||||||
.unwrap_or("."),
|
.unwrap_or("."),
|
||||||
root_markers,
|
root_markers,
|
||||||
manual_roots,
|
manual_roots,
|
||||||
|
&workspace,
|
||||||
|
workspace_is_cwd,
|
||||||
);
|
);
|
||||||
|
|
||||||
let root_uri = lsp::Url::from_file_path(root_path.clone()).ok();
|
// `root_uri` and `workspace_folder` can be empty in case there is no workspace
|
||||||
|
// `root_url` can not, use `workspace` as a fallback
|
||||||
|
let root_path = root.clone().unwrap_or_else(|| workspace.clone());
|
||||||
|
let root_uri = root.and_then(|root| lsp::Url::from_file_path(root).ok());
|
||||||
|
|
||||||
// TODO: support multiple workspace folders
|
|
||||||
let workspace_folders = root_uri
|
let workspace_folders = root_uri
|
||||||
.clone()
|
.clone()
|
||||||
.map(|root| {
|
.map(|root| vec![workspace_for_uri(root)])
|
||||||
vec![lsp::WorkspaceFolder {
|
|
||||||
name: root
|
|
||||||
.path_segments()
|
|
||||||
.and_then(|segments| segments.last())
|
|
||||||
.map(|basename| basename.to_string())
|
|
||||||
.unwrap_or_default(),
|
|
||||||
uri: root,
|
|
||||||
}]
|
|
||||||
})
|
|
||||||
.unwrap_or_default();
|
.unwrap_or_default();
|
||||||
|
|
||||||
let client = Self {
|
let client = Self {
|
||||||
@ -110,10 +229,10 @@ pub fn start(
|
|||||||
capabilities: OnceCell::new(),
|
capabilities: OnceCell::new(),
|
||||||
config,
|
config,
|
||||||
req_timeout,
|
req_timeout,
|
||||||
|
|
||||||
root_path,
|
root_path,
|
||||||
root_uri,
|
root_uri,
|
||||||
workspace_folders,
|
workspace_folders: Mutex::new(workspace_folders),
|
||||||
|
initalize_notify: initialize_notify.clone(),
|
||||||
};
|
};
|
||||||
|
|
||||||
Ok((client, server_rx, initialize_notify))
|
Ok((client, server_rx, initialize_notify))
|
||||||
@ -169,8 +288,10 @@ pub fn config(&self) -> Option<&Value> {
|
|||||||
self.config.as_ref()
|
self.config.as_ref()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn workspace_folders(&self) -> &[lsp::WorkspaceFolder] {
|
pub async fn workspace_folders(
|
||||||
&self.workspace_folders
|
&self,
|
||||||
|
) -> parking_lot::MutexGuard<'_, Vec<lsp::WorkspaceFolder>> {
|
||||||
|
self.workspace_folders.lock()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Execute a RPC request on the language server.
|
/// Execute a RPC request on the language server.
|
||||||
@ -298,7 +419,7 @@ pub(crate) async fn initialize(&self) -> Result<lsp::InitializeResult> {
|
|||||||
#[allow(deprecated)]
|
#[allow(deprecated)]
|
||||||
let params = lsp::InitializeParams {
|
let params = lsp::InitializeParams {
|
||||||
process_id: Some(std::process::id()),
|
process_id: Some(std::process::id()),
|
||||||
workspace_folders: Some(self.workspace_folders.clone()),
|
workspace_folders: Some(self.workspace_folders.lock().clone()),
|
||||||
// root_path is obsolete, but some clients like pyright still use it so we specify both.
|
// root_path is obsolete, but some clients like pyright still use it so we specify both.
|
||||||
// clients will prefer _uri if possible
|
// clients will prefer _uri if possible
|
||||||
root_path: self.root_path.to_str().map(|path| path.to_owned()),
|
root_path: self.root_path.to_str().map(|path| path.to_owned()),
|
||||||
@ -469,6 +590,16 @@ pub fn did_change_configuration(&self, settings: Value) -> impl Future<Output =
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn did_change_workspace(
|
||||||
|
&self,
|
||||||
|
added: Vec<WorkspaceFolder>,
|
||||||
|
removed: Vec<WorkspaceFolder>,
|
||||||
|
) -> impl Future<Output = Result<()>> {
|
||||||
|
self.notify::<DidChangeWorkspaceFolders>(DidChangeWorkspaceFoldersParams {
|
||||||
|
event: WorkspaceFoldersChangeEvent { added, removed },
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
// -------------------------------------------------------------------------------------------
|
// -------------------------------------------------------------------------------------------
|
||||||
// Text document
|
// Text document
|
||||||
// -------------------------------------------------------------------------------------------
|
// -------------------------------------------------------------------------------------------
|
||||||
|
@ -10,15 +10,12 @@
|
|||||||
pub use lsp_types as lsp;
|
pub use lsp_types as lsp;
|
||||||
|
|
||||||
use futures_util::stream::select_all::SelectAll;
|
use futures_util::stream::select_all::SelectAll;
|
||||||
use helix_core::{
|
use helix_core::syntax::{LanguageConfiguration, LanguageServerConfiguration};
|
||||||
find_workspace,
|
|
||||||
syntax::{LanguageConfiguration, LanguageServerConfiguration},
|
|
||||||
};
|
|
||||||
use tokio::sync::mpsc::UnboundedReceiver;
|
use tokio::sync::mpsc::UnboundedReceiver;
|
||||||
|
|
||||||
use std::{
|
use std::{
|
||||||
collections::{hash_map::Entry, HashMap},
|
collections::{hash_map::Entry, HashMap},
|
||||||
path::PathBuf,
|
path::{Path, PathBuf},
|
||||||
sync::{
|
sync::{
|
||||||
atomic::{AtomicUsize, Ordering},
|
atomic::{AtomicUsize, Ordering},
|
||||||
Arc,
|
Arc,
|
||||||
@ -609,7 +606,7 @@ pub fn parse(method: &str, params: jsonrpc::Params) -> Result<Notification> {
|
|||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct Registry {
|
pub struct Registry {
|
||||||
inner: HashMap<LanguageId, (usize, Arc<Client>)>,
|
inner: HashMap<LanguageId, Vec<(usize, Arc<Client>)>>,
|
||||||
|
|
||||||
counter: AtomicUsize,
|
counter: AtomicUsize,
|
||||||
pub incoming: SelectAll<UnboundedReceiverStream<(usize, Call)>>,
|
pub incoming: SelectAll<UnboundedReceiverStream<(usize, Call)>>,
|
||||||
@ -633,12 +630,16 @@ pub fn new() -> Self {
|
|||||||
pub fn get_by_id(&self, id: usize) -> Option<&Client> {
|
pub fn get_by_id(&self, id: usize) -> Option<&Client> {
|
||||||
self.inner
|
self.inner
|
||||||
.values()
|
.values()
|
||||||
|
.flatten()
|
||||||
.find(|(client_id, _)| client_id == &id)
|
.find(|(client_id, _)| client_id == &id)
|
||||||
.map(|(_, client)| client.as_ref())
|
.map(|(_, client)| client.as_ref())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn remove_by_id(&mut self, id: usize) {
|
pub fn remove_by_id(&mut self, id: usize) {
|
||||||
self.inner.retain(|_, (client_id, _)| client_id != &id)
|
self.inner.retain(|_, clients| {
|
||||||
|
clients.retain(|&(client_id, _)| client_id != id);
|
||||||
|
!clients.is_empty()
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn restart(
|
pub fn restart(
|
||||||
@ -664,11 +665,13 @@ pub fn restart(
|
|||||||
start_client(id, language_config, config, doc_path, root_dirs)?;
|
start_client(id, language_config, config, doc_path, root_dirs)?;
|
||||||
self.incoming.push(UnboundedReceiverStream::new(incoming));
|
self.incoming.push(UnboundedReceiverStream::new(incoming));
|
||||||
|
|
||||||
let (_, old_client) = entry.insert((id, client.clone()));
|
let old_clients = entry.insert(vec![(id, client.clone())]);
|
||||||
|
|
||||||
tokio::spawn(async move {
|
for (_, old_client) in old_clients {
|
||||||
let _ = old_client.force_shutdown().await;
|
tokio::spawn(async move {
|
||||||
});
|
let _ = old_client.force_shutdown().await;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
Ok(Some(client))
|
Ok(Some(client))
|
||||||
}
|
}
|
||||||
@ -678,10 +681,12 @@ pub fn restart(
|
|||||||
pub fn stop(&mut self, language_config: &LanguageConfiguration) {
|
pub fn stop(&mut self, language_config: &LanguageConfiguration) {
|
||||||
let scope = language_config.scope.clone();
|
let scope = language_config.scope.clone();
|
||||||
|
|
||||||
if let Some((_, client)) = self.inner.remove(&scope) {
|
if let Some(clients) = self.inner.remove(&scope) {
|
||||||
tokio::spawn(async move {
|
for (_, client) in clients {
|
||||||
let _ = client.force_shutdown().await;
|
tokio::spawn(async move {
|
||||||
});
|
let _ = client.force_shutdown().await;
|
||||||
|
});
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -696,24 +701,25 @@ pub fn get(
|
|||||||
None => return Ok(None),
|
None => return Ok(None),
|
||||||
};
|
};
|
||||||
|
|
||||||
match self.inner.entry(language_config.scope.clone()) {
|
let clients = self.inner.entry(language_config.scope.clone()).or_default();
|
||||||
Entry::Occupied(entry) => Ok(Some(entry.get().1.clone())),
|
// check if we already have a client for this documents root that we can reuse
|
||||||
Entry::Vacant(entry) => {
|
if let Some((_, client)) = clients.iter_mut().enumerate().find(|(i, (_, client))| {
|
||||||
// initialize a new client
|
client.try_add_doc(&language_config.roots, root_dirs, doc_path, *i == 0)
|
||||||
let id = self.counter.fetch_add(1, Ordering::Relaxed);
|
}) {
|
||||||
|
return Ok(Some(client.1.clone()));
|
||||||
let NewClientResult(client, incoming) =
|
|
||||||
start_client(id, language_config, config, doc_path, root_dirs)?;
|
|
||||||
self.incoming.push(UnboundedReceiverStream::new(incoming));
|
|
||||||
|
|
||||||
entry.insert((id, client.clone()));
|
|
||||||
Ok(Some(client))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
// initialize a new client
|
||||||
|
let id = self.counter.fetch_add(1, Ordering::Relaxed);
|
||||||
|
|
||||||
|
let NewClientResult(client, incoming) =
|
||||||
|
start_client(id, language_config, config, doc_path, root_dirs)?;
|
||||||
|
clients.push((id, client.clone()));
|
||||||
|
self.incoming.push(UnboundedReceiverStream::new(incoming));
|
||||||
|
Ok(Some(client))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn iter_clients(&self) -> impl Iterator<Item = &Arc<Client>> {
|
pub fn iter_clients(&self) -> impl Iterator<Item = &Arc<Client>> {
|
||||||
self.inner.values().map(|(_, client)| client)
|
self.inner.values().flatten().map(|(_, client)| client)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -850,16 +856,23 @@ fn start_client(
|
|||||||
Ok(NewClientResult(client, incoming))
|
Ok(NewClientResult(client, incoming))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Find an LSP root of a file using the following mechansim:
|
/// Find an LSP workspace of a file using the following mechanism:
|
||||||
/// * start at `file` (either an absolute path or relative to CWD)
|
/// * if the file is outside `workspace` return `None`
|
||||||
/// * find the top most directory containing a root_marker
|
/// * start at `file` and search the file tree upward
|
||||||
/// * inside the current workspace
|
/// * stop the search at the first `root_dirs` entry that contains `file`
|
||||||
/// * stop the search at the first root_dir that contains `file` or the workspace (obtained from `helix_core::find_workspace`)
|
/// * if no `root_dirs` matchs `file` stop at workspace
|
||||||
/// * root_dirs only apply inside the workspace. For files outside of the workspace they are ignored
|
/// * Returns the top most directory that contains a `root_marker`
|
||||||
/// * outside the current workspace: keep searching to the top of the file hiearchy
|
/// * If no root marker and we stopped at a `root_dirs` entry, return the directory we stopped at
|
||||||
pub fn find_root(file: &str, root_markers: &[String], root_dirs: &[PathBuf]) -> PathBuf {
|
/// * If we stopped at `workspace` instead and `workspace_is_cwd == false` return `None`
|
||||||
|
/// * If we stopped at `workspace` instead and `workspace_is_cwd == true` return `workspace`
|
||||||
|
pub fn find_lsp_workspace(
|
||||||
|
file: &str,
|
||||||
|
root_markers: &[String],
|
||||||
|
root_dirs: &[PathBuf],
|
||||||
|
workspace: &Path,
|
||||||
|
workspace_is_cwd: bool,
|
||||||
|
) -> Option<PathBuf> {
|
||||||
let file = std::path::Path::new(file);
|
let file = std::path::Path::new(file);
|
||||||
let workspace = find_workspace();
|
|
||||||
let file = if file.is_absolute() {
|
let file = if file.is_absolute() {
|
||||||
file.to_path_buf()
|
file.to_path_buf()
|
||||||
} else {
|
} else {
|
||||||
@ -867,7 +880,9 @@ pub fn find_root(file: &str, root_markers: &[String], root_dirs: &[PathBuf]) ->
|
|||||||
current_dir.join(file)
|
current_dir.join(file)
|
||||||
};
|
};
|
||||||
|
|
||||||
let inside_workspace = file.strip_prefix(&workspace).is_ok();
|
if !file.starts_with(workspace) {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
let mut top_marker = None;
|
let mut top_marker = None;
|
||||||
for ancestor in file.ancestors() {
|
for ancestor in file.ancestors() {
|
||||||
@ -878,18 +893,25 @@ pub fn find_root(file: &str, root_markers: &[String], root_dirs: &[PathBuf]) ->
|
|||||||
top_marker = Some(ancestor);
|
top_marker = Some(ancestor);
|
||||||
}
|
}
|
||||||
|
|
||||||
if inside_workspace
|
if root_dirs
|
||||||
&& (ancestor == workspace
|
.iter()
|
||||||
|| root_dirs
|
.any(|root_dir| root_dir == ancestor.strip_prefix(workspace).unwrap())
|
||||||
.iter()
|
|
||||||
.any(|root_dir| root_dir == ancestor.strip_prefix(&workspace).unwrap()))
|
|
||||||
{
|
{
|
||||||
return top_marker.unwrap_or(ancestor).to_owned();
|
// if the worskapce is the cwd do not search any higher for workspaces
|
||||||
|
// but specify
|
||||||
|
return Some(top_marker.unwrap_or(workspace).to_owned());
|
||||||
|
}
|
||||||
|
if ancestor == workspace {
|
||||||
|
// if the workspace is the CWD, let the LSP decide what the workspace
|
||||||
|
// is
|
||||||
|
return top_marker
|
||||||
|
.or_else(|| (!workspace_is_cwd).then_some(workspace))
|
||||||
|
.map(Path::to_owned);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// If no root was found use the workspace as a fallback
|
debug_assert!(false, "workspace must be an ancestor of <file>");
|
||||||
workspace
|
None
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
@ -1018,7 +1018,7 @@ pub async fn handle_language_server_message(
|
|||||||
let language_server =
|
let language_server =
|
||||||
self.editor.language_servers.get_by_id(server_id).unwrap();
|
self.editor.language_servers.get_by_id(server_id).unwrap();
|
||||||
|
|
||||||
Ok(json!(language_server.workspace_folders()))
|
Ok(json!(&*language_server.workspace_folders().await))
|
||||||
}
|
}
|
||||||
Ok(MethodCall::WorkspaceConfiguration(params)) => {
|
Ok(MethodCall::WorkspaceConfiguration(params)) => {
|
||||||
let result: Vec<_> = params
|
let result: Vec<_> = params
|
||||||
|
@ -2419,7 +2419,7 @@ fn append_mode(cx: &mut Context) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn file_picker(cx: &mut Context) {
|
fn file_picker(cx: &mut Context) {
|
||||||
let root = find_workspace();
|
let root = find_workspace().0;
|
||||||
let picker = ui::file_picker(root, &cx.editor.config());
|
let picker = ui::file_picker(root, &cx.editor.config());
|
||||||
cx.push_layer(Box::new(overlayed(picker)));
|
cx.push_layer(Box::new(overlayed(picker)));
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user