Compare commits
5 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
7b47ce32aa | ||
|
0557e88eae | ||
|
79ac545873 | ||
|
5c4b93b4b4 | ||
|
57fde30a09 |
9
Cargo.lock
generated
9
Cargo.lock
generated
@ -471,8 +471,10 @@ dependencies = [
|
||||
"futures-executor",
|
||||
"futures-util",
|
||||
"helix-core",
|
||||
"helix-parsec",
|
||||
"log",
|
||||
"lsp-types",
|
||||
"once_cell",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"thiserror",
|
||||
@ -481,6 +483,13 @@ dependencies = [
|
||||
"which",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "helix-parsec"
|
||||
version = "0.6.0"
|
||||
dependencies = [
|
||||
"regex",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "helix-term"
|
||||
version = "0.6.0"
|
||||
|
@ -7,6 +7,7 @@ members = [
|
||||
"helix-lsp",
|
||||
"helix-dap",
|
||||
"helix-loader",
|
||||
"helix-parsec",
|
||||
"xtask",
|
||||
]
|
||||
|
||||
|
9
TODO.xit
Normal file
9
TODO.xit
Normal file
@ -0,0 +1,9 @@
|
||||
[x] Parse snippet syntax
|
||||
[x] Generate transaction from snippet
|
||||
[ ] Store snippet on the Document
|
||||
[ ] Map snippet ranges through Document changes
|
||||
[ ] Delete placeholder values on-type in `helix_term::commands::insert::insert`
|
||||
[ ] Jump between tabstops with Tab and BackTab
|
||||
[ ] Support variables
|
||||
[ ] Support variable transforms
|
||||
[ ] Support choices as completion items
|
@ -13,6 +13,7 @@ homepage = "https://helix-editor.com"
|
||||
|
||||
[dependencies]
|
||||
helix-core = { version = "0.6", path = "../helix-core" }
|
||||
helix-parsec = { version = "0.6", path = "../helix-parsec" }
|
||||
|
||||
anyhow = "1.0"
|
||||
futures-executor = "0.3"
|
||||
@ -25,3 +26,4 @@ thiserror = "1.0"
|
||||
tokio = { version = "1.21", features = ["rt", "rt-multi-thread", "io-util", "io-std", "time", "process", "macros", "fs", "parking_lot", "sync"] }
|
||||
tokio-stream = "0.1.11"
|
||||
which = "4.2"
|
||||
once_cell = "1.15"
|
||||
|
@ -303,7 +303,7 @@ pub(crate) async fn initialize(&self) -> Result<lsp::InitializeResult> {
|
||||
text_document: Some(lsp::TextDocumentClientCapabilities {
|
||||
completion: Some(lsp::CompletionClientCapabilities {
|
||||
completion_item: Some(lsp::CompletionItemCapability {
|
||||
snippet_support: Some(false),
|
||||
snippet_support: Some(true),
|
||||
resolve_support: Some(lsp::CompletionItemCapabilityResolveSupport {
|
||||
properties: vec![
|
||||
String::from("documentation"),
|
||||
|
@ -1,5 +1,6 @@
|
||||
mod client;
|
||||
pub mod jsonrpc;
|
||||
pub mod snippet;
|
||||
mod transport;
|
||||
|
||||
pub use client::Client;
|
||||
|
440
helix-lsp/src/snippet.rs
Normal file
440
helix-lsp/src/snippet.rs
Normal file
@ -0,0 +1,440 @@
|
||||
use anyhow::{anyhow, Result};
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub enum CaseChange {
|
||||
Upcase,
|
||||
Downcase,
|
||||
Capitalize,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub enum FormatItem<'a> {
|
||||
Text(&'a str),
|
||||
Capture(usize),
|
||||
CaseChange(usize, CaseChange),
|
||||
Conditional(usize, Option<&'a str>, Option<&'a str>),
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub struct Regex<'a> {
|
||||
value: &'a str,
|
||||
replacement: Vec<FormatItem<'a>>,
|
||||
options: Option<&'a str>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub enum SnippetElement<'a> {
|
||||
Tabstop {
|
||||
tabstop: usize,
|
||||
},
|
||||
Placeholder {
|
||||
tabstop: usize,
|
||||
value: Box<SnippetElement<'a>>,
|
||||
},
|
||||
Choice {
|
||||
tabstop: usize,
|
||||
choices: Vec<&'a str>,
|
||||
},
|
||||
Variable {
|
||||
name: &'a str,
|
||||
default: Option<&'a str>,
|
||||
regex: Option<Regex<'a>>,
|
||||
},
|
||||
Text(&'a str),
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub struct Snippet<'a> {
|
||||
elements: Vec<SnippetElement<'a>>,
|
||||
}
|
||||
|
||||
pub fn parse<'a>(s: &'a str) -> Result<Snippet<'a>> {
|
||||
parser::parse(s).map_err(|rest| anyhow!("Failed to parse snippet. Remaining input: {}", rest))
|
||||
}
|
||||
|
||||
pub fn into_transaction<'a>(
|
||||
snippet: Snippet<'a>,
|
||||
text: &helix_core::Rope,
|
||||
trigger_offset: usize,
|
||||
) -> helix_core::Transaction {
|
||||
use helix_core::{smallvec, Range, Selection, Transaction};
|
||||
use SnippetElement::*;
|
||||
|
||||
let mut insert = String::new();
|
||||
let mut offset = trigger_offset;
|
||||
let mut tabstops: Vec<Range> = Vec::new();
|
||||
|
||||
for element in snippet.elements {
|
||||
match element {
|
||||
Text(text) => {
|
||||
offset += text.chars().count();
|
||||
insert.push_str(text)
|
||||
}
|
||||
Variable {
|
||||
name: _name,
|
||||
regex: None,
|
||||
r#default,
|
||||
} => {
|
||||
// TODO: variables. For now, fall back to the default, which defaults to "".
|
||||
let text = r#default.unwrap_or_default();
|
||||
offset += text.chars().count();
|
||||
insert.push_str(text);
|
||||
}
|
||||
Tabstop { .. } => {
|
||||
// TODO: tabstop indexing: 0 is final cursor position. 1,2,.. are positions.
|
||||
// TODO: merge tabstops with the same index
|
||||
tabstops.push(Range::point(offset));
|
||||
}
|
||||
Placeholder {
|
||||
tabstop: _tabstop,
|
||||
value,
|
||||
} => match value.as_ref() {
|
||||
// https://doc.rust-lang.org/beta/unstable-book/language-features/box-patterns.html
|
||||
// would make this a bit nicer
|
||||
Text(text) => {
|
||||
let len_chars = text.chars().count();
|
||||
tabstops.push(Range::new(offset, offset + len_chars));
|
||||
offset += len_chars;
|
||||
insert.push_str(text);
|
||||
}
|
||||
other => {
|
||||
log::error!(
|
||||
"Discarding snippet: generating a transaction for placeholder contents {:?} is unimplemented.",
|
||||
other
|
||||
);
|
||||
return Transaction::new(text);
|
||||
}
|
||||
},
|
||||
other => {
|
||||
log::error!(
|
||||
"Discarding snippet: generating a transaction for {:?} is unimplemented.",
|
||||
other
|
||||
);
|
||||
return Transaction::new(text);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let transaction = Transaction::change(
|
||||
text,
|
||||
std::iter::once((trigger_offset, trigger_offset, Some(insert.into()))),
|
||||
);
|
||||
|
||||
if let Some(first) = tabstops.first() {
|
||||
transaction.with_selection(Selection::new(smallvec![*first], 0))
|
||||
} else {
|
||||
transaction
|
||||
}
|
||||
}
|
||||
|
||||
mod parser {
|
||||
use helix_core::regex;
|
||||
use once_cell::sync::Lazy;
|
||||
|
||||
use helix_parsec::*;
|
||||
|
||||
use super::{CaseChange, FormatItem, Regex, Snippet, SnippetElement};
|
||||
|
||||
/*
|
||||
https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#snippet_syntax
|
||||
|
||||
any ::= tabstop | placeholder | choice | variable | text
|
||||
tabstop ::= '$' int | '${' int '}'
|
||||
placeholder ::= '${' int ':' any '}'
|
||||
choice ::= '${' int '|' text (',' text)* '|}'
|
||||
variable ::= '$' var | '${' var }'
|
||||
| '${' var ':' any '}'
|
||||
| '${' var '/' regex '/' (format | text)+ '/' options '}'
|
||||
format ::= '$' int | '${' int '}'
|
||||
| '${' int ':' '/upcase' | '/downcase' | '/capitalize' '}'
|
||||
| '${' int ':+' if '}'
|
||||
| '${' int ':?' if ':' else '}'
|
||||
| '${' int ':-' else '}' | '${' int ':' else '}'
|
||||
regex ::= Regular Expression value (ctor-string)
|
||||
options ::= Regular Expression option (ctor-options)
|
||||
var ::= [_a-zA-Z] [_a-zA-Z0-9]*
|
||||
int ::= [0-9]+
|
||||
text ::= .*
|
||||
if ::= text
|
||||
else ::= text
|
||||
*/
|
||||
|
||||
static DIGIT: Lazy<regex::Regex> = Lazy::new(|| regex::Regex::new(r"^[0-9]+").unwrap());
|
||||
static VARIABLE: Lazy<regex::Regex> =
|
||||
Lazy::new(|| regex::Regex::new(r"^[_a-zA-Z][_a-zA-Z0-9]*").unwrap());
|
||||
static TEXT: Lazy<regex::Regex> = Lazy::new(|| regex::Regex::new(r"^[^\$]+").unwrap());
|
||||
|
||||
fn var<'a>() -> impl Parser<'a, Output = &'a str> {
|
||||
pattern(&VARIABLE)
|
||||
}
|
||||
|
||||
fn digit<'a>() -> impl Parser<'a, Output = usize> {
|
||||
filter_map(pattern(&DIGIT), |s| s.parse().ok())
|
||||
}
|
||||
|
||||
fn case_change<'a>() -> impl Parser<'a, Output = CaseChange> {
|
||||
use CaseChange::*;
|
||||
|
||||
choice!(
|
||||
map("upcase", |_| Upcase),
|
||||
map("downcase", |_| Downcase),
|
||||
map("capitalize", |_| Capitalize),
|
||||
)
|
||||
}
|
||||
|
||||
fn format<'a>() -> impl Parser<'a, Output = FormatItem<'a>> {
|
||||
use FormatItem::*;
|
||||
|
||||
choice!(
|
||||
// '$' int
|
||||
map(right("$", digit()), Capture),
|
||||
// '${' int '}'
|
||||
map(seq!("${", digit(), "}"), |seq| Capture(seq.1)),
|
||||
// '${' int ':' '/upcase' | '/downcase' | '/capitalize' '}'
|
||||
map(seq!("${", digit(), ":/", case_change(), "}"), |seq| {
|
||||
CaseChange(seq.1, seq.3)
|
||||
}),
|
||||
// '${' int ':+' if '}'
|
||||
map(
|
||||
seq!("${", digit(), ":+", take_until(|c| c == '}'), "}"),
|
||||
|seq| { Conditional(seq.1, Some(seq.3), None) }
|
||||
),
|
||||
// '${' int ':?' if ':' else '}'
|
||||
map(
|
||||
seq!(
|
||||
"${",
|
||||
digit(),
|
||||
":?",
|
||||
take_until(|c| c == ':'),
|
||||
":",
|
||||
take_until(|c| c == '}'),
|
||||
"}"
|
||||
),
|
||||
|seq| { Conditional(seq.1, Some(seq.3), Some(seq.5)) }
|
||||
),
|
||||
// '${' int ':-' else '}' | '${' int ':' else '}'
|
||||
map(
|
||||
seq!(
|
||||
"${",
|
||||
digit(),
|
||||
":",
|
||||
optional("-"),
|
||||
take_until(|c| c == '}'),
|
||||
"}"
|
||||
),
|
||||
|seq| { Conditional(seq.1, None, Some(seq.4)) }
|
||||
),
|
||||
// Any text
|
||||
map(pattern(&TEXT), Text),
|
||||
)
|
||||
}
|
||||
|
||||
fn regex<'a>() -> impl Parser<'a, Output = Regex<'a>> {
|
||||
let replacement = reparse_as(take_until(|c| c == '/'), one_or_more(format()));
|
||||
|
||||
map(
|
||||
seq!(
|
||||
"/",
|
||||
take_until(|c| c == '/'),
|
||||
"/",
|
||||
replacement,
|
||||
"/",
|
||||
optional(take_until(|c| c == '}')),
|
||||
),
|
||||
|(_, value, _, replacement, _, options)| Regex {
|
||||
value,
|
||||
replacement,
|
||||
options,
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
fn tabstop<'a>() -> impl Parser<'a, Output = SnippetElement<'a>> {
|
||||
map(
|
||||
or(
|
||||
right("$", digit()),
|
||||
map(seq!("${", digit(), "}"), |values| values.1),
|
||||
),
|
||||
|digit| SnippetElement::Tabstop { tabstop: digit },
|
||||
)
|
||||
}
|
||||
|
||||
fn placeholder<'a>() -> impl Parser<'a, Output = SnippetElement<'a>> {
|
||||
// TODO: why doesn't parse_as work?
|
||||
// let value = reparse_as(take_until(|c| c == '}'), anything());
|
||||
let value = filter_map(take_until(|c| c == '}'), |s| {
|
||||
anything().parse(s).map(|parse_result| parse_result.1).ok()
|
||||
});
|
||||
|
||||
map(seq!("${", digit(), ":", value, "}"), |seq| {
|
||||
SnippetElement::Placeholder {
|
||||
tabstop: seq.1,
|
||||
value: Box::new(seq.3),
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
fn choice<'a>() -> impl Parser<'a, Output = SnippetElement<'a>> {
|
||||
map(
|
||||
seq!(
|
||||
"${",
|
||||
digit(),
|
||||
"|",
|
||||
sep(take_until(|c| c == ',' || c == '|'), ","),
|
||||
"|}",
|
||||
),
|
||||
|seq| SnippetElement::Choice {
|
||||
tabstop: seq.1,
|
||||
choices: seq.3,
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
fn variable<'a>() -> impl Parser<'a, Output = SnippetElement<'a>> {
|
||||
choice!(
|
||||
// $var
|
||||
map(right("$", var()), |name| SnippetElement::Variable {
|
||||
name,
|
||||
default: None,
|
||||
regex: None,
|
||||
}),
|
||||
// ${var:default}
|
||||
map(
|
||||
seq!("${", var(), ":", take_until(|c| c == '}'), "}",),
|
||||
|values| SnippetElement::Variable {
|
||||
name: values.1,
|
||||
default: Some(values.3),
|
||||
regex: None,
|
||||
}
|
||||
),
|
||||
// ${var/value/format/options}
|
||||
map(seq!("${", var(), regex(), "}"), |values| {
|
||||
SnippetElement::Variable {
|
||||
name: values.1,
|
||||
default: None,
|
||||
regex: Some(values.2),
|
||||
}
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
||||
fn text<'a>() -> impl Parser<'a, Output = SnippetElement<'a>> {
|
||||
map(pattern(&TEXT), SnippetElement::Text)
|
||||
}
|
||||
|
||||
fn anything<'a>() -> impl Parser<'a, Output = SnippetElement<'a>> {
|
||||
choice!(tabstop(), placeholder(), choice(), variable(), text())
|
||||
}
|
||||
|
||||
fn snippet<'a>() -> impl Parser<'a, Output = Snippet<'a>> {
|
||||
map(one_or_more(anything()), |parts| Snippet { elements: parts })
|
||||
}
|
||||
|
||||
pub fn parse(s: &str) -> Result<Snippet, &str> {
|
||||
snippet().parse(s).map(|(_input, elements)| elements)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::SnippetElement::*;
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn empty_string_is_error() {
|
||||
assert_eq!(Err(""), parse(""));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_placeholders_in_function_call() {
|
||||
assert_eq!(
|
||||
Ok(Snippet {
|
||||
elements: vec![
|
||||
Text("match("),
|
||||
Placeholder {
|
||||
tabstop: 1,
|
||||
value: Box::new(Text("Arg1")),
|
||||
},
|
||||
Text(")")
|
||||
]
|
||||
}),
|
||||
parse("match(${1:Arg1})")
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_placeholders_in_statement() {
|
||||
assert_eq!(
|
||||
Ok(Snippet {
|
||||
elements: vec![
|
||||
Text("local "),
|
||||
Placeholder {
|
||||
tabstop: 1,
|
||||
value: Box::new(Text("var")),
|
||||
},
|
||||
Text(" = "),
|
||||
Placeholder {
|
||||
tabstop: 1,
|
||||
value: Box::new(Text("value")),
|
||||
},
|
||||
]
|
||||
}),
|
||||
parse("local ${1:var} = ${1:value}")
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_all() {
|
||||
assert_eq!(
|
||||
Ok(Snippet {
|
||||
elements: vec![
|
||||
Text("hello "),
|
||||
Tabstop { tabstop: 1 },
|
||||
Tabstop { tabstop: 2 },
|
||||
Text(" "),
|
||||
Choice {
|
||||
tabstop: 1,
|
||||
choices: vec!["one", "two", "three"]
|
||||
},
|
||||
Text(" "),
|
||||
Variable {
|
||||
name: "name",
|
||||
default: Some("foo"),
|
||||
regex: None
|
||||
},
|
||||
Text(" "),
|
||||
Variable {
|
||||
name: "var",
|
||||
default: None,
|
||||
regex: None
|
||||
},
|
||||
Text(" "),
|
||||
Variable {
|
||||
name: "TM",
|
||||
default: None,
|
||||
regex: None
|
||||
},
|
||||
]
|
||||
}),
|
||||
parse("hello $1${2} ${1|one,two,three|} ${name:foo} $var $TM")
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn regex_capture_replace() {
|
||||
assert_eq!(
|
||||
Ok(Snippet {
|
||||
elements: vec![Variable {
|
||||
name: "TM_FILENAME",
|
||||
default: None,
|
||||
regex: Some(Regex {
|
||||
value: "(.*).+$",
|
||||
replacement: vec![FormatItem::Capture(1)],
|
||||
options: None,
|
||||
}),
|
||||
}]
|
||||
}),
|
||||
parse("${TM_FILENAME/(.*).+$/$1/}")
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
14
helix-parsec/Cargo.toml
Normal file
14
helix-parsec/Cargo.toml
Normal file
@ -0,0 +1,14 @@
|
||||
[package]
|
||||
name = "helix-parsec"
|
||||
version = "0.6.0"
|
||||
authors = ["Blaž Hrastnik <blaz@mxxn.io>"]
|
||||
edition = "2021"
|
||||
license = "MPL-2.0"
|
||||
description = "Parser combinators for Helix"
|
||||
categories = ["editor"]
|
||||
repository = "https://github.com/helix-editor/helix"
|
||||
homepage = "https://helix-editor.com"
|
||||
include = ["src/**/*", "README.md"]
|
||||
|
||||
[dependencies]
|
||||
regex = "1"
|
551
helix-parsec/src/lib.rs
Normal file
551
helix-parsec/src/lib.rs
Normal file
@ -0,0 +1,551 @@
|
||||
//! Parser-combinator functions
|
||||
//!
|
||||
//! This module provides parsers and parser combinators which can be used
|
||||
//! together to build parsers by functional composition.
|
||||
|
||||
use regex::Regex;
|
||||
|
||||
// This module implements parser combinators following https://bodil.lol/parser-combinators/.
|
||||
// `sym` (trait implementation for `&'static str`), `map`, `pred` (filter), `one_or_more`,
|
||||
// `zero_or_more`, as well as the `Parser` trait originate mostly from that post.
|
||||
// The remaining parsers and parser combinators are either based on
|
||||
// https://github.com/archseer/snippets.nvim/blob/a583da6ef130d2a4888510afd8c4e5ffd62d0dce/lua/snippet/parser.lua#L5-L138
|
||||
// or are novel.
|
||||
|
||||
// When a parser matches the input successfully, it returns `Ok((next_input, some_value))`
|
||||
// where the type of the returned value depends on the parser. If the parser fails to match,
|
||||
// it returns `Err(input)`.
|
||||
type ParseResult<'a, Output> = Result<(&'a str, Output), &'a str>;
|
||||
|
||||
/// A parser or parser-combinator.
|
||||
///
|
||||
/// Parser-combinators compose multiple parsers together to parse input.
|
||||
/// For example, two basic parsers (`&'static str`s) may be combined with
|
||||
/// a parser-combinator like [or] to produce a new parser.
|
||||
///
|
||||
/// ```
|
||||
/// use helix_parsec::{or, Parser};
|
||||
/// let foo = "foo"; // matches "foo" literally
|
||||
/// let bar = "bar"; // matches "bar" literally
|
||||
/// let foo_or_bar = or(foo, bar); // matches either "foo" or "bar"
|
||||
/// assert_eq!(Ok(("", "foo")), foo_or_bar.parse("foo"));
|
||||
/// assert_eq!(Ok(("", "bar")), foo_or_bar.parse("bar"));
|
||||
/// assert_eq!(Err("baz"), foo_or_bar.parse("baz"));
|
||||
/// ```
|
||||
pub trait Parser<'a> {
|
||||
type Output;
|
||||
|
||||
fn parse(&self, input: &'a str) -> ParseResult<'a, Self::Output>;
|
||||
}
|
||||
|
||||
// Most parser-combinators are written as higher-order functions which take some
|
||||
// parser(s) as input and return a new parser: a function that takes input and returns
|
||||
// a parse result. The underlying implementation of [Parser::parse] for these functions
|
||||
// is simply application.
|
||||
#[doc(hidden)]
|
||||
impl<'a, F, T> Parser<'a> for F
|
||||
where
|
||||
F: Fn(&'a str) -> ParseResult<T>,
|
||||
{
|
||||
type Output = T;
|
||||
|
||||
fn parse(&self, input: &'a str) -> ParseResult<'a, Self::Output> {
|
||||
self(input)
|
||||
}
|
||||
}
|
||||
|
||||
/// A parser which matches the string literal exactly.
|
||||
///
|
||||
/// This parser succeeds if the next characters in the input are equal to the given
|
||||
/// string literal.
|
||||
///
|
||||
/// Note that [str::parse] interferes with calling [Parser::parse] on string literals
|
||||
/// directly; this trait implementation works when used within any parser combinator
|
||||
/// but does not work on its own. To call [Parser::parse] on a parser for a string
|
||||
/// literal, use the [token] parser.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use helix_parsec::{or, Parser};
|
||||
/// let parser = or("foo", "bar");
|
||||
/// assert_eq!(Ok(("", "foo")), parser.parse("foo"));
|
||||
/// assert_eq!(Ok(("", "bar")), parser.parse("bar"));
|
||||
/// assert_eq!(Err("baz"), parser.parse("baz"));
|
||||
/// ```
|
||||
impl<'a> Parser<'a> for &'static str {
|
||||
type Output = &'a str;
|
||||
|
||||
fn parse(&self, input: &'a str) -> ParseResult<'a, Self::Output> {
|
||||
match input.get(0..self.len()) {
|
||||
Some(actual) if actual == *self => Ok((&input[self.len()..], &input[0..self.len()])),
|
||||
_ => Err(input),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Parsers
|
||||
|
||||
/// A parser which matches the given string literally.
|
||||
///
|
||||
/// This function is a convenience for interpreting string literals as parsers
|
||||
/// and is only necessary to avoid conflict with [str::parse]. See the documentation
|
||||
/// for the `&'static str` implementation of [Parser].
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use helix_parsec::{token, Parser};
|
||||
/// let parser = token("foo");
|
||||
/// assert_eq!(Ok(("", "foo")), parser.parse("foo"));
|
||||
/// assert_eq!(Err("bar"), parser.parse("bar"));
|
||||
/// ```
|
||||
pub fn token<'a>(literal: &'static str) -> impl Parser<'a, Output = &'a str> {
|
||||
literal
|
||||
}
|
||||
|
||||
/// A parser which matches the pattern described by the given regular expression.
|
||||
///
|
||||
/// The pattern must match from the beginning of the input as if the regular expression
|
||||
/// included the `^` anchor. Using a `^` anchor in the regular expression is
|
||||
/// recommended in order to reduce any work done by the regex on non-matching input.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use helix_parsec::{pattern, Parser};
|
||||
/// use regex::Regex;
|
||||
/// let regex = Regex::new(r"Hello, \w+!").unwrap();
|
||||
/// let parser = pattern(®ex);
|
||||
/// assert_eq!(Ok(("", "Hello, world!")), parser.parse("Hello, world!"));
|
||||
/// assert_eq!(Err("Hey, you!"), parser.parse("Hey, you!"));
|
||||
/// assert_eq!(Err("Oh Hello, world!"), parser.parse("Oh Hello, world!"));
|
||||
/// ```
|
||||
pub fn pattern<'a>(regex: &'a Regex) -> impl Parser<'a, Output = &'a str> {
|
||||
move |input: &'a str| match regex.find(input) {
|
||||
Some(match_) if match_.start() == 0 => {
|
||||
Ok((&input[match_.end()..], &input[0..match_.end()]))
|
||||
}
|
||||
_ => Err(input),
|
||||
}
|
||||
}
|
||||
|
||||
/// A parser which matches all values until the specified pattern is found.
|
||||
///
|
||||
/// If the pattern is not found, this parser does not match. The input up to the
|
||||
/// character which returns `true` is returned but not that character itself.
|
||||
///
|
||||
/// If the pattern function returns true on the first input character, this
|
||||
/// parser fails.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use helix_parsec::{take_until, Parser};
|
||||
/// let parser = take_until(|c| c == '.');
|
||||
/// assert_eq!(Ok((".bar", "foo")), parser.parse("foo.bar"));
|
||||
/// assert_eq!(Err(".foo"), parser.parse(".foo"));
|
||||
/// assert_eq!(Err("foo"), parser.parse("foo"));
|
||||
/// ```
|
||||
pub fn take_until<'a, F>(pattern: F) -> impl Parser<'a, Output = &'a str>
|
||||
where
|
||||
F: Fn(char) -> bool,
|
||||
{
|
||||
move |input: &'a str| match input.find(&pattern) {
|
||||
Some(index) if index != 0 => Ok((&input[index..], &input[0..index])),
|
||||
_ => Err(input),
|
||||
}
|
||||
}
|
||||
|
||||
// Variadic parser combinators
|
||||
|
||||
/// A parser combinator which matches a sequence of parsers in an all-or-nothing fashion.
|
||||
///
|
||||
/// The returned value is a tuple containing the outputs of all parsers in order. Each
|
||||
/// parser in the sequence may be typed differently.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use helix_parsec::{seq, Parser};
|
||||
/// let parser = seq!("<", "a", ">");
|
||||
/// assert_eq!(Ok(("", ("<", "a", ">"))), parser.parse("<a>"));
|
||||
/// assert_eq!(Err("<b>"), parser.parse("<b>"));
|
||||
/// ```
|
||||
#[macro_export]
|
||||
macro_rules! seq {
|
||||
($($parsers: expr),+ $(,)?) => {
|
||||
($($parsers),+)
|
||||
}
|
||||
}
|
||||
|
||||
// Seq is implemented using trait-implementations of Parser for various size tuples.
|
||||
// This allows sequences to be typed heterogeneously.
|
||||
macro_rules! seq_impl {
|
||||
($($parser:ident),+) => {
|
||||
#[allow(non_snake_case)]
|
||||
impl<'a, $($parser),+> Parser<'a> for ($($parser),+)
|
||||
where
|
||||
$($parser: Parser<'a>),+
|
||||
{
|
||||
type Output = ($($parser::Output),+);
|
||||
|
||||
fn parse(&self, input: &'a str) -> ParseResult<'a, Self::Output> {
|
||||
let ($($parser),+) = self;
|
||||
seq_body_impl!(input, input, $($parser),+ ; )
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! seq_body_impl {
|
||||
($input:expr, $next_input:expr, $head:ident, $($tail:ident),+ ; $(,)? $($acc:ident),*) => {
|
||||
match $head.parse($next_input) {
|
||||
Ok((next_input, $head)) => seq_body_impl!($input, next_input, $($tail),+ ; $($acc),*, $head),
|
||||
Err(_) => Err($input),
|
||||
}
|
||||
};
|
||||
($input:expr, $next_input:expr, $last:ident ; $(,)? $($acc:ident),*) => {
|
||||
match $last.parse($next_input) {
|
||||
Ok((next_input, last)) => Ok((next_input, ($($acc),+, last))),
|
||||
Err(_) => Err($input),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
seq_impl!(A, B);
|
||||
seq_impl!(A, B, C);
|
||||
seq_impl!(A, B, C, D);
|
||||
seq_impl!(A, B, C, D, E);
|
||||
seq_impl!(A, B, C, D, E, F);
|
||||
seq_impl!(A, B, C, D, E, F, G);
|
||||
seq_impl!(A, B, C, D, E, F, G, H);
|
||||
seq_impl!(A, B, C, D, E, F, G, H, I);
|
||||
seq_impl!(A, B, C, D, E, F, G, H, I, J);
|
||||
|
||||
/// A parser combinator which chooses the first of the input parsers which matches
|
||||
/// successfully.
|
||||
///
|
||||
/// All input parsers must have the same output type. This is a variadic form for [or].
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use helix_parsec::{choice, or, Parser};
|
||||
/// let parser = choice!("foo", "bar", "baz");
|
||||
/// assert_eq!(Ok(("", "foo")), parser.parse("foo"));
|
||||
/// assert_eq!(Ok(("", "bar")), parser.parse("bar"));
|
||||
/// assert_eq!(Err("quiz"), parser.parse("quiz"));
|
||||
/// ```
|
||||
#[macro_export]
|
||||
macro_rules! choice {
|
||||
($parser: expr $(,)?) => {
|
||||
$parser
|
||||
};
|
||||
($parser: expr, $($rest: expr),+ $(,)?) => {
|
||||
or($parser, choice!($($rest),+))
|
||||
}
|
||||
}
|
||||
|
||||
// Ordinary parser combinators
|
||||
|
||||
/// A parser combinator which takes a parser as input and maps the output using the
|
||||
/// given transformation function.
|
||||
///
|
||||
/// This corresponds to [Result::map]. The value is only mapped if the input parser
|
||||
/// matches against input.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use helix_parsec::{map, Parser};
|
||||
/// let parser = map("123", |s| s.parse::<i32>().unwrap());
|
||||
/// assert_eq!(Ok(("", 123)), parser.parse("123"));
|
||||
/// assert_eq!(Err("abc"), parser.parse("abc"));
|
||||
/// ```
|
||||
pub fn map<'a, P, F, T>(parser: P, map_fn: F) -> impl Parser<'a, Output = T>
|
||||
where
|
||||
P: Parser<'a>,
|
||||
F: Fn(P::Output) -> T,
|
||||
{
|
||||
move |input| {
|
||||
parser
|
||||
.parse(input)
|
||||
.map(|(next_input, result)| (next_input, map_fn(result)))
|
||||
}
|
||||
}
|
||||
|
||||
/// A parser combinator which succeeds if the given parser matches the input and
|
||||
/// the given `filter_map_fun` returns `Some`.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use helix_parsec::{filter_map, take_until, Parser};
|
||||
/// let parser = filter_map(take_until(|c| c == '.'), |s| s.parse::<i32>().ok());
|
||||
/// assert_eq!(Ok((".456", 123)), parser.parse("123.456"));
|
||||
/// assert_eq!(Err("abc.def"), parser.parse("abc.def"));
|
||||
/// ```
|
||||
pub fn filter_map<'a, P, F, T>(parser: P, filter_map_fn: F) -> impl Parser<'a, Output = T>
|
||||
where
|
||||
P: Parser<'a>,
|
||||
F: Fn(P::Output) -> Option<T>,
|
||||
{
|
||||
move |input| match parser.parse(input) {
|
||||
Ok((next_input, value)) => match filter_map_fn(value) {
|
||||
Some(value) => Ok((next_input, value)),
|
||||
None => Err(input),
|
||||
},
|
||||
Err(_) => Err(input),
|
||||
}
|
||||
}
|
||||
|
||||
/// TODO: name
|
||||
pub fn reparse_as<'a, P1, P2, T>(parser1: P1, parser2: P2) -> impl Parser<'a, Output = T>
|
||||
where
|
||||
P1: Parser<'a, Output = &'a str>,
|
||||
P2: Parser<'a, Output = T>,
|
||||
{
|
||||
filter_map(parser1, move |str| {
|
||||
parser2.parse(str).map(|(_, value)| value).ok()
|
||||
})
|
||||
}
|
||||
|
||||
/// A parser combinator which only matches the input when the predicate function
|
||||
/// returns true.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use helix_parsec::{filter, take_until, Parser};
|
||||
/// let parser = filter(take_until(|c| c == '.'), |s| s == &"123");
|
||||
/// assert_eq!(Ok((".456", "123")), parser.parse("123.456"));
|
||||
/// assert_eq!(Err("456.123"), parser.parse("456.123"));
|
||||
/// ```
|
||||
pub fn filter<'a, P, F, T>(parser: P, pred_fn: F) -> impl Parser<'a, Output = T>
|
||||
where
|
||||
P: Parser<'a, Output = T>,
|
||||
F: Fn(&P::Output) -> bool,
|
||||
{
|
||||
move |input| {
|
||||
if let Ok((next_input, value)) = parser.parse(input) {
|
||||
if pred_fn(&value) {
|
||||
return Ok((next_input, value));
|
||||
}
|
||||
}
|
||||
Err(input)
|
||||
}
|
||||
}
|
||||
|
||||
/// A parser combinator which matches either of the input parsers.
|
||||
///
|
||||
/// Both parsers must have the same output type. For a variadic form which
|
||||
/// can take any number of parsers, use `choice!`.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use helix_parsec::{or, Parser};
|
||||
/// let parser = or("foo", "bar");
|
||||
/// assert_eq!(Ok(("", "foo")), parser.parse("foo"));
|
||||
/// assert_eq!(Ok(("", "bar")), parser.parse("bar"));
|
||||
/// assert_eq!(Err("baz"), parser.parse("baz"));
|
||||
/// ```
|
||||
pub fn or<'a, P1, P2, T>(parser1: P1, parser2: P2) -> impl Parser<'a, Output = T>
|
||||
where
|
||||
P1: Parser<'a, Output = T>,
|
||||
P2: Parser<'a, Output = T>,
|
||||
{
|
||||
move |input| match parser1.parse(input) {
|
||||
ok @ Ok(_) => ok,
|
||||
Err(_) => parser2.parse(input),
|
||||
}
|
||||
}
|
||||
|
||||
/// A parser combinator which attempts to match the given parser, returning a
|
||||
/// `None` output value if the parser does not match.
|
||||
///
|
||||
/// The parser produced with this combinator always succeeds. If the given parser
|
||||
/// succeeds, `Some(value)` is returned where `value` is the output of the given
|
||||
/// parser. Otherwise, `None`.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use helix_parsec::{optional, Parser};
|
||||
/// let parser = optional("foo");
|
||||
/// assert_eq!(Ok(("bar", Some("foo"))), parser.parse("foobar"));
|
||||
/// assert_eq!(Ok(("bar", None)), parser.parse("bar"));
|
||||
/// ```
|
||||
pub fn optional<'a, P, T>(parser: P) -> impl Parser<'a, Output = Option<T>>
|
||||
where
|
||||
P: Parser<'a, Output = T>,
|
||||
{
|
||||
move |input| match parser.parse(input) {
|
||||
Ok((next_input, value)) => Ok((next_input, Some(value))),
|
||||
Err(_) => Ok((input, None)),
|
||||
}
|
||||
}
|
||||
|
||||
/// A parser combinator which runs the given parsers in sequence and returns the
|
||||
/// value of `left` if both are matched.
|
||||
///
|
||||
/// This is useful for two-element sequences in which you only want the output
|
||||
/// value of the `left` parser.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use helix_parsec::{left, Parser};
|
||||
/// let parser = left("foo", "bar");
|
||||
/// assert_eq!(Ok(("", "foo")), parser.parse("foobar"));
|
||||
/// ```
|
||||
pub fn left<'a, L, R, T>(left: L, right: R) -> impl Parser<'a, Output = T>
|
||||
where
|
||||
L: Parser<'a, Output = T>,
|
||||
R: Parser<'a>,
|
||||
{
|
||||
map(seq!(left, right), |(left_value, _)| left_value)
|
||||
}
|
||||
|
||||
/// A parser combinator which runs the given parsers in sequence and returns the
|
||||
/// value of `right` if both are matched.
|
||||
///
|
||||
/// This is useful for two-element sequences in which you only want the output
|
||||
/// value of the `right` parser.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use helix_parsec::{right, Parser};
|
||||
/// let parser = right("foo", "bar");
|
||||
/// assert_eq!(Ok(("", "bar")), parser.parse("foobar"));
|
||||
/// ```
|
||||
pub fn right<'a, L, R, T>(left: L, right: R) -> impl Parser<'a, Output = T>
|
||||
where
|
||||
L: Parser<'a>,
|
||||
R: Parser<'a, Output = T>,
|
||||
{
|
||||
map(seq!(left, right), |(_, right_value)| right_value)
|
||||
}
|
||||
|
||||
/// A parser combinator which matches the given parser against the input zero or
|
||||
/// more times.
|
||||
///
|
||||
/// This parser always succeeds and returns the empty Vec when it matched zero
|
||||
/// times.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use helix_parsec::{zero_or_more, Parser};
|
||||
/// let parser = zero_or_more("a");
|
||||
/// assert_eq!(Ok(("", vec![])), parser.parse(""));
|
||||
/// assert_eq!(Ok(("", vec!["a"])), parser.parse("a"));
|
||||
/// assert_eq!(Ok(("", vec!["a", "a"])), parser.parse("aa"));
|
||||
/// assert_eq!(Ok(("bb", vec![])), parser.parse("bb"));
|
||||
/// ```
|
||||
pub fn zero_or_more<'a, P, T>(parser: P) -> impl Parser<'a, Output = Vec<T>>
|
||||
where
|
||||
P: Parser<'a, Output = T>,
|
||||
{
|
||||
move |mut input| {
|
||||
let mut values = Vec::new();
|
||||
|
||||
while let Ok((next_input, value)) = parser.parse(input) {
|
||||
input = next_input;
|
||||
values.push(value);
|
||||
}
|
||||
|
||||
Ok((input, values))
|
||||
}
|
||||
}
|
||||
|
||||
/// A parser combinator which matches the given parser against the input one or
|
||||
/// more times.
|
||||
///
|
||||
/// This parser combinator acts the same as [zero_or_more] but must match at
|
||||
/// least once.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use helix_parsec::{one_or_more, Parser};
|
||||
/// let parser = one_or_more("a");
|
||||
/// assert_eq!(Err(""), parser.parse(""));
|
||||
/// assert_eq!(Ok(("", vec!["a"])), parser.parse("a"));
|
||||
/// assert_eq!(Ok(("", vec!["a", "a"])), parser.parse("aa"));
|
||||
/// assert_eq!(Err("bb"), parser.parse("bb"));
|
||||
/// ```
|
||||
pub fn one_or_more<'a, P, T>(parser: P) -> impl Parser<'a, Output = Vec<T>>
|
||||
where
|
||||
P: Parser<'a, Output = T>,
|
||||
{
|
||||
move |mut input| {
|
||||
let mut values = Vec::new();
|
||||
|
||||
match parser.parse(input) {
|
||||
Ok((next_input, value)) => {
|
||||
input = next_input;
|
||||
values.push(value);
|
||||
}
|
||||
Err(err) => return Err(err),
|
||||
}
|
||||
|
||||
while let Ok((next_input, value)) = parser.parse(input) {
|
||||
input = next_input;
|
||||
values.push(value);
|
||||
}
|
||||
|
||||
Ok((input, values))
|
||||
}
|
||||
}
|
||||
|
||||
/// A parser combinator which matches one or more instances of the given parser
|
||||
/// interspersed with the separator parser.
|
||||
///
|
||||
/// Output values of the separator parser are discarded.
|
||||
///
|
||||
/// This is typically used to parse function arguments or list items.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```rust
|
||||
/// use helix_parsec::{sep, Parser};
|
||||
/// let parser = sep("a", ",");
|
||||
/// assert_eq!(Ok(("", vec!["a", "a", "a"])), parser.parse("a,a,a"));
|
||||
/// ```
|
||||
pub fn sep<'a, P, S, T>(parser: P, separator: S) -> impl Parser<'a, Output = Vec<T>>
|
||||
where
|
||||
P: Parser<'a, Output = T>,
|
||||
S: Parser<'a>,
|
||||
{
|
||||
move |mut input| {
|
||||
let mut values = Vec::new();
|
||||
|
||||
match parser.parse(input) {
|
||||
Ok((next_input, value)) => {
|
||||
input = next_input;
|
||||
values.push(value);
|
||||
}
|
||||
Err(err) => return Err(err),
|
||||
}
|
||||
|
||||
loop {
|
||||
match separator.parse(input) {
|
||||
Ok((next_input, _)) => input = next_input,
|
||||
Err(_) => break,
|
||||
}
|
||||
|
||||
match parser.parse(input) {
|
||||
Ok((next_input, value)) => {
|
||||
input = next_input;
|
||||
values.push(value);
|
||||
}
|
||||
Err(_) => break,
|
||||
}
|
||||
}
|
||||
|
||||
Ok((input, values))
|
||||
}
|
||||
}
|
@ -109,32 +109,55 @@ fn item_to_transaction(
|
||||
start_offset: usize,
|
||||
trigger_offset: usize,
|
||||
) -> Transaction {
|
||||
let transaction = if let Some(edit) = &item.text_edit {
|
||||
let edit = match edit {
|
||||
lsp::CompletionTextEdit::Edit(edit) => edit.clone(),
|
||||
lsp::CompletionTextEdit::InsertAndReplace(item) => {
|
||||
unimplemented!("completion: insert_and_replace {:?}", item)
|
||||
use helix_lsp::snippet;
|
||||
|
||||
match item {
|
||||
CompletionItem {
|
||||
text_edit: Some(edit),
|
||||
..
|
||||
} => {
|
||||
let edit = match edit {
|
||||
lsp::CompletionTextEdit::Edit(edit) => edit.clone(),
|
||||
lsp::CompletionTextEdit::InsertAndReplace(item) => {
|
||||
unimplemented!("completion: insert_and_replace {:?}", item)
|
||||
}
|
||||
};
|
||||
|
||||
util::generate_transaction_from_edits(
|
||||
doc.text(),
|
||||
vec![edit],
|
||||
offset_encoding, // TODO: should probably transcode in Client
|
||||
)
|
||||
}
|
||||
CompletionItem {
|
||||
insert_text: Some(insert_text),
|
||||
insert_text_format: Some(lsp::InsertTextFormat::SNIPPET),
|
||||
..
|
||||
} => match snippet::parse(insert_text) {
|
||||
Ok(snippet) => {
|
||||
snippet::into_transaction(snippet, doc.text(), trigger_offset)
|
||||
}
|
||||
};
|
||||
|
||||
util::generate_transaction_from_edits(
|
||||
doc.text(),
|
||||
vec![edit],
|
||||
offset_encoding, // TODO: should probably transcode in Client
|
||||
)
|
||||
} else {
|
||||
let text = item.insert_text.as_ref().unwrap_or(&item.label);
|
||||
// Some LSPs just give you an insertText with no offset ¯\_(ツ)_/¯
|
||||
// in these cases we need to check for a common prefix and remove it
|
||||
let prefix = Cow::from(doc.text().slice(start_offset..trigger_offset));
|
||||
let text = text.trim_start_matches::<&str>(&prefix);
|
||||
Transaction::change(
|
||||
doc.text(),
|
||||
vec![(trigger_offset, trigger_offset, Some(text.into()))].into_iter(),
|
||||
)
|
||||
};
|
||||
|
||||
transaction
|
||||
Err(err) => {
|
||||
log::error!(
|
||||
"Failed to parse snippet: {:?}, remaining output: {}",
|
||||
insert_text,
|
||||
err
|
||||
);
|
||||
Transaction::new(doc.text())
|
||||
}
|
||||
},
|
||||
_ => {
|
||||
let text = item.insert_text.as_ref().unwrap_or(&item.label);
|
||||
// Some LSPs just give you an insertText with no offset ¯\_(ツ)_/¯
|
||||
// in these cases we need to check for a common prefix and remove it
|
||||
let prefix = Cow::from(doc.text().slice(start_offset..trigger_offset));
|
||||
let text = text.trim_start_matches::<&str>(&prefix);
|
||||
Transaction::change(
|
||||
doc.text(),
|
||||
vec![(trigger_offset, trigger_offset, Some(text.into()))].into_iter(),
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn completion_changes(transaction: &Transaction, trigger_offset: usize) -> Vec<Change> {
|
||||
|
Loading…
Reference in New Issue
Block a user