syntax: Split parsing and highlighting
This commit is contained in:
parent
83bde1004d
commit
6728e44490
1
Cargo.lock
generated
1
Cargo.lock
generated
@ -381,6 +381,7 @@ dependencies = [
|
||||
"serde",
|
||||
"serde_json",
|
||||
"similar",
|
||||
"slotmap",
|
||||
"smallvec",
|
||||
"tendril",
|
||||
"toml",
|
||||
|
@ -22,6 +22,7 @@ unicode-segmentation = "1.8"
|
||||
unicode-width = "0.1"
|
||||
unicode-general-category = "0.4"
|
||||
# slab = "0.4.2"
|
||||
slotmap = "1.0"
|
||||
tree-sitter = "0.20"
|
||||
once_cell = "1.9"
|
||||
arc-swap = "1"
|
||||
|
@ -454,7 +454,7 @@ pub fn change<I>(document: &Document, changes: I) -> Self
|
||||
|
||||
let language_config = loader.language_config_for_scope("source.rust").unwrap();
|
||||
let highlight_config = language_config.highlight_config(&[]).unwrap();
|
||||
let syntax = Syntax::new(&doc, highlight_config.clone());
|
||||
let syntax = Syntax::new(&doc, highlight_config.clone(), std::sync::Arc::new(loader));
|
||||
let text = doc.slice(..);
|
||||
let tab_width = 4;
|
||||
|
||||
|
@ -9,6 +9,7 @@
|
||||
pub use helix_syntax::get_language;
|
||||
|
||||
use arc_swap::ArcSwap;
|
||||
use slotmap::{DefaultKey as LayerId, HopSlotMap};
|
||||
|
||||
use std::{
|
||||
borrow::Cow,
|
||||
@ -388,9 +389,9 @@ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Syntax {
|
||||
config: Arc<HighlightConfiguration>,
|
||||
|
||||
root_layer: LanguageLayer,
|
||||
layers: HopSlotMap<LayerId, LanguageLayer>,
|
||||
root: LayerId,
|
||||
loader: Arc<Loader>,
|
||||
}
|
||||
|
||||
fn byte_range_to_str(range: std::ops::Range<usize>, source: RopeSlice) -> Cow<str> {
|
||||
@ -400,38 +401,36 @@ fn byte_range_to_str(range: std::ops::Range<usize>, source: RopeSlice) -> Cow<st
|
||||
}
|
||||
|
||||
impl Syntax {
|
||||
// buffer, grammar, config, grammars, sync_timeout?
|
||||
pub fn new(
|
||||
/*language: Lang,*/ source: &Rope,
|
||||
config: Arc<HighlightConfiguration>,
|
||||
) -> Self {
|
||||
let root_layer = LanguageLayer { tree: None };
|
||||
|
||||
// track markers of injections
|
||||
// track scope_descriptor: a Vec of scopes for item in tree
|
||||
|
||||
let mut syntax = Self {
|
||||
// grammar,
|
||||
pub fn new(source: &Rope, config: Arc<HighlightConfiguration>, loader: Arc<Loader>) -> Self {
|
||||
let root_layer = LanguageLayer {
|
||||
tree: None,
|
||||
config,
|
||||
root_layer,
|
||||
};
|
||||
|
||||
// update root layer
|
||||
PARSER.with(|ts_parser| {
|
||||
// TODO: handle the returned `Result` properly.
|
||||
let _ = syntax.root_layer.parse(
|
||||
&mut ts_parser.borrow_mut(),
|
||||
&syntax.config,
|
||||
source,
|
||||
0,
|
||||
vec![Range {
|
||||
depth: 0,
|
||||
ranges: vec![Range {
|
||||
start_byte: 0,
|
||||
end_byte: usize::MAX,
|
||||
start_point: Point::new(0, 0),
|
||||
end_point: Point::new(usize::MAX, usize::MAX),
|
||||
}],
|
||||
);
|
||||
});
|
||||
};
|
||||
|
||||
// track markers of injections
|
||||
// track scope_descriptor: a Vec of scopes for item in tree
|
||||
|
||||
let mut layers = HopSlotMap::default();
|
||||
let root = layers.insert(root_layer);
|
||||
|
||||
let mut syntax = Self {
|
||||
// grammar,
|
||||
root,
|
||||
layers,
|
||||
loader,
|
||||
};
|
||||
|
||||
syntax
|
||||
.update(source, source, &ChangeSet::new(&source))
|
||||
.unwrap();
|
||||
|
||||
syntax
|
||||
}
|
||||
|
||||
@ -441,30 +440,197 @@ pub fn update(
|
||||
source: &Rope,
|
||||
changeset: &ChangeSet,
|
||||
) -> Result<(), Error> {
|
||||
PARSER.with(|ts_parser| {
|
||||
self.root_layer.update(
|
||||
&mut ts_parser.borrow_mut(),
|
||||
&self.config,
|
||||
old_source,
|
||||
source,
|
||||
changeset,
|
||||
use std::collections::VecDeque;
|
||||
let mut queue = VecDeque::new();
|
||||
// let source = source.slice(..);
|
||||
let injection_callback = |language: &str| {
|
||||
self.loader
|
||||
.language_configuration_for_injection_string(language)
|
||||
.and_then(|language_config| {
|
||||
// TODO: get these theme.scopes from somewhere, probably make them settable on Loader
|
||||
let scopes = &[
|
||||
"attribute",
|
||||
"constant",
|
||||
"function.builtin",
|
||||
"function",
|
||||
"keyword",
|
||||
"operator",
|
||||
"property",
|
||||
"punctuation",
|
||||
"punctuation.bracket",
|
||||
"punctuation.delimiter",
|
||||
"string",
|
||||
"string.special",
|
||||
"tag",
|
||||
"type",
|
||||
"type.builtin",
|
||||
"variable",
|
||||
"variable.builtin",
|
||||
"variable.parameter",
|
||||
];
|
||||
language_config.highlight_config(
|
||||
&scopes
|
||||
.iter()
|
||||
.map(|scope| scope.to_string())
|
||||
.collect::<Vec<_>>(),
|
||||
)
|
||||
})
|
||||
};
|
||||
|
||||
// TODO: deal with injections and update them too
|
||||
queue.push_back(self.root);
|
||||
|
||||
// HAXX: for now, clear all layers except root so they get re-parsed
|
||||
self.layers.retain(|id, _| id == self.root);
|
||||
|
||||
// Workaround for Syntax::new() with empty changeset
|
||||
if !changeset.is_empty() {
|
||||
// TODO: do this in a recursive way
|
||||
// Notify the tree about all the changes
|
||||
let edits = generate_edits(old_source.slice(..), changeset);
|
||||
let tree = self.layers[self.root].tree.as_mut().unwrap();
|
||||
for edit in edits.iter().rev() {
|
||||
// apply the edits in reverse. If we applied them in order then edit 1 would disrupt
|
||||
// the positioning of edit 2
|
||||
tree.edit(edit);
|
||||
}
|
||||
}
|
||||
|
||||
PARSER.with(|ts_parser| {
|
||||
let ts_parser = &mut ts_parser.borrow_mut();
|
||||
let mut cursor = ts_parser.cursors.pop().unwrap_or_else(QueryCursor::new);
|
||||
// TODO: might need to set cursor range
|
||||
|
||||
while let Some(layer_id) = queue.pop_front() {
|
||||
// Re-parse the tree.
|
||||
self.layers[layer_id].parse(ts_parser, source)?;
|
||||
|
||||
let source = source.slice(..);
|
||||
let layer = &self.layers[layer_id];
|
||||
|
||||
// Process injections.
|
||||
let matches = cursor.matches(
|
||||
&layer.config.injections_query,
|
||||
layer.tree().root_node(),
|
||||
RopeProvider(source),
|
||||
);
|
||||
let mut injections = Vec::new();
|
||||
for mat in matches {
|
||||
let (language_name, content_node, include_children) = injection_for_match(
|
||||
&layer.config,
|
||||
&layer.config.injections_query,
|
||||
&mat,
|
||||
source,
|
||||
);
|
||||
|
||||
// Explicitly remove this match so that none of its other captures will remain
|
||||
// in the stream of captures.
|
||||
mat.remove(); // TODO: is this still necessary?
|
||||
|
||||
// If a language is found with the given name, then add a new language layer
|
||||
// to the highlighted document.
|
||||
if let (Some(language_name), Some(content_node)) = (language_name, content_node)
|
||||
{
|
||||
if let Some(config) = (injection_callback)(&language_name) {
|
||||
let ranges =
|
||||
intersect_ranges(&layer.ranges, &[content_node], include_children);
|
||||
|
||||
if !ranges.is_empty() {
|
||||
log::info!("{} {:?}", language_name, ranges);
|
||||
injections.push((config, ranges));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Process combined injections.
|
||||
if let Some(combined_injections_query) = &layer.config.combined_injections_query {
|
||||
let mut injections_by_pattern_index =
|
||||
vec![(None, Vec::new(), false); combined_injections_query.pattern_count()];
|
||||
let matches = cursor.matches(
|
||||
combined_injections_query,
|
||||
layer.tree().root_node(),
|
||||
RopeProvider(source),
|
||||
);
|
||||
for mat in matches {
|
||||
let entry = &mut injections_by_pattern_index[mat.pattern_index];
|
||||
let (language_name, content_node, include_children) = injection_for_match(
|
||||
&layer.config,
|
||||
combined_injections_query,
|
||||
&mat,
|
||||
source,
|
||||
);
|
||||
if language_name.is_some() {
|
||||
entry.0 = language_name;
|
||||
}
|
||||
if let Some(content_node) = content_node {
|
||||
entry.1.push(content_node);
|
||||
}
|
||||
entry.2 = include_children;
|
||||
}
|
||||
for (lang_name, content_nodes, includes_children) in injections_by_pattern_index
|
||||
{
|
||||
if let (Some(lang_name), false) = (lang_name, content_nodes.is_empty()) {
|
||||
if let Some(config) = (injection_callback)(&lang_name) {
|
||||
let ranges = intersect_ranges(
|
||||
&layer.ranges,
|
||||
&content_nodes,
|
||||
includes_children,
|
||||
);
|
||||
if !ranges.is_empty() {
|
||||
injections.push((config, ranges));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let depth = layer.depth + 1;
|
||||
// TODO: can't inline this since matches borrows self.layers
|
||||
for (config, ranges) in injections {
|
||||
let layer_id = self.layers.insert(LanguageLayer {
|
||||
tree: None,
|
||||
config,
|
||||
depth,
|
||||
ranges,
|
||||
});
|
||||
queue.push_back(layer_id);
|
||||
}
|
||||
}
|
||||
|
||||
// Return the cursor back in the pool.
|
||||
ts_parser.cursors.push(cursor);
|
||||
|
||||
Ok(()) // so we can use the try operator
|
||||
})?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// fn buffer_changed -> call layer.update(range, new_text) on root layer and then all marker layers
|
||||
|
||||
// call this on transaction.apply() -> buffer_changed(changes)
|
||||
//
|
||||
// fn parse(language, old_tree, ranges)
|
||||
//
|
||||
|
||||
pub fn tree(&self) -> &Tree {
|
||||
self.root_layer.tree()
|
||||
self.layers[self.root].tree()
|
||||
}
|
||||
|
||||
// root: Tree
|
||||
// injections: Vec<(Tree, Range marker)>
|
||||
|
||||
// handle updates that go over a part of the layer by truncating them to start/end appropriately
|
||||
|
||||
// injections tracked by marker:
|
||||
// if marker areas match it's fine and update
|
||||
// if not found add new layer
|
||||
// if length 0 then area got removed, clean up the layer
|
||||
//
|
||||
// <!--update_for_injection(grammar)-->
|
||||
// layer update:
|
||||
// if range.len = 0 then remove the layer
|
||||
// for change in changes { tree.edit(change) }
|
||||
// tree = parser.parse(.., tree, ..)
|
||||
// calculate affected range and update injections
|
||||
// injection update:
|
||||
// look for existing injections
|
||||
// if present, range = (first injection start, last injection end)
|
||||
|
||||
// Highlighting
|
||||
|
||||
@ -474,31 +640,37 @@ pub fn highlight_iter<'a>(
|
||||
source: RopeSlice<'a>,
|
||||
range: Option<std::ops::Range<usize>>,
|
||||
cancellation_flag: Option<&'a AtomicUsize>,
|
||||
injection_callback: impl FnMut(&str) -> Option<&'a HighlightConfiguration> + 'a,
|
||||
) -> impl Iterator<Item = Result<HighlightEvent, Error>> + 'a {
|
||||
// The `captures` iterator borrows the `Tree` and the `QueryCursor`, which
|
||||
// prevents them from being moved. But both of these values are really just
|
||||
// pointers, so it's actually ok to move them.
|
||||
|
||||
// reuse a cursor from the pool if possible
|
||||
let mut layers = self
|
||||
.layers
|
||||
.iter()
|
||||
.map(|(_, layer)| {
|
||||
// Reuse a cursor from the pool if available.
|
||||
let mut cursor = PARSER.with(|ts_parser| {
|
||||
let highlighter = &mut ts_parser.borrow_mut();
|
||||
highlighter.cursors.pop().unwrap_or_else(QueryCursor::new)
|
||||
});
|
||||
let tree_ref = self.tree();
|
||||
let cursor_ref = unsafe { mem::transmute::<_, &'static mut QueryCursor>(&mut cursor) };
|
||||
let query_ref = &self.config.query;
|
||||
let config_ref = self.config.as_ref();
|
||||
|
||||
// The `captures` iterator borrows the `Tree` and the `QueryCursor`, which
|
||||
// prevents them from being moved. But both of these values are really just
|
||||
// pointers, so it's actually ok to move them.
|
||||
let cursor_ref =
|
||||
unsafe { mem::transmute::<_, &'static mut QueryCursor>(&mut cursor) };
|
||||
|
||||
// if reusing cursors & no range this resets to whole range
|
||||
cursor_ref.set_byte_range(range.clone().unwrap_or(0..usize::MAX));
|
||||
// TODO: handle intersect (range & layer.range)
|
||||
// cursor_ref.set_byte_range(range.clone().unwrap_or(0..usize::MAX));
|
||||
cursor_ref.set_byte_range(0..usize::MAX);
|
||||
|
||||
let captures = cursor_ref
|
||||
.captures(query_ref, tree_ref.root_node(), RopeProvider(source))
|
||||
.captures(
|
||||
&layer.config.query,
|
||||
layer.tree().root_node(),
|
||||
RopeProvider(source),
|
||||
)
|
||||
.peekable();
|
||||
|
||||
// manually craft the root layer based on the existing tree
|
||||
let layer = HighlightIterLayer {
|
||||
HighlightIterLayer {
|
||||
highlight_end_stack: Vec::new(),
|
||||
scope_stack: vec![LocalScope {
|
||||
inherits: false,
|
||||
@ -506,29 +678,38 @@ pub fn highlight_iter<'a>(
|
||||
local_defs: Vec::new(),
|
||||
}],
|
||||
cursor,
|
||||
depth: 0,
|
||||
_tree: None,
|
||||
captures,
|
||||
config: config_ref,
|
||||
ranges: vec![Range {
|
||||
start_byte: 0,
|
||||
end_byte: usize::MAX,
|
||||
start_point: Point::new(0, 0),
|
||||
end_point: Point::new(usize::MAX, usize::MAX),
|
||||
}],
|
||||
};
|
||||
config: layer.config.as_ref(), // TODO: just reuse
|
||||
depth: layer.depth, // TODO: just reuse
|
||||
ranges: layer.ranges.clone(),
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
log::info!("--");
|
||||
|
||||
// HAXX: arrange layers by byte range, with deeper layers positioned first
|
||||
layers.sort_by_key(|layer| {
|
||||
(
|
||||
layer.ranges.first().cloned(),
|
||||
std::cmp::Reverse(layer.depth),
|
||||
)
|
||||
});
|
||||
|
||||
let mut result = HighlightIter {
|
||||
source,
|
||||
byte_offset: range.map_or(0, |r| r.start), // TODO: simplify
|
||||
injection_callback,
|
||||
cancellation_flag,
|
||||
iter_count: 0,
|
||||
layers: vec![layer],
|
||||
layers,
|
||||
next_event: None,
|
||||
last_highlight_range: None,
|
||||
};
|
||||
result.sort_layers();
|
||||
for layer in &result.layers {
|
||||
log::info!("> {:?} {:?}", layer.depth, layer.ranges); // <- for some reason layers are reversed here
|
||||
}
|
||||
result
|
||||
}
|
||||
// on_tokenize
|
||||
@ -556,32 +737,24 @@ pub fn highlight_iter<'a>(
|
||||
pub struct LanguageLayer {
|
||||
// mode
|
||||
// grammar
|
||||
// depth
|
||||
pub config: Arc<HighlightConfiguration>,
|
||||
pub(crate) tree: Option<Tree>,
|
||||
pub ranges: Vec<Range>,
|
||||
pub depth: usize,
|
||||
}
|
||||
|
||||
impl LanguageLayer {
|
||||
// pub fn new() -> Self {
|
||||
// Self { tree: None }
|
||||
// }
|
||||
|
||||
pub fn tree(&self) -> &Tree {
|
||||
// TODO: no unwrap
|
||||
self.tree.as_ref().unwrap()
|
||||
}
|
||||
|
||||
fn parse(
|
||||
&mut self,
|
||||
ts_parser: &mut TsParser,
|
||||
config: &HighlightConfiguration,
|
||||
source: &Rope,
|
||||
_depth: usize,
|
||||
ranges: Vec<Range>,
|
||||
) -> Result<(), Error> {
|
||||
if ts_parser.parser.set_included_ranges(&ranges).is_ok() {
|
||||
fn parse(&mut self, ts_parser: &mut TsParser, source: &Rope) -> Result<(), Error> {
|
||||
ts_parser.parser.set_included_ranges(&self.ranges).unwrap();
|
||||
|
||||
ts_parser
|
||||
.parser
|
||||
.set_language(config.language)
|
||||
.set_language(self.config.language)
|
||||
.map_err(|_| Error::InvalidLanguage)?;
|
||||
|
||||
// unsafe { syntax.parser.set_cancellation_flag(cancellation_flag) };
|
||||
@ -600,11 +773,11 @@ fn parse(
|
||||
self.tree.as_ref(),
|
||||
)
|
||||
.ok_or(Error::Cancelled)?;
|
||||
|
||||
self.tree = Some(tree)
|
||||
}
|
||||
// unsafe { ts_parser.parser.set_cancellation_flag(None) };
|
||||
self.tree = Some(tree);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn generate_edits(
|
||||
old_text: RopeSlice,
|
||||
@ -713,77 +886,6 @@ fn traverse(point: Point, text: &Tendril) -> Point {
|
||||
edits
|
||||
}
|
||||
|
||||
fn update(
|
||||
&mut self,
|
||||
ts_parser: &mut TsParser,
|
||||
config: &HighlightConfiguration,
|
||||
old_source: &Rope,
|
||||
source: &Rope,
|
||||
changeset: &ChangeSet,
|
||||
) -> Result<(), Error> {
|
||||
if changeset.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let edits = Self::generate_edits(old_source.slice(..), changeset);
|
||||
|
||||
// Notify the tree about all the changes
|
||||
for edit in edits.iter().rev() {
|
||||
// apply the edits in reverse. If we applied them in order then edit 1 would disrupt
|
||||
// the positioning of edit 2
|
||||
self.tree.as_mut().unwrap().edit(edit);
|
||||
}
|
||||
|
||||
self.parse(
|
||||
ts_parser,
|
||||
config,
|
||||
source,
|
||||
0,
|
||||
// TODO: what to do about this range on update
|
||||
vec![Range {
|
||||
start_byte: 0,
|
||||
end_byte: usize::MAX,
|
||||
start_point: Point::new(0, 0),
|
||||
end_point: Point::new(usize::MAX, usize::MAX),
|
||||
}],
|
||||
)
|
||||
}
|
||||
|
||||
// fn highlight_iter() -> same as Mode but for this layer. Mode composits these
|
||||
// fn buffer_changed
|
||||
// fn update(range)
|
||||
// fn update_injections()
|
||||
}
|
||||
|
||||
// -- refactored from tree-sitter-highlight to be able to retain state
|
||||
// TODO: add seek() to iter
|
||||
|
||||
// problem: any time a layer is updated it must update it's injections on the parent (potentially
|
||||
// removing some from use)
|
||||
// can't modify to vec and exist in it at the same time since that would violate borrows
|
||||
// maybe we can do with an arena
|
||||
// maybe just caching on the top layer and nevermind the injections for now?
|
||||
//
|
||||
// Grammar {
|
||||
// layers: Vec<Box<Layer>> to prevent memory moves when vec is modified
|
||||
// }
|
||||
// injections tracked by marker:
|
||||
// if marker areas match it's fine and update
|
||||
// if not found add new layer
|
||||
// if length 0 then area got removed, clean up the layer
|
||||
//
|
||||
// layer update:
|
||||
// if range.len = 0 then remove the layer
|
||||
// for change in changes { tree.edit(change) }
|
||||
// tree = parser.parse(.., tree, ..)
|
||||
// calculate affected range and update injections
|
||||
// injection update:
|
||||
// look for existing injections
|
||||
// if present, range = (first injection start, last injection end)
|
||||
//
|
||||
// For now cheat and just throw out non-root layers if they exist. This should still improve
|
||||
// parsing in majority of cases.
|
||||
|
||||
use std::sync::atomic::{AtomicUsize, Ordering};
|
||||
use std::{iter, mem, ops, str, usize};
|
||||
use tree_sitter::{
|
||||
@ -820,8 +922,8 @@ pub enum HighlightEvent {
|
||||
pub struct HighlightConfiguration {
|
||||
pub language: Grammar,
|
||||
pub query: Query,
|
||||
injections_query: Query,
|
||||
combined_injections_query: Option<Query>,
|
||||
locals_pattern_index: usize,
|
||||
highlights_pattern_index: usize,
|
||||
highlight_indices: ArcSwap<Vec<Option<Highlight>>>,
|
||||
non_local_variable_patterns: Vec<bool>,
|
||||
@ -848,13 +950,9 @@ struct LocalScope<'a> {
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct HighlightIter<'a, F>
|
||||
where
|
||||
F: FnMut(&str) -> Option<&'a HighlightConfiguration> + 'a,
|
||||
{
|
||||
struct HighlightIter<'a> {
|
||||
source: RopeSlice<'a>,
|
||||
byte_offset: usize,
|
||||
injection_callback: F,
|
||||
cancellation_flag: Option<&'a AtomicUsize>,
|
||||
layers: Vec<HighlightIterLayer<'a>>,
|
||||
iter_count: usize,
|
||||
@ -894,8 +992,8 @@ struct HighlightIterLayer<'a> {
|
||||
config: &'a HighlightConfiguration,
|
||||
highlight_end_stack: Vec<usize>,
|
||||
scope_stack: Vec<LocalScope<'a>>,
|
||||
ranges: Vec<Range>,
|
||||
depth: usize,
|
||||
ranges: Vec<Range>, // TEMP
|
||||
}
|
||||
|
||||
impl<'a> fmt::Debug for HighlightIterLayer<'a> {
|
||||
@ -927,38 +1025,32 @@ pub fn new(
|
||||
) -> Result<Self, QueryError> {
|
||||
// Concatenate the query strings, keeping track of the start offset of each section.
|
||||
let mut query_source = String::new();
|
||||
query_source.push_str(injection_query);
|
||||
let locals_query_offset = query_source.len();
|
||||
query_source.push_str(locals_query);
|
||||
let highlights_query_offset = query_source.len();
|
||||
query_source.push_str(highlights_query);
|
||||
|
||||
// Construct a single query by concatenating the three query strings, but record the
|
||||
// range of pattern indices that belong to each individual string.
|
||||
let mut query = Query::new(language, &query_source)?;
|
||||
let mut locals_pattern_index = 0;
|
||||
let query = Query::new(language, &query_source)?;
|
||||
let mut highlights_pattern_index = 0;
|
||||
for i in 0..(query.pattern_count()) {
|
||||
let pattern_offset = query.start_byte_for_pattern(i);
|
||||
if pattern_offset < highlights_query_offset {
|
||||
if pattern_offset < highlights_query_offset {
|
||||
highlights_pattern_index += 1;
|
||||
}
|
||||
if pattern_offset < locals_query_offset {
|
||||
locals_pattern_index += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut injections_query = Query::new(language, injection_query)?;
|
||||
|
||||
// Construct a separate query just for dealing with the 'combined injections'.
|
||||
// Disable the combined injection patterns in the main query.
|
||||
let mut combined_injections_query = Query::new(language, injection_query)?;
|
||||
let mut has_combined_queries = false;
|
||||
for pattern_index in 0..locals_pattern_index {
|
||||
let settings = query.property_settings(pattern_index);
|
||||
for pattern_index in 0..injections_query.pattern_count() {
|
||||
let settings = injections_query.property_settings(pattern_index);
|
||||
if settings.iter().any(|s| &*s.key == "injection.combined") {
|
||||
has_combined_queries = true;
|
||||
query.disable_pattern(pattern_index);
|
||||
injections_query.disable_pattern(pattern_index);
|
||||
} else {
|
||||
combined_injections_query.disable_pattern(pattern_index);
|
||||
}
|
||||
@ -990,8 +1082,6 @@ pub fn new(
|
||||
for (i, name) in query.capture_names().iter().enumerate() {
|
||||
let i = Some(i as u32);
|
||||
match name.as_str() {
|
||||
"injection.content" => injection_content_capture_index = i,
|
||||
"injection.language" => injection_language_capture_index = i,
|
||||
"local.definition" => local_def_capture_index = i,
|
||||
"local.definition-value" => local_def_value_capture_index = i,
|
||||
"local.reference" => local_ref_capture_index = i,
|
||||
@ -1000,12 +1090,21 @@ pub fn new(
|
||||
}
|
||||
}
|
||||
|
||||
for (i, name) in injections_query.capture_names().iter().enumerate() {
|
||||
let i = Some(i as u32);
|
||||
match name.as_str() {
|
||||
"injection.content" => injection_content_capture_index = i,
|
||||
"injection.language" => injection_language_capture_index = i,
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
let highlight_indices = ArcSwap::from_pointee(vec![None; query.capture_names().len()]);
|
||||
Ok(Self {
|
||||
language,
|
||||
query,
|
||||
injections_query,
|
||||
combined_injections_query,
|
||||
locals_pattern_index,
|
||||
highlights_pattern_index,
|
||||
highlight_indices,
|
||||
non_local_variable_patterns,
|
||||
@ -1070,144 +1169,31 @@ pub fn configure(&self, recognized_names: &[String]) {
|
||||
}
|
||||
|
||||
impl<'a> HighlightIterLayer<'a> {
|
||||
/// Create a new 'layer' of highlighting for this document.
|
||||
///
|
||||
/// In the even that the new layer contains "combined injections" (injections where multiple
|
||||
/// disjoint ranges are parsed as one syntax tree), these will be eagerly processed and
|
||||
/// added to the returned vector.
|
||||
fn new<F: FnMut(&str) -> Option<&'a HighlightConfiguration> + 'a>(
|
||||
source: RopeSlice<'a>,
|
||||
cancellation_flag: Option<&'a AtomicUsize>,
|
||||
injection_callback: &mut F,
|
||||
mut config: &'a HighlightConfiguration,
|
||||
mut depth: usize,
|
||||
mut ranges: Vec<Range>,
|
||||
) -> Result<Vec<Self>, Error> {
|
||||
let mut result = Vec::with_capacity(1);
|
||||
let mut queue = Vec::new();
|
||||
loop {
|
||||
// --> Tree parsing part
|
||||
|
||||
PARSER.with(|ts_parser| {
|
||||
let highlighter = &mut ts_parser.borrow_mut();
|
||||
|
||||
if highlighter.parser.set_included_ranges(&ranges).is_ok() {
|
||||
highlighter
|
||||
.parser
|
||||
.set_language(config.language)
|
||||
.map_err(|_| Error::InvalidLanguage)?;
|
||||
|
||||
unsafe { highlighter.parser.set_cancellation_flag(cancellation_flag) };
|
||||
let tree = highlighter
|
||||
.parser
|
||||
.parse_with(
|
||||
&mut |byte, _| {
|
||||
if byte <= source.len_bytes() {
|
||||
let (chunk, start_byte, _, _) = source.chunk_at_byte(byte);
|
||||
chunk[byte - start_byte..].as_bytes()
|
||||
// First, sort scope boundaries by their byte offset in the document. At a
|
||||
// given position, emit scope endings before scope beginnings. Finally, emit
|
||||
// scope boundaries from deeper layers first.
|
||||
fn sort_key(&mut self) -> Option<(usize, bool, isize)> {
|
||||
let depth = -(self.depth as isize);
|
||||
let next_start = self
|
||||
.captures
|
||||
.peek()
|
||||
.map(|(m, i)| m.captures[*i].node.start_byte());
|
||||
let next_end = self.highlight_end_stack.last().cloned();
|
||||
match (next_start, next_end) {
|
||||
(Some(start), Some(end)) => {
|
||||
if start < end {
|
||||
Some((start, true, depth))
|
||||
} else {
|
||||
// out of range
|
||||
&[]
|
||||
}
|
||||
},
|
||||
None,
|
||||
)
|
||||
.ok_or(Error::Cancelled)?;
|
||||
unsafe { highlighter.parser.set_cancellation_flag(None) };
|
||||
let mut cursor = highlighter.cursors.pop().unwrap_or_else(QueryCursor::new);
|
||||
|
||||
// Process combined injections.
|
||||
if let Some(combined_injections_query) = &config.combined_injections_query {
|
||||
let mut injections_by_pattern_index = vec![
|
||||
(None, Vec::new(), false);
|
||||
combined_injections_query
|
||||
.pattern_count()
|
||||
];
|
||||
let matches = cursor.matches(
|
||||
combined_injections_query,
|
||||
tree.root_node(),
|
||||
RopeProvider(source),
|
||||
);
|
||||
for mat in matches {
|
||||
let entry = &mut injections_by_pattern_index[mat.pattern_index];
|
||||
let (language_name, content_node, include_children) =
|
||||
injection_for_match(
|
||||
config,
|
||||
combined_injections_query,
|
||||
&mat,
|
||||
source,
|
||||
);
|
||||
if language_name.is_some() {
|
||||
entry.0 = language_name;
|
||||
}
|
||||
if let Some(content_node) = content_node {
|
||||
entry.1.push(content_node);
|
||||
}
|
||||
entry.2 = include_children;
|
||||
}
|
||||
for (lang_name, content_nodes, includes_children) in
|
||||
injections_by_pattern_index
|
||||
{
|
||||
if let (Some(lang_name), false) = (lang_name, content_nodes.is_empty())
|
||||
{
|
||||
if let Some(next_config) = (injection_callback)(&lang_name) {
|
||||
let ranges = Self::intersect_ranges(
|
||||
&ranges,
|
||||
&content_nodes,
|
||||
includes_children,
|
||||
);
|
||||
if !ranges.is_empty() {
|
||||
queue.push((next_config, depth + 1, ranges));
|
||||
Some((end, false, depth))
|
||||
}
|
||||
}
|
||||
(Some(i), None) => Some((i, true, depth)),
|
||||
(None, Some(j)) => Some((j, false, depth)),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// --> Highlighting query part
|
||||
|
||||
// The `captures` iterator borrows the `Tree` and the `QueryCursor`, which
|
||||
// prevents them from being moved. But both of these values are really just
|
||||
// pointers, so it's actually ok to move them.
|
||||
let tree_ref = unsafe { mem::transmute::<_, &'static Tree>(&tree) };
|
||||
let cursor_ref =
|
||||
unsafe { mem::transmute::<_, &'static mut QueryCursor>(&mut cursor) };
|
||||
let captures = cursor_ref
|
||||
.captures(&config.query, tree_ref.root_node(), RopeProvider(source))
|
||||
.peekable();
|
||||
|
||||
result.push(HighlightIterLayer {
|
||||
highlight_end_stack: Vec::new(),
|
||||
scope_stack: vec![LocalScope {
|
||||
inherits: false,
|
||||
range: 0..usize::MAX,
|
||||
local_defs: Vec::new(),
|
||||
}],
|
||||
cursor,
|
||||
depth,
|
||||
_tree: Some(tree),
|
||||
captures,
|
||||
config,
|
||||
ranges,
|
||||
});
|
||||
}
|
||||
|
||||
Ok(()) // so we can use the try operator
|
||||
})?;
|
||||
|
||||
if queue.is_empty() {
|
||||
break;
|
||||
}
|
||||
|
||||
let (next_config, next_depth, next_ranges) = queue.remove(0);
|
||||
config = next_config;
|
||||
depth = next_depth;
|
||||
ranges = next_ranges;
|
||||
}
|
||||
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
// Compute the ranges that should be included when parsing an injection.
|
||||
// This takes into account three things:
|
||||
// * `parent_ranges` - The ranges must all fall within the *current* layer's ranges.
|
||||
@ -1302,35 +1288,7 @@ fn intersect_ranges(
|
||||
result
|
||||
}
|
||||
|
||||
// First, sort scope boundaries by their byte offset in the document. At a
|
||||
// given position, emit scope endings before scope beginnings. Finally, emit
|
||||
// scope boundaries from deeper layers first.
|
||||
fn sort_key(&mut self) -> Option<(usize, bool, isize)> {
|
||||
let depth = -(self.depth as isize);
|
||||
let next_start = self
|
||||
.captures
|
||||
.peek()
|
||||
.map(|(m, i)| m.captures[*i].node.start_byte());
|
||||
let next_end = self.highlight_end_stack.last().cloned();
|
||||
match (next_start, next_end) {
|
||||
(Some(start), Some(end)) => {
|
||||
if start < end {
|
||||
Some((start, true, depth))
|
||||
} else {
|
||||
Some((end, false, depth))
|
||||
}
|
||||
}
|
||||
(Some(i), None) => Some((i, true, depth)),
|
||||
(None, Some(j)) => Some((j, false, depth)),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, F> HighlightIter<'a, F>
|
||||
where
|
||||
F: FnMut(&str) -> Option<&'a HighlightConfiguration> + 'a,
|
||||
{
|
||||
impl<'a> HighlightIter<'a> {
|
||||
fn emit_event(
|
||||
&mut self,
|
||||
offset: usize,
|
||||
@ -1361,6 +1319,12 @@ fn sort_layers(&mut self) {
|
||||
i += 1;
|
||||
continue;
|
||||
}
|
||||
} else {
|
||||
let layer = self.layers.remove(i + 1);
|
||||
PARSER.with(|ts_parser| {
|
||||
let highlighter = &mut ts_parser.borrow_mut();
|
||||
highlighter.cursors.push(layer.cursor);
|
||||
});
|
||||
}
|
||||
break;
|
||||
}
|
||||
@ -1377,30 +1341,9 @@ fn sort_layers(&mut self) {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn insert_layer(&mut self, mut layer: HighlightIterLayer<'a>) {
|
||||
if let Some(sort_key) = layer.sort_key() {
|
||||
let mut i = 1;
|
||||
while i < self.layers.len() {
|
||||
if let Some(sort_key_i) = self.layers[i].sort_key() {
|
||||
if sort_key_i > sort_key {
|
||||
self.layers.insert(i, layer);
|
||||
return;
|
||||
}
|
||||
i += 1;
|
||||
} else {
|
||||
self.layers.remove(i);
|
||||
}
|
||||
}
|
||||
self.layers.push(layer);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, F> Iterator for HighlightIter<'a, F>
|
||||
where
|
||||
F: FnMut(&str) -> Option<&'a HighlightConfiguration> + 'a,
|
||||
{
|
||||
impl<'a> Iterator for HighlightIter<'a> {
|
||||
type Item = Result<HighlightEvent, Error>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
@ -1460,55 +1403,12 @@ fn next(&mut self) -> Option<Self::Item> {
|
||||
layer.highlight_end_stack.pop();
|
||||
return self.emit_event(end_byte, Some(HighlightEvent::HighlightEnd));
|
||||
} else {
|
||||
// return self.emit_event(self.source.len(), None);
|
||||
return None;
|
||||
return self.emit_event(self.source.len_bytes(), None);
|
||||
};
|
||||
|
||||
let (mut match_, capture_index) = layer.captures.next().unwrap();
|
||||
let mut capture = match_.captures[capture_index];
|
||||
|
||||
// If this capture represents an injection, then process the injection.
|
||||
if match_.pattern_index < layer.config.locals_pattern_index {
|
||||
let (language_name, content_node, include_children) =
|
||||
injection_for_match(layer.config, &layer.config.query, &match_, self.source);
|
||||
|
||||
// Explicitly remove this match so that none of its other captures will remain
|
||||
// in the stream of captures.
|
||||
match_.remove();
|
||||
|
||||
// If a language is found with the given name, then add a new language layer
|
||||
// to the highlighted document.
|
||||
if let (Some(language_name), Some(content_node)) = (language_name, content_node) {
|
||||
if let Some(config) = (self.injection_callback)(&language_name) {
|
||||
let ranges = HighlightIterLayer::intersect_ranges(
|
||||
&self.layers[0].ranges,
|
||||
&[content_node],
|
||||
include_children,
|
||||
);
|
||||
if !ranges.is_empty() {
|
||||
match HighlightIterLayer::new(
|
||||
self.source,
|
||||
self.cancellation_flag,
|
||||
&mut self.injection_callback,
|
||||
config,
|
||||
self.layers[0].depth + 1,
|
||||
ranges,
|
||||
) {
|
||||
Ok(layers) => {
|
||||
for layer in layers {
|
||||
self.insert_layer(layer);
|
||||
}
|
||||
}
|
||||
Err(e) => return Some(Err(e)),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
self.sort_layers();
|
||||
continue 'main;
|
||||
}
|
||||
|
||||
// Remove from the local scope stack any local scopes that have already ended.
|
||||
while range.start > layer.scope_stack.last().unwrap().range.end {
|
||||
layer.scope_stack.pop();
|
||||
@ -1703,14 +1603,6 @@ fn injection_for_match<'a>(
|
||||
(language_name, content_node, include_children)
|
||||
}
|
||||
|
||||
// fn shrink_and_clear<T>(vec: &mut Vec<T>, capacity: usize) {
|
||||
// if vec.len() > capacity {
|
||||
// vec.truncate(capacity);
|
||||
// vec.shrink_to_fit();
|
||||
// }
|
||||
// vec.clear();
|
||||
// }
|
||||
|
||||
pub struct Merge<I> {
|
||||
iter: I,
|
||||
spans: Box<dyn Iterator<Item = (usize, std::ops::Range<usize>)>>,
|
||||
@ -1877,6 +1769,8 @@ fn test_parser() {
|
||||
.map(String::from)
|
||||
.collect();
|
||||
|
||||
let loader = Loader::new(Configuration { language: vec![] });
|
||||
|
||||
let language = get_language(&crate::RUNTIME_DIR, "Rust").unwrap();
|
||||
let config = HighlightConfiguration::new(
|
||||
language,
|
||||
@ -1899,7 +1793,7 @@ struct Stuff {}
|
||||
fn main() {}
|
||||
",
|
||||
);
|
||||
let syntax = Syntax::new(&source, Arc::new(config));
|
||||
let syntax = Syntax::new(&source, Arc::new(config), Arc::new(loader));
|
||||
let tree = syntax.tree();
|
||||
let root = tree.root_node();
|
||||
assert_eq!(root.kind(), "source_file");
|
||||
@ -1926,7 +1820,7 @@ fn test_input_edits() {
|
||||
&doc,
|
||||
vec![(6, 11, Some("test".into())), (12, 17, None)].into_iter(),
|
||||
);
|
||||
let edits = LanguageLayer::generate_edits(doc.slice(..), transaction.changes());
|
||||
let edits = generate_edits(doc.slice(..), transaction.changes());
|
||||
// transaction.apply(&mut state);
|
||||
|
||||
assert_eq!(
|
||||
@ -1955,7 +1849,7 @@ fn test_input_edits() {
|
||||
let mut doc = Rope::from("fn test() {}");
|
||||
let transaction =
|
||||
Transaction::change(&doc, vec![(8, 8, Some("a: u32".into()))].into_iter());
|
||||
let edits = LanguageLayer::generate_edits(doc.slice(..), transaction.changes());
|
||||
let edits = generate_edits(doc.slice(..), transaction.changes());
|
||||
transaction.apply(&mut doc);
|
||||
|
||||
assert_eq!(doc, "fn test(a: u32) {}");
|
||||
|
@ -68,13 +68,12 @@ pub fn render_view(
|
||||
surface: &mut Surface,
|
||||
theme: &Theme,
|
||||
is_focused: bool,
|
||||
loader: &syntax::Loader,
|
||||
config: &helix_view::editor::Config,
|
||||
) {
|
||||
let inner = view.inner_area();
|
||||
let area = view.area;
|
||||
|
||||
let highlights = Self::doc_syntax_highlights(doc, view.offset, inner.height, theme, loader);
|
||||
let highlights = Self::doc_syntax_highlights(doc, view.offset, inner.height, theme);
|
||||
let highlights = syntax::merge(highlights, Self::doc_diagnostics_highlights(doc, theme));
|
||||
let highlights: Box<dyn Iterator<Item = HighlightEvent>> = if is_focused {
|
||||
Box::new(syntax::merge(
|
||||
@ -121,8 +120,7 @@ pub fn doc_syntax_highlights<'doc>(
|
||||
doc: &'doc Document,
|
||||
offset: Position,
|
||||
height: u16,
|
||||
theme: &Theme,
|
||||
loader: &syntax::Loader,
|
||||
_theme: &Theme,
|
||||
) -> Box<dyn Iterator<Item = HighlightEvent> + 'doc> {
|
||||
let text = doc.text().slice(..);
|
||||
let last_line = std::cmp::min(
|
||||
@ -142,25 +140,8 @@ pub fn doc_syntax_highlights<'doc>(
|
||||
// TODO: range doesn't actually restrict source, just highlight range
|
||||
let highlights = match doc.syntax() {
|
||||
Some(syntax) => {
|
||||
let scopes = theme.scopes();
|
||||
syntax
|
||||
.highlight_iter(text.slice(..), Some(range), None, |language| {
|
||||
loader.language_configuration_for_injection_string(language)
|
||||
.and_then(|language_config| {
|
||||
let config = language_config.highlight_config(scopes)?;
|
||||
let config_ref = config.as_ref();
|
||||
// SAFETY: the referenced `HighlightConfiguration` behind
|
||||
// the `Arc` is guaranteed to remain valid throughout the
|
||||
// duration of the highlight.
|
||||
let config_ref = unsafe {
|
||||
std::mem::transmute::<
|
||||
_,
|
||||
&'static syntax::HighlightConfiguration,
|
||||
>(config_ref)
|
||||
};
|
||||
Some(config_ref)
|
||||
})
|
||||
})
|
||||
.highlight_iter(text.slice(..), Some(range), None)
|
||||
.map(|event| event.unwrap())
|
||||
.collect() // TODO: we collect here to avoid holding the lock, fix later
|
||||
}
|
||||
@ -1070,7 +1051,6 @@ fn render(&mut self, area: Rect, surface: &mut Surface, cx: &mut Context) {
|
||||
|
||||
for (view, is_focused) in cx.editor.tree.views() {
|
||||
let doc = cx.editor.document(view.doc).unwrap();
|
||||
let loader = &cx.editor.syn_loader;
|
||||
self.render_view(
|
||||
doc,
|
||||
view,
|
||||
@ -1078,7 +1058,6 @@ fn render(&mut self, area: Rect, surface: &mut Surface, cx: &mut Context) {
|
||||
surface,
|
||||
&cx.editor.theme,
|
||||
is_focused,
|
||||
loader,
|
||||
&cx.editor.config,
|
||||
);
|
||||
}
|
||||
|
@ -38,7 +38,7 @@ pub fn new(contents: String, config_loader: Arc<syntax::Loader>) -> Self {
|
||||
fn parse<'a>(
|
||||
contents: &'a str,
|
||||
theme: Option<&Theme>,
|
||||
loader: &syntax::Loader,
|
||||
loader: Arc<syntax::Loader>,
|
||||
) -> tui::text::Text<'a> {
|
||||
// // also 2021-03-04T16:33:58.553 helix_lsp::transport [INFO] <- {"contents":{"kind":"markdown","value":"\n```rust\ncore::num\n```\n\n```rust\npub const fn saturating_sub(self, rhs:Self) ->Self\n```\n\n---\n\n```rust\n```"},"range":{"end":{"character":61,"line":101},"start":{"character":47,"line":101}}}
|
||||
// let text = "\n```rust\ncore::iter::traits::iterator::Iterator\n```\n\n```rust\nfn collect<B: FromIterator<Self::Item>>(self) -> B\nwhere\n Self: Sized,\n```\n\n---\n\nTransforms an iterator into a collection.\n\n`collect()` can take anything iterable, and turn it into a relevant\ncollection. This is one of the more powerful methods in the standard\nlibrary, used in a variety of contexts.\n\nThe most basic pattern in which `collect()` is used is to turn one\ncollection into another. You take a collection, call [`iter`](https://doc.rust-lang.org/nightly/core/iter/traits/iterator/trait.Iterator.html) on it,\ndo a bunch of transformations, and then `collect()` at the end.\n\n`collect()` can also create instances of types that are not typical\ncollections. For example, a [`String`](https://doc.rust-lang.org/nightly/core/iter/std/string/struct.String.html) can be built from [`char`](type@char)s,\nand an iterator of [`Result<T, E>`](https://doc.rust-lang.org/nightly/core/result/enum.Result.html) items can be collected\ninto `Result<Collection<T>, E>`. See the examples below for more.\n\nBecause `collect()` is so general, it can cause problems with type\ninference. As such, `collect()` is one of the few times you'll see\nthe syntax affectionately known as the 'turbofish': `::<>`. This\nhelps the inference algorithm understand specifically which collection\nyou're trying to collect into.\n\n# Examples\n\nBasic usage:\n\n```rust\nlet a = [1, 2, 3];\n\nlet doubled: Vec<i32> = a.iter()\n .map(|&x| x * 2)\n .collect();\n\nassert_eq!(vec![2, 4, 6], doubled);\n```\n\nNote that we needed the `: Vec<i32>` on the left-hand side. This is because\nwe could collect into, for example, a [`VecDeque<T>`](https://doc.rust-lang.org/nightly/core/iter/std/collections/struct.VecDeque.html) instead:\n\n```rust\nuse std::collections::VecDeque;\n\nlet a = [1, 2, 3];\n\nlet doubled: VecDeque<i32> = a.iter().map(|&x| x * 2).collect();\n\nassert_eq!(2, doubled[0]);\nassert_eq!(4, doubled[1]);\nassert_eq!(6, doubled[2]);\n```\n\nUsing the 'turbofish' instead of annotating `doubled`:\n\n```rust\nlet a = [1, 2, 3];\n\nlet doubled = a.iter().map(|x| x * 2).collect::<Vec<i32>>();\n\nassert_eq!(vec![2, 4, 6], doubled);\n```\n\nBecause `collect()` only cares about what you're collecting into, you can\nstill use a partial type hint, `_`, with the turbofish:\n\n```rust\nlet a = [1, 2, 3];\n\nlet doubled = a.iter().map(|x| x * 2).collect::<Vec<_>>();\n\nassert_eq!(vec![2, 4, 6], doubled);\n```\n\nUsing `collect()` to make a [`String`](https://doc.rust-lang.org/nightly/core/iter/std/string/struct.String.html):\n\n```rust\nlet chars = ['g', 'd', 'k', 'k', 'n'];\n\nlet hello: String = chars.iter()\n .map(|&x| x as u8)\n .map(|x| (x + 1) as char)\n .collect();\n\nassert_eq!(\"hello\", hello);\n```\n\nIf you have a list of [`Result<T, E>`](https://doc.rust-lang.org/nightly/core/result/enum.Result.html)s, you can use `collect()` to\nsee if any of them failed:\n\n```rust\nlet results = [Ok(1), Err(\"nope\"), Ok(3), Err(\"bad\")];\n\nlet result: Result<Vec<_>, &str> = results.iter().cloned().collect();\n\n// gives us the first error\nassert_eq!(Err(\"nope\"), result);\n\nlet results = [Ok(1), Ok(3)];\n\nlet result: Result<Vec<_>, &str> = results.iter().cloned().collect();\n\n// gives us the list of answers\nassert_eq!(Ok(vec![1, 3]), result);\n```";
|
||||
@ -98,14 +98,13 @@ fn to_span(text: pulldown_cmark::CowStr) -> Span {
|
||||
let syntax = loader
|
||||
.language_configuration_for_injection_string(language)
|
||||
.and_then(|config| config.highlight_config(theme.scopes()))
|
||||
.map(|config| Syntax::new(&rope, config));
|
||||
.map(|config| Syntax::new(&rope, config, loader.clone()));
|
||||
|
||||
if let Some(syntax) = syntax {
|
||||
// if we have a syntax available, highlight_iter and generate spans
|
||||
let mut highlights = Vec::new();
|
||||
|
||||
for event in syntax.highlight_iter(rope.slice(..), None, None, |_| None)
|
||||
{
|
||||
for event in syntax.highlight_iter(rope.slice(..), None, None) {
|
||||
match event.unwrap() {
|
||||
HighlightEvent::HighlightStart(span) => {
|
||||
highlights.push(span);
|
||||
@ -211,7 +210,11 @@ impl Component for Markdown {
|
||||
fn render(&mut self, area: Rect, surface: &mut Surface, cx: &mut Context) {
|
||||
use tui::widgets::{Paragraph, Widget, Wrap};
|
||||
|
||||
let text = parse(&self.contents, Some(&cx.editor.theme), &self.config_loader);
|
||||
let text = parse(
|
||||
&self.contents,
|
||||
Some(&cx.editor.theme),
|
||||
self.config_loader.clone(),
|
||||
);
|
||||
|
||||
let par = Paragraph::new(text)
|
||||
.wrap(Wrap { trim: false })
|
||||
@ -229,7 +232,7 @@ fn required_size(&mut self, viewport: (u16, u16)) -> Option<(u16, u16)> {
|
||||
if padding >= viewport.1 || padding >= viewport.0 {
|
||||
return None;
|
||||
}
|
||||
let contents = parse(&self.contents, None, &self.config_loader);
|
||||
let contents = parse(&self.contents, None, self.config_loader.clone());
|
||||
// TODO: account for tab width
|
||||
let max_text_width = (viewport.0 - padding).min(120);
|
||||
let mut text_width = 0;
|
||||
|
@ -221,13 +221,8 @@ fn render(&mut self, area: Rect, surface: &mut Surface, cx: &mut Context) {
|
||||
|
||||
let offset = Position::new(first_line, 0);
|
||||
|
||||
let highlights = EditorView::doc_syntax_highlights(
|
||||
doc,
|
||||
offset,
|
||||
area.height,
|
||||
&cx.editor.theme,
|
||||
&cx.editor.syn_loader,
|
||||
);
|
||||
let highlights =
|
||||
EditorView::doc_syntax_highlights(doc, offset, area.height, &cx.editor.theme);
|
||||
EditorView::render_text_highlights(
|
||||
doc,
|
||||
offset,
|
||||
|
@ -359,7 +359,7 @@ pub fn open(
|
||||
path: &Path,
|
||||
encoding: Option<&'static encoding::Encoding>,
|
||||
theme: Option<&Theme>,
|
||||
config_loader: Option<&syntax::Loader>,
|
||||
config_loader: Option<Arc<syntax::Loader>>,
|
||||
) -> Result<Self, Error> {
|
||||
// Open the file if it exists, otherwise assume it is a new file (and thus empty).
|
||||
let (rope, encoding) = if path.exists() {
|
||||
@ -498,12 +498,12 @@ fn save_impl<F: Future<Output = LspFormatting>>(
|
||||
}
|
||||
|
||||
/// Detect the programming language based on the file type.
|
||||
pub fn detect_language(&mut self, theme: Option<&Theme>, config_loader: &syntax::Loader) {
|
||||
pub fn detect_language(&mut self, theme: Option<&Theme>, config_loader: Arc<syntax::Loader>) {
|
||||
if let Some(path) = &self.path {
|
||||
let language_config = config_loader
|
||||
.language_config_for_file_name(path)
|
||||
.or_else(|| config_loader.language_config_for_shebang(self.text()));
|
||||
self.set_language(theme, language_config);
|
||||
self.set_language(theme, language_config, Some(config_loader));
|
||||
}
|
||||
}
|
||||
|
||||
@ -579,11 +579,12 @@ pub fn set_language(
|
||||
&mut self,
|
||||
theme: Option<&Theme>,
|
||||
language_config: Option<Arc<helix_core::syntax::LanguageConfiguration>>,
|
||||
loader: Option<Arc<helix_core::syntax::Loader>>,
|
||||
) {
|
||||
if let Some(language_config) = language_config {
|
||||
if let (Some(language_config), Some(loader)) = (language_config, loader) {
|
||||
let scopes = theme.map(|theme| theme.scopes()).unwrap_or(&[]);
|
||||
if let Some(highlight_config) = language_config.highlight_config(scopes) {
|
||||
let syntax = Syntax::new(&self.text, highlight_config);
|
||||
let syntax = Syntax::new(&self.text, highlight_config, loader);
|
||||
self.syntax = Some(syntax);
|
||||
// TODO: config.configure(scopes) is now delayed, is that ok?
|
||||
}
|
||||
@ -605,7 +606,7 @@ pub fn set_language2(
|
||||
) {
|
||||
let language_config = config_loader.language_config_for_scope(scope);
|
||||
|
||||
self.set_language(theme, language_config);
|
||||
self.set_language(theme, language_config, Some(config_loader));
|
||||
}
|
||||
|
||||
/// Set the LSP.
|
||||
|
@ -283,7 +283,7 @@ pub fn set_theme(&mut self, theme: Theme) {
|
||||
/// Refreshes the language server for a given document
|
||||
pub fn refresh_language_server(&mut self, doc_id: DocumentId) -> Option<()> {
|
||||
let doc = self.documents.get_mut(&doc_id)?;
|
||||
doc.detect_language(Some(&self.theme), &self.syn_loader);
|
||||
doc.detect_language(Some(&self.theme), self.syn_loader.clone());
|
||||
Self::launch_language_server(&mut self.language_servers, doc)
|
||||
}
|
||||
|
||||
@ -462,7 +462,12 @@ pub fn open(&mut self, path: PathBuf, action: Action) -> Result<DocumentId, Erro
|
||||
let id = if let Some(id) = id {
|
||||
id
|
||||
} else {
|
||||
let mut doc = Document::open(&path, None, Some(&self.theme), Some(&self.syn_loader))?;
|
||||
let mut doc = Document::open(
|
||||
&path,
|
||||
None,
|
||||
Some(&self.theme),
|
||||
Some(self.syn_loader.clone()),
|
||||
)?;
|
||||
|
||||
let _ = Self::launch_language_server(&mut self.language_servers, &mut doc);
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user