New way to handle syntax

This commit is contained in:
Dongdong Zhou 2022-03-21 09:10:41 +00:00
parent 08937dbc40
commit fcac68412d
12 changed files with 2425 additions and 2153 deletions

2
Cargo.lock generated
View File

@ -2176,6 +2176,7 @@ name = "lapce-core"
version = "0.0.10"
dependencies = [
"itertools 0.10.3",
"serde 1.0.130",
"serde_json",
"thiserror",
"tree-sitter",
@ -2264,6 +2265,7 @@ dependencies = [
"hotwatch",
"ignore",
"jsonrpc-lite",
"lapce-core",
"lapce-rpc",
"locale_config",
"lsp-types",

View File

@ -7,6 +7,7 @@ edition = "2021"
[dependencies]
thiserror = "1.0"
itertools = "0.10.3"
serde = "1.0"
serde_json = "1.0"
tree-sitter = "0.20.6"
tree-sitter-highlight = "0.20.1"

View File

@ -1,7 +1,8 @@
use std::{collections::HashSet, path::Path};
use tree_sitter::{Parser, Query, TreeCursor};
use tree_sitter_highlight::HighlightConfiguration;
use crate::style::{HighlightConfiguration, SCOPES};
const RUST_CODE_LENS_LIST: &[&str] =
&["source_file", "impl_item", "trait_item", "declaration_list"];
@ -52,15 +53,15 @@ pub(crate) fn new_parser(&self) -> Parser {
parser
}
pub(crate) fn new_highlight_query(&self) -> Query {
pub(crate) fn new_highlight_config(&self) -> HighlightConfiguration {
let language = self.tree_sitter_language();
let query = match self {
LapceLanguage::Rust => tree_sitter_rust::HIGHLIGHT_QUERY,
LapceLanguage::Go => tree_sitter_go::HIGHLIGHT_QUERY,
};
HighlightConfiguration::new(language, query, "", "")
.unwrap()
.query
let mut config =
HighlightConfiguration::new(language, query, "", "").unwrap();
config
}
pub(crate) fn walk_tree(

File diff suppressed because it is too large Load Diff

View File

@ -1,238 +1,277 @@
use std::{
borrow::BorrowMut,
cell::RefCell,
collections::{HashMap, HashSet},
path::Path,
};
use itertools::Itertools;
use tree_sitter::{Parser, Point, Query, QueryCursor, Tree};
use xi_rope::{Rope, RopeDelta};
use crate::{
language::LapceLanguage,
lens::{Lens, LensBuilder},
};
thread_local! {
static PARSER: RefCell<HashMap<LapceLanguage, Parser>> = RefCell::new(HashMap::new());
static QUERY: RefCell<HashMap<LapceLanguage, Query>> = RefCell::new(HashMap::new());
}
#[derive(Clone)]
pub struct Syntax {
rev: u64,
language: LapceLanguage,
pub text: Rope,
tree: Option<Tree>,
pub lens: Lens,
pub normal_lines: Vec<usize>,
pub line_height: usize,
pub lens_height: usize,
}
impl Syntax {
pub fn init(path: &Path) -> Option<Syntax> {
LapceLanguage::from_path(path).map(|l| Syntax {
rev: 0,
language: l,
text: Rope::from(""),
tree: None,
lens: Self::lens_from_normal_lines(0, 0, 0, &Vec::new()),
line_height: 0,
lens_height: 0,
normal_lines: Vec::new(),
})
}
pub fn parse(
&self,
new_rev: u64,
new_text: Rope,
delta: Option<RopeDelta>,
) -> Syntax {
let mut old_tree = None;
if new_rev == self.rev + 1 {
if let Some(delta) = delta {
fn point_at_offset(text: &Rope, offset: usize) -> Point {
let line = text.line_of_offset(offset);
let col = text.offset_of_line(line + 1) - offset;
Point::new(line, col)
}
let (interval, _) = delta.summary();
let (start, end) = interval.start_end();
if let Some(inserted) = delta.as_simple_insert() {
fn traverse(point: Point, text: &str) -> Point {
let Point {
mut row,
mut column,
} = point;
for ch in text.chars() {
if ch == '\n' {
row += 1;
column = 0;
} else {
column += 1;
}
}
Point { row, column }
}
let start_position = point_at_offset(&self.text, start);
let edit = tree_sitter::InputEdit {
start_byte: start,
old_end_byte: start,
new_end_byte: start + inserted.len(),
start_position,
old_end_position: start_position,
new_end_position: traverse(
start_position,
&inserted.slice_to_cow(0..inserted.len()),
),
};
old_tree = self.tree.as_ref().map(|tree| {
let mut tree = tree.clone();
tree.edit(&edit);
tree
});
} else if delta.is_simple_delete() {
let start_position = point_at_offset(&self.text, start);
let end_position = point_at_offset(&self.text, end);
let edit = tree_sitter::InputEdit {
start_byte: start,
old_end_byte: end,
new_end_byte: start,
start_position,
old_end_position: end_position,
new_end_position: start_position,
};
old_tree = self.tree.as_ref().map(|tree| {
let mut tree = tree.clone();
tree.edit(&edit);
tree
});
};
}
}
let new_tree = PARSER.with(|parsers| {
let mut parsers = parsers.borrow_mut();
parsers
.entry(self.language)
.or_insert_with(|| self.language.new_parser());
let parser = parsers.get_mut(&self.language).unwrap();
parser.parse_with(
&mut |byte, _| {
if byte <= new_text.len() {
new_text
.iter_chunks(byte..)
.next()
.map(|s| s.as_bytes())
.unwrap_or(&[])
} else {
&[]
}
},
old_tree.as_ref(),
)
});
if let Some(tree) = new_tree.as_ref() {
// QUERY.with(|queries| {
// let mut queries = queries.borrow_mut();
// queries
// .entry(self.language)
// .or_insert_with(|| self.language.new_highlight_query());
// let query = queries.get(&self.language).unwrap();
// let mut cursor = QueryCursor::new();
// let text = &new_text.slice_to_cow(..);
// let bytes = text.as_bytes();
// let captures = cursor.captures(query, tree.root_node(), bytes);
// for (capture, index) in captures {}
// });
}
let normal_lines = if let Some(tree) = new_tree.as_ref() {
let mut cursor = tree.walk();
let mut normal_lines = HashSet::new();
self.language.walk_tree(&mut cursor, &mut normal_lines);
let normal_lines: Vec<usize> =
normal_lines.into_iter().sorted().collect();
normal_lines
} else {
Vec::new()
};
let lens = Self::lens_from_normal_lines(
new_text.line_of_offset(new_text.len()),
0,
0,
&normal_lines,
);
Syntax {
rev: new_rev,
language: self.language,
tree: new_tree,
text: new_text,
lens,
line_height: 0,
lens_height: 0,
normal_lines,
}
}
pub fn update_lens_height(&mut self, line_height: usize, lens_height: usize) {
self.lens = Self::lens_from_normal_lines(
self.text.line_of_offset(self.text.len()),
line_height,
lens_height,
&self.normal_lines,
);
self.line_height = line_height;
self.lens_height = lens_height;
}
pub fn lens_from_normal_lines(
total_lines: usize,
line_height: usize,
lens_height: usize,
normal_lines: &[usize],
) -> Lens {
let mut builder = LensBuilder::new();
let mut current_line = 0;
for normal_line in normal_lines.iter() {
let normal_line = *normal_line;
if normal_line > current_line {
builder.add_section(normal_line - current_line, lens_height);
}
builder.add_section(1, line_height);
current_line = normal_line + 1;
}
if current_line < total_lines {
builder.add_section(total_lines - current_line, lens_height);
}
builder.build()
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_lens() {
let lens = Syntax::lens_from_normal_lines(5, 25, 2, &[4]);
assert_eq!(5, lens.len());
assert_eq!(8, lens.height_of_line(4));
assert_eq!(33, lens.height_of_line(5));
let lens = Syntax::lens_from_normal_lines(5, 25, 2, &[3]);
assert_eq!(5, lens.len());
assert_eq!(6, lens.height_of_line(3));
assert_eq!(31, lens.height_of_line(4));
assert_eq!(33, lens.height_of_line(5));
}
}
use std::{
cell::RefCell,
collections::{HashMap, HashSet},
path::Path,
rc::Rc,
sync::Arc,
};
use itertools::Itertools;
use tree_sitter::{Parser, Point, Query, QueryCursor, Tree};
use xi_rope::{
spans::{Spans, SpansBuilder},
Interval, Rope, RopeDelta,
};
use crate::{
language::LapceLanguage,
lens::{Lens, LensBuilder},
style::{Highlight, HighlightEvent, Highlighter, LineStyle, Style, SCOPES},
};
thread_local! {
static PARSER: RefCell<HashMap<LapceLanguage, Parser>> = RefCell::new(HashMap::new());
static HIGHLIGHTS: RefCell<HashMap<LapceLanguage, crate::style::HighlightConfiguration>> = RefCell::new(HashMap::new());
}
#[derive(Clone)]
pub struct Syntax {
rev: u64,
language: LapceLanguage,
pub text: Rope,
tree: Option<Tree>,
pub lens: Lens,
pub normal_lines: Vec<usize>,
pub line_height: usize,
pub lens_height: usize,
pub styles: Option<Spans<Style>>,
}
impl Syntax {
pub fn init(path: &Path) -> Option<Syntax> {
LapceLanguage::from_path(path).map(|l| Syntax {
rev: 0,
language: l,
text: Rope::from(""),
tree: None,
lens: Self::lens_from_normal_lines(0, 0, 0, &Vec::new()),
line_height: 0,
lens_height: 0,
normal_lines: Vec::new(),
styles: None,
})
}
pub fn parse(
&self,
new_rev: u64,
new_text: Rope,
delta: Option<RopeDelta>,
) -> Syntax {
let mut old_tree = None;
if new_rev == self.rev + 1 {
if let Some(delta) = delta {
fn point_at_offset(text: &Rope, offset: usize) -> Point {
let line = text.line_of_offset(offset);
let col = text.offset_of_line(line + 1) - offset;
Point::new(line, col)
}
let (interval, _) = delta.summary();
let (start, end) = interval.start_end();
if let Some(inserted) = delta.as_simple_insert() {
fn traverse(point: Point, text: &str) -> Point {
let Point {
mut row,
mut column,
} = point;
for ch in text.chars() {
if ch == '\n' {
row += 1;
column = 0;
} else {
column += 1;
}
}
Point { row, column }
}
let start_position = point_at_offset(&self.text, start);
let edit = tree_sitter::InputEdit {
start_byte: start,
old_end_byte: start,
new_end_byte: start + inserted.len(),
start_position,
old_end_position: start_position,
new_end_position: traverse(
start_position,
&inserted.slice_to_cow(0..inserted.len()),
),
};
old_tree = self.tree.as_ref().map(|tree| {
let mut tree = tree.clone();
tree.edit(&edit);
tree
});
} else if delta.is_simple_delete() {
let start_position = point_at_offset(&self.text, start);
let end_position = point_at_offset(&self.text, end);
let edit = tree_sitter::InputEdit {
start_byte: start,
old_end_byte: end,
new_end_byte: start,
start_position,
old_end_position: end_position,
new_end_position: start_position,
};
old_tree = self.tree.as_ref().map(|tree| {
let mut tree = tree.clone();
tree.edit(&edit);
tree
});
};
}
}
let new_tree = PARSER.with(|parsers| {
let mut parsers = parsers.borrow_mut();
parsers
.entry(self.language)
.or_insert_with(|| self.language.new_parser());
let parser = parsers.get_mut(&self.language).unwrap();
parser.parse_with(
&mut |byte, _| {
if byte <= new_text.len() {
new_text
.iter_chunks(byte..)
.next()
.map(|s| s.as_bytes())
.unwrap_or(&[])
} else {
&[]
}
},
old_tree.as_ref(),
)
});
let styles = if let Some(tree) = new_tree.as_ref() {
let styles = HIGHLIGHTS.with(|configs| {
let mut configs = configs.borrow_mut();
configs
.entry(self.language)
.or_insert_with(|| self.language.new_highlight_config());
let config = configs.get(&self.language).unwrap();
let mut current_hl: Option<Highlight> = None;
let mut highlights = SpansBuilder::new(new_text.len());
let mut highlighter = Highlighter::new();
for highlight in highlighter
.highlight(
tree.clone(),
config,
new_text.slice_to_cow(0..new_text.len()).as_bytes(),
None,
|_| None,
)
.flatten()
{
match highlight {
HighlightEvent::Source { start, end } => {
if let Some(hl) = current_hl {
if let Some(hl) = SCOPES.get(hl.0) {
highlights.add_span(
Interval::new(start, end),
Style {
fg_color: Some(hl.to_string()),
},
);
}
}
}
HighlightEvent::HighlightStart(hl) => {
current_hl = Some(hl);
}
HighlightEvent::HighlightEnd => current_hl = None,
}
}
highlights.build()
});
Some(styles)
} else {
None
};
let normal_lines = if let Some(tree) = new_tree.as_ref() {
let mut cursor = tree.walk();
let mut normal_lines = HashSet::new();
self.language.walk_tree(&mut cursor, &mut normal_lines);
let normal_lines: Vec<usize> =
normal_lines.into_iter().sorted().collect();
normal_lines
} else {
Vec::new()
};
let lens = Self::lens_from_normal_lines(
new_text.line_of_offset(new_text.len()),
0,
0,
&normal_lines,
);
Syntax {
rev: new_rev,
language: self.language,
tree: new_tree,
text: new_text,
lens,
line_height: 0,
lens_height: 0,
normal_lines,
styles,
}
}
pub fn update_lens_height(&mut self, line_height: usize, lens_height: usize) {
self.lens = Self::lens_from_normal_lines(
self.text.line_of_offset(self.text.len()),
line_height,
lens_height,
&self.normal_lines,
);
self.line_height = line_height;
self.lens_height = lens_height;
}
pub fn lens_from_normal_lines(
total_lines: usize,
line_height: usize,
lens_height: usize,
normal_lines: &[usize],
) -> Lens {
let mut builder = LensBuilder::new();
let mut current_line = 0;
for normal_line in normal_lines.iter() {
let normal_line = *normal_line;
if normal_line > current_line {
builder.add_section(normal_line - current_line, lens_height);
}
builder.add_section(1, line_height);
current_line = normal_line + 1;
}
if current_line < total_lines {
builder.add_section(total_lines - current_line, lens_height);
}
builder.build()
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_lens() {
let lens = Syntax::lens_from_normal_lines(5, 25, 2, &[4]);
assert_eq!(5, lens.len());
assert_eq!(8, lens.height_of_line(4));
assert_eq!(33, lens.height_of_line(5));
let lens = Syntax::lens_from_normal_lines(5, 25, 2, &[3]);
assert_eq!(5, lens.len());
assert_eq!(6, lens.height_of_line(3));
assert_eq!(31, lens.height_of_line(4));
assert_eq!(33, lens.height_of_line(5));
}
}

View File

@ -6,6 +6,7 @@
};
use druid::{PaintCtx, Point};
use language::{new_highlight_config, LapceLanguage};
use lapce_core::style::{line_styles, LineStyle, LineStyles};
use lapce_core::syntax::Syntax;
use lapce_proxy::dispatch::{BufferHeadResponse, NewBufferResponse};
use lsp_types::SemanticTokensLegend;
@ -248,6 +249,8 @@ pub struct Buffer {
pub semantic_tokens: bool,
pub language: Option<LapceLanguage>,
pub syntax: Option<Syntax>,
pub new_line_styles: Rc<RefCell<LineStyles>>,
pub semantic_styles: Option<Spans<lapce_core::style::Style>>,
pub highlighter: Arc<Mutex<Highlighter>>,
pub highlight: Option<Arc<Mutex<HighlightConfiguration>>>,
pub max_len: usize,
@ -316,6 +319,8 @@ pub fn new(
.map(|l| Arc::new(Mutex::new(new_highlight_config(l)))),
language,
syntax,
new_line_styles: Rc::new(RefCell::new(HashMap::new())),
semantic_styles: None,
content,
styles: Arc::new(SpansBuilder::new(0).build()),
line_styles: Rc::new(RefCell::new(Vec::new())),
@ -842,6 +847,23 @@ fn get_hisotry_line_styles(
Some(line_styles)
}
fn line_style(&self, line: usize) -> Arc<Vec<LineStyle>> {
if self.new_line_styles.borrow().get(&line).is_none() {
let styles = self
.semantic_styles
.as_ref()
.or_else(|| self.syntax.as_ref().and_then(|s| s.styles.as_ref()));
let line_styles = styles
.map(|styles| line_styles(&self.rope, line, styles))
.unwrap_or_default();
self.new_line_styles
.borrow_mut()
.insert(line, Arc::new(line_styles));
}
self.new_line_styles.borrow().get(&line).cloned().unwrap()
}
fn get_line_styles(&self, line: usize) -> Arc<Vec<(usize, usize, Style)>> {
if let Some(line_styles) =
self.line_styles.borrow().get(line).and_then(|s| s.as_ref())
@ -935,7 +957,7 @@ pub fn new_text_layout(
bounds: [f64; 2],
config: &Config,
) -> PietTextLayout {
let styles = self.get_line_styles(line);
let styles = self.line_style(line);
let mut layout_builder = ctx
.text()
.new_text_layout(line_content.to_string())
@ -957,13 +979,13 @@ pub fn new_text_layout(
);
}
for (start, end, style) in styles.iter() {
if let Some(fg_color) = style.fg_color.as_ref() {
for line_style in styles.iter() {
if let Some(fg_color) = line_style.style.fg_color.as_ref() {
if let Some(fg_color) =
config.get_color(&("style.".to_string() + fg_color))
{
layout_builder = layout_builder.range_attribute(
start..end,
line_style.start..line_style.end,
TextAttribute::TextColor(fg_color.clone()),
);
}
@ -1814,6 +1836,15 @@ fn update_size(&mut self, inval_lines: &InvalLines) {
}
fn update_line_styles(&mut self, delta: &RopeDelta, inval_lines: &InvalLines) {
if let Some(styles) = self.semantic_styles.as_mut() {
styles.apply_shape(delta);
} else if let Some(syntax) = self.syntax.as_mut() {
if let Some(styles) = syntax.styles.as_mut() {
styles.apply_shape(delta);
}
}
self.new_line_styles.borrow_mut().clear();
Arc::make_mut(&mut self.styles).apply_shape(delta);
let mut line_styles = self.line_styles.borrow_mut();
let right = line_styles.split_off(inval_lines.start_line);

View File

@ -3,7 +3,7 @@
use anyhow::Result;
use druid::{Point, Rect, Selector, Size, WidgetId, WindowId};
use indexmap::IndexMap;
use lapce_core::syntax::Syntax;
use lapce_core::{style::LineStyle, syntax::Syntax};
use lapce_proxy::{
dispatch::{DiffInfo, FileNodeItem},
plugin::PluginDescription,
@ -553,6 +553,7 @@ pub enum LapceUICommand {
DocumentFormat(PathBuf, u64, Result<Value>),
DocumentFormatAndSave(PathBuf, u64, Result<Value>),
BufferSave(PathBuf, u64),
UpdateSemanticStyles(BufferId, PathBuf, u64, Spans<lapce_core::style::Style>),
UpdateSemanticTokens(BufferId, PathBuf, u64, Vec<(usize, usize, String)>),
UpdateHighlights(BufferId, u64, Vec<(usize, usize, Highlight)>),
UpdateTerminalTitle(TermId, String),

File diff suppressed because it is too large Load Diff

View File

@ -21,6 +21,7 @@ mio = "0.6.20"
hotwatch = "0.4.6"
notify = "5.0.0-pre.13"
lapce-rpc = { path = "../lapce-rpc" }
lapce-core = { path = "../lapce-core" }
xi-rope = { git = "https://github.com/lapce/xi-editor", features = ["serde"] }
serde = { version = "1.0", features = ["derive"] }
lsp-types = { version = "0.89.2", features = ["proposed"] }

View File

@ -561,19 +561,20 @@ fn handle_notification(&self, rpc: Notification) {
height,
} => {
let terminals = self.terminals.lock();
let tx = terminals.get(&term_id).unwrap();
let size = SizeInfo::new(
width as f32,
height as f32,
1.0,
1.0,
0.0,
0.0,
true,
);
if let Some(tx) = terminals.get(&term_id) {
let size = SizeInfo::new(
width as f32,
height as f32,
1.0,
1.0,
0.0,
0.0,
true,
);
#[allow(deprecated)]
let _ = tx.send(Msg::Resize(size));
#[allow(deprecated)]
let _ = tx.send(Msg::Resize(size));
}
}
Notification::GitCommit { message, diffs } => {
if let Some(workspace) = self.workspace.lock().clone() {

View File

@ -13,6 +13,7 @@
use anyhow::{anyhow, Result};
use jsonrpc_lite::{Id, JsonRpc, Params};
use lapce_core::style::{LineStyle, Style};
use lapce_rpc::RequestId;
use lsp_types::*;
use parking_lot::Mutex;
@ -116,6 +117,7 @@ pub fn get_semantic_tokens(&self, buffer: &Buffer) {
let buffer_id = buffer.id;
let path = buffer.path.clone();
let rev = buffer.rev;
let len = buffer.len();
if let Some(client) = self.clients.get(&buffer.language_id) {
let uri = client.get_uri(buffer);
let local_dispatcher = self.dispatcher.clone().unwrap();
@ -132,6 +134,22 @@ pub fn get_semantic_tokens(&self, buffer: &Buffer) {
.as_ref()
.unwrap()
.semantic_tokens_provider;
if let Some(styles) = format_semantic_styles(
buffer,
semantic_tokens_provider,
res.clone(),
) {
local_dispatcher.send_notification(
"semantic_styles",
json!({
"rev": rev,
"buffer_id": buffer_id,
"path": path,
"styles": styles,
"len": len,
}),
)
}
if let Some(tokens) =
format_semantic_tokens(buffer, semantic_tokens_provider, res)
{
@ -987,6 +1005,45 @@ pub fn get_change_for_sync_kind(
}
}
fn format_semantic_styles(
buffer: &Buffer,
semantic_tokens_provider: &Option<SemanticTokensServerCapabilities>,
value: Value,
) -> Option<Vec<LineStyle>> {
let semantic_tokens: SemanticTokens = serde_json::from_value(value).ok()?;
let semantic_tokens_provider = semantic_tokens_provider.as_ref()?;
let semantic_lengends = semantic_tokens_lengend(semantic_tokens_provider);
let mut highlights = Vec::new();
let mut line = 0;
let mut start = 0;
let mut last_start = 0;
for semantic_token in &semantic_tokens.data {
if semantic_token.delta_line > 0 {
line += semantic_token.delta_line as usize;
start = buffer.offset_of_line(line);
}
start += semantic_token.delta_start as usize;
let end = start + semantic_token.length as usize;
let kind = semantic_lengends.token_types[semantic_token.token_type as usize]
.as_str()
.to_string();
if start < last_start {
continue;
}
last_start = start;
highlights.push(LineStyle {
start,
end,
style: Style {
fg_color: Some(kind),
},
});
}
Some(highlights)
}
fn format_semantic_tokens(
buffer: &Buffer,
semantic_tokens_provider: &Option<SemanticTokensServerCapabilities>,
@ -999,6 +1056,7 @@ fn format_semantic_tokens(
let mut highlights = Vec::new();
let mut line = 0;
let mut start = 0;
let mut last_start = 0;
for semantic_token in &semantic_tokens.data {
if semantic_token.delta_line > 0 {
line += semantic_token.delta_line as usize;
@ -1009,6 +1067,10 @@ fn format_semantic_tokens(
let kind = semantic_lengends.token_types[semantic_token.token_type as usize]
.as_str()
.to_string();
if start < last_start {
continue;
}
last_start = start;
highlights.push((start, end, kind));
}

View File

@ -760,6 +760,16 @@ fn event(
}
ctx.set_handled();
}
LapceUICommand::UpdateSemanticStyles(_id, path, rev, styles) => {
let buffer =
data.main_split.open_files.get_mut(path).unwrap();
if buffer.rev == *rev {
let buffer = Arc::make_mut(buffer);
buffer.semantic_styles = Some(styles.clone());
buffer.new_line_styles.borrow_mut().clear();
}
ctx.set_handled();
}
LapceUICommand::UpdateSemanticTokens(_id, path, rev, tokens) => {
let buffer =
data.main_split.open_files.get_mut(path).unwrap();
@ -877,6 +887,9 @@ fn event(
let buffer = Arc::make_mut(buffer);
if buffer.rev == *rev {
buffer.syntax = Some(syntax.clone());
if buffer.semantic_styles.is_none() {
buffer.new_line_styles.borrow_mut().clear();
}
}
}
#[allow(unused_variables)]