remove stuff from lsp plugin

This commit is contained in:
Dongdong Zhou 2020-11-11 15:22:00 +00:00
parent 450362411f
commit a487c5767d
14 changed files with 876 additions and 1155 deletions

1
Cargo.lock generated
View File

@ -1591,7 +1591,6 @@ dependencies = [
"anyhow",
"jsonrpc-lite",
"languageserver-types",
"lapce-core",
"memchr",
"parking_lot 0.11.0",
"serde",

View File

@ -166,8 +166,8 @@ pub fn save(&mut self) -> Result<()> {
let state = LAPCE_APP_STATE.get_tab_state(&self.window_id, &self.tab_id);
let workspace_type = state.workspace.lock().kind.clone();
match workspace_type {
LapceWorkspaceType::RemoteSSH(host) => {
state.get_ssh_session(&host)?;
LapceWorkspaceType::RemoteSSH(user, host) => {
state.get_ssh_session(&user, &host)?;
let mut ssh_session = state.ssh_session.lock();
let ssh_session = ssh_session.as_mut().unwrap();
let tmp_path = format!("{}.swp", self.path);
@ -864,8 +864,8 @@ fn load_file(window_id: &WindowId, tab_id: &WidgetId, path: &str) -> Result<Rope
f.read_to_end(&mut bytes)?;
bytes
}
LapceWorkspaceType::RemoteSSH(host) => {
state.get_ssh_session(&host)?;
LapceWorkspaceType::RemoteSSH(user, host) => {
state.get_ssh_session(&user, &host)?;
let mut ssh_session = state.ssh_session.lock();
let ssh_session = ssh_session.as_mut().unwrap();
ssh_session.read_file(path)?

View File

@ -114,13 +114,14 @@ pub fn start_server(
);
self.clients.insert(language_id.to_string(), client);
}
LapceWorkspaceType::RemoteSSH(host) => {
LapceWorkspaceType::RemoteSSH(user, host) => {
if let Ok(client) = LspClient::new_ssh(
self.window_id,
self.tab_id,
language_id.to_string(),
exec_path,
options,
&user,
&host,
) {
self.clients.insert(language_id.to_string(), client);
@ -324,9 +325,10 @@ pub fn new_ssh(
language_id: String,
exec_path: &str,
options: Option<Value>,
user: &str,
host: &str,
) -> Result<Arc<LspClient>> {
let mut ssh_session = SshSession::new(host)?;
let mut ssh_session = SshSession::new(user, host)?;
let mut channel = ssh_session.get_channel()?;
ssh_session.channel_exec(&mut channel, exec_path)?;
println!("lsp {}", exec_path);

View File

@ -314,15 +314,24 @@ fn get_workspaces(&self) -> Vec<PaletteItem> {
path: PathBuf::from("/Users/Lulu/lapce"),
},
LapceWorkspace {
kind: LapceWorkspaceType::RemoteSSH("10.132.0.2:22".to_string()),
kind: LapceWorkspaceType::RemoteSSH(
"dz".to_string(),
"10.132.0.2:22".to_string(),
),
path: PathBuf::from("/home/dz/go/src/galaxy"),
},
LapceWorkspace {
kind: LapceWorkspaceType::RemoteSSH("10.132.0.2:22".to_string()),
kind: LapceWorkspaceType::RemoteSSH(
"dz".to_string(),
"10.132.0.2:22".to_string(),
),
path: PathBuf::from("/home/dz/go/src/tardis"),
},
LapceWorkspace {
kind: LapceWorkspaceType::RemoteSSH("10.132.0.2:22".to_string()),
kind: LapceWorkspaceType::RemoteSSH(
"dz".to_string(),
"10.132.0.2:22".to_string(),
),
path: PathBuf::from("/home/dz/cosmos"),
},
];
@ -333,8 +342,8 @@ fn get_workspaces(&self) -> Vec<PaletteItem> {
let text = w.path.to_str().unwrap();
let text = match &w.kind {
LapceWorkspaceType::Local => text.to_string(),
LapceWorkspaceType::RemoteSSH(host) => {
format!("[{}] {}", host, text)
LapceWorkspaceType::RemoteSSH(user, host) => {
format!("[{}@{}] {}", user, host, text)
}
};
PaletteItem {
@ -439,16 +448,18 @@ fn get_files(&self) -> Vec<PaletteItem> {
.kind
.clone();
match workspace_type {
LapceWorkspaceType::RemoteSSH(host) => self.get_ssh_files(&host),
LapceWorkspaceType::RemoteSSH(user, host) => {
self.get_ssh_files(&user, &host)
}
LapceWorkspaceType::Local => self.get_local_files(),
}
}
fn get_ssh_files(&self, host: &str) -> Vec<PaletteItem> {
fn get_ssh_files(&self, user: &str, host: &str) -> Vec<PaletteItem> {
let state = LAPCE_APP_STATE.get_tab_state(&self.window_id, &self.tab_id);
let mut ssh_session = state.ssh_session.lock();
if ssh_session.is_none() {
if let Ok(session) = SshSession::new(host) {
if let Ok(session) = SshSession::new(user, host) {
*ssh_session = Some(session);
} else {
return Vec::new();

View File

@ -136,9 +136,9 @@ pub fn load_from_paths(&mut self, paths: &[PathBuf]) -> Result<()> {
}
}
}
LapceWorkspaceType::RemoteSSH(host) => {
LapceWorkspaceType::RemoteSSH(user, host) => {
println!("load plugins for remote ssh");
if let Err(e) = state.get_ssh_session(&host) {
if let Err(e) = state.get_ssh_session(&user, &host) {
println!("get ssh session error {}", e);
return Err(e);
}
@ -198,16 +198,17 @@ pub fn start_all(&mut self) -> Result<()> {
);
}
}
LapceWorkspaceType::RemoteSSH(host) => {
LapceWorkspaceType::RemoteSSH(user, host) => {
for (_, manifest) in self.items.clone().iter() {
let manifest = manifest.clone();
let plugin_id = self.next_plugin_id();
let user = user.clone();
let host = host.clone();
let window_id = self.window_id;
let tab_id = self.tab_id;
thread::spawn(move || {
if let Err(e) = start_plugin_ssh(
window_id, tab_id, manifest, plugin_id, &host,
window_id, tab_id, manifest, plugin_id, &user, &host,
) {
println!("start plugin ssh error {}", e);
}
@ -310,13 +311,14 @@ fn start_plugin_ssh(
tab_id: WidgetId,
plugin_desc: Arc<PluginDescription>,
id: PluginId,
user: &str,
host: &str,
) -> Result<()> {
println!(
"start plugin {:?} {:?}",
plugin_desc.exec_path, plugin_desc.dir
);
let mut ssh_session = SshSession::new(host)?;
let mut ssh_session = SshSession::new(user, host)?;
let mut channel = ssh_session.get_channel()?;
ssh_session
.channel_exec(&mut channel, plugin_desc.exec_path.to_str().unwrap())?;

View File

@ -29,7 +29,7 @@ pub struct SshPathEntry {
}
impl SshSession {
pub fn new(host: &str) -> Result<SshSession> {
pub fn new(user: &str, host: &str) -> Result<SshSession> {
let mut session = Session::new()?;
let addr: SocketAddr = host.parse()?;
let mut tcp = MioTcpStream::connect(addr)?;
@ -48,7 +48,7 @@ pub fn new(host: &str) -> Result<SshSession> {
events,
};
ssh_session.handshake()?;
ssh_session.auth()?;
ssh_session.auth(user)?;
Ok(ssh_session)
}
@ -78,13 +78,13 @@ pub fn handshake(&mut self) -> Result<()> {
}
}
pub fn auth(&mut self) -> Result<()> {
pub fn auth(&mut self, user: &str) -> Result<()> {
println!("start auth");
let path = PathBuf::from_str("/Users/Lulu/.ssh/id_rsa")?;
loop {
if let Err(e) =
self.session
.userauth_pubkey_file("dz", None, path.as_path(), None)
.userauth_pubkey_file(user, None, path.as_path(), None)
{
let e = io::Error::from(e);
if e.kind() == io::ErrorKind::WouldBlock {

View File

@ -145,7 +145,7 @@ pub fn get_editor(&self, view_id: &WidgetId) -> &EditorUIState {
#[derive(Clone, Debug)]
pub enum LapceWorkspaceType {
Local,
RemoteSSH(String),
RemoteSSH(String, String),
}
#[derive(Clone, Debug)]
@ -359,10 +359,10 @@ pub fn get_mode(&self) -> Mode {
}
}
pub fn get_ssh_session(&self, host: &str) -> Result<()> {
pub fn get_ssh_session(&self, user: &str, host: &str) -> Result<()> {
let mut ssh_session = self.ssh_session.lock();
if ssh_session.is_none() {
let session = SshSession::new(host)?;
let session = SshSession::new(user, host)?;
*ssh_session = Some(session);
}
Ok(())

View File

@ -404,8 +404,8 @@ fn paint(&mut self, ctx: &mut PaintCtx, data: &LapceUIState, env: &Env) {
let dir = workspace.path.file_name().unwrap().to_str().unwrap();
let dir = match &workspace.kind {
LapceWorkspaceType::Local => dir.to_string(),
LapceWorkspaceType::RemoteSSH(host) => {
format!("{} [{}]", dir, host)
LapceWorkspaceType::RemoteSSH(user, host) => {
format!("{} [{}@{}]", dir, user, host)
}
};
let mut text_layout = TextLayout::<String>::from_text(dir);

View File

@ -11,7 +11,6 @@ serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0.59"
xi-rpc = { path = "../../xi-editor/rust/rpc/" }
xi-rope = { path = "../../xi-editor/rust/rope/" }
lapce-core = { path = "../core" }
anyhow = "1.0.32"
parking_lot = "0.11.0"
jsonrpc-lite = "0.5.0"

View File

@ -1,9 +1,5 @@
use anyhow::{anyhow, Result};
use languageserver_types::Position;
use lapce_core::{
buffer::BufferId,
plugin::{GetDataResponse, PluginBufferInfo, PluginId, TextUnit},
};
use memchr::memchr;
use serde::Deserialize;
use serde_json::json;
@ -13,405 +9,405 @@
const CHUNK_SIZE: usize = 1024 * 1024;
pub struct Buffer {
pub buffer_id: BufferId,
plugin_id: PluginId,
pub language_id: String,
pub path: String,
peer: RpcPeer,
pub offset: usize,
pub contents: String,
pub first_line: usize,
pub first_line_offset: usize,
pub line_offsets: Vec<usize>,
pub buf_size: usize,
pub num_lines: usize,
pub rev: u64,
}
impl Buffer {
pub fn new(peer: RpcPeer, plugin_id: PluginId, info: PluginBufferInfo) -> Self {
Buffer {
peer,
plugin_id,
language_id: info.language_id,
buffer_id: info.buffer_id,
path: info.path,
line_offsets: Vec::new(),
buf_size: info.buf_size,
num_lines: info.nb_lines,
rev: info.rev,
offset: 0,
first_line: 0,
first_line_offset: 0,
contents: "".to_string(),
}
}
pub fn get_line(&mut self, line_num: usize) -> Result<&str> {
if line_num >= self.num_lines {
return Err(anyhow!("bad request"));
}
// if chunk does not include the start of this line, fetch and reset everything
if self.contents.is_empty()
|| line_num < self.first_line
|| (line_num == self.first_line && self.first_line_offset > 0)
|| (line_num > self.first_line + self.line_offsets.len())
{
let resp =
self.get_data(line_num, TextUnit::Line, CHUNK_SIZE, self.rev)?;
self.reset_chunk(resp);
}
// We now know that the start of this line is contained in self.contents.
let mut start_off =
self.cached_offset_of_line(line_num).unwrap() - self.offset;
// Now we make sure we also contain the end of the line, fetching more
// of the document as necessary.
loop {
if let Some(end_off) = self.cached_offset_of_line(line_num + 1) {
return Ok(&self.contents[start_off..end_off - self.offset]);
}
// if we have a chunk and we're fetching more, discard unnecessary
// portion of our chunk.
if start_off != 0 {
self.clear_up_to(start_off);
start_off = 0;
}
let chunk_end = self.offset + self.contents.len();
let resp =
self.get_data(chunk_end, TextUnit::Utf8, CHUNK_SIZE, self.rev)?;
self.append_chunk(&resp);
}
}
fn get_data(
&self,
start: usize,
unit: TextUnit,
max_size: usize,
rev: u64,
) -> Result<GetDataResponse> {
let params = json!({
"plugin_id": self.plugin_id,
"buffer_id": self.buffer_id,
"start": start,
"unit": unit,
"max_size": max_size,
"rev": rev,
});
let result = self
.peer
.send_rpc_request("get_data", &params)
.map_err(|e| anyhow!(""))?;
GetDataResponse::deserialize(result)
.map_err(|e| anyhow!("wrong return type"))
}
pub fn get_document(&mut self) -> Result<String> {
let mut result = String::new();
let mut cur_idx = 0;
while cur_idx < self.buf_size {
if self.contents.is_empty() || cur_idx != self.offset {
let resp =
self.get_data(cur_idx, TextUnit::Utf8, CHUNK_SIZE, self.rev)?;
self.reset_chunk(resp);
}
result.push_str(&self.contents);
cur_idx = self.offset + self.contents.len();
}
Ok(result)
}
fn append_chunk(&mut self, data: &GetDataResponse) {
self.contents.push_str(data.chunk.as_str());
// this is doing extra work in the case where we're fetching a single
// massive (multiple of CHUNK_SIZE) line, but unclear if it's worth optimizing
self.recalculate_line_offsets();
}
fn reset_chunk(&mut self, data: GetDataResponse) {
self.contents = data.chunk;
self.offset = data.offset;
self.first_line = data.first_line;
self.first_line_offset = data.first_line_offset;
self.recalculate_line_offsets();
}
pub fn update(
&mut self,
delta: &RopeDelta,
new_len: usize,
new_num_lines: usize,
rev: u64,
) {
let is_empty = self.offset == 0 && self.contents.is_empty();
let should_clear = if !is_empty {
self.should_clear(delta)
} else {
true
};
if should_clear {
self.clear();
} else {
// only reached if delta exists
self.update_chunk(delta);
}
self.buf_size = new_len;
self.num_lines = new_num_lines;
self.rev = rev;
}
fn update_chunk(&mut self, delta: &RopeDelta) {
let chunk_start = self.offset;
let chunk_end = chunk_start + self.contents.len();
let mut new_state = String::with_capacity(self.contents.len());
let mut prev_copy_end = 0;
let mut del_before: usize = 0;
let mut ins_before: usize = 0;
for op in delta.els.as_slice() {
match *op {
DeltaElement::Copy(start, end) => {
if start < chunk_start {
del_before += start - prev_copy_end;
if end >= chunk_start {
let cp_end =
(end - chunk_start).min(self.contents.len());
new_state.push_str(&self.contents[0..cp_end]);
}
} else if start <= chunk_end {
if prev_copy_end < chunk_start {
del_before += chunk_start - prev_copy_end;
}
let cp_start = start - chunk_start;
let cp_end = (end - chunk_start).min(self.contents.len());
new_state.push_str(&self.contents[cp_start..cp_end]);
}
prev_copy_end = end;
}
DeltaElement::Insert(ref s) => {
if prev_copy_end < chunk_start {
ins_before += s.len();
} else if prev_copy_end <= chunk_end {
let s: String = s.into();
new_state.push_str(&s);
}
}
}
}
self.offset += ins_before;
self.offset -= del_before;
self.contents = new_state;
}
fn should_clear(&mut self, delta: &RopeDelta) -> bool {
let (iv, _) = delta.summary();
let start = iv.start();
let end = iv.end();
// we only apply the delta if it is a simple edit, which
// begins inside or immediately following our chunk.
// - If it begins _before_ our chunk, we are likely going to
// want to fetch the edited region, which will reset our state;
// - If it's a complex edit the logic is tricky, and this should
// be rare enough we can afford to discard.
// The one 'complex edit' we should probably be handling is
// the replacement of a single range. This could be a new
// convenience method on `Delta`?
if start < self.offset || start > self.offset + self.contents.len() {
true
} else if delta.is_simple_delete() {
// Don't go over cache boundary.
let end = end.min(self.offset + self.contents.len());
self.simple_delete(start, end);
false
} else if let Some(text) = delta.as_simple_insert() {
assert_eq!(iv.size(), 0);
self.simple_insert(text, start);
false
} else {
true
}
}
fn simple_insert(&mut self, text: &Rope, ins_offset: usize) {
let has_newline = text.measure::<LinesMetric>() > 0;
let self_off = self.offset;
assert!(ins_offset >= self_off);
// regardless of if we are inserting newlines we adjust offsets
self.line_offsets.iter_mut().for_each(|off| {
if *off > ins_offset - self_off {
*off += text.len()
}
});
// calculate and insert new newlines if necessary
// we could save some hassle and just rerun memchr on the chunk here?
if has_newline {
let mut new_offsets = Vec::new();
newline_offsets(&String::from(text), &mut new_offsets);
new_offsets
.iter_mut()
.for_each(|off| *off += ins_offset - self_off);
let split_idx = self
.line_offsets
.binary_search(&new_offsets[0])
.err()
.expect("new index cannot be occupied");
self.line_offsets = [
&self.line_offsets[..split_idx],
&new_offsets,
&self.line_offsets[split_idx..],
]
.concat();
}
}
/// Patches up `self.line_offsets` in the simple delete case.
fn simple_delete(&mut self, start: usize, end: usize) {
let del_size = end - start;
let start = start - self.offset;
let end = end - self.offset;
let has_newline =
memchr(b'\n', &self.contents.as_bytes()[start..end]).is_some();
// a bit too fancy: only reallocate if we need to remove an item
if has_newline {
self.line_offsets = self
.line_offsets
.iter()
.filter_map(|off| match *off {
x if x <= start => Some(x),
x if x > start && x <= end => None,
x if x > end => Some(x - del_size),
hmm => panic!("invariant violated {} {} {}?", start, end, hmm),
})
.collect();
} else {
self.line_offsets.iter_mut().for_each(|off| {
if *off >= end {
*off -= del_size
}
});
}
}
fn clear(&mut self) {
self.contents.clear();
self.offset = 0;
self.line_offsets.clear();
self.first_line = 0;
self.first_line_offset = 0;
}
fn clear_up_to(&mut self, offset: usize) {
if offset > self.contents.len() {
panic!(
"offset greater than content length: {} > {}",
offset,
self.contents.len()
)
}
let new_contents = self.contents.split_off(offset);
self.contents = new_contents;
self.offset += offset;
// first find out if offset is a line offset, and set first_line / first_line_offset
let (new_line, new_line_off) = match self.line_offsets.binary_search(&offset)
{
Ok(idx) => (self.first_line + idx + 1, 0),
Err(0) => (self.first_line, self.first_line_offset + offset),
Err(idx) => (self.first_line + idx, offset - self.line_offsets[idx - 1]),
};
// then clear line_offsets up to and including offset
self.line_offsets = self
.line_offsets
.iter()
.filter(|i| **i > offset)
.map(|i| i - offset)
.collect();
self.first_line = new_line;
self.first_line_offset = new_line_off;
}
fn recalculate_line_offsets(&mut self) {
self.line_offsets.clear();
newline_offsets(&self.contents, &mut self.line_offsets);
}
pub fn offset_of_line(&mut self, line_num: usize) -> Result<usize> {
if line_num > self.num_lines {
return Err(anyhow!("bad request"));
}
match self.cached_offset_of_line(line_num) {
Some(offset) => Ok(offset),
None => {
let resp =
self.get_data(line_num, TextUnit::Line, CHUNK_SIZE, self.rev)?;
self.reset_chunk(resp);
self.offset_of_line(line_num)
}
}
}
pub fn line_of_offset(&mut self, offset: usize) -> Result<usize> {
if offset > self.buf_size {
return Err(anyhow!("bad request"));
}
if self.contents.is_empty()
|| offset < self.offset
|| offset > self.offset + self.contents.len()
{
let resp =
self.get_data(offset, TextUnit::Utf8, CHUNK_SIZE, self.rev)?;
self.reset_chunk(resp);
}
let rel_offset = offset - self.offset;
let line_num = match self.line_offsets.binary_search(&rel_offset) {
Ok(ix) => ix + self.first_line + 1,
Err(ix) => ix + self.first_line,
};
Ok(line_num)
}
fn cached_offset_of_line(&self, line_num: usize) -> Option<usize> {
if line_num < self.first_line {
return None;
}
let rel_line_num = line_num - self.first_line;
if rel_line_num == 0 {
return Some(self.offset - self.first_line_offset);
}
if rel_line_num <= self.line_offsets.len() {
return Some(self.offset + self.line_offsets[rel_line_num - 1]);
}
// EOF
if line_num == self.num_lines
&& self.offset + self.contents.len() == self.buf_size
{
return Some(self.offset + self.contents.len());
}
None
}
}
fn newline_offsets(text: &str, storage: &mut Vec<usize>) {
let mut cur_idx = 0;
while let Some(idx) = memchr(b'\n', &text.as_bytes()[cur_idx..]) {
storage.push(cur_idx + idx + 1);
cur_idx += idx + 1;
}
}
//pub struct Buffer {
// pub buffer_id: BufferId,
// plugin_id: PluginId,
// pub language_id: String,
// pub path: String,
// peer: RpcPeer,
//
// pub offset: usize,
// pub contents: String,
// pub first_line: usize,
// pub first_line_offset: usize,
// pub line_offsets: Vec<usize>,
// pub buf_size: usize,
// pub num_lines: usize,
// pub rev: u64,
//}
//
//impl Buffer {
// pub fn new(peer: RpcPeer, plugin_id: PluginId, info: PluginBufferInfo) -> Self {
// Buffer {
// peer,
// plugin_id,
// language_id: info.language_id,
// buffer_id: info.buffer_id,
// path: info.path,
// line_offsets: Vec::new(),
// buf_size: info.buf_size,
// num_lines: info.nb_lines,
// rev: info.rev,
// offset: 0,
// first_line: 0,
// first_line_offset: 0,
// contents: "".to_string(),
// }
// }
//
// pub fn get_line(&mut self, line_num: usize) -> Result<&str> {
// if line_num >= self.num_lines {
// return Err(anyhow!("bad request"));
// }
//
// // if chunk does not include the start of this line, fetch and reset everything
// if self.contents.is_empty()
// || line_num < self.first_line
// || (line_num == self.first_line && self.first_line_offset > 0)
// || (line_num > self.first_line + self.line_offsets.len())
// {
// let resp =
// self.get_data(line_num, TextUnit::Line, CHUNK_SIZE, self.rev)?;
// self.reset_chunk(resp);
// }
//
// // We now know that the start of this line is contained in self.contents.
// let mut start_off =
// self.cached_offset_of_line(line_num).unwrap() - self.offset;
//
// // Now we make sure we also contain the end of the line, fetching more
// // of the document as necessary.
// loop {
// if let Some(end_off) = self.cached_offset_of_line(line_num + 1) {
// return Ok(&self.contents[start_off..end_off - self.offset]);
// }
// // if we have a chunk and we're fetching more, discard unnecessary
// // portion of our chunk.
// if start_off != 0 {
// self.clear_up_to(start_off);
// start_off = 0;
// }
//
// let chunk_end = self.offset + self.contents.len();
// let resp =
// self.get_data(chunk_end, TextUnit::Utf8, CHUNK_SIZE, self.rev)?;
// self.append_chunk(&resp);
// }
// }
//
// fn get_data(
// &self,
// start: usize,
// unit: TextUnit,
// max_size: usize,
// rev: u64,
// ) -> Result<GetDataResponse> {
// let params = json!({
// "plugin_id": self.plugin_id,
// "buffer_id": self.buffer_id,
// "start": start,
// "unit": unit,
// "max_size": max_size,
// "rev": rev,
// });
// let result = self
// .peer
// .send_rpc_request("get_data", &params)
// .map_err(|e| anyhow!(""))?;
// GetDataResponse::deserialize(result)
// .map_err(|e| anyhow!("wrong return type"))
// }
//
// pub fn get_document(&mut self) -> Result<String> {
// let mut result = String::new();
// let mut cur_idx = 0;
// while cur_idx < self.buf_size {
// if self.contents.is_empty() || cur_idx != self.offset {
// let resp =
// self.get_data(cur_idx, TextUnit::Utf8, CHUNK_SIZE, self.rev)?;
// self.reset_chunk(resp);
// }
// result.push_str(&self.contents);
// cur_idx = self.offset + self.contents.len();
// }
// Ok(result)
// }
//
// fn append_chunk(&mut self, data: &GetDataResponse) {
// self.contents.push_str(data.chunk.as_str());
// // this is doing extra work in the case where we're fetching a single
// // massive (multiple of CHUNK_SIZE) line, but unclear if it's worth optimizing
// self.recalculate_line_offsets();
// }
//
// fn reset_chunk(&mut self, data: GetDataResponse) {
// self.contents = data.chunk;
// self.offset = data.offset;
// self.first_line = data.first_line;
// self.first_line_offset = data.first_line_offset;
// self.recalculate_line_offsets();
// }
//
// pub fn update(
// &mut self,
// delta: &RopeDelta,
// new_len: usize,
// new_num_lines: usize,
// rev: u64,
// ) {
// let is_empty = self.offset == 0 && self.contents.is_empty();
// let should_clear = if !is_empty {
// self.should_clear(delta)
// } else {
// true
// };
//
// if should_clear {
// self.clear();
// } else {
// // only reached if delta exists
// self.update_chunk(delta);
// }
// self.buf_size = new_len;
// self.num_lines = new_num_lines;
// self.rev = rev;
// }
//
// fn update_chunk(&mut self, delta: &RopeDelta) {
// let chunk_start = self.offset;
// let chunk_end = chunk_start + self.contents.len();
// let mut new_state = String::with_capacity(self.contents.len());
// let mut prev_copy_end = 0;
// let mut del_before: usize = 0;
// let mut ins_before: usize = 0;
//
// for op in delta.els.as_slice() {
// match *op {
// DeltaElement::Copy(start, end) => {
// if start < chunk_start {
// del_before += start - prev_copy_end;
// if end >= chunk_start {
// let cp_end =
// (end - chunk_start).min(self.contents.len());
// new_state.push_str(&self.contents[0..cp_end]);
// }
// } else if start <= chunk_end {
// if prev_copy_end < chunk_start {
// del_before += chunk_start - prev_copy_end;
// }
// let cp_start = start - chunk_start;
// let cp_end = (end - chunk_start).min(self.contents.len());
// new_state.push_str(&self.contents[cp_start..cp_end]);
// }
// prev_copy_end = end;
// }
// DeltaElement::Insert(ref s) => {
// if prev_copy_end < chunk_start {
// ins_before += s.len();
// } else if prev_copy_end <= chunk_end {
// let s: String = s.into();
// new_state.push_str(&s);
// }
// }
// }
// }
// self.offset += ins_before;
// self.offset -= del_before;
// self.contents = new_state;
// }
//
// fn should_clear(&mut self, delta: &RopeDelta) -> bool {
// let (iv, _) = delta.summary();
// let start = iv.start();
// let end = iv.end();
// // we only apply the delta if it is a simple edit, which
// // begins inside or immediately following our chunk.
// // - If it begins _before_ our chunk, we are likely going to
// // want to fetch the edited region, which will reset our state;
// // - If it's a complex edit the logic is tricky, and this should
// // be rare enough we can afford to discard.
// // The one 'complex edit' we should probably be handling is
// // the replacement of a single range. This could be a new
// // convenience method on `Delta`?
// if start < self.offset || start > self.offset + self.contents.len() {
// true
// } else if delta.is_simple_delete() {
// // Don't go over cache boundary.
// let end = end.min(self.offset + self.contents.len());
//
// self.simple_delete(start, end);
// false
// } else if let Some(text) = delta.as_simple_insert() {
// assert_eq!(iv.size(), 0);
// self.simple_insert(text, start);
// false
// } else {
// true
// }
// }
//
// fn simple_insert(&mut self, text: &Rope, ins_offset: usize) {
// let has_newline = text.measure::<LinesMetric>() > 0;
// let self_off = self.offset;
// assert!(ins_offset >= self_off);
// // regardless of if we are inserting newlines we adjust offsets
// self.line_offsets.iter_mut().for_each(|off| {
// if *off > ins_offset - self_off {
// *off += text.len()
// }
// });
// // calculate and insert new newlines if necessary
// // we could save some hassle and just rerun memchr on the chunk here?
// if has_newline {
// let mut new_offsets = Vec::new();
// newline_offsets(&String::from(text), &mut new_offsets);
// new_offsets
// .iter_mut()
// .for_each(|off| *off += ins_offset - self_off);
//
// let split_idx = self
// .line_offsets
// .binary_search(&new_offsets[0])
// .err()
// .expect("new index cannot be occupied");
//
// self.line_offsets = [
// &self.line_offsets[..split_idx],
// &new_offsets,
// &self.line_offsets[split_idx..],
// ]
// .concat();
// }
// }
//
// /// Patches up `self.line_offsets` in the simple delete case.
// fn simple_delete(&mut self, start: usize, end: usize) {
// let del_size = end - start;
// let start = start - self.offset;
// let end = end - self.offset;
// let has_newline =
// memchr(b'\n', &self.contents.as_bytes()[start..end]).is_some();
// // a bit too fancy: only reallocate if we need to remove an item
// if has_newline {
// self.line_offsets = self
// .line_offsets
// .iter()
// .filter_map(|off| match *off {
// x if x <= start => Some(x),
// x if x > start && x <= end => None,
// x if x > end => Some(x - del_size),
// hmm => panic!("invariant violated {} {} {}?", start, end, hmm),
// })
// .collect();
// } else {
// self.line_offsets.iter_mut().for_each(|off| {
// if *off >= end {
// *off -= del_size
// }
// });
// }
// }
//
// fn clear(&mut self) {
// self.contents.clear();
// self.offset = 0;
// self.line_offsets.clear();
// self.first_line = 0;
// self.first_line_offset = 0;
// }
//
// fn clear_up_to(&mut self, offset: usize) {
// if offset > self.contents.len() {
// panic!(
// "offset greater than content length: {} > {}",
// offset,
// self.contents.len()
// )
// }
//
// let new_contents = self.contents.split_off(offset);
// self.contents = new_contents;
// self.offset += offset;
// // first find out if offset is a line offset, and set first_line / first_line_offset
// let (new_line, new_line_off) = match self.line_offsets.binary_search(&offset)
// {
// Ok(idx) => (self.first_line + idx + 1, 0),
// Err(0) => (self.first_line, self.first_line_offset + offset),
// Err(idx) => (self.first_line + idx, offset - self.line_offsets[idx - 1]),
// };
//
// // then clear line_offsets up to and including offset
// self.line_offsets = self
// .line_offsets
// .iter()
// .filter(|i| **i > offset)
// .map(|i| i - offset)
// .collect();
//
// self.first_line = new_line;
// self.first_line_offset = new_line_off;
// }
//
// fn recalculate_line_offsets(&mut self) {
// self.line_offsets.clear();
// newline_offsets(&self.contents, &mut self.line_offsets);
// }
//
// pub fn offset_of_line(&mut self, line_num: usize) -> Result<usize> {
// if line_num > self.num_lines {
// return Err(anyhow!("bad request"));
// }
// match self.cached_offset_of_line(line_num) {
// Some(offset) => Ok(offset),
// None => {
// let resp =
// self.get_data(line_num, TextUnit::Line, CHUNK_SIZE, self.rev)?;
// self.reset_chunk(resp);
// self.offset_of_line(line_num)
// }
// }
// }
//
// pub fn line_of_offset(&mut self, offset: usize) -> Result<usize> {
// if offset > self.buf_size {
// return Err(anyhow!("bad request"));
// }
// if self.contents.is_empty()
// || offset < self.offset
// || offset > self.offset + self.contents.len()
// {
// let resp =
// self.get_data(offset, TextUnit::Utf8, CHUNK_SIZE, self.rev)?;
// self.reset_chunk(resp);
// }
//
// let rel_offset = offset - self.offset;
// let line_num = match self.line_offsets.binary_search(&rel_offset) {
// Ok(ix) => ix + self.first_line + 1,
// Err(ix) => ix + self.first_line,
// };
// Ok(line_num)
// }
//
// fn cached_offset_of_line(&self, line_num: usize) -> Option<usize> {
// if line_num < self.first_line {
// return None;
// }
//
// let rel_line_num = line_num - self.first_line;
//
// if rel_line_num == 0 {
// return Some(self.offset - self.first_line_offset);
// }
//
// if rel_line_num <= self.line_offsets.len() {
// return Some(self.offset + self.line_offsets[rel_line_num - 1]);
// }
//
// // EOF
// if line_num == self.num_lines
// && self.offset + self.contents.len() == self.buf_size
// {
// return Some(self.offset + self.contents.len());
// }
// None
// }
//}
//
//fn newline_offsets(text: &str, storage: &mut Vec<usize>) {
// let mut cur_idx = 0;
// while let Some(idx) = memchr(b'\n', &text.as_bytes()[cur_idx..]) {
// storage.push(cur_idx + idx + 1);
// cur_idx += idx + 1;
// }
//}

View File

@ -1,21 +1,29 @@
use std::collections::HashMap;
use crate::{
buffer::Buffer,
plugin::{CoreProxy, Plugin},
};
use lapce_core::{
buffer::BufferId,
plugin::{HostNotification, HostRequest, PluginId},
};
use crate::plugin::PluginId;
use crate::plugin::{CoreProxy, Plugin};
use serde::{Deserialize, Deserializer, Serialize};
use serde_json::Value;
use xi_rpc::{Handler, RpcCtx};
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
#[serde(tag = "method", content = "params")]
/// RPC Notifications sent from the host
pub enum HostNotification {
Initialize { plugin_id: PluginId },
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
#[serde(tag = "method", content = "params")]
/// RPC Request sent from the host
pub enum HostRequest {}
pub struct Dispatcher<'a, P: 'a + Plugin> {
plugin: &'a mut P,
plugin_id: Option<PluginId>,
buffers: HashMap<BufferId, Buffer>,
// buffers: HashMap<BufferId, Buffer>,
}
impl<'a, P: 'a + Plugin> Dispatcher<'a, P> {
@ -23,7 +31,6 @@ pub(crate) fn new(plugin: &'a mut P) -> Self {
Dispatcher {
plugin,
plugin_id: None,
buffers: HashMap::new(),
}
}
}
@ -42,38 +49,37 @@ fn handle_notification(
self.plugin_id = Some(plugin_id.clone());
let core_proxy = CoreProxy::new(plugin_id, ctx);
self.plugin.initialize(core_proxy);
}
HostNotification::NewBuffer { buffer_info } => {
let buffer_id = buffer_info.buffer_id.clone();
let buffer = Buffer::new(
ctx.get_peer().clone(),
self.plugin_id.as_ref().unwrap().clone(),
buffer_info,
);
self.buffers.insert(buffer_id.clone(), buffer);
} // HostNotification::NewBuffer { buffer_info } => {
// let buffer_id = buffer_info.buffer_id.clone();
// let buffer = Buffer::new(
// ctx.get_peer().clone(),
// self.plugin_id.as_ref().unwrap().clone(),
// buffer_info,
// );
// self.buffers.insert(buffer_id.clone(), buffer);
let buffer = self.buffers.get_mut(&buffer_id).unwrap();
self.plugin.new_buffer(buffer);
}
HostNotification::Update {
buffer_id,
delta,
new_len,
new_line_count,
rev,
} => {
let buffer = self.buffers.get_mut(&buffer_id).unwrap();
buffer.update(&delta, new_len, new_line_count, rev);
self.plugin.update(buffer, &delta, rev);
}
HostNotification::GetCompletion {
buffer_id,
request_id,
offset,
} => {
let buffer = self.buffers.get_mut(&buffer_id).unwrap();
self.plugin.get_completion(buffer, request_id, offset);
}
// let buffer = self.buffers.get_mut(&buffer_id).unwrap();
// self.plugin.new_buffer(buffer);
// }
// HostNotification::Update {
// buffer_id,
// delta,
// new_len,
// new_line_count,
// rev,
// } => {
// let buffer = self.buffers.get_mut(&buffer_id).unwrap();
// buffer.update(&delta, new_len, new_line_count, rev);
// self.plugin.update(buffer, &delta, rev);
// }
// HostNotification::GetCompletion {
// buffer_id,
// request_id,
// offset,
// } => {
// let buffer = self.buffers.get_mut(&buffer_id).unwrap();
// self.plugin.get_completion(buffer, request_id, offset);
// }
}
}
@ -86,8 +92,8 @@ fn handle_request(
}
fn idle(&mut self, ctx: &RpcCtx, token: usize) {
let buffer_id: BufferId = BufferId(token);
let buffer = self.buffers.get_mut(&buffer_id).unwrap();
self.plugin.idle(buffer);
// let buffer_id: BufferId = BufferId(token);
// let buffer = self.buffers.get_mut(&buffer_id).unwrap();
// self.plugin.idle(buffer);
}
}

View File

@ -1,7 +1,6 @@
use anyhow::Result;
use jsonrpc_lite::{Error as JsonRpcError, Id, JsonRpc, Params};
use languageserver_types::*;
use lapce_core::buffer::BufferId;
use parking_lot::Mutex;
use serde_json::{json, to_value, Value};
use std::{
@ -16,367 +15,367 @@
thread,
};
pub trait Callable: Send {
fn call(
self: Box<Self>,
client: &mut LspClient,
result: Result<Value, JsonRpcError>,
);
}
//pub trait Callable: Send {
// fn call(
// self: Box<Self>,
// client: &mut LspClient,
// result: Result<Value, JsonRpcError>,
// );
//}
//
//impl<F: Send + FnOnce(&mut LspClient, Result<Value, JsonRpcError>)> Callable for F {
// fn call(
// self: Box<F>,
// client: &mut LspClient,
// result: Result<Value, JsonRpcError>,
// ) {
// (*self)(client, result)
// }
//}
//
//pub type Callback = Box<dyn Callable>;
impl<F: Send + FnOnce(&mut LspClient, Result<Value, JsonRpcError>)> Callable for F {
fn call(
self: Box<F>,
client: &mut LspClient,
result: Result<Value, JsonRpcError>,
) {
(*self)(client, result)
}
}
pub type Callback = Box<dyn Callable>;
pub struct LspClient {
writer: Box<dyn Write + Send>,
next_id: u64,
pending: HashMap<u64, Callback>,
pub server_capabilities: Option<ServerCapabilities>,
pub opened_documents: HashMap<BufferId, Url>,
pub is_initialized: bool,
}
impl LspClient {
pub fn new() -> Arc<Mutex<LspClient>> {
let mut process = Command::new("gopls")
.stdin(Stdio::piped())
.stdout(Stdio::piped())
.spawn()
.expect("Error Occurred");
let writer = Box::new(BufWriter::new(process.stdin.take().unwrap()));
let lsp_client = Arc::new(Mutex::new(LspClient {
writer,
next_id: 0,
pending: HashMap::new(),
server_capabilities: None,
opened_documents: HashMap::new(),
is_initialized: false,
}));
let local_lsp_client = lsp_client.clone();
let mut stdout = process.stdout;
thread::spawn(move || {
let mut reader = Box::new(BufReader::new(stdout.take().unwrap()));
loop {
match read_message(&mut reader) {
Ok(message_str) => {
local_lsp_client.lock().handle_message(message_str.as_ref());
}
Err(err) => {
// eprintln!("Error occurred {:?}", err);
}
};
}
});
lsp_client
}
pub fn handle_message(&mut self, message: &str) {
match JsonRpc::parse(message) {
Ok(JsonRpc::Request(obj)) => {
// trace!("client received unexpected request: {:?}", obj)
}
Ok(value @ JsonRpc::Notification(_)) => {
// self.handle_notification(
// value.get_method().unwrap(),
// value.get_params().unwrap(),
// );
}
Ok(value @ JsonRpc::Success(_)) => {
let id = number_from_id(&value.get_id().unwrap());
let result = value.get_result().unwrap();
self.handle_response(id, Ok(result.clone()));
}
Ok(value @ JsonRpc::Error(_)) => {
let id = number_from_id(&value.get_id().unwrap());
let error = value.get_error().unwrap();
self.handle_response(id, Err(error.clone()));
}
Err(err) => eprintln!("Error in parsing incoming string: {}", err),
}
}
pub fn handle_response(
&mut self,
id: u64,
result: Result<Value, jsonrpc_lite::Error>,
) {
let callback = self
.pending
.remove(&id)
.unwrap_or_else(|| panic!("id {} missing from request table", id));
callback.call(self, result);
}
pub fn write(&mut self, msg: &str) {
self.writer
.write_all(msg.as_bytes())
.expect("error writing to stdin");
self.writer.flush().expect("error flushing child stdin");
}
pub fn send_request(
&mut self,
method: &str,
params: Params,
completion: Callback,
) {
let request = JsonRpc::request_with_params(
Id::Num(self.next_id as i64),
method,
params,
);
self.pending.insert(self.next_id, completion);
self.next_id += 1;
self.send_rpc(&to_value(&request).unwrap());
}
fn send_rpc(&mut self, value: &Value) {
let rpc = match prepare_lsp_json(value) {
Ok(r) => r,
Err(err) => panic!("Encoding Error {:?}", err),
};
self.write(rpc.as_ref());
}
pub fn send_notification(&mut self, method: &str, params: Params) {
let notification = JsonRpc::notification_with_params(method, params);
let res = to_value(&notification).unwrap();
self.send_rpc(&res);
}
pub fn send_initialized(&mut self) {
self.send_notification("initialized", Params::from(json!({})));
}
pub fn send_initialize<CB>(&mut self, root_uri: Option<Url>, on_init: CB)
where
CB: 'static + Send + FnOnce(&mut LspClient, Result<Value, JsonRpcError>),
{
let client_capabilities = ClientCapabilities::default();
let init_params = InitializeParams {
process_id: Some(u64::from(process::id())),
root_uri,
root_path: None,
initialization_options: None,
capabilities: client_capabilities,
trace: Some(TraceOption::Verbose),
workspace_folders: None,
};
eprintln!("send initilize");
let params = Params::from(serde_json::to_value(init_params).unwrap());
self.send_request("initialize", params, Box::new(on_init));
}
pub fn request_completion<CB>(
&mut self,
document_uri: Url,
position: Position,
on_completion: CB,
) where
CB: 'static + Send + FnOnce(&mut LspClient, Result<Value, JsonRpcError>),
{
let completion_params = CompletionParams {
text_document: TextDocumentIdentifier { uri: document_uri },
position,
context: None,
};
let params = Params::from(serde_json::to_value(completion_params).unwrap());
self.send_request(
"textDocument/completion",
params,
Box::new(on_completion),
);
}
pub fn send_did_open(
&mut self,
buffer_id: &BufferId,
document_uri: Url,
document_text: String,
) {
self.opened_documents
.insert(buffer_id.clone(), document_uri.clone());
eprintln!("open docuemnts insert {:?}", buffer_id);
let text_document_did_open_params = DidOpenTextDocumentParams {
text_document: TextDocumentItem {
language_id: "rust".to_string(),
uri: document_uri,
version: 0,
text: document_text,
},
};
let params = Params::from(
serde_json::to_value(text_document_did_open_params).unwrap(),
);
self.send_notification("textDocument/didOpen", params);
}
pub fn send_did_change(
&mut self,
buffer_id: &BufferId,
changes: Vec<TextDocumentContentChangeEvent>,
version: u64,
) {
let uri = self.opened_documents.get(buffer_id).unwrap().clone();
let text_document_did_change_params = DidChangeTextDocumentParams {
text_document: VersionedTextDocumentIdentifier {
uri,
version: Some(version),
},
content_changes: changes,
};
let params = Params::from(
serde_json::to_value(text_document_did_change_params).unwrap(),
);
self.send_notification("textDocument/didChange", params);
}
pub fn get_sync_kind(&mut self) -> TextDocumentSyncKind {
match self
.server_capabilities
.as_ref()
.and_then(|c| c.text_document_sync.as_ref())
{
Some(&TextDocumentSyncCapability::Kind(kind)) => kind,
_ => TextDocumentSyncKind::Full,
}
}
}
fn prepare_lsp_json(msg: &Value) -> Result<String, serde_json::error::Error> {
let request = serde_json::to_string(&msg)?;
Ok(format!(
"Content-Length: {}\r\n\r\n{}",
request.len(),
request
))
}
const HEADER_CONTENT_LENGTH: &str = "content-length";
const HEADER_CONTENT_TYPE: &str = "content-type";
pub enum LspHeader {
ContentType,
ContentLength(usize),
}
/// Type to represent errors occurred while parsing LSP RPCs
#[derive(Debug)]
pub enum ParseError {
Io(std::io::Error),
ParseInt(std::num::ParseIntError),
Utf8(std::string::FromUtf8Error),
Json(serde_json::Error),
Unknown(String),
}
impl From<std::io::Error> for ParseError {
fn from(err: std::io::Error) -> ParseError {
ParseError::Io(err)
}
}
impl From<std::string::FromUtf8Error> for ParseError {
fn from(err: std::string::FromUtf8Error) -> ParseError {
ParseError::Utf8(err)
}
}
impl From<serde_json::Error> for ParseError {
fn from(err: serde_json::Error) -> ParseError {
ParseError::Json(err)
}
}
impl From<std::num::ParseIntError> for ParseError {
fn from(err: std::num::ParseIntError) -> ParseError {
ParseError::ParseInt(err)
}
}
impl From<String> for ParseError {
fn from(s: String) -> ParseError {
ParseError::Unknown(s)
}
}
/// parse header from the incoming input string
fn parse_header(s: &str) -> Result<LspHeader, ParseError> {
let split: Vec<String> =
s.splitn(2, ": ").map(|s| s.trim().to_lowercase()).collect();
if split.len() != 2 {
return Err(ParseError::Unknown("Malformed".to_string()));
};
match split[0].as_ref() {
HEADER_CONTENT_TYPE => Ok(LspHeader::ContentType),
HEADER_CONTENT_LENGTH => Ok(LspHeader::ContentLength(
usize::from_str_radix(&split[1], 10)?,
)),
_ => Err(ParseError::Unknown(
"Unknown parse error occurred".to_string(),
)),
}
}
/// Blocking call to read a message from the provided BufRead
pub fn read_message<T: BufRead>(reader: &mut T) -> Result<String, ParseError> {
let mut buffer = String::new();
let mut content_length: Option<usize> = None;
loop {
buffer.clear();
let _result = reader.read_line(&mut buffer);
// eprintln!("got message {} {}", buffer, buffer.trim().is_empty());
match &buffer {
s if s.trim().is_empty() => break,
s => {
match parse_header(s)? {
LspHeader::ContentLength(len) => content_length = Some(len),
LspHeader::ContentType => (),
};
}
};
}
let content_length = content_length
.ok_or_else(|| format!("missing content-length header: {}", buffer))?;
let mut body_buffer = vec![0; content_length];
reader.read_exact(&mut body_buffer)?;
let body = String::from_utf8(body_buffer)?;
Ok(body)
}
fn number_from_id(id: &Id) -> u64 {
match *id {
Id::Num(n) => n as u64,
Id::Str(ref s) => {
u64::from_str_radix(s, 10).expect("failed to convert string id to u64")
}
_ => panic!("unexpected value for id: None"),
}
}
//pub struct LspClient {
// writer: Box<dyn Write + Send>,
// next_id: u64,
// pending: HashMap<u64, Callback>,
// pub server_capabilities: Option<ServerCapabilities>,
// pub opened_documents: HashMap<BufferId, Url>,
// pub is_initialized: bool,
//}
//
//impl LspClient {
// pub fn new() -> Arc<Mutex<LspClient>> {
// let mut process = Command::new("gopls")
// .stdin(Stdio::piped())
// .stdout(Stdio::piped())
// .spawn()
// .expect("Error Occurred");
//
// let writer = Box::new(BufWriter::new(process.stdin.take().unwrap()));
//
// let lsp_client = Arc::new(Mutex::new(LspClient {
// writer,
// next_id: 0,
// pending: HashMap::new(),
// server_capabilities: None,
// opened_documents: HashMap::new(),
// is_initialized: false,
// }));
//
// let local_lsp_client = lsp_client.clone();
// let mut stdout = process.stdout;
// thread::spawn(move || {
// let mut reader = Box::new(BufReader::new(stdout.take().unwrap()));
// loop {
// match read_message(&mut reader) {
// Ok(message_str) => {
// local_lsp_client.lock().handle_message(message_str.as_ref());
// }
// Err(err) => {
// // eprintln!("Error occurred {:?}", err);
// }
// };
// }
// });
//
// lsp_client
// }
//
// pub fn handle_message(&mut self, message: &str) {
// match JsonRpc::parse(message) {
// Ok(JsonRpc::Request(obj)) => {
// // trace!("client received unexpected request: {:?}", obj)
// }
// Ok(value @ JsonRpc::Notification(_)) => {
// // self.handle_notification(
// // value.get_method().unwrap(),
// // value.get_params().unwrap(),
// // );
// }
// Ok(value @ JsonRpc::Success(_)) => {
// let id = number_from_id(&value.get_id().unwrap());
// let result = value.get_result().unwrap();
// self.handle_response(id, Ok(result.clone()));
// }
// Ok(value @ JsonRpc::Error(_)) => {
// let id = number_from_id(&value.get_id().unwrap());
// let error = value.get_error().unwrap();
// self.handle_response(id, Err(error.clone()));
// }
// Err(err) => eprintln!("Error in parsing incoming string: {}", err),
// }
// }
//
// pub fn handle_response(
// &mut self,
// id: u64,
// result: Result<Value, jsonrpc_lite::Error>,
// ) {
// let callback = self
// .pending
// .remove(&id)
// .unwrap_or_else(|| panic!("id {} missing from request table", id));
// callback.call(self, result);
// }
//
// pub fn write(&mut self, msg: &str) {
// self.writer
// .write_all(msg.as_bytes())
// .expect("error writing to stdin");
//
// self.writer.flush().expect("error flushing child stdin");
// }
//
// pub fn send_request(
// &mut self,
// method: &str,
// params: Params,
// completion: Callback,
// ) {
// let request = JsonRpc::request_with_params(
// Id::Num(self.next_id as i64),
// method,
// params,
// );
//
// self.pending.insert(self.next_id, completion);
// self.next_id += 1;
//
// self.send_rpc(&to_value(&request).unwrap());
// }
//
// fn send_rpc(&mut self, value: &Value) {
// let rpc = match prepare_lsp_json(value) {
// Ok(r) => r,
// Err(err) => panic!("Encoding Error {:?}", err),
// };
//
// self.write(rpc.as_ref());
// }
//
// pub fn send_notification(&mut self, method: &str, params: Params) {
// let notification = JsonRpc::notification_with_params(method, params);
// let res = to_value(&notification).unwrap();
// self.send_rpc(&res);
// }
//
// pub fn send_initialized(&mut self) {
// self.send_notification("initialized", Params::from(json!({})));
// }
//
// pub fn send_initialize<CB>(&mut self, root_uri: Option<Url>, on_init: CB)
// where
// CB: 'static + Send + FnOnce(&mut LspClient, Result<Value, JsonRpcError>),
// {
// let client_capabilities = ClientCapabilities::default();
//
// let init_params = InitializeParams {
// process_id: Some(u64::from(process::id())),
// root_uri,
// root_path: None,
// initialization_options: None,
// capabilities: client_capabilities,
// trace: Some(TraceOption::Verbose),
// workspace_folders: None,
// };
//
// eprintln!("send initilize");
// let params = Params::from(serde_json::to_value(init_params).unwrap());
// self.send_request("initialize", params, Box::new(on_init));
// }
//
// pub fn request_completion<CB>(
// &mut self,
// document_uri: Url,
// position: Position,
// on_completion: CB,
// ) where
// CB: 'static + Send + FnOnce(&mut LspClient, Result<Value, JsonRpcError>),
// {
// let completion_params = CompletionParams {
// text_document: TextDocumentIdentifier { uri: document_uri },
// position,
// context: None,
// };
// let params = Params::from(serde_json::to_value(completion_params).unwrap());
// self.send_request(
// "textDocument/completion",
// params,
// Box::new(on_completion),
// );
// }
//
// pub fn send_did_open(
// &mut self,
// buffer_id: &BufferId,
// document_uri: Url,
// document_text: String,
// ) {
// self.opened_documents
// .insert(buffer_id.clone(), document_uri.clone());
// eprintln!("open docuemnts insert {:?}", buffer_id);
//
// let text_document_did_open_params = DidOpenTextDocumentParams {
// text_document: TextDocumentItem {
// language_id: "rust".to_string(),
// uri: document_uri,
// version: 0,
// text: document_text,
// },
// };
//
// let params = Params::from(
// serde_json::to_value(text_document_did_open_params).unwrap(),
// );
// self.send_notification("textDocument/didOpen", params);
// }
//
// pub fn send_did_change(
// &mut self,
// buffer_id: &BufferId,
// changes: Vec<TextDocumentContentChangeEvent>,
// version: u64,
// ) {
// let uri = self.opened_documents.get(buffer_id).unwrap().clone();
// let text_document_did_change_params = DidChangeTextDocumentParams {
// text_document: VersionedTextDocumentIdentifier {
// uri,
// version: Some(version),
// },
// content_changes: changes,
// };
//
// let params = Params::from(
// serde_json::to_value(text_document_did_change_params).unwrap(),
// );
// self.send_notification("textDocument/didChange", params);
// }
//
// pub fn get_sync_kind(&mut self) -> TextDocumentSyncKind {
// match self
// .server_capabilities
// .as_ref()
// .and_then(|c| c.text_document_sync.as_ref())
// {
// Some(&TextDocumentSyncCapability::Kind(kind)) => kind,
// _ => TextDocumentSyncKind::Full,
// }
// }
//}
//
//fn prepare_lsp_json(msg: &Value) -> Result<String, serde_json::error::Error> {
// let request = serde_json::to_string(&msg)?;
// Ok(format!(
// "Content-Length: {}\r\n\r\n{}",
// request.len(),
// request
// ))
//}
//
//const HEADER_CONTENT_LENGTH: &str = "content-length";
//const HEADER_CONTENT_TYPE: &str = "content-type";
//
//pub enum LspHeader {
// ContentType,
// ContentLength(usize),
//}
//
///// Type to represent errors occurred while parsing LSP RPCs
//#[derive(Debug)]
//pub enum ParseError {
// Io(std::io::Error),
// ParseInt(std::num::ParseIntError),
// Utf8(std::string::FromUtf8Error),
// Json(serde_json::Error),
// Unknown(String),
//}
//
//impl From<std::io::Error> for ParseError {
// fn from(err: std::io::Error) -> ParseError {
// ParseError::Io(err)
// }
//}
//
//impl From<std::string::FromUtf8Error> for ParseError {
// fn from(err: std::string::FromUtf8Error) -> ParseError {
// ParseError::Utf8(err)
// }
//}
//
//impl From<serde_json::Error> for ParseError {
// fn from(err: serde_json::Error) -> ParseError {
// ParseError::Json(err)
// }
//}
//
//impl From<std::num::ParseIntError> for ParseError {
// fn from(err: std::num::ParseIntError) -> ParseError {
// ParseError::ParseInt(err)
// }
//}
//
//impl From<String> for ParseError {
// fn from(s: String) -> ParseError {
// ParseError::Unknown(s)
// }
//}
//
///// parse header from the incoming input string
//fn parse_header(s: &str) -> Result<LspHeader, ParseError> {
// let split: Vec<String> =
// s.splitn(2, ": ").map(|s| s.trim().to_lowercase()).collect();
// if split.len() != 2 {
// return Err(ParseError::Unknown("Malformed".to_string()));
// };
// match split[0].as_ref() {
// HEADER_CONTENT_TYPE => Ok(LspHeader::ContentType),
// HEADER_CONTENT_LENGTH => Ok(LspHeader::ContentLength(
// usize::from_str_radix(&split[1], 10)?,
// )),
// _ => Err(ParseError::Unknown(
// "Unknown parse error occurred".to_string(),
// )),
// }
//}
//
///// Blocking call to read a message from the provided BufRead
//pub fn read_message<T: BufRead>(reader: &mut T) -> Result<String, ParseError> {
// let mut buffer = String::new();
// let mut content_length: Option<usize> = None;
//
// loop {
// buffer.clear();
// let _result = reader.read_line(&mut buffer);
//
// // eprintln!("got message {} {}", buffer, buffer.trim().is_empty());
// match &buffer {
// s if s.trim().is_empty() => break,
// s => {
// match parse_header(s)? {
// LspHeader::ContentLength(len) => content_length = Some(len),
// LspHeader::ContentType => (),
// };
// }
// };
// }
//
// let content_length = content_length
// .ok_or_else(|| format!("missing content-length header: {}", buffer))?;
//
// let mut body_buffer = vec![0; content_length];
// reader.read_exact(&mut body_buffer)?;
//
// let body = String::from_utf8(body_buffer)?;
// Ok(body)
//}
//
//fn number_from_id(id: &Id) -> u64 {
// match *id {
// Id::Num(n) => n as u64,
// Id::Str(ref s) => {
// u64::from_str_radix(s, 10).expect("failed to convert string id to u64")
// }
// _ => panic!("unexpected value for id: None"),
// }
//}

View File

@ -6,15 +6,10 @@
request::Completion, Hover, HoverContents, InitializeResult, MarkedString,
Position, Range, TextDocumentContentChangeEvent, TextDocumentSyncKind, Url,
};
use lapce_core::plugin::Hover as CoreHover;
use lapce_core::plugin::PluginBufferInfo;
use lapce_core::plugin::Range as CoreRange;
use parking_lot::Mutex;
use xi_rope::RopeDelta;
use crate::{
buffer::Buffer, lsp_client::LspClient, plugin::CoreProxy, plugin::Plugin,
};
use crate::{plugin::CoreProxy, plugin::Plugin};
#[derive(Debug)]
pub enum LspResponse {
@ -43,7 +38,6 @@ pub fn pop_result(&mut self) -> Option<(usize, LspResponse)> {
pub struct LspPlugin {
core: Option<CoreProxy>,
lsp_clients: HashMap<String, Arc<Mutex<LspClient>>>,
result_queue: ResultQueue,
}
@ -51,7 +45,6 @@ impl LspPlugin {
pub fn new() -> LspPlugin {
LspPlugin {
core: None,
lsp_clients: HashMap::new(),
result_queue: ResultQueue::new(),
}
}
@ -74,290 +67,9 @@ fn initialize(&mut self, core: CoreProxy) {
);
}
fn new_buffer(&mut self, buffer: &mut Buffer) {
if !self.lsp_clients.contains_key(&buffer.language_id) {
let lsp_client = LspClient::new();
self.lsp_clients
.insert(buffer.language_id.clone(), lsp_client);
}
let mut lsp_client =
self.lsp_clients.get(&buffer.language_id).unwrap().lock();
let buffer_id = buffer.buffer_id.clone();
let document_uri = Url::from_file_path(&buffer.path).unwrap();
let document_text = buffer.get_document().unwrap_or("".to_string());
if !lsp_client.is_initialized {
lsp_client.send_initialize(None, move |lsp_client, result| {
if let Ok(result) = result {
eprintln!("lsp initilize got result");
let init_result: InitializeResult =
serde_json::from_value(result).unwrap();
lsp_client.server_capabilities = Some(init_result.capabilities);
lsp_client.is_initialized = true;
lsp_client.send_initialized();
lsp_client.send_did_open(
&buffer_id,
document_uri,
document_text,
);
} else {
eprintln!("lsp initilize error {}", result.err().unwrap());
}
});
}
eprintln!("got new buffer");
}
fn update(&mut self, buffer: &mut Buffer, delta: &RopeDelta, rev: u64) {
let mut lsp_client =
self.lsp_clients.get(&buffer.language_id).unwrap().lock();
let sync_kind = lsp_client.get_sync_kind();
if let Some(changes) = get_change_for_sync_kind(sync_kind, buffer, delta) {
lsp_client.send_did_change(&buffer.buffer_id, changes, rev);
}
}
fn get_completion(
&mut self,
buffer: &mut Buffer,
request_id: usize,
offset: usize,
) {
let mut lsp_client =
self.lsp_clients.get(&buffer.language_id).unwrap().lock();
let buffer_id = buffer.buffer_id.clone();
let mut result_queue = self.result_queue.clone();
let mut core_proxy = self.core.clone().unwrap();
let document_uri = Url::from_file_path(&buffer.path).unwrap();
let position = get_position_of_offset(buffer, offset);
match position {
Ok(position) => lsp_client.request_completion(
document_uri,
position,
move |lsp_client, result| {
if let Ok(res) = result {
result_queue
.push_result(request_id, LspResponse::Completion(res));
core_proxy.schedule_idle(buffer_id);
}
},
),
Err(e) => {}
}
}
fn idle(&mut self, buffer: &mut Buffer) {
let result = self.result_queue.pop_result();
if let Some((request_id, reponse)) = result {
match reponse {
LspResponse::Completion(res) => self
.core
.as_mut()
.unwrap()
.show_completion(buffer.buffer_id.clone(), request_id, &res),
LspResponse::Hover(res) => {
// let res = res
// .and_then(|h| core_hover_from_hover(view, h))
// .map_err(|e| e.into());
// self.with_language_server_for_view(view, |ls_client| {
// ls_client
// .core
// .display_hover(view.get_id(), request_id, &res)
// });
}
}
}
}
}
pub fn core_hover_from_hover(
buffer: &mut Buffer,
hover: Hover,
) -> Result<CoreHover> {
Ok(CoreHover {
content: markdown_from_hover_contents(hover.contents)?,
range: match hover.range {
Some(range) => Some(core_range_from_range(buffer, range)?),
None => None,
},
})
}
pub(crate) fn offset_of_position(
buffer: &mut Buffer,
position: Position,
) -> Result<usize> {
let line_offset = buffer.offset_of_line(position.line as usize);
let mut cur_len_utf16 = 0;
let mut cur_len_utf8 = 0;
for u in buffer.get_line(position.line as usize)?.chars() {
if cur_len_utf16 >= (position.character as usize) {
break;
}
cur_len_utf16 += u.len_utf16();
cur_len_utf8 += u.len_utf8();
}
Ok(cur_len_utf8 + line_offset?)
}
pub(crate) fn core_range_from_range(
buffer: &mut Buffer,
range: Range,
) -> Result<CoreRange> {
Ok(CoreRange {
start: offset_of_position(buffer, range.start)?,
end: offset_of_position(buffer, range.end)?,
})
}
pub(crate) fn marked_string_to_string(marked_string: &MarkedString) -> String {
match *marked_string {
MarkedString::String(ref text) => text.to_owned(),
MarkedString::LanguageString(ref d) => {
format!("```{}\n{}\n```", d.language, d.value)
}
}
}
pub(crate) fn markdown_from_hover_contents(
hover_contents: HoverContents,
) -> Result<String> {
let res = match hover_contents {
HoverContents::Scalar(content) => marked_string_to_string(&content),
HoverContents::Array(content) => {
let res: Vec<String> =
content.iter().map(|c| marked_string_to_string(c)).collect();
res.join("\n")
}
HoverContents::Markup(content) => content.value,
};
if res.is_empty() {
Err(anyhow!("no hover contents"))
} else {
Ok(res)
}
}
pub fn get_change_for_sync_kind(
sync_kind: TextDocumentSyncKind,
buffer: &mut Buffer,
delta: &RopeDelta,
) -> Option<Vec<TextDocumentContentChangeEvent>> {
match sync_kind {
TextDocumentSyncKind::None => None,
TextDocumentSyncKind::Full => {
let text_document_content_change_event =
TextDocumentContentChangeEvent {
range: None,
range_length: None,
text: buffer.get_document().unwrap_or("".to_string()),
};
Some(vec![text_document_content_change_event])
}
TextDocumentSyncKind::Incremental => {
match get_document_content_changes(delta, buffer) {
Ok(result) => Some(result),
Err(err) => {
let text_document_content_change_event =
TextDocumentContentChangeEvent {
range: None,
range_length: None,
text: buffer.get_document().unwrap(),
};
Some(vec![text_document_content_change_event])
}
}
}
}
}
pub fn get_document_content_changes(
delta: &RopeDelta,
buffer: &mut Buffer,
) -> Result<Vec<TextDocumentContentChangeEvent>> {
let (interval, _) = delta.summary();
let (start, end) = interval.start_end();
// TODO: Handle more trivial cases like typing when there's a selection or transpose
if let Some(node) = delta.as_simple_insert() {
let text = String::from(node);
let (start, end) = interval.start_end();
let text_document_content_change_event = TextDocumentContentChangeEvent {
range: Some(Range {
start: get_position_of_offset(buffer, start)?,
end: get_position_of_offset(buffer, end)?,
}),
range_length: Some((end - start) as u64),
text,
};
return Ok(vec![text_document_content_change_event]);
}
// Or a simple delete
else if delta.is_simple_delete() {
let mut end_position = get_position_of_offset(buffer, end)?;
// Hack around sending VSCode Style Positions to Language Server.
// See this issue to understand: https://github.com/Microsoft/vscode/issues/23173
if end_position.character == 0 {
// There is an assumption here that the line separator character is exactly
// 1 byte wide which is true for "\n" but it will be an issue if they are not
// for example for u+2028
let mut ep = get_position_of_offset(buffer, end - 1)?;
ep.character += 1;
end_position = ep;
}
let text_document_content_change_event = TextDocumentContentChangeEvent {
range: Some(Range {
start: get_position_of_offset(buffer, start)?,
end: end_position,
}),
range_length: Some((end - start) as u64),
text: String::new(),
};
return Ok(vec![text_document_content_change_event]);
}
let text_document_content_change_event = TextDocumentContentChangeEvent {
range: None,
range_length: None,
text: buffer.get_document()?,
};
Ok(vec![text_document_content_change_event])
}
pub(crate) fn get_position_of_offset(
buffer: &mut Buffer,
offset: usize,
) -> Result<Position> {
let line_num = buffer.line_of_offset(offset)?;
let line_offset = buffer.offset_of_line(line_num)?;
let char_offset =
count_utf16(&(buffer.get_line(line_num)?[0..(offset - line_offset)]));
Ok(Position {
line: line_num as u64,
character: char_offset as u64,
})
}
pub(crate) fn count_utf16(s: &str) -> usize {
let mut utf16_count = 0;
for &b in s.as_bytes() {
if (b as i8) >= -0x40 {
utf16_count += 1;
}
if b >= 0xf0 {
utf16_count += 1;
}
}
utf16_count
// fn new_buffer(&mut self, buffer: &mut Buffer) {}
//
// fn update(&mut self, buffer: &mut Buffer, delta: &RopeDelta, rev: u64) {}
//
// fn idle(&mut self, buffer: &mut Buffer) {}
}

View File

@ -1,13 +1,15 @@
use anyhow::Result;
use lapce_core::{
buffer::BufferId,
plugin::{PluginBufferInfo, PluginId},
};
use serde::Deserialize;
use serde::Serialize;
use serde_json::{json, Value};
use xi_rope::RopeDelta;
use xi_rpc::{RpcCtx, RpcPeer};
use crate::buffer::Buffer;
#[derive(Eq, PartialEq, Hash, Clone, Debug, Serialize, Deserialize)]
pub struct PluginId(pub usize);
#[derive(Eq, PartialEq, Hash, Copy, Clone, Debug, Serialize, Deserialize)]
pub struct BufferId(pub usize);
#[derive(Clone)]
pub struct CoreProxy {
@ -65,16 +67,9 @@ pub fn schedule_idle(&mut self, buffer_id: BufferId) {
pub trait Plugin {
fn initialize(&mut self, core: CoreProxy);
fn new_buffer(&mut self, buffer: &mut Buffer);
fn update(&mut self, buffer: &mut Buffer, delta: &RopeDelta, rev: u64);
fn get_completion(
&mut self,
buffer: &mut Buffer,
request_id: usize,
offset: usize,
);
fn idle(&mut self, buffer: &mut Buffer);
// fn new_buffer(&mut self, buffer: &mut Buffer);
//
// fn update(&mut self, buffer: &mut Buffer, delta: &RopeDelta, rev: u64);
//
// fn idle(&mut self, buffer: &mut Buffer);
}