aboutsummaryrefslogtreecommitdiff
path: root/helix-lsp
diff options
context:
space:
mode:
authorBlaž Hrastnik2020-12-03 04:12:40 +0000
committerGitHub2020-12-03 04:12:40 +0000
commitb7a3e525ed7fed5ed79e8580df2e3496bd994419 (patch)
treed202637047759b0510a16d8c59fdbbde62b50617 /helix-lsp
parent2e12fc9a7cd221bb7b5f4b5c1ece599089770ccb (diff)
parent39bf1ca82514e1dc56dfebdce2558cce662367d1 (diff)
Merge pull request #5 from helix-editor/lsp
LSP: mk1
Diffstat (limited to 'helix-lsp')
-rw-r--r--helix-lsp/Cargo.toml26
-rw-r--r--helix-lsp/src/client.rs355
-rw-r--r--helix-lsp/src/lib.rs117
-rw-r--r--helix-lsp/src/transport.rs212
4 files changed, 710 insertions, 0 deletions
diff --git a/helix-lsp/Cargo.toml b/helix-lsp/Cargo.toml
new file mode 100644
index 00000000..2ecd0cc1
--- /dev/null
+++ b/helix-lsp/Cargo.toml
@@ -0,0 +1,26 @@
+[package]
+name = "helix-lsp"
+version = "0.1.0"
+authors = ["Blaž Hrastnik <blaz@mxxn.io>"]
+edition = "2018"
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dependencies]
+helix-core = { path = "../helix-core" }
+helix-view = { path = "../helix-view" }
+once_cell = "1.4"
+
+lsp-types = { version = "0.84", features = ["proposed"] }
+smol = "1.2"
+url = "2"
+pathdiff = "0.2"
+shellexpand = "2.0"
+glob = "0.3"
+anyhow = "1"
+serde_json = "1.0"
+serde = { version = "1.0", features = ["derive"] }
+jsonrpc-core = "15.1"
+futures-util = "0.3"
+thiserror = "1"
+log = "0.4"
diff --git a/helix-lsp/src/client.rs b/helix-lsp/src/client.rs
new file mode 100644
index 00000000..1f07cf89
--- /dev/null
+++ b/helix-lsp/src/client.rs
@@ -0,0 +1,355 @@
+use crate::{
+ transport::{Payload, Transport},
+ Call, Error,
+};
+
+type Result<T> = core::result::Result<T, Error>;
+
+use helix_core::{ChangeSet, Transaction};
+use helix_view::Document;
+
+// use std::collections::HashMap;
+use std::sync::atomic::{AtomicU64, Ordering};
+
+use jsonrpc_core as jsonrpc;
+use lsp_types as lsp;
+use serde_json::Value;
+
+use smol::{
+ channel::{Receiver, Sender},
+ io::{BufReader, BufWriter},
+ // prelude::*,
+ process::{Child, ChildStderr, Command, Stdio},
+ Executor,
+};
+
+pub struct Client {
+ _process: Child,
+ stderr: BufReader<ChildStderr>,
+
+ outgoing: Sender<Payload>,
+ pub incoming: Receiver<Call>,
+
+ pub request_counter: AtomicU64,
+
+ capabilities: Option<lsp::ServerCapabilities>,
+ // TODO: handle PublishDiagnostics Version
+ // diagnostics: HashMap<lsp::Url, Vec<lsp::Diagnostic>>,
+}
+
+impl Client {
+ pub fn start(ex: &Executor, cmd: &str, args: &[String]) -> Self {
+ let mut process = Command::new(cmd)
+ .args(args)
+ .stdin(Stdio::piped())
+ .stdout(Stdio::piped())
+ .stderr(Stdio::piped())
+ .spawn()
+ .expect("Failed to start language server");
+ // smol makes sure the process is reaped on drop, but using kill_on_drop(true) maybe?
+
+ // TODO: do we need bufreader/writer here? or do we use async wrappers on unblock?
+ let writer = BufWriter::new(process.stdin.take().expect("Failed to open stdin"));
+ let reader = BufReader::new(process.stdout.take().expect("Failed to open stdout"));
+ let stderr = BufReader::new(process.stderr.take().expect("Failed to open stderr"));
+
+ let (incoming, outgoing) = Transport::start(ex, reader, writer);
+
+ Client {
+ _process: process,
+ stderr,
+
+ outgoing,
+ incoming,
+
+ request_counter: AtomicU64::new(0),
+
+ capabilities: None,
+ // diagnostics: HashMap::new(),
+ }
+ }
+
+ fn next_request_id(&self) -> jsonrpc::Id {
+ let id = self.request_counter.fetch_add(1, Ordering::Relaxed);
+ jsonrpc::Id::Num(id)
+ }
+
+ fn to_params(value: Value) -> Result<jsonrpc::Params> {
+ use jsonrpc::Params;
+
+ let params = match value {
+ Value::Null => Params::None,
+ Value::Bool(_) | Value::Number(_) | Value::String(_) => Params::Array(vec![value]),
+ Value::Array(vec) => Params::Array(vec),
+ Value::Object(map) => Params::Map(map),
+ };
+
+ Ok(params)
+ }
+
+ /// Execute a RPC request on the language server.
+ pub async fn request<R: lsp::request::Request>(&self, params: R::Params) -> Result<R::Result>
+ where
+ R::Params: serde::Serialize,
+ R::Result: core::fmt::Debug, // TODO: temporary
+ {
+ let params = serde_json::to_value(params)?;
+
+ let request = jsonrpc::MethodCall {
+ jsonrpc: Some(jsonrpc::Version::V2),
+ id: self.next_request_id(),
+ method: R::METHOD.to_string(),
+ params: Self::to_params(params)?,
+ };
+
+ let (tx, rx) = smol::channel::bounded::<Result<Value>>(1);
+
+ self.outgoing
+ .send(Payload::Request {
+ chan: tx,
+ value: request,
+ })
+ .await
+ .map_err(|e| Error::Other(e.into()))?;
+
+ let response = rx.recv().await.map_err(|e| Error::Other(e.into()))??;
+
+ let response = serde_json::from_value(response)?;
+
+ // TODO: we should pass request to a sender thread via a channel
+ // so it can't be interleaved
+
+ // TODO: responses can be out of order, we need to register a single shot response channel
+
+ Ok(response)
+ }
+
+ /// Send a RPC notification to the language server.
+ pub async fn notify<R: lsp::notification::Notification>(&self, params: R::Params) -> Result<()>
+ where
+ R::Params: serde::Serialize,
+ {
+ let params = serde_json::to_value(params)?;
+
+ let notification = jsonrpc::Notification {
+ jsonrpc: Some(jsonrpc::Version::V2),
+ method: R::METHOD.to_string(),
+ params: Self::to_params(params)?,
+ };
+
+ self.outgoing
+ .send(Payload::Notification(notification))
+ .await
+ .map_err(|e| Error::Other(e.into()))?;
+
+ Ok(())
+ }
+
+ /// Reply to a language server RPC call.
+ pub async fn reply(
+ &self,
+ id: jsonrpc::Id,
+ result: core::result::Result<Value, jsonrpc::Error>,
+ ) -> Result<()> {
+ use jsonrpc::{Failure, Output, Success, Version};
+
+ let output = match result {
+ Ok(result) => Output::Success(Success {
+ jsonrpc: Some(Version::V2),
+ id,
+ result,
+ }),
+ Err(error) => Output::Failure(Failure {
+ jsonrpc: Some(Version::V2),
+ id,
+ error,
+ }),
+ };
+
+ self.outgoing
+ .send(Payload::Response(output))
+ .await
+ .map_err(|e| Error::Other(e.into()))?;
+
+ Ok(())
+ }
+
+ // -------------------------------------------------------------------------------------------
+ // General messages
+ // -------------------------------------------------------------------------------------------
+
+ pub async fn initialize(&mut self) -> Result<()> {
+ // TODO: delay any requests that are triggered prior to initialize
+
+ #[allow(deprecated)]
+ let params = lsp::InitializeParams {
+ process_id: Some(std::process::id()),
+ root_path: None,
+ // root_uri: Some(lsp_types::Url::parse("file://localhost/")?),
+ root_uri: None, // set to project root in the future
+ initialization_options: None,
+ capabilities: lsp::ClientCapabilities {
+ ..Default::default()
+ },
+ trace: None,
+ workspace_folders: None,
+ client_info: None,
+ locale: None, // TODO
+ };
+
+ let response = self.request::<lsp::request::Initialize>(params).await?;
+ self.capabilities = Some(response.capabilities);
+
+ // next up, notify<initialized>
+ self.notify::<lsp::notification::Initialized>(lsp::InitializedParams {})
+ .await?;
+
+ Ok(())
+ }
+
+ pub async fn shutdown(&self) -> Result<()> {
+ self.request::<lsp::request::Shutdown>(()).await
+ }
+
+ pub async fn exit(&self) -> Result<()> {
+ self.notify::<lsp::notification::Exit>(()).await
+ }
+
+ // -------------------------------------------------------------------------------------------
+ // Text document
+ // -------------------------------------------------------------------------------------------
+
+ pub async fn text_document_did_open(&mut self, doc: &Document) -> Result<()> {
+ self.notify::<lsp::notification::DidOpenTextDocument>(lsp::DidOpenTextDocumentParams {
+ text_document: lsp::TextDocumentItem {
+ uri: lsp::Url::from_file_path(doc.path().unwrap()).unwrap(),
+ language_id: "rust".to_string(), // TODO: hardcoded for now
+ version: doc.version,
+ text: String::from(doc.text()),
+ },
+ })
+ .await
+ }
+
+ fn to_changes(changeset: &ChangeSet) -> Vec<lsp::TextDocumentContentChangeEvent> {
+ let mut iter = changeset.changes().iter().peekable();
+ let mut old_pos = 0;
+
+ let mut changes = Vec::new();
+
+ use crate::util::pos_to_lsp_pos;
+ use helix_core::Operation::*;
+
+ // TEMP
+ let rope = helix_core::Rope::from("");
+ let old_text = rope.slice(..);
+
+ while let Some(change) = iter.next() {
+ let len = match change {
+ Delete(i) | Retain(i) => *i,
+ Insert(_) => 0,
+ };
+ let old_end = old_pos + len;
+
+ match change {
+ Retain(_) => {}
+ Delete(_) => {
+ let start = pos_to_lsp_pos(&old_text, old_pos);
+ let end = pos_to_lsp_pos(&old_text, old_end);
+
+ // a subsequent ins means a replace, consume it
+ if let Some(Insert(s)) = iter.peek() {
+ iter.next();
+
+ // replacement
+ changes.push(lsp::TextDocumentContentChangeEvent {
+ range: Some(lsp::Range::new(start, end)),
+ text: s.into(),
+ range_length: None,
+ });
+ } else {
+ // deletion
+ changes.push(lsp::TextDocumentContentChangeEvent {
+ range: Some(lsp::Range::new(start, end)),
+ text: "".to_string(),
+ range_length: None,
+ });
+ };
+ }
+ Insert(s) => {
+ let start = pos_to_lsp_pos(&old_text, old_pos);
+
+ // insert
+ changes.push(lsp::TextDocumentContentChangeEvent {
+ range: Some(lsp::Range::new(start, start)),
+ text: s.into(),
+ range_length: None,
+ });
+ }
+ }
+ old_pos = old_end;
+ }
+
+ changes
+ }
+
+ // TODO: trigger any time history.commit_revision happens
+ pub async fn text_document_did_change(
+ &mut self,
+ doc: &Document,
+ transaction: &Transaction,
+ ) -> Result<()> {
+ // figure out what kind of sync the server supports
+
+ let capabilities = self.capabilities.as_ref().unwrap(); // TODO: needs post init
+
+ let sync_capabilities = match capabilities.text_document_sync {
+ Some(lsp::TextDocumentSyncCapability::Kind(kind)) => kind,
+ Some(lsp::TextDocumentSyncCapability::Options(lsp::TextDocumentSyncOptions {
+ change: Some(kind),
+ ..
+ })) => kind,
+ // None | SyncOptions { changes: None }
+ _ => return Ok(()),
+ };
+
+ let changes = match sync_capabilities {
+ lsp::TextDocumentSyncKind::Full => {
+ vec![lsp::TextDocumentContentChangeEvent {
+ // range = None -> whole document
+ range: None, //Some(Range)
+ range_length: None, // u64 apparently deprecated
+ text: "".to_string(),
+ }] // TODO: probably need old_state here too?
+ }
+ lsp::TextDocumentSyncKind::Incremental => Self::to_changes(transaction.changes()),
+ lsp::TextDocumentSyncKind::None => return Ok(()),
+ };
+
+ self.notify::<lsp::notification::DidChangeTextDocument>(lsp::DidChangeTextDocumentParams {
+ text_document: lsp::VersionedTextDocumentIdentifier::new(
+ lsp::Url::from_file_path(doc.path().unwrap()).unwrap(),
+ doc.version,
+ ),
+ content_changes: changes,
+ })
+ .await
+ }
+
+ // TODO: impl into() TextDocumentIdentifier / VersionedTextDocumentIdentifier for Document.
+
+ pub async fn text_document_did_close(&mut self, doc: &Document) -> Result<()> {
+ self.notify::<lsp::notification::DidCloseTextDocument>(lsp::DidCloseTextDocumentParams {
+ text_document: lsp::TextDocumentIdentifier::new(
+ lsp::Url::from_file_path(doc.path().unwrap()).unwrap(),
+ ),
+ })
+ .await
+ }
+
+ // will_save / will_save_wait_until
+
+ pub async fn text_document_did_save(&mut self) -> anyhow::Result<()> {
+ unimplemented!()
+ }
+}
diff --git a/helix-lsp/src/lib.rs b/helix-lsp/src/lib.rs
new file mode 100644
index 00000000..eae6fa86
--- /dev/null
+++ b/helix-lsp/src/lib.rs
@@ -0,0 +1,117 @@
+mod client;
+mod transport;
+
+pub use jsonrpc_core as jsonrpc;
+pub use lsp_types as lsp;
+
+pub use once_cell::sync::{Lazy, OnceCell};
+
+pub use client::Client;
+pub use lsp::{Position, Url};
+
+use thiserror::Error;
+
+use std::{collections::HashMap, sync::Arc};
+
+#[derive(Error, Debug)]
+pub enum Error {
+ #[error("protocol error: {0}")]
+ Rpc(#[from] jsonrpc::Error),
+ #[error("failed to parse: {0}")]
+ Parse(#[from] serde_json::Error),
+ #[error("request timed out")]
+ Timeout,
+ #[error(transparent)]
+ Other(#[from] anyhow::Error),
+}
+
+pub mod util {
+ use super::*;
+
+ pub fn lsp_pos_to_pos(doc: &helix_core::RopeSlice, pos: lsp::Position) -> usize {
+ let line = doc.line_to_char(pos.line as usize);
+ let line_start = doc.char_to_utf16_cu(line);
+ doc.utf16_cu_to_char(pos.character as usize + line_start)
+ }
+ pub fn pos_to_lsp_pos(doc: &helix_core::RopeSlice, pos: usize) -> lsp::Position {
+ let line = doc.char_to_line(pos);
+ let line_start = doc.char_to_utf16_cu(line);
+ let col = doc.char_to_utf16_cu(pos) - line_start;
+
+ lsp::Position::new(line as u32, col as u32)
+ }
+}
+
+#[derive(Debug, PartialEq, Clone)]
+pub enum Notification {
+ PublishDiagnostics(lsp::PublishDiagnosticsParams),
+}
+
+impl Notification {
+ pub fn parse(method: &str, params: jsonrpc::Params) -> Notification {
+ use lsp::notification::Notification as _;
+
+ match method {
+ lsp::notification::PublishDiagnostics::METHOD => {
+ let params: lsp::PublishDiagnosticsParams = params
+ .parse()
+ .expect("Failed to parse PublishDiagnostics params");
+
+ // TODO: need to loop over diagnostics and distinguish them by URI
+ Notification::PublishDiagnostics(params)
+ }
+ _ => unimplemented!("unhandled notification: {}", method),
+ }
+ }
+}
+
+pub use jsonrpc::Call;
+
+type LanguageId = String;
+
+pub static REGISTRY: Lazy<Registry> = Lazy::new(Registry::init);
+
+pub struct Registry {
+ inner: HashMap<LanguageId, OnceCell<Arc<Client>>>,
+}
+
+impl Registry {
+ pub fn init() -> Self {
+ Self {
+ inner: HashMap::new(),
+ }
+ }
+
+ pub fn get(&self, id: &str, ex: &smol::Executor) -> Option<Arc<Client>> {
+ // TODO: use get_or_try_init and propagate the error
+ self.inner
+ .get(id)
+ .map(|cell| {
+ cell.get_or_init(|| {
+ // TODO: lookup defaults for id (name, args)
+
+ // initialize a new client
+ let client = Client::start(&ex, "rust-analyzer", &[]);
+ // TODO: also call initialize().await()
+ Arc::new(client)
+ })
+ })
+ .cloned()
+ }
+}
+
+// REGISTRY = HashMap<LanguageId, Lazy/OnceCell<Arc<RwLock<Client>>>
+// spawn one server per language type, need to spawn one per workspace if server doesn't support
+// workspaces
+//
+// could also be a client per root dir
+//
+// storing a copy of Option<Arc<RwLock<Client>>> on Document would make the LSP client easily
+// accessible during edit/save callbacks
+//
+// the event loop needs to process all incoming streams, maybe we can just have that be a separate
+// task that's continually running and store the state on the client, then use read lock to
+// retrieve data during render
+// -> PROBLEM: how do you trigger an update on the editor side when data updates?
+//
+// -> The data updates should pull all events until we run out so we don't frequently re-render
diff --git a/helix-lsp/src/transport.rs b/helix-lsp/src/transport.rs
new file mode 100644
index 00000000..4c349a13
--- /dev/null
+++ b/helix-lsp/src/transport.rs
@@ -0,0 +1,212 @@
+use std::collections::HashMap;
+
+use log::debug;
+
+use crate::{Error, Notification};
+
+type Result<T> = core::result::Result<T, Error>;
+
+use jsonrpc_core as jsonrpc;
+use serde_json::Value;
+
+use smol::prelude::*;
+
+use smol::{
+ channel::{Receiver, Sender},
+ io::{BufReader, BufWriter},
+ process::{ChildStderr, ChildStdin, ChildStdout},
+ Executor,
+};
+
+pub(crate) enum Payload {
+ Request {
+ chan: Sender<Result<Value>>,
+ value: jsonrpc::MethodCall,
+ },
+ Notification(jsonrpc::Notification),
+ Response(jsonrpc::Output),
+}
+
+use serde::{Deserialize, Serialize};
+/// A type representing all possible values sent from the server to the client.
+#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
+#[serde(deny_unknown_fields)]
+#[serde(untagged)]
+enum Message {
+ /// A regular JSON-RPC request output (single response).
+ Output(jsonrpc::Output),
+ /// A JSON-RPC request or notification.
+ Call(jsonrpc::Call),
+}
+
+pub(crate) struct Transport {
+ incoming: Sender<jsonrpc::Call>,
+ outgoing: Receiver<Payload>,
+
+ pending_requests: HashMap<jsonrpc::Id, Sender<Result<Value>>>,
+ headers: HashMap<String, String>,
+
+ writer: BufWriter<ChildStdin>,
+ reader: BufReader<ChildStdout>,
+}
+
+impl Transport {
+ pub fn start(
+ ex: &Executor,
+ reader: BufReader<ChildStdout>,
+ writer: BufWriter<ChildStdin>,
+ ) -> (Receiver<jsonrpc::Call>, Sender<Payload>) {
+ let (incoming, rx) = smol::channel::unbounded();
+ let (tx, outgoing) = smol::channel::unbounded();
+
+ let transport = Self {
+ reader,
+ writer,
+ incoming,
+ outgoing,
+ pending_requests: Default::default(),
+ headers: Default::default(),
+ };
+
+ ex.spawn(transport.duplex()).detach();
+
+ (rx, tx)
+ }
+
+ async fn recv(
+ reader: &mut (impl AsyncBufRead + Unpin),
+ headers: &mut HashMap<String, String>,
+ ) -> core::result::Result<Message, std::io::Error> {
+ // read headers
+ loop {
+ let mut header = String::new();
+ // detect pipe closed if 0
+ reader.read_line(&mut header).await?;
+ let header = header.trim();
+
+ if header.is_empty() {
+ break;
+ }
+
+ let parts: Vec<&str> = header.split(": ").collect();
+ if parts.len() != 2 {
+ return Err(std::io::Error::new(
+ std::io::ErrorKind::Other,
+ "Failed to parse header",
+ ));
+ }
+ headers.insert(parts[0].to_string(), parts[1].to_string());
+ }
+
+ // find content-length
+ let content_length = headers.get("Content-Length").unwrap().parse().unwrap();
+
+ let mut content = vec![0; content_length];
+ reader.read_exact(&mut content).await?;
+ let msg = String::from_utf8(content).unwrap();
+
+ // read data
+
+ // try parsing as output (server response) or call (server request)
+ let output: serde_json::Result<Message> = serde_json::from_str(&msg);
+
+ Ok(output?)
+ }
+
+ pub async fn send_payload(&mut self, payload: Payload) -> anyhow::Result<()> {
+ match payload {
+ Payload::Request { chan, value } => {
+ self.pending_requests.insert(value.id.clone(), chan);
+
+ let json = serde_json::to_string(&value)?;
+ self.send(json).await
+ }
+ Payload::Notification(value) => {
+ let json = serde_json::to_string(&value)?;
+ self.send(json).await
+ }
+ Payload::Response(error) => {
+ let json = serde_json::to_string(&error)?;
+ self.send(json).await
+ }
+ }
+ }
+
+ pub async fn send(&mut self, request: String) -> anyhow::Result<()> {
+ debug!("-> {}", request);
+
+ // send the headers
+ self.writer
+ .write_all(format!("Content-Length: {}\r\n\r\n", request.len()).as_bytes())
+ .await?;
+
+ // send the body
+ self.writer.write_all(request.as_bytes()).await?;
+
+ self.writer.flush().await?;
+
+ Ok(())
+ }
+
+ async fn recv_msg(&mut self, msg: Message) -> anyhow::Result<()> {
+ match msg {
+ Message::Output(output) => self.recv_response(output).await?,
+ Message::Call(call) => {
+ self.incoming.send(call).await?;
+ // let notification = Notification::parse(&method, params);
+ }
+ };
+ Ok(())
+ }
+
+ async fn recv_response(&mut self, output: jsonrpc::Output) -> anyhow::Result<()> {
+ match output {
+ jsonrpc::Output::Success(jsonrpc::Success { id, result, .. }) => {
+ debug!("<- {}", result);
+
+ let tx = self
+ .pending_requests
+ .remove(&id)
+ .expect("pending_request with id not found!");
+ tx.send(Ok(result)).await?;
+ }
+ jsonrpc::Output::Failure(jsonrpc::Failure { id, error, .. }) => {
+ let tx = self
+ .pending_requests
+ .remove(&id)
+ .expect("pending_request with id not found!");
+ tx.send(Err(error.into())).await?;
+ }
+ msg => unimplemented!("{:?}", msg),
+ }
+ Ok(())
+ }
+
+ pub async fn duplex(mut self) {
+ use futures_util::{select, FutureExt};
+ loop {
+ select! {
+ // client -> server
+ msg = self.outgoing.next().fuse() => {
+ if msg.is_none() {
+ break;
+ }
+ let msg = msg.unwrap();
+
+ self.send_payload(msg).await.unwrap();
+ }
+ // server <- client
+ msg = Self::recv(&mut self.reader, &mut self.headers).fuse() => {
+ if msg.is_err() {
+ break;
+ }
+ let msg = msg.unwrap();
+
+ debug!("<- {:?}", msg);
+
+ self.recv_msg(msg).await.unwrap();
+ }
+ }
+ }
+ }
+}