summaryrefslogtreecommitdiff
path: root/support/texlab/src/server.rs
diff options
context:
space:
mode:
Diffstat (limited to 'support/texlab/src/server.rs')
-rw-r--r--support/texlab/src/server.rs1089
1 files changed, 521 insertions, 568 deletions
diff --git a/support/texlab/src/server.rs b/support/texlab/src/server.rs
index 2521faba09..5f833b65bb 100644
--- a/support/texlab/src/server.rs
+++ b/support/texlab/src/server.rs
@@ -4,35 +4,15 @@ use std::{
};
use anyhow::Result;
-use cancellation::{CancellationToken, CancellationTokenSource};
-use crossbeam_channel::Sender;
+use crossbeam_channel::{Receiver, Sender};
use log::{error, info, warn};
-use lsp_server::{Connection, ErrorCode, Message, RequestId};
-use lsp_types::{
- notification::{
- Cancel, DidChangeConfiguration, DidChangeTextDocument, DidOpenTextDocument,
- DidSaveTextDocument, PublishDiagnostics,
- },
- request::{
- DocumentLinkRequest, FoldingRangeRequest, Formatting, GotoDefinition, PrepareRenameRequest,
- References, Rename, SemanticTokensRangeRequest,
- },
- *,
-};
-use notification::DidCloseTextDocument;
-use notify::RecursiveMode;
-use request::{
- Completion, DocumentHighlightRequest, DocumentSymbolRequest, HoverRequest,
- ResolveCompletionItem, WorkspaceSymbol,
-};
+use lsp_server::{Connection, Message, RequestId};
+use lsp_types::{notification::*, request::*, *};
use serde::Serialize;
use threadpool::ThreadPool;
use crate::{
- client::send_notification,
- component_db::COMPONENT_DATABASE,
- config::{pull_config, push_config, register_config_capability},
- create_workspace_full,
+ client::{send_notification, send_request},
diagnostics::{DiagnosticsDebouncer, DiagnosticsManager, DiagnosticsMessage},
dispatch::{NotificationDispatcher, RequestDispatcher},
distro::Distribution,
@@ -42,19 +22,27 @@ use crate::{
prepare_rename_all, rename_all, BuildEngine, BuildParams, BuildResult, BuildStatus,
FeatureRequest, ForwardSearchResult, ForwardSearchStatus,
},
- req_queue::{self, IncomingData, ReqQueue},
- ClientCapabilitiesExt, Document, DocumentLanguage, LineIndexExt, ServerContext, Uri, Workspace,
- WorkspaceSource,
+ req_queue::{IncomingData, ReqQueue},
+ ClientCapabilitiesExt, DocumentLanguage, Environment, LineIndex, LineIndexExt, Options,
+ Workspace, WorkspaceEvent,
};
+#[derive(Debug)]
+enum InternalMessage {
+ SetDistro(Distribution),
+ SetOptions(Options),
+}
+
+#[derive(Clone)]
pub struct Server {
- connection: Connection,
- context: Arc<ServerContext>,
+ connection: Arc<Connection>,
+ internal_tx: Sender<InternalMessage>,
+ internal_rx: Receiver<InternalMessage>,
req_queue: Arc<Mutex<ReqQueue>>,
- workspace: Arc<dyn Workspace>,
- static_debouncer: DiagnosticsDebouncer,
- chktex_debouncer: DiagnosticsDebouncer,
- pool: ThreadPool,
+ workspace: Workspace,
+ static_debouncer: Arc<DiagnosticsDebouncer>,
+ chktex_debouncer: Arc<DiagnosticsDebouncer>,
+ pool: Arc<Mutex<ThreadPool>>,
load_resolver: bool,
build_engine: Arc<BuildEngine>,
}
@@ -65,30 +53,38 @@ impl Server {
current_dir: PathBuf,
load_resolver: bool,
) -> Result<Self> {
- let context = Arc::new(ServerContext::new(current_dir));
let req_queue = Arc::default();
- let workspace = Arc::new(create_workspace_full(Arc::clone(&context))?);
+ let workspace = Workspace::new(Environment::new(Arc::new(current_dir)));
let diag_manager = Arc::new(Mutex::new(DiagnosticsManager::default()));
- let static_debouncer =
- create_static_debouncer(Arc::clone(&diag_manager), &connection, Arc::clone(&context));
+ let static_debouncer = Arc::new(create_static_debouncer(
+ Arc::clone(&diag_manager),
+ &connection,
+ ));
- let chktex_debouncer =
- create_chktex_debouncer(diag_manager, &connection, Arc::clone(&context));
+ let chktex_debouncer = Arc::new(create_chktex_debouncer(diag_manager, &connection));
+
+ let (internal_tx, internal_rx) = crossbeam_channel::unbounded();
Ok(Self {
- connection,
- context,
+ connection: Arc::new(connection),
+ internal_tx,
+ internal_rx,
req_queue,
workspace,
static_debouncer,
chktex_debouncer,
- pool: threadpool::Builder::new().build(),
+ pool: Arc::new(Mutex::new(threadpool::Builder::new().build())),
load_resolver,
build_engine: Arc::default(),
})
}
+ fn spawn(&self, job: impl FnOnce(Self) + Send + 'static) {
+ let server = self.clone();
+ self.pool.lock().unwrap().execute(move || job(server));
+ }
+
fn capabilities(&self) -> ServerCapabilities {
ServerCapabilities {
text_document_sync: Some(TextDocumentSyncCapability::Options(
@@ -131,18 +127,6 @@ impl Server {
})),
document_highlight_provider: Some(OneOf::Left(true)),
document_formatting_provider: Some(OneOf::Left(true)),
- #[cfg(feature = "semantic")]
- semantic_tokens_provider: Some(
- SemanticTokensServerCapabilities::SemanticTokensOptions(SemanticTokensOptions {
- full: None,
- range: Some(true),
- legend: SemanticTokensLegend {
- token_types: crate::features::legend::SUPPORTED_TYPES.to_vec(),
- token_modifiers: crate::features::legend::SUPPORTED_MODIFIERS.to_vec(),
- },
- work_done_progress_options: WorkDoneProgressOptions::default(),
- }),
- ),
..ServerCapabilities::default()
}
}
@@ -151,8 +135,8 @@ impl Server {
let (id, params) = self.connection.initialize_start()?;
let params: InitializeParams = serde_json::from_value(params)?;
- *self.context.client_capabilities.lock().unwrap() = params.capabilities;
- *self.context.client_info.lock().unwrap() = params.client_info;
+ self.workspace.environment.client_capabilities = Arc::new(params.capabilities);
+ self.workspace.environment.client_info = params.client_info.map(Arc::new);
let result = InitializeResult {
capabilities: self.capabilities(),
@@ -164,49 +148,162 @@ impl Server {
self.connection
.initialize_finish(id, serde_json::to_value(result)?)?;
- let cx = Arc::clone(&self.context);
if self.load_resolver {
- self.pool.execute(move || {
+ self.spawn(move |server| {
let distro = Distribution::detect();
info!("Detected distribution: {}", distro.kind);
- *cx.resolver.lock().unwrap() = distro.resolver;
+
+ server
+ .internal_tx
+ .send(InternalMessage::SetDistro(distro))
+ .unwrap();
});
}
self.register_diagnostics_handler();
- let req_queue = Arc::clone(&self.req_queue);
- let sender = self.connection.sender.clone();
- let context = Arc::clone(&self.context);
- let workspace = Arc::clone(&self.workspace);
- self.pool.execute(move || {
- register_config_capability(&req_queue, &sender, &context.client_capabilities);
- pull_and_reparse_all(req_queue, sender, context, workspace);
+ self.spawn(move |server| {
+ server.register_config_capability();
+ server.register_file_watching();
+ server.pull_config();
});
Ok(())
}
+ fn register_file_watching(&self) {
+ if self
+ .workspace
+ .environment
+ .client_capabilities
+ .has_file_watching_support()
+ {
+ let options = DidChangeWatchedFilesRegistrationOptions {
+ watchers: vec![FileSystemWatcher {
+ glob_pattern: "**/*.{aux,log}".into(),
+ kind: Some(WatchKind::Create | WatchKind::Change | WatchKind::Delete),
+ }],
+ };
+
+ let reg = Registration {
+ id: "build-watch".to_string(),
+ method: DidChangeWatchedFiles::METHOD.to_string(),
+ register_options: Some(serde_json::to_value(options).unwrap()),
+ };
+
+ let params = RegistrationParams {
+ registrations: vec![reg],
+ };
+
+ if let Err(why) =
+ send_request::<RegisterCapability>(&self.req_queue, &self.connection.sender, params)
+ {
+ error!(
+ "Failed to register \"{}\" notification: {}",
+ DidChangeWatchedFiles::METHOD,
+ why
+ );
+ }
+ }
+ }
+
+ fn register_config_capability(&self) {
+ if self
+ .workspace
+ .environment
+ .client_capabilities
+ .has_push_configuration_support()
+ {
+ let reg = Registration {
+ id: "pull-config".to_string(),
+ method: DidChangeConfiguration::METHOD.to_string(),
+ register_options: None,
+ };
+
+ let params = RegistrationParams {
+ registrations: vec![reg],
+ };
+
+ if let Err(why) =
+ send_request::<RegisterCapability>(&self.req_queue, &self.connection.sender, params)
+ {
+ error!(
+ "Failed to register \"{}\" notification: {}",
+ DidChangeConfiguration::METHOD,
+ why
+ );
+ }
+ }
+ }
+
fn register_diagnostics_handler(&mut self) {
- let sender = self.static_debouncer.sender.clone();
- self.workspace
- .register_open_handler(Arc::new(move |workspace, document| {
- let message = DiagnosticsMessage::Analyze {
- workspace,
- document,
+ let (event_sender, event_receiver) = crossbeam_channel::unbounded();
+ let diag_sender = self.static_debouncer.sender.clone();
+ std::thread::spawn(move || {
+ for event in event_receiver {
+ match event {
+ WorkspaceEvent::Changed(workspace, document) => {
+ let message = DiagnosticsMessage::Analyze {
+ workspace,
+ document,
+ };
+
+ if diag_sender.send(message).is_err() {
+ break;
+ }
+ }
};
- sender.send(message).unwrap();
- }));
+ }
+ });
+
+ self.workspace.listeners.push(event_sender);
}
- fn register_incoming_request(&self, id: RequestId) -> Arc<CancellationToken> {
- let token_source = CancellationTokenSource::new();
- let token = Arc::clone(token_source.token());
+ fn register_incoming_request(&self, id: RequestId) {
let mut req_queue = self.req_queue.lock().unwrap();
- req_queue
- .incoming
- .register(id.clone(), IncomingData { token_source });
- token
+ req_queue.incoming.register(id, IncomingData);
+ }
+
+ fn pull_config(&self) {
+ if !self
+ .workspace
+ .environment
+ .client_capabilities
+ .has_pull_configuration_support()
+ {
+ return;
+ }
+
+ let params = ConfigurationParams {
+ items: vec![ConfigurationItem {
+ section: Some("texlab".to_string()),
+ scope_uri: None,
+ }],
+ };
+
+ match send_request::<WorkspaceConfiguration>(
+ &self.req_queue,
+ &self.connection.sender,
+ params,
+ ) {
+ Ok(mut json) => {
+ let value = json.pop().expect("invalid configuration request");
+ let options = match serde_json::from_value(value) {
+ Ok(new_options) => new_options,
+ Err(why) => {
+ warn!("Invalid configuration section \"texlab\": {}", why);
+ Options::default()
+ }
+ };
+
+ self.internal_tx
+ .send(InternalMessage::SetOptions(options))
+ .unwrap();
+ }
+ Err(why) => {
+ error!("Retrieving configuration failed: {}", why);
+ }
+ };
}
fn cancel(&self, params: CancelParams) -> Result<()> {
@@ -216,165 +313,146 @@ impl Server {
};
let mut req_queue = self.req_queue.lock().unwrap();
- if let Some(data) = req_queue.incoming.complete(id.clone()) {
- data.token_source.cancel();
+ req_queue.incoming.complete(id);
+
+ Ok(())
+ }
+
+ fn did_change_watched_files(&mut self, params: DidChangeWatchedFilesParams) -> Result<()> {
+ for change in params.changes {
+ if let Ok(path) = change.uri.to_file_path() {
+ match change.typ {
+ FileChangeType::CREATED | FileChangeType::CHANGED => {
+ self.workspace.reload(path)?;
+ }
+ FileChangeType::DELETED => {
+ self.workspace.documents_by_uri.remove(&change.uri);
+ }
+ _ => {}
+ }
+ }
}
Ok(())
}
- fn did_change_configuration(&self, params: DidChangeConfigurationParams) -> Result<()> {
- let client_capabilities = { self.context.client_capabilities.lock().unwrap().clone() };
- if client_capabilities.has_pull_configuration_support() {
- let req_queue = Arc::clone(&self.req_queue);
- let sender = self.connection.sender.clone();
- let context = Arc::clone(&self.context);
- let workspace = Arc::clone(&self.workspace);
- self.pool.execute(move || {
- pull_and_reparse_all(req_queue, sender, context, workspace);
+ fn did_change_configuration(&mut self, params: DidChangeConfigurationParams) -> Result<()> {
+ if self
+ .workspace
+ .environment
+ .client_capabilities
+ .has_pull_configuration_support()
+ {
+ self.spawn(move |server| {
+ server.pull_config();
});
} else {
- push_config(&self.context.options, params.settings);
- if let Some(path) = { self.context.options.read().unwrap().aux_directory.clone() } {
- let _ = self.workspace.watch(path, RecursiveMode::NonRecursive);
- }
+ match serde_json::from_value(params.settings) {
+ Ok(options) => {
+ self.workspace.environment.options = Arc::new(options);
+ }
+ Err(why) => {
+ error!("Invalid configuration: {}", why);
+ }
+ };
- let workspace = Arc::clone(&self.workspace);
- self.pool.execute(move || {
- reparse_all(workspace.as_ref());
- });
+ self.reparse_all()?;
}
Ok(())
}
- fn did_open(&self, params: DidOpenTextDocumentParams) -> Result<()> {
+ fn did_open(&mut self, params: DidOpenTextDocumentParams) -> Result<()> {
let language_id = &params.text_document.language_id;
let language = DocumentLanguage::by_language_id(language_id);
let document = self.workspace.open(
- Arc::new(params.text_document.uri.into()),
- params.text_document.text,
+ Arc::new(params.text_document.uri),
+ Arc::new(params.text_document.text),
language.unwrap_or(DocumentLanguage::Latex),
- WorkspaceSource::Client,
- );
-
- let should_lint = { self.context.options.read().unwrap().chktex.on_open_and_save };
- if let Some(document) = self
- .workspace
- .get(document.uri.as_ref())
- .filter(|_| should_lint)
- {
- self.chktex_debouncer
- .sender
- .send(DiagnosticsMessage::Analyze {
- workspace: Arc::clone(&self.workspace),
- document,
- })?;
- };
- Ok(())
- }
-
- fn did_change(&self, mut params: DidChangeTextDocumentParams) -> Result<()> {
- let uri = params.text_document.uri.into();
- let old_document = self.workspace.get(&uri);
- let old_text = old_document.as_ref().map(|document| document.text.as_str());
- let uri = Arc::new(uri);
+ )?;
- let language = self
- .workspace
- .get(&uri)
- .map(|document| document.data.language())
- .unwrap_or(DocumentLanguage::Latex);
-
- let new_document = match &old_document {
- Some(old_document) => params
- .content_changes
- .into_iter()
- .fold(Arc::clone(&old_document), |old_document, change| {
- self.merge_text_changes(&old_document, language, change)
- }),
- None => self.workspace.open(
- Arc::clone(&uri),
- params.content_changes.pop().unwrap().text,
- language,
- WorkspaceSource::Client,
- ),
- };
+ self.workspace.viewport.insert(Arc::clone(&document.uri));
- let line = match old_text {
- Some(old_text) => old_text
- .lines()
- .zip(new_document.text.lines())
- .position(|(a, b)| a != b)
- .unwrap_or_default() as u32,
- None => 0,
- };
- self.build_engine
- .positions_by_uri
- .insert(Arc::clone(&uri), Position::new(line, 0));
-
- let should_lint = { self.context.options.read().unwrap().chktex.on_edit };
- if should_lint {
+ if self.workspace.environment.options.chktex.on_open_and_save {
self.chktex_debouncer
.sender
.send(DiagnosticsMessage::Analyze {
- workspace: Arc::clone(&self.workspace),
- document: new_document,
+ workspace: self.workspace.clone(),
+ document,
})?;
- };
+ }
Ok(())
}
- fn merge_text_changes(
- &self,
- old_document: &Document,
- new_language: DocumentLanguage,
- change: TextDocumentContentChangeEvent,
- ) -> Arc<Document> {
- let new_text = match change.range {
- Some(range) => {
- let range = old_document.line_index.offset_lsp_range(range);
- let mut new_text = String::new();
- new_text.push_str(&old_document.text[..range.start().into()]);
- new_text.push_str(&change.text);
- new_text.push_str(&old_document.text[range.end().into()..]);
- new_text
+ fn did_change(&mut self, params: DidChangeTextDocumentParams) -> Result<()> {
+ let uri = Arc::new(params.text_document.uri);
+ match self.workspace.documents_by_uri.get(&uri).cloned() {
+ Some(old_document) => {
+ let mut text = old_document.text.to_string();
+ apply_document_edit(&mut text, params.content_changes);
+ let language = old_document.data.language();
+ let new_document =
+ self.workspace
+ .open(Arc::clone(&uri), Arc::new(text), language)?;
+ self.workspace
+ .viewport
+ .insert(Arc::clone(&new_document.uri));
+
+ self.build_engine.positions_by_uri.insert(
+ Arc::clone(&uri),
+ Position::new(
+ old_document
+ .text
+ .lines()
+ .zip(new_document.text.lines())
+ .position(|(a, b)| a != b)
+ .unwrap_or_default() as u32,
+ 0,
+ ),
+ );
+
+ if self.workspace.environment.options.chktex.on_edit {
+ self.chktex_debouncer
+ .sender
+ .send(DiagnosticsMessage::Analyze {
+ workspace: self.workspace.clone(),
+ document: new_document,
+ })?;
+ };
}
- None => change.text,
+ None => match uri.to_file_path() {
+ Ok(path) => {
+ self.workspace.load(path)?;
+ }
+ Err(_) => return Ok(()),
+ },
};
- self.workspace.open(
- Arc::clone(&old_document.uri),
- new_text,
- new_language,
- WorkspaceSource::Client,
- )
+ Ok(())
}
fn did_save(&self, params: DidSaveTextDocumentParams) -> Result<()> {
- let uri = params.text_document.uri.into();
-
- let should_build = { self.context.options.read().unwrap().build.on_save };
- if let Some(request) =
- self.workspace
- .get(&uri)
- .filter(|_| should_build)
- .and_then(|document| {
- self.feature_request(
- Arc::clone(&document.uri),
- BuildParams {
- text_document: TextDocumentIdentifier::new(uri.clone().into()),
- },
- )
- })
+ let uri = params.text_document.uri;
+
+ if let Some(request) = self
+ .workspace
+ .documents_by_uri
+ .get(&uri)
+ .filter(|_| self.workspace.environment.options.build.on_save)
+ .map(|document| {
+ self.feature_request(
+ Arc::clone(&document.uri),
+ BuildParams {
+ text_document: TextDocumentIdentifier::new(uri.clone()),
+ },
+ )
+ })
{
- let lsp_sender = self.connection.sender.clone();
- let req_queue = Arc::clone(&self.req_queue);
- let build_engine = Arc::clone(&self.build_engine);
- self.pool.execute(move || {
- build_engine
- .build(request, CancellationToken::none(), &req_queue, &lsp_sender)
+ self.spawn(move |server| {
+ server
+ .build_engine
+ .build(request, &server.req_queue, &server.connection.sender)
.unwrap_or_else(|why| {
error!("Build failed: {}", why);
BuildResult {
@@ -384,357 +462,228 @@ impl Server {
});
}
- let should_lint = { self.context.options.read().unwrap().chktex.on_open_and_save };
- if let Some(document) = self.workspace.get(&uri).filter(|_| should_lint) {
+ if let Some(document) = self
+ .workspace
+ .documents_by_uri
+ .get(&uri)
+ .filter(|_| self.workspace.environment.options.chktex.on_open_and_save)
+ .cloned()
+ {
self.chktex_debouncer
.sender
.send(DiagnosticsMessage::Analyze {
- workspace: Arc::clone(&self.workspace),
+ workspace: self.workspace.clone(),
document,
})?;
};
Ok(())
}
- fn did_close(&self, params: DidCloseTextDocumentParams) -> Result<()> {
- let uri = params.text_document.uri.into();
- self.workspace.close(&uri);
+ fn did_close(&mut self, params: DidCloseTextDocumentParams) -> Result<()> {
+ self.workspace.close(&params.text_document.uri);
Ok(())
}
- fn feature_request<P>(&self, uri: Arc<Uri>, params: P) -> Option<FeatureRequest<P>> {
- Some(FeatureRequest {
- context: Arc::clone(&self.context),
+ fn feature_request<P>(&self, uri: Arc<Url>, params: P) -> FeatureRequest<P> {
+ FeatureRequest {
params,
- workspace: Arc::clone(&self.workspace),
- subset: self.workspace.subset(uri)?,
- })
- }
-
- fn send_feature_error(&self, id: RequestId) -> Result<()> {
- let resp = lsp_server::Response::new_err(
- id,
- ErrorCode::InternalError as i32,
- "unknown document URI".to_string(),
- );
- self.connection.sender.send(resp.into())?;
- Ok(())
+ workspace: self.workspace.slice(&uri),
+ uri,
+ }
}
fn handle_feature_request<P, R, H>(
&self,
id: RequestId,
params: P,
- uri: Arc<Uri>,
- token: &Arc<CancellationToken>,
+ uri: Arc<Url>,
handler: H,
) -> Result<()>
where
P: Send + 'static,
R: Serialize,
- H: FnOnce(FeatureRequest<P>, &CancellationToken) -> R + Send + 'static,
+ H: FnOnce(FeatureRequest<P>) -> R + Send + 'static,
{
- match self.feature_request(uri, params) {
- Some(req) => {
- let sender = self.connection.sender.clone();
- let token = Arc::clone(token);
- self.pool.execute(move || {
- let result = handler(req, &token);
- if token.is_canceled() {
- sender.send(cancel_response(id).into()).unwrap();
- } else {
- sender
- .send(lsp_server::Response::new_ok(id, result).into())
- .unwrap();
- }
- });
- }
- None => {
- self.send_feature_error(id)?;
- }
- };
+ self.spawn(move |server| {
+ let request = server.feature_request(uri, params);
+ let result = handler(request);
+ server
+ .connection
+ .sender
+ .send(lsp_server::Response::new_ok(id, result).into())
+ .unwrap();
+ });
+
Ok(())
}
- fn document_link(
- &self,
- id: RequestId,
- params: DocumentLinkParams,
- token: &Arc<CancellationToken>,
- ) -> Result<()> {
- let uri = Arc::new(params.text_document.uri.clone().into());
- self.handle_feature_request(id, params, uri, token, find_document_links)?;
+ fn document_link(&self, id: RequestId, params: DocumentLinkParams) -> Result<()> {
+ let uri = Arc::new(params.text_document.uri.clone());
+ self.handle_feature_request(id, params, uri, find_document_links)?;
Ok(())
}
- fn document_symbols(
- &self,
- id: RequestId,
- params: DocumentSymbolParams,
- token: &Arc<CancellationToken>,
- ) -> Result<()> {
- let uri = Arc::new(params.text_document.uri.clone().into());
- self.handle_feature_request(id, params, uri, token, find_document_symbols)?;
+ fn document_symbols(&self, id: RequestId, params: DocumentSymbolParams) -> Result<()> {
+ let uri = Arc::new(params.text_document.uri.clone());
+ self.handle_feature_request(id, params, uri, find_document_symbols)?;
Ok(())
}
- fn workspace_symbols(
- &self,
- id: RequestId,
- params: WorkspaceSymbolParams,
- token: &Arc<CancellationToken>,
- ) -> Result<()> {
- let sender = self.connection.sender.clone();
- let workspace = Arc::clone(&self.workspace);
- let token = Arc::clone(token);
- self.pool.execute(move || {
- let result = find_workspace_symbols(workspace.as_ref(), &params, &token);
- if token.is_canceled() {
- sender.send(cancel_response(id).into()).unwrap();
- } else {
- sender
- .send(lsp_server::Response::new_ok(id, result).into())
- .unwrap();
- }
+ fn workspace_symbols(&self, id: RequestId, params: WorkspaceSymbolParams) -> Result<()> {
+ self.spawn(move |server| {
+ let result = find_workspace_symbols(&server.workspace, &params);
+ server
+ .connection
+ .sender
+ .send(lsp_server::Response::new_ok(id, result).into())
+ .unwrap();
});
Ok(())
}
#[cfg(feature = "completion")]
- fn completion(
- &self,
- id: RequestId,
- params: CompletionParams,
- token: &Arc<CancellationToken>,
- ) -> Result<()> {
- let uri = Arc::new(
- params
- .text_document_position
- .text_document
- .uri
- .clone()
- .into(),
- );
+ fn completion(&self, id: RequestId, params: CompletionParams) -> Result<()> {
+ let uri = Arc::new(params.text_document_position.text_document.uri.clone());
self.build_engine
.positions_by_uri
.insert(Arc::clone(&uri), params.text_document_position.position);
- self.handle_feature_request(id, params, uri, token, crate::features::complete)?;
+ self.handle_feature_request(id, params, uri, crate::features::complete)?;
Ok(())
}
#[cfg(feature = "completion")]
- fn completion_resolve(
- &self,
- id: RequestId,
- mut item: CompletionItem,
- token: &Arc<CancellationToken>,
- ) -> Result<()> {
- let sender = self.connection.sender.clone();
- let token = Arc::clone(token);
- let workspace = Arc::clone(&self.workspace);
- self.pool.execute(move || {
+ fn completion_resolve(&self, id: RequestId, mut item: CompletionItem) -> Result<()> {
+ use rowan::ast::AstNode;
+
+ use crate::{
+ citation, component_db::COMPONENT_DATABASE, features::CompletionItemData,
+ syntax::bibtex,
+ };
+
+ self.spawn(move |server| {
match serde_json::from_value(item.data.clone().unwrap()).unwrap() {
- crate::features::CompletionItemData::Package
- | crate::features::CompletionItemData::Class => {
+ CompletionItemData::Package | CompletionItemData::Class => {
item.documentation = COMPONENT_DATABASE
.documentation(&item.label)
.map(Documentation::MarkupContent);
}
- #[cfg(feature = "citation")]
- crate::features::CompletionItemData::Citation { uri, key } => {
- if let Some(document) = workspace.get(&uri) {
+ CompletionItemData::Citation { uri, key } => {
+ if let Some(document) = server.workspace.documents_by_uri.get(&uri) {
if let Some(data) = document.data.as_bibtex() {
- let markup = crate::citation::render_citation(&data.root, &key);
- item.documentation = markup.map(Documentation::MarkupContent);
+ let root = bibtex::SyntaxNode::new_root(data.green.clone());
+ item.documentation = bibtex::Root::cast(root)
+ .and_then(|root| root.find_entry(&key))
+ .and_then(|entry| citation::render(&entry))
+ .map(|value| {
+ Documentation::MarkupContent(MarkupContent {
+ kind: MarkupKind::Markdown,
+ value,
+ })
+ });
}
}
}
_ => {}
};
- drop(workspace);
- if token.is_canceled() {
- sender.send(cancel_response(id).into()).unwrap();
- } else {
- sender
- .send(lsp_server::Response::new_ok(id, item).into())
- .unwrap();
- }
+ server
+ .connection
+ .sender
+ .send(lsp_server::Response::new_ok(id, item).into())
+ .unwrap();
});
Ok(())
}
- fn folding_range(
- &self,
- id: RequestId,
- params: FoldingRangeParams,
- token: &Arc<CancellationToken>,
- ) -> Result<()> {
- let uri = Arc::new(params.text_document.uri.clone().into());
- self.handle_feature_request(id, params, uri, token, find_foldings)?;
+ fn folding_range(&self, id: RequestId, params: FoldingRangeParams) -> Result<()> {
+ let uri = Arc::new(params.text_document.uri.clone());
+ self.handle_feature_request(id, params, uri, find_foldings)?;
Ok(())
}
- fn references(
- &self,
- id: RequestId,
- params: ReferenceParams,
- token: &Arc<CancellationToken>,
- ) -> Result<()> {
- let uri = Arc::new(
- params
- .text_document_position
- .text_document
- .uri
- .clone()
- .into(),
- );
- self.handle_feature_request(id, params, uri, token, find_all_references)?;
+ fn references(&self, id: RequestId, params: ReferenceParams) -> Result<()> {
+ let uri = Arc::new(params.text_document_position.text_document.uri.clone());
+ self.handle_feature_request(id, params, uri, find_all_references)?;
Ok(())
}
- fn hover(
- &self,
- id: RequestId,
- params: HoverParams,
- token: &Arc<CancellationToken>,
- ) -> Result<()> {
+ fn hover(&self, id: RequestId, params: HoverParams) -> Result<()> {
let uri = Arc::new(
params
.text_document_position_params
.text_document
.uri
- .clone()
- .into(),
+ .clone(),
);
self.build_engine.positions_by_uri.insert(
Arc::clone(&uri),
params.text_document_position_params.position,
);
- self.handle_feature_request(id, params, uri, token, find_hover)?;
+ self.handle_feature_request(id, params, uri, find_hover)?;
Ok(())
}
- fn goto_definition(
- &self,
- id: RequestId,
- params: GotoDefinitionParams,
- token: &Arc<CancellationToken>,
- ) -> Result<()> {
+ fn goto_definition(&self, id: RequestId, params: GotoDefinitionParams) -> Result<()> {
let uri = Arc::new(
params
.text_document_position_params
.text_document
.uri
- .clone()
- .into(),
+ .clone(),
);
- self.handle_feature_request(id, params, uri, token, goto_definition)?;
+ self.handle_feature_request(id, params, uri, goto_definition)?;
Ok(())
}
- fn prepare_rename(
- &self,
- id: RequestId,
- params: TextDocumentPositionParams,
- token: &Arc<CancellationToken>,
- ) -> Result<()> {
- let uri = Arc::new(params.text_document.uri.clone().into());
- self.handle_feature_request(id, params, uri, token, prepare_rename_all)?;
+ fn prepare_rename(&self, id: RequestId, params: TextDocumentPositionParams) -> Result<()> {
+ let uri = Arc::new(params.text_document.uri.clone());
+ self.handle_feature_request(id, params, uri, prepare_rename_all)?;
Ok(())
}
- fn rename(
- &self,
- id: RequestId,
- params: RenameParams,
- token: &Arc<CancellationToken>,
- ) -> Result<()> {
- let uri = Arc::new(
- params
- .text_document_position
- .text_document
- .uri
- .clone()
- .into(),
- );
- self.handle_feature_request(id, params, uri, token, rename_all)?;
+ fn rename(&self, id: RequestId, params: RenameParams) -> Result<()> {
+ let uri = Arc::new(params.text_document_position.text_document.uri.clone());
+ self.handle_feature_request(id, params, uri, rename_all)?;
Ok(())
}
- fn document_highlight(
- &self,
- id: RequestId,
- params: DocumentHighlightParams,
- token: &Arc<CancellationToken>,
- ) -> Result<()> {
+ fn document_highlight(&self, id: RequestId, params: DocumentHighlightParams) -> Result<()> {
let uri = Arc::new(
params
.text_document_position_params
.text_document
.uri
- .clone()
- .into(),
+ .clone(),
);
- self.handle_feature_request(id, params, uri, token, find_document_highlights)?;
+ self.handle_feature_request(id, params, uri, find_document_highlights)?;
Ok(())
}
- fn formatting(
- &self,
- id: RequestId,
- params: DocumentFormattingParams,
- token: &Arc<CancellationToken>,
- ) -> Result<()> {
- let uri = Arc::new(params.text_document.uri.clone().into());
- self.handle_feature_request(id, params, uri, token, format_source_code)?;
+ fn formatting(&self, id: RequestId, params: DocumentFormattingParams) -> Result<()> {
+ let uri = Arc::new(params.text_document.uri.clone());
+ self.handle_feature_request(id, params, uri, format_source_code)?;
Ok(())
}
- #[cfg(feature = "semantic")]
- fn semantic_tokens_range(
- &self,
- id: RequestId,
- params: SemanticTokensRangeParams,
- token: &Arc<CancellationToken>,
- ) -> Result<()> {
- let uri = Arc::new(params.text_document.uri.clone().into());
- self.handle_feature_request(
- id,
- params,
- uri,
- token,
- crate::features::find_semantic_tokens_range,
- )?;
- Ok(())
- }
-
- #[cfg(not(feature = "semantic"))]
fn semantic_tokens_range(
&self,
_id: RequestId,
_params: SemanticTokensRangeParams,
- _token: &Arc<CancellationToken>,
) -> Result<()> {
Ok(())
}
- fn build(
- &self,
- id: RequestId,
- params: BuildParams,
- token: &Arc<CancellationToken>,
- ) -> Result<()> {
- let uri = Arc::new(params.text_document.uri.clone().into());
+ fn build(&self, id: RequestId, params: BuildParams) -> Result<()> {
+ let uri = Arc::new(params.text_document.uri.clone());
let lsp_sender = self.connection.sender.clone();
let req_queue = Arc::clone(&self.req_queue);
let build_engine = Arc::clone(&self.build_engine);
- self.handle_feature_request(id, params, uri, token, move |request, token| {
+ self.handle_feature_request(id, params, uri, move |request| {
build_engine
- .build(request, token, &req_queue, &lsp_sender)
+ .build(request, &req_queue, &lsp_sender)
.unwrap_or_else(|why| {
error!("Build failed: {}", why);
BuildResult {
@@ -745,104 +694,126 @@ impl Server {
Ok(())
}
- fn forward_search(
- &self,
- id: RequestId,
- params: TextDocumentPositionParams,
- token: &Arc<CancellationToken>,
- ) -> Result<()> {
- let uri = Arc::new(params.text_document.uri.clone().into());
- self.handle_feature_request(id, params, uri, token, |req, token| {
- crate::features::execute_forward_search(req, token).unwrap_or_else(|| {
- ForwardSearchResult {
- status: ForwardSearchStatus::ERROR,
- }
+ fn forward_search(&self, id: RequestId, params: TextDocumentPositionParams) -> Result<()> {
+ let uri = Arc::new(params.text_document.uri.clone());
+ self.handle_feature_request(id, params, uri, |req| {
+ crate::features::execute_forward_search(req).unwrap_or(ForwardSearchResult {
+ status: ForwardSearchStatus::ERROR,
})
})?;
Ok(())
}
- fn process_messages(&self) -> Result<()> {
- for msg in &self.connection.receiver {
- match msg {
- Message::Request(request) => {
- if self.connection.handle_shutdown(&request)? {
- return Ok(());
- }
+ fn reparse_all(&mut self) -> Result<()> {
+ for document in self
+ .workspace
+ .documents_by_uri
+ .values()
+ .cloned()
+ .collect::<Vec<_>>()
+ {
+ self.workspace.open(
+ Arc::clone(&document.uri),
+ document.text.clone(),
+ document.data.language(),
+ )?;
+ }
- let token = self.register_incoming_request(request.id.clone());
- if let Some(response) = RequestDispatcher::new(request)
- .on::<DocumentLinkRequest, _>(|id, params| {
- self.document_link(id, params, &token)
- })?
- .on::<FoldingRangeRequest, _>(|id, params| {
- self.folding_range(id, params, &token)
- })?
- .on::<References, _>(|id, params| self.references(id, params, &token))?
- .on::<HoverRequest, _>(|id, params| self.hover(id, params, &token))?
- .on::<DocumentSymbolRequest, _>(|id, params| {
- self.document_symbols(id, params, &token)
- })?
- .on::<WorkspaceSymbol, _>(|id, params| {
- self.workspace_symbols(id, params, &token)
- })?
- .on::<Completion, _>(|id, params| {
- #[cfg(feature = "completion")]
- self.completion(id, params, &token)?;
- Ok(())
- })?
- .on::<ResolveCompletionItem, _>(|id, params| {
- #[cfg(feature = "completion")]
- self.completion_resolve(id, params, &token)?;
- Ok(())
- })?
- .on::<GotoDefinition, _>(|id, params| {
- self.goto_definition(id, params, &token)
- })?
- .on::<PrepareRenameRequest, _>(|id, params| {
- self.prepare_rename(id, params, &token)
- })?
- .on::<Rename, _>(|id, params| self.rename(id, params, &token))?
- .on::<DocumentHighlightRequest, _>(|id, params| {
- self.document_highlight(id, params, &token)
- })?
- .on::<Formatting, _>(|id, params| self.formatting(id, params, &token))?
- .on::<BuildRequest, _>(|id, params| self.build(id, params, &token))?
- .on::<ForwardSearchRequest, _>(|id, params| {
- self.forward_search(id, params, &token)
- })?
- .on::<SemanticTokensRangeRequest, _>(|id, params| {
- self.semantic_tokens_range(id, params, &token)
- })?
- .default()
- {
- self.connection.sender.send(response.into())?;
- }
- }
- Message::Notification(notification) => {
- NotificationDispatcher::new(notification)
- .on::<Cancel, _>(|params| self.cancel(params))?
- .on::<DidChangeConfiguration, _>(|params| {
- self.did_change_configuration(params)
- })?
- .on::<DidOpenTextDocument, _>(|params| self.did_open(params))?
- .on::<DidChangeTextDocument, _>(|params| self.did_change(params))?
- .on::<DidSaveTextDocument, _>(|params| self.did_save(params))?
- .on::<DidCloseTextDocument, _>(|params| self.did_close(params))?
- .default();
- }
- Message::Response(response) => {
- let mut req_queue = self.req_queue.lock().unwrap();
- let data = req_queue.outgoing.complete(response.id);
- let result = match response.error {
- Some(error) => Err(error),
- None => Ok(response.result.unwrap_or_default()),
+ Ok(())
+ }
+
+ fn process_messages(&mut self) -> Result<()> {
+ loop {
+ crossbeam_channel::select! {
+ recv(&self.connection.receiver) -> msg => {
+ match msg? {
+ Message::Request(request) => {
+ if self.connection.handle_shutdown(&request)? {
+ return Ok(());
+ }
+
+ self.register_incoming_request(request.id.clone());
+ if let Some(response) = RequestDispatcher::new(request)
+ .on::<DocumentLinkRequest, _>(|id, params| self.document_link(id, params))?
+ .on::<FoldingRangeRequest, _>(|id, params| self.folding_range(id, params))?
+ .on::<References, _>(|id, params| self.references(id, params))?
+ .on::<HoverRequest, _>(|id, params| self.hover(id, params))?
+ .on::<DocumentSymbolRequest, _>(|id, params| {
+ self.document_symbols(id, params)
+ })?
+ .on::<WorkspaceSymbol, _>(|id, params| self.workspace_symbols(id, params))?
+ .on::<Completion, _>(|id, params| {
+ #[cfg(feature = "completion")]
+ self.completion(id, params)?;
+ Ok(())
+ })?
+ .on::<ResolveCompletionItem, _>(|id, params| {
+ #[cfg(feature = "completion")]
+ self.completion_resolve(id, params)?;
+ Ok(())
+ })?
+ .on::<GotoDefinition, _>(|id, params| self.goto_definition(id, params))?
+ .on::<PrepareRenameRequest, _>(|id, params| {
+ self.prepare_rename(id, params)
+ })?
+ .on::<Rename, _>(|id, params| self.rename(id, params))?
+ .on::<DocumentHighlightRequest, _>(|id, params| {
+ self.document_highlight(id, params)
+ })?
+ .on::<Formatting, _>(|id, params| self.formatting(id, params))?
+ .on::<BuildRequest, _>(|id, params| self.build(id, params))?
+ .on::<ForwardSearchRequest, _>(|id, params| {
+ self.forward_search(id, params)
+ })?
+ .on::<SemanticTokensRangeRequest, _>(|id, params| {
+ self.semantic_tokens_range(id, params)
+ })?
+ .default()
+ {
+ self.connection.sender.send(response.into())?;
+ }
+ }
+ Message::Notification(notification) => {
+ NotificationDispatcher::new(notification)
+ .on::<Cancel, _>(|params| self.cancel(params))?
+ .on::<DidChangeConfiguration, _>(|params| {
+ self.did_change_configuration(params)
+ })?
+ .on::<DidChangeWatchedFiles, _>(|params| {
+ self.did_change_watched_files(params)
+ })?
+ .on::<DidOpenTextDocument, _>(|params| self.did_open(params))?
+ .on::<DidChangeTextDocument, _>(|params| self.did_change(params))?
+ .on::<DidSaveTextDocument, _>(|params| self.did_save(params))?
+ .on::<DidCloseTextDocument, _>(|params| self.did_close(params))?
+ .default();
+ }
+ Message::Response(response) => {
+ let mut req_queue = self.req_queue.lock().unwrap();
+ if let Some(data) = req_queue.outgoing.complete(response.id) {
+ let result = match response.error {
+ Some(error) => Err(error),
+ None => Ok(response.result.unwrap_or_default()),
+ };
+ data.sender.send(result)?;
+ }
+ }
+ };
+ },
+ recv(&self.internal_rx) -> msg => {
+ match msg? {
+ InternalMessage::SetDistro(distro) => {
+ self.workspace.environment.resolver = Arc::new(distro.resolver);
+ self.reparse_all()?;
+ }
+ InternalMessage::SetOptions(options) => {
+ self.workspace.environment.options = Arc::new(options);
+ self.reparse_all()?;
+ }
};
- data.sender.send(result)?;
}
- }
+ };
}
- Ok(())
}
pub fn run(mut self) -> Result<()> {
@@ -850,47 +821,20 @@ impl Server {
self.process_messages()?;
drop(self.static_debouncer);
drop(self.chktex_debouncer);
- self.pool.join();
+ self.pool.lock().unwrap().join();
Ok(())
}
}
-fn pull_and_reparse_all(
- req_queue: Arc<Mutex<lsp_server::ReqQueue<IncomingData, req_queue::OutgoingData>>>,
- sender: Sender<Message>,
- context: Arc<ServerContext>,
- workspace: Arc<dyn Workspace>,
-) {
- let client_capabilities = { context.client_capabilities.lock().unwrap().clone() };
- pull_config(&req_queue, &sender, &context.options, &client_capabilities);
- if let Some(path) = { context.options.read().unwrap().aux_directory.clone() } {
- let _ = workspace.watch(path, RecursiveMode::NonRecursive);
- }
-
- reparse_all(workspace.as_ref());
-}
-
-fn reparse_all(workspace: &dyn Workspace) {
- for document in workspace.documents() {
- workspace.open(
- Arc::clone(&document.uri),
- document.text.clone(),
- document.language(),
- WorkspaceSource::Client,
- );
- }
-}
-
fn create_static_debouncer(
manager: Arc<Mutex<DiagnosticsManager>>,
conn: &Connection,
- context: Arc<ServerContext>,
) -> DiagnosticsDebouncer {
let sender = conn.sender.clone();
- DiagnosticsDebouncer::launch(context, move |workspace, document| {
+ DiagnosticsDebouncer::launch(move |workspace, document| {
let mut manager = manager.lock().unwrap();
- manager.update_static(workspace.as_ref(), Arc::clone(&document.uri));
- if let Err(why) = publish_diagnostics(&sender, workspace.as_ref(), &manager) {
+ manager.update_static(&workspace, Arc::clone(&document.uri));
+ if let Err(why) = publish_diagnostics(&sender, &workspace, &manager) {
warn!("Failed to publish diagnostics: {}", why);
}
})
@@ -899,14 +843,16 @@ fn create_static_debouncer(
fn create_chktex_debouncer(
manager: Arc<Mutex<DiagnosticsManager>>,
conn: &Connection,
- context: Arc<ServerContext>,
) -> DiagnosticsDebouncer {
let sender = conn.sender.clone();
- DiagnosticsDebouncer::launch(Arc::clone(&context), move |workspace, document| {
- let options = { context.options.read().unwrap().clone() };
+ DiagnosticsDebouncer::launch(move |workspace, document| {
let mut manager = manager.lock().unwrap();
- manager.update_chktex(workspace.as_ref(), Arc::clone(&document.uri), &options);
- if let Err(why) = publish_diagnostics(&sender, workspace.as_ref(), &manager) {
+ manager.update_chktex(
+ &workspace,
+ Arc::clone(&document.uri),
+ &workspace.environment.options,
+ );
+ if let Err(why) = publish_diagnostics(&sender, &workspace, &manager) {
warn!("Failed to publish diagnostics: {}", why);
}
})
@@ -914,15 +860,15 @@ fn create_chktex_debouncer(
fn publish_diagnostics(
sender: &Sender<lsp_server::Message>,
- workspace: &dyn Workspace,
+ workspace: &Workspace,
diag_manager: &DiagnosticsManager,
) -> Result<()> {
- for document in workspace.documents() {
+ for document in workspace.documents_by_uri.values() {
let diagnostics = diag_manager.publish(Arc::clone(&document.uri));
send_notification::<PublishDiagnostics>(
sender,
PublishDiagnosticsParams {
- uri: document.uri.as_ref().clone().into(),
+ uri: document.uri.as_ref().clone(),
version: None,
diagnostics,
},
@@ -931,12 +877,19 @@ fn publish_diagnostics(
Ok(())
}
-fn cancel_response(id: RequestId) -> lsp_server::Response {
- lsp_server::Response::new_err(
- id,
- ErrorCode::RequestCanceled as i32,
- "canceled by client".to_string(),
- )
+fn apply_document_edit(old_text: &mut String, changes: Vec<TextDocumentContentChangeEvent>) {
+ for change in changes {
+ let line_index = LineIndex::new(old_text);
+ match change.range {
+ Some(range) => {
+ let range = std::ops::Range::<usize>::from(line_index.offset_lsp_range(range));
+ old_text.replace_range(range, &change.text);
+ }
+ None => {
+ *old_text = change.text;
+ }
+ };
+ }
}
struct BuildRequest;