summaryrefslogtreecommitdiff
path: root/support/texlab/src/features
diff options
context:
space:
mode:
Diffstat (limited to 'support/texlab/src/features')
-rw-r--r--support/texlab/src/features/build.rs335
-rw-r--r--support/texlab/src/features/build/progress.rs54
-rw-r--r--support/texlab/src/features/completion.rs599
-rw-r--r--support/texlab/src/features/completion/acronym_ref.rs25
-rw-r--r--support/texlab/src/features/completion/argument.rs27
-rw-r--r--support/texlab/src/features/completion/begin_command.rs18
-rw-r--r--support/texlab/src/features/completion/begin_snippet.rs9
-rw-r--r--support/texlab/src/features/completion/builder.rs784
-rw-r--r--support/texlab/src/features/completion/citation.rs82
-rw-r--r--support/texlab/src/features/completion/color.rs19
-rw-r--r--support/texlab/src/features/completion/color_model.rs20
-rw-r--r--support/texlab/src/features/completion/component_command.rs28
-rw-r--r--support/texlab/src/features/completion/component_environment.rs22
-rw-r--r--support/texlab/src/features/completion/entry_type.rs22
-rw-r--r--support/texlab/src/features/completion/field.rs19
-rw-r--r--support/texlab/src/features/completion/glossary_ref.rs27
-rw-r--r--support/texlab/src/features/completion/import.rs57
-rw-r--r--support/texlab/src/features/completion/include.rs78
-rw-r--r--support/texlab/src/features/completion/label.rs112
-rw-r--r--support/texlab/src/features/completion/theorem.rs26
-rw-r--r--support/texlab/src/features/completion/tikz_library.rs24
-rw-r--r--support/texlab/src/features/completion/types.rs151
-rw-r--r--support/texlab/src/features/completion/user_command.rs34
-rw-r--r--support/texlab/src/features/completion/user_environment.rs26
-rw-r--r--support/texlab/src/features/completion/util.rs64
-rw-r--r--support/texlab/src/features/cursor.rs333
-rw-r--r--support/texlab/src/features/definition.rs54
-rw-r--r--support/texlab/src/features/definition/command.rs20
-rw-r--r--support/texlab/src/features/definition/document.rs50
-rw-r--r--support/texlab/src/features/definition/entry.rs24
-rw-r--r--support/texlab/src/features/definition/label.rs37
-rw-r--r--support/texlab/src/features/definition/string.rs33
-rw-r--r--support/texlab/src/features/execute_command.rs103
-rw-r--r--support/texlab/src/features/folding.rs59
-rw-r--r--support/texlab/src/features/formatting.rs35
-rw-r--r--support/texlab/src/features/formatting/bibtex_internal.rs56
-rw-r--r--support/texlab/src/features/formatting/latexindent.rs112
-rw-r--r--support/texlab/src/features/forward_search.rs184
-rw-r--r--support/texlab/src/features/highlight.rs14
-rw-r--r--support/texlab/src/features/highlight/label.rs93
-rw-r--r--support/texlab/src/features/hover.rs39
-rw-r--r--support/texlab/src/features/hover/citation.rs25
-rw-r--r--support/texlab/src/features/hover/component.rs39
-rw-r--r--support/texlab/src/features/hover/entry_type.rs11
-rw-r--r--support/texlab/src/features/hover/field.rs11
-rw-r--r--support/texlab/src/features/hover/label.rs24
-rw-r--r--support/texlab/src/features/hover/string_ref.rs15
-rw-r--r--support/texlab/src/features/inlay_hint.rs45
-rw-r--r--support/texlab/src/features/inlay_hint/label.rs31
-rw-r--r--support/texlab/src/features/link.rs54
-rw-r--r--support/texlab/src/features/link/include.rs42
-rw-r--r--support/texlab/src/features/lsp_kinds.rs94
-rw-r--r--support/texlab/src/features/reference.rs45
-rw-r--r--support/texlab/src/features/reference/entry.rs35
-rw-r--r--support/texlab/src/features/reference/label.rs31
-rw-r--r--support/texlab/src/features/reference/string.rs24
-rw-r--r--support/texlab/src/features/rename.rs58
-rw-r--r--support/texlab/src/features/rename/command.rs30
-rw-r--r--support/texlab/src/features/rename/entry.rs40
-rw-r--r--support/texlab/src/features/rename/label.rs34
-rw-r--r--support/texlab/src/features/symbol.rs82
-rw-r--r--support/texlab/src/features/symbol/bibtex.rs137
-rw-r--r--support/texlab/src/features/symbol/latex.rs236
-rw-r--r--support/texlab/src/features/symbol/project_order.rs307
-rw-r--r--support/texlab/src/features/symbol/types.rs15
-rw-r--r--support/texlab/src/features/workspace_command.rs103
66 files changed, 2434 insertions, 3042 deletions
diff --git a/support/texlab/src/features/build.rs b/support/texlab/src/features/build.rs
index d961052061..645f1e8e0e 100644
--- a/support/texlab/src/features/build.rs
+++ b/support/texlab/src/features/build.rs
@@ -1,28 +1,18 @@
+mod progress;
+
use std::{
io::{BufRead, BufReader, Read},
- path::Path,
- process::{Command, Stdio},
- sync::{Arc, Mutex},
+ path::{Path, PathBuf},
+ process::Stdio,
thread::{self, JoinHandle},
};
-use anyhow::Result;
-use crossbeam_channel::{Receiver, Sender};
-use dashmap::DashMap;
use encoding_rs_io::DecodeReaderBytesBuilder;
-use lsp_types::{
- notification::{LogMessage, Progress},
- LogMessageParams, NumberOrString, Position, ProgressParams, ProgressParamsValue,
- TextDocumentIdentifier, TextDocumentPositionParams, Url, WorkDoneProgress,
- WorkDoneProgressBegin, WorkDoneProgressCreateParams, WorkDoneProgressEnd,
-};
+use lsp_types::{notification::LogMessage, LogMessageParams, TextDocumentIdentifier, Url};
use serde::{Deserialize, Serialize};
use serde_repr::{Deserialize_repr, Serialize_repr};
-use uuid::Uuid;
-use crate::{client, req_queue::ReqQueue, ClientCapabilitiesExt, DocumentLanguage};
-
-use super::{forward_search, FeatureRequest};
+use crate::{client::LspClient, db::Workspace, util::capabilities::ClientCapabilitiesExt, Db};
#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
@@ -30,6 +20,12 @@ pub struct BuildParams {
pub text_document: TextDocumentIdentifier,
}
+#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
+#[serde(rename_all = "camelCase")]
+pub struct BuildResult {
+ pub status: BuildStatus,
+}
+
#[derive(Debug, PartialEq, Eq, Clone, Copy, Serialize_repr, Deserialize_repr)]
#[repr(i32)]
pub enum BuildStatus {
@@ -39,217 +35,132 @@ pub enum BuildStatus {
CANCELLED = 3,
}
-#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
-#[serde(rename_all = "camelCase")]
-pub struct BuildResult {
- pub status: BuildStatus,
-}
-
-struct ProgressReporter<'a> {
- supports_progress: bool,
- req_queue: &'a Mutex<ReqQueue>,
- lsp_sender: Sender<lsp_server::Message>,
- token: &'a str,
+#[derive(Debug)]
+pub struct Command {
+ uri: Url,
+ progress: bool,
+ executable: String,
+ args: Vec<String>,
+ working_dir: PathBuf,
+ client: LspClient,
}
-impl<'a> ProgressReporter<'a> {
- pub fn start(&self, uri: &Url) -> Result<()> {
- if self.supports_progress {
- client::send_request::<lsp_types::request::WorkDoneProgressCreate>(
- self.req_queue,
- &self.lsp_sender,
- WorkDoneProgressCreateParams {
- token: NumberOrString::String(self.token.to_string()),
- },
- )?;
- client::send_notification::<Progress>(
- &self.lsp_sender,
- ProgressParams {
- token: NumberOrString::String(self.token.to_string()),
- value: ProgressParamsValue::WorkDone(WorkDoneProgress::Begin(
- WorkDoneProgressBegin {
- title: "Building".to_string(),
- message: Some(uri.as_str().to_string()),
- cancellable: Some(false),
- percentage: None,
- },
- )),
- },
- )?;
+impl Command {
+ pub fn new(db: &dyn Db, uri: Url, client: LspClient) -> Option<Self> {
+ let workspace = Workspace::get(db);
+ let document = match workspace.lookup_uri(db, &uri) {
+ Some(child) => workspace
+ .parents(db, child)
+ .iter()
+ .next()
+ .copied()
+ .unwrap_or(child),
+ None => return None,
};
- Ok(())
- }
-}
-impl<'a> Drop for ProgressReporter<'a> {
- fn drop(&mut self) {
- if self.supports_progress {
- drop(client::send_notification::<Progress>(
- &self.lsp_sender,
- ProgressParams {
- token: NumberOrString::String(self.token.to_string()),
- value: ProgressParamsValue::WorkDone(WorkDoneProgress::End(
- WorkDoneProgressEnd { message: None },
- )),
- },
- ));
+ if document.location(db).path(db).is_none() {
+ log::warn!("Document {uri} cannot be compiled; skipping...");
+ return None;
}
- }
-}
-
-#[derive(Default)]
-pub struct BuildEngine {
- lock: Mutex<()>,
- pub positions_by_uri: DashMap<Arc<Url>, Position>,
-}
-
-impl BuildEngine {
- pub fn build(
- &self,
- request: FeatureRequest<BuildParams>,
- req_queue: &Mutex<ReqQueue>,
- lsp_sender: &Sender<lsp_server::Message>,
- ) -> Result<BuildResult> {
- let lock = self.lock.lock().unwrap();
-
- let document = request
- .workspace
- .documents_by_uri
- .values()
- .find(|document| {
- if let Some(data) = document.data.as_latex() {
- data.extras.has_document_environment
- } else {
- false
- }
- })
- .unwrap_or_else(|| request.main_document());
-
- if document.data.language() != DocumentLanguage::Latex {
- return Ok(BuildResult {
- status: BuildStatus::SUCCESS,
- });
- }
-
- if document.uri.scheme() != "file" {
- return Ok(BuildResult {
- status: BuildStatus::FAILURE,
- });
- }
- let path = document.uri.to_file_path().unwrap();
-
- let supports_progress = request
- .workspace
- .environment
- .client_capabilities
- .has_work_done_progress_support();
-
- let token = format!("texlab-build-{}", Uuid::new_v4());
- let progress_reporter = ProgressReporter {
- supports_progress,
- req_queue,
- lsp_sender: lsp_sender.clone(),
- token: &token,
- };
- progress_reporter.start(&document.uri)?;
- let options = &request.workspace.environment.options;
-
- let build_dir = options
- .root_directory
- .as_ref()
- .map(AsRef::as_ref)
- .or_else(|| path.parent())
- .unwrap();
-
- let args: Vec<_> = options
- .build
+ let options = &workspace.options(db).build;
+ let executable = options.executable.0.clone();
+ let path = document.location(db).path(db).as_deref().unwrap();
+ let args = options
.args
+ .0
.iter()
- .map(|arg| replace_placeholder(arg.clone(), &path))
+ .map(|arg| replace_placeholder(arg, path))
.collect();
- let mut process = Command::new(&options.build.executable)
- .args(args)
- .stdin(Stdio::null())
- .stdout(Stdio::piped())
- .stderr(Stdio::piped())
- .current_dir(build_dir)
- .spawn()?;
-
- let (exit_sender, exit_receiver) = crossbeam_channel::bounded(1);
- let log_handle = capture_output(&mut process, lsp_sender, exit_receiver);
- let success = process.wait().map(|status| status.success())?;
- exit_sender.send(())?;
- drop(exit_sender);
+ let working_dir = workspace
+ .working_dir(db, document.directory(db))
+ .path(db)
+ .clone()?;
+
+ Some(Self {
+ uri: document.location(db).uri(db).clone(),
+ progress: workspace
+ .client_capabilities(db)
+ .has_work_done_progress_support(),
+ executable,
+ args,
+ working_dir,
+ client,
+ })
+ }
- log_handle.join().unwrap();
- let status = if success {
- BuildStatus::SUCCESS
+ pub fn run(self) -> BuildStatus {
+ let reporter = if self.progress {
+ let inner = progress::Reporter::new(&self.client);
+ inner.start(&self.uri).expect("report progress");
+ Some(inner)
} else {
- BuildStatus::ERROR
+ None
};
- drop(progress_reporter);
- drop(lock);
-
- if options.build.forward_search_after {
- let request = FeatureRequest {
- params: TextDocumentPositionParams {
- position: self
- .positions_by_uri
- .get(&request.main_document().uri)
- .map(|guard| *guard)
- .unwrap_or_default(),
- text_document: TextDocumentIdentifier::new(request.uri.as_ref().clone()),
- },
- uri: request.uri,
- workspace: request.workspace,
- };
- forward_search::execute_forward_search(request);
- }
-
- Ok(BuildResult { status })
- }
-}
-
-fn capture_output(
- process: &mut std::process::Child,
- lsp_sender: &Sender<lsp_server::Message>,
- exit_receiver: Receiver<()>,
-) -> JoinHandle<()> {
- let (log_sender, log_receiver) = crossbeam_channel::unbounded();
- track_output(process.stdout.take().unwrap(), log_sender.clone());
- track_output(process.stderr.take().unwrap(), log_sender);
- let lsp_sender = lsp_sender.clone();
- thread::spawn(move || loop {
- crossbeam_channel::select! {
- recv(&log_receiver) -> message => {
- if let Ok(message) = message {
- client::send_notification::<LogMessage>(
- &lsp_sender,
- LogMessageParams {
- message,
- typ: lsp_types::MessageType::LOG,
- },
- )
- .unwrap();
- }
- },
- recv(&exit_receiver) -> _ => break,
+ let mut process = match std::process::Command::new(&self.executable)
+ .args(self.args)
+ .stdin(Stdio::null())
+ .stdout(Stdio::piped())
+ .stderr(Stdio::piped())
+ .current_dir(self.working_dir)
+ .spawn()
+ {
+ Ok(process) => process,
+ Err(_) => {
+ log::error!("Failed to spawn process: {:?}", self.executable);
+ return BuildStatus::FAILURE;
+ }
};
- })
-}
-fn replace_placeholder(arg: String, file: &Path) -> String {
- if arg.starts_with('"') || arg.ends_with('"') {
- arg
- } else {
- arg.replace("%f", &file.to_string_lossy())
+ let (line_sender, line_receiver) = flume::unbounded();
+ let (exit_sender, exit_receiver) = flume::unbounded();
+ track_output(process.stderr.take().unwrap(), line_sender.clone());
+ track_output(process.stdout.take().unwrap(), line_sender);
+ let client = self.client.clone();
+ let handle = std::thread::spawn(move || {
+ let typ = lsp_types::MessageType::LOG;
+
+ loop {
+ let done = flume::Selector::new()
+ .recv(&line_receiver, |line| match line {
+ Ok(message) => {
+ let params = LogMessageParams { message, typ };
+ client.send_notification::<LogMessage>(params).unwrap();
+ false
+ }
+ Err(_) => true,
+ })
+ .recv(&exit_receiver, |_| true)
+ .wait();
+
+ if done {
+ break;
+ }
+ }
+ });
+
+ let status = process.wait().map_or(BuildStatus::FAILURE, |result| {
+ if result.success() {
+ BuildStatus::SUCCESS
+ } else {
+ BuildStatus::ERROR
+ }
+ });
+
+ let _ = exit_sender.send(());
+ handle.join().unwrap();
+
+ drop(reporter);
+ status
}
}
-fn track_output(output: impl Read + Send + 'static, sender: Sender<String>) -> JoinHandle<()> {
+fn track_output(
+ output: impl Read + Send + 'static,
+ sender: flume::Sender<String>,
+) -> JoinHandle<()> {
let reader = BufReader::new(
DecodeReaderBytesBuilder::new()
.encoding(Some(encoding_rs::UTF_8))
@@ -264,3 +175,11 @@ fn track_output(output: impl Read + Send + 'static, sender: Sender<String>) -> J
}
})
}
+
+fn replace_placeholder(arg: &str, file: &Path) -> String {
+ if arg.starts_with('"') || arg.ends_with('"') {
+ arg.to_string()
+ } else {
+ arg.replace("%f", &file.to_string_lossy())
+ }
+}
diff --git a/support/texlab/src/features/build/progress.rs b/support/texlab/src/features/build/progress.rs
new file mode 100644
index 0000000000..6f235bebd6
--- /dev/null
+++ b/support/texlab/src/features/build/progress.rs
@@ -0,0 +1,54 @@
+use std::sync::atomic::{AtomicI32, Ordering};
+
+use anyhow::Result;
+use lsp_types::{
+ notification::Progress, request::WorkDoneProgressCreate, NumberOrString, ProgressParams,
+ ProgressParamsValue, Url, WorkDoneProgress, WorkDoneProgressBegin,
+ WorkDoneProgressCreateParams, WorkDoneProgressEnd,
+};
+
+use crate::client::LspClient;
+
+static NEXT_TOKEN: AtomicI32 = AtomicI32::new(1);
+
+pub struct Reporter<'a> {
+ client: &'a LspClient,
+ token: i32,
+}
+
+impl<'a> Reporter<'a> {
+ pub fn new(client: &'a LspClient) -> Self {
+ let token = NEXT_TOKEN.fetch_add(1, Ordering::SeqCst);
+ Self { client, token }
+ }
+
+ pub fn start(&self, uri: &Url) -> Result<()> {
+ self.client
+ .send_request::<WorkDoneProgressCreate>(WorkDoneProgressCreateParams {
+ token: NumberOrString::Number(self.token),
+ })?;
+
+ self.client.send_notification::<Progress>(ProgressParams {
+ token: NumberOrString::Number(self.token),
+ value: ProgressParamsValue::WorkDone(WorkDoneProgress::Begin(WorkDoneProgressBegin {
+ title: "Building".to_string(),
+ message: Some(uri.as_str().to_string()),
+ cancellable: Some(false),
+ percentage: None,
+ })),
+ })?;
+
+ Ok(())
+ }
+}
+
+impl<'a> Drop for Reporter<'a> {
+ fn drop(&mut self) {
+ let _ = self.client.send_notification::<Progress>(ProgressParams {
+ token: NumberOrString::Number(self.token),
+ value: ProgressParamsValue::WorkDone(WorkDoneProgress::End(WorkDoneProgressEnd {
+ message: None,
+ })),
+ });
+ }
+}
diff --git a/support/texlab/src/features/completion.rs b/support/texlab/src/features/completion.rs
index 3009b437e7..949b5552e5 100644
--- a/support/texlab/src/features/completion.rs
+++ b/support/texlab/src/features/completion.rs
@@ -1,6 +1,7 @@
mod acronym_ref;
mod argument;
-mod begin_command;
+mod begin_snippet;
+pub mod builder;
mod citation;
mod color;
mod color_model;
@@ -14,584 +15,36 @@ mod include;
mod label;
mod theorem;
mod tikz_library;
-mod types;
mod user_command;
mod user_environment;
-mod util;
-use std::borrow::Cow;
+use lsp_types::{CompletionList, Position, Url};
-use fuzzy_matcher::{skim::SkimMatcherV2, FuzzyMatcher};
-use itertools::Itertools;
-use lsp_types::{
- CompletionItem, CompletionList, CompletionParams, CompletionTextEdit, Documentation,
- InsertTextFormat, MarkupContent, MarkupKind, TextEdit,
-};
-use rowan::{ast::AstNode, TextSize};
-use rustc_hash::FxHashSet;
-
-use crate::{
- syntax::{
- bibtex::{self},
- latex,
- },
- LineIndexExt,
-};
-
-use self::{
- acronym_ref::complete_acronyms,
- argument::complete_arguments,
- begin_command::complete_begin_command,
- citation::complete_citations,
- color::complete_colors,
- color_model::complete_color_models,
- component_command::complete_component_commands,
- component_environment::complete_component_environments,
- entry_type::complete_entry_types,
- field::complete_fields,
- glossary_ref::complete_glossary_entries,
- import::complete_imports,
- include::complete_includes,
- label::complete_labels,
- theorem::complete_theorem_environments,
- tikz_library::complete_tikz_libraries,
- types::{InternalCompletionItem, InternalCompletionItemData},
- user_command::complete_user_commands,
- user_environment::complete_user_environments,
- util::{adjust_kind, component_detail, image_documentation},
-};
-
-pub use self::types::CompletionItemData;
-
-use super::{
- cursor::{Cursor, CursorContext},
- lsp_kinds::Structure,
- FeatureRequest,
-};
+use crate::{features::completion::builder::CompletionBuilder, util::cursor::CursorContext, Db};
pub const COMPLETION_LIMIT: usize = 50;
-#[must_use]
-pub fn complete(request: FeatureRequest<CompletionParams>) -> Option<CompletionList> {
- let mut items = Vec::new();
- let context = CursorContext::new(request);
+pub fn complete(db: &dyn Db, uri: &Url, position: Position) -> Option<CompletionList> {
+ let context = CursorContext::new(db, uri, position, ())?;
+ let mut builder = CompletionBuilder::new(&context);
log::debug!("[Completion] Cursor: {:?}", context.cursor);
- complete_entry_types(&context, &mut items);
- complete_fields(&context, &mut items);
- complete_arguments(&context, &mut items);
- complete_citations(&context, &mut items);
- complete_imports(&context, &mut items);
- complete_colors(&context, &mut items);
- complete_color_models(&context, &mut items);
- complete_acronyms(&context, &mut items);
- complete_glossary_entries(&context, &mut items);
- complete_includes(&context, &mut items);
- complete_labels(&context, &mut items);
- complete_tikz_libraries(&context, &mut items);
- complete_component_environments(&context, &mut items);
- complete_theorem_environments(&context, &mut items);
- complete_user_environments(&context, &mut items);
- complete_begin_command(&context, &mut items);
- complete_component_commands(&context, &mut items);
- complete_user_commands(&context, &mut items);
-
- let mut items = dedup(items);
- preselect(&context, &mut items);
- score(&context, &mut items);
-
- items.sort_by(|a, b| {
- a.preselect
- .cmp(&b.preselect)
- .reverse()
- .then_with(|| a.score.cmp(&b.score).reverse())
- .then_with(|| a.data.label().cmp(b.data.label()))
- });
-
- let items: Vec<_> = items
- .into_iter()
- .take(COMPLETION_LIMIT)
- .filter(|item| item.score.is_some())
- .map(|item| convert_internal_items(&context, item))
- .enumerate()
- .map(|(i, item)| append_sort_text(item, i))
- .collect();
-
- let is_incomplete = if context
- .request
- .workspace
- .environment
- .client_info
- .as_ref()
- .as_ref()
- .map_or(false, |info| info.name.as_str() == "Visual Studio Code")
- {
- true
- } else {
- items.len() >= COMPLETION_LIMIT
- };
-
- Some(CompletionList {
- is_incomplete,
- items,
- })
-}
-
-fn dedup(items: Vec<InternalCompletionItem>) -> Vec<InternalCompletionItem> {
- let mut labels = FxHashSet::default();
- let mut insert = vec![false; items.len()];
- for (i, item) in items.iter().enumerate() {
- insert[i] = labels.insert(item.data.label());
- }
- items
- .into_iter()
- .enumerate()
- .filter(|(i, _)| insert[*i])
- .map(|(_, item)| item)
- .collect()
-}
-
-fn score(context: &CursorContext<CompletionParams>, items: &mut Vec<InternalCompletionItem>) {
- let pattern: Cow<str> = match &context.cursor {
- Cursor::Latex(token) if token.kind().is_command_name() => {
- if token.text_range().start() + TextSize::from(1) == context.offset {
- // Handle cases similar to this one correctly:
- // $\|$ % (| is the cursor)
- "\\".into()
- } else {
- token.text().trim_end().into()
- }
- }
- Cursor::Latex(token) if token.kind() == latex::WORD => {
- if let Some(key) = token.parent().and_then(latex::Key::cast) {
- key.words()
- .take_while(|word| word.text_range() != token.text_range())
- .chain(std::iter::once(token.clone()))
- .filter(|word| word.text_range().start() < context.offset)
- .join(" ")
- .into()
- } else {
- token.text().into()
- }
- }
- Cursor::Bibtex(token)
- if matches!(
- token.kind(),
- bibtex::TYPE
- | bibtex::NAME
- | bibtex::WORD
- | bibtex::COMMAND_NAME
- | bibtex::ACCENT_NAME
- ) =>
- {
- token.text().into()
- }
- Cursor::Latex(_) | Cursor::Bibtex(_) | Cursor::Nothing => "".into(),
- };
-
- let file_pattern = pattern.split('/').last().unwrap();
- let matcher = SkimMatcherV2::default().ignore_case();
- for item in items {
- item.score = match &item.data {
- InternalCompletionItemData::EntryType { ty } => {
- matcher.fuzzy_match(&ty.name, &pattern[1..])
- }
- InternalCompletionItemData::Field { field } => {
- matcher.fuzzy_match(&field.name, &pattern)
- }
- InternalCompletionItemData::Argument { name, .. } => {
- matcher.fuzzy_match(name, &pattern)
- }
- InternalCompletionItemData::BeginCommand => matcher.fuzzy_match("begin", &pattern[1..]),
- InternalCompletionItemData::Citation { key, .. } => matcher.fuzzy_match(key, &pattern),
- InternalCompletionItemData::ComponentCommand { name, .. } => {
- matcher.fuzzy_match(name, &pattern[1..])
- }
- InternalCompletionItemData::ComponentEnvironment { name, .. } => {
- matcher.fuzzy_match(name, &pattern)
- }
- InternalCompletionItemData::Class { name } => matcher.fuzzy_match(name, &pattern),
- InternalCompletionItemData::Package { name } => matcher.fuzzy_match(name, &pattern),
- InternalCompletionItemData::Color { name } => matcher.fuzzy_match(name, &pattern),
- InternalCompletionItemData::ColorModel { name } => matcher.fuzzy_match(name, &pattern),
- InternalCompletionItemData::Acronym { name } => matcher.fuzzy_match(name, &pattern),
- InternalCompletionItemData::GlossaryEntry { name } => {
- matcher.fuzzy_match(name, &pattern)
- }
- InternalCompletionItemData::File { name } => matcher.fuzzy_match(name, file_pattern),
- InternalCompletionItemData::Directory { name } => {
- matcher.fuzzy_match(name, file_pattern)
- }
- InternalCompletionItemData::Label { name, .. } => matcher.fuzzy_match(name, &pattern),
- InternalCompletionItemData::UserCommand { name } => matcher.fuzzy_match(name, &pattern),
- InternalCompletionItemData::UserEnvironment { name } => {
- matcher.fuzzy_match(name, &pattern)
- }
- InternalCompletionItemData::PgfLibrary { name } => matcher.fuzzy_match(name, &pattern),
- InternalCompletionItemData::TikzLibrary { name } => matcher.fuzzy_match(name, &pattern),
- };
- }
-}
-
-fn preselect(
- context: &CursorContext<CompletionParams>,
- items: &mut [InternalCompletionItem],
-) -> Option<()> {
- let name = context.cursor.as_latex()?;
- let group = latex::CurlyGroupWord::cast(name.parent()?)?;
- let end = latex::End::cast(group.syntax().parent()?)?;
- let environment = latex::Environment::cast(end.syntax().parent()?)?;
- let name = environment.begin()?.name()?.key()?.to_string();
-
- for item in items {
- if item.data.label() == name {
- item.preselect = true;
- }
- }
- Some(())
-}
-
-fn convert_internal_items(
- context: &CursorContext<CompletionParams>,
- item: InternalCompletionItem,
-) -> CompletionItem {
- let range = context
- .request
- .main_document()
- .line_index
- .line_col_lsp_range(item.range);
-
- let mut new_item = match item.data {
- InternalCompletionItemData::EntryType { ty } => {
- let text_edit = TextEdit::new(range, (&ty.name).into());
- let kind = Structure::Entry(ty.category).completion_kind();
- CompletionItem {
- label: (&ty.name).into(),
- kind: Some(adjust_kind(&context.request, kind)),
- documentation: ty.documentation.as_ref().map(|doc| {
- Documentation::MarkupContent(MarkupContent {
- kind: MarkupKind::Markdown,
- value: doc.into(),
- })
- }),
- text_edit: Some(CompletionTextEdit::Edit(text_edit)),
- data: Some(serde_json::to_value(CompletionItemData::EntryType).unwrap()),
- ..CompletionItem::default()
- }
- }
- InternalCompletionItemData::Field { field } => {
- let text_edit = TextEdit::new(range, (&field.name).into());
- CompletionItem {
- label: (&field.name).into(),
- kind: Some(adjust_kind(
- &context.request,
- Structure::Field.completion_kind(),
- )),
- documentation: Some(Documentation::MarkupContent(MarkupContent {
- kind: MarkupKind::Markdown,
- value: (&field.documentation).into(),
- })),
- text_edit: Some(CompletionTextEdit::Edit(text_edit)),
- data: Some(serde_json::to_value(CompletionItemData::FieldName).unwrap()),
- ..CompletionItem::default()
- }
- }
- InternalCompletionItemData::Argument { name, image } => {
- let text_edit = TextEdit::new(range, name.into());
- CompletionItem {
- label: name.into(),
- kind: Some(adjust_kind(
- &context.request,
- Structure::Argument.completion_kind(),
- )),
- data: Some(serde_json::to_value(CompletionItemData::Argument).unwrap()),
- text_edit: Some(CompletionTextEdit::Edit(text_edit)),
- documentation: image
- .and_then(|image| image_documentation(&context.request, name, image)),
- ..CompletionItem::default()
- }
- }
- InternalCompletionItemData::BeginCommand => {
- if context
- .request
- .workspace
- .environment
- .client_capabilities
- .text_document
- .as_ref()
- .and_then(|cap| cap.completion.as_ref())
- .and_then(|cap| cap.completion_item.as_ref())
- .and_then(|cap| cap.snippet_support)
- == Some(true)
- {
- let text_edit = TextEdit::new(range, "begin{$1}\n\t$0\n\\end{$1}".into());
- CompletionItem {
- kind: Some(adjust_kind(
- &context.request,
- Structure::Snippet.completion_kind(),
- )),
- data: Some(serde_json::to_value(CompletionItemData::CommandSnippet).unwrap()),
- text_edit: Some(CompletionTextEdit::Edit(text_edit)),
- insert_text_format: Some(InsertTextFormat::SNIPPET),
- ..CompletionItem::new_simple("begin".into(), component_detail(&[]))
- }
- } else {
- let text_edit = TextEdit::new(range, "begin".to_string());
- CompletionItem {
- kind: Some(adjust_kind(
- &context.request,
- Structure::Command.completion_kind(),
- )),
- data: Some(serde_json::to_value(CompletionItemData::Command).unwrap()),
- text_edit: Some(CompletionTextEdit::Edit(text_edit)),
- ..CompletionItem::new_simple("begin".to_string(), component_detail(&[]))
- }
- }
- }
- InternalCompletionItemData::Citation { uri, key, text, ty } => {
- let text_edit = TextEdit::new(range, key.to_string());
- CompletionItem {
- label: key.to_string(),
- kind: Some(adjust_kind(&context.request, ty.completion_kind())),
- filter_text: Some(text.clone()),
- sort_text: Some(text),
- data: Some(
- serde_json::to_value(CompletionItemData::Citation {
- uri: uri.as_ref().clone(),
- key: key.into(),
- })
- .unwrap(),
- ),
- text_edit: Some(CompletionTextEdit::Edit(text_edit)),
- ..CompletionItem::default()
- }
- }
- InternalCompletionItemData::ComponentCommand {
- name,
- image,
- glyph,
- file_names,
- } => {
- let detail = glyph.map_or_else(
- || component_detail(file_names),
- |glyph| format!("{}, {}", glyph, component_detail(file_names)),
- );
- let documentation =
- image.and_then(|img| image_documentation(&context.request, name, img));
- let text_edit = TextEdit::new(range, name.to_string());
- CompletionItem {
- kind: Some(adjust_kind(
- &context.request,
- Structure::Command.completion_kind(),
- )),
- data: Some(serde_json::to_value(CompletionItemData::Command).unwrap()),
- documentation,
- text_edit: Some(CompletionTextEdit::Edit(text_edit)),
- ..CompletionItem::new_simple(name.to_string(), detail)
- }
- }
- InternalCompletionItemData::ComponentEnvironment { name, file_names } => {
- let text_edit = TextEdit::new(range, name.to_string());
- CompletionItem {
- kind: Some(adjust_kind(
- &context.request,
- Structure::Environment.completion_kind(),
- )),
- data: Some(serde_json::to_value(CompletionItemData::Environment).unwrap()),
- text_edit: Some(CompletionTextEdit::Edit(text_edit)),
- ..CompletionItem::new_simple(name.to_string(), component_detail(file_names))
- }
- }
- InternalCompletionItemData::Class { name } => {
- let text_edit = TextEdit::new(range, name.to_string());
- CompletionItem {
- label: name.into(),
- kind: Some(adjust_kind(
- &context.request,
- Structure::Class.completion_kind(),
- )),
- data: Some(serde_json::to_value(CompletionItemData::Class).unwrap()),
- text_edit: Some(CompletionTextEdit::Edit(text_edit)),
- ..CompletionItem::default()
- }
- }
- InternalCompletionItemData::Package { name } => {
- let text_edit = TextEdit::new(range, name.to_string());
- CompletionItem {
- label: name.into(),
- kind: Some(adjust_kind(
- &context.request,
- Structure::Package.completion_kind(),
- )),
- data: Some(serde_json::to_value(CompletionItemData::Package).unwrap()),
- text_edit: Some(CompletionTextEdit::Edit(text_edit)),
- ..CompletionItem::default()
- }
- }
- InternalCompletionItemData::Color { name } => {
- let text_edit = TextEdit::new(range, name.into());
- CompletionItem {
- label: name.into(),
- kind: Some(adjust_kind(
- &context.request,
- Structure::Color.completion_kind(),
- )),
- data: Some(serde_json::to_value(CompletionItemData::Color).unwrap()),
- text_edit: Some(CompletionTextEdit::Edit(text_edit)),
- ..CompletionItem::default()
- }
- }
- InternalCompletionItemData::ColorModel { name } => {
- let text_edit = TextEdit::new(range, name.into());
- CompletionItem {
- label: name.into(),
- kind: Some(adjust_kind(
- &context.request,
- Structure::ColorModel.completion_kind(),
- )),
- data: Some(serde_json::to_value(CompletionItemData::ColorModel).unwrap()),
- text_edit: Some(CompletionTextEdit::Edit(text_edit)),
- ..CompletionItem::default()
- }
- }
- InternalCompletionItemData::Acronym { name } => {
- let text_edit = TextEdit::new(range, name.to_string());
- CompletionItem {
- label: name,
- kind: Some(adjust_kind(
- &context.request,
- Structure::GlossaryEntry.completion_kind(),
- )),
- data: Some(serde_json::to_value(CompletionItemData::Acronym).unwrap()),
- text_edit: Some(CompletionTextEdit::Edit(text_edit)),
- ..CompletionItem::default()
- }
- }
- InternalCompletionItemData::GlossaryEntry { name } => {
- let text_edit = TextEdit::new(range, name.to_string());
- CompletionItem {
- label: name,
- kind: Some(adjust_kind(
- &context.request,
- Structure::GlossaryEntry.completion_kind(),
- )),
- data: Some(serde_json::to_value(CompletionItemData::GlossaryEntry).unwrap()),
- text_edit: Some(CompletionTextEdit::Edit(text_edit)),
- ..CompletionItem::default()
- }
- }
- InternalCompletionItemData::File { name } => {
- let text_edit = TextEdit::new(range, name.to_string());
- CompletionItem {
- label: name.into(),
- kind: Some(adjust_kind(
- &context.request,
- Structure::File.completion_kind(),
- )),
- data: Some(serde_json::to_value(CompletionItemData::File).unwrap()),
- text_edit: Some(CompletionTextEdit::Edit(text_edit)),
- ..CompletionItem::default()
- }
- }
- InternalCompletionItemData::Directory { name } => {
- let text_edit = TextEdit::new(range, name.to_string());
- CompletionItem {
- label: name.into(),
- kind: Some(adjust_kind(
- &context.request,
- Structure::Folder.completion_kind(),
- )),
- data: Some(serde_json::to_value(CompletionItemData::Folder).unwrap()),
- text_edit: Some(CompletionTextEdit::Edit(text_edit)),
- ..CompletionItem::default()
- }
- }
- InternalCompletionItemData::Label {
- name,
- kind,
- header,
- footer,
- text,
- } => {
- let text_edit = TextEdit::new(range, name.to_string());
- CompletionItem {
- label: name,
- kind: Some(adjust_kind(&context.request, kind.completion_kind())),
- detail: header,
- documentation: footer.map(Documentation::String),
- sort_text: Some(text.clone()),
- filter_text: Some(text),
- text_edit: Some(CompletionTextEdit::Edit(text_edit)),
- data: Some(serde_json::to_value(CompletionItemData::Label).unwrap()),
- ..CompletionItem::default()
- }
- }
- InternalCompletionItemData::UserCommand { name } => {
- let detail = "user-defined".into();
- let name = &name[1..];
- let text_edit = TextEdit::new(range, name.to_string());
- CompletionItem {
- kind: Some(adjust_kind(
- &context.request,
- Structure::Command.completion_kind(),
- )),
- data: Some(serde_json::to_value(CompletionItemData::Command).unwrap()),
- text_edit: Some(CompletionTextEdit::Edit(text_edit)),
- ..CompletionItem::new_simple(name.into(), detail)
- }
- }
- InternalCompletionItemData::UserEnvironment { name } => {
- let detail = "user-defined".into();
- let text_edit = TextEdit::new(range, name.clone());
- CompletionItem {
- kind: Some(adjust_kind(
- &context.request,
- Structure::Environment.completion_kind(),
- )),
- data: Some(serde_json::to_value(CompletionItemData::Environment).unwrap()),
- text_edit: Some(CompletionTextEdit::Edit(text_edit)),
- ..CompletionItem::new_simple(name, detail)
- }
- }
- InternalCompletionItemData::PgfLibrary { name } => {
- let text_edit = TextEdit::new(range, name.into());
- CompletionItem {
- label: name.into(),
- kind: Some(adjust_kind(
- &context.request,
- Structure::PgfLibrary.completion_kind(),
- )),
- data: Some(serde_json::to_value(CompletionItemData::PgfLibrary).unwrap()),
- text_edit: Some(CompletionTextEdit::Edit(text_edit)),
- ..CompletionItem::default()
- }
- }
- InternalCompletionItemData::TikzLibrary { name } => {
- let text_edit = TextEdit::new(range, name.into());
- CompletionItem {
- label: name.into(),
- kind: Some(adjust_kind(
- &context.request,
- Structure::TikzLibrary.completion_kind(),
- )),
- data: Some(serde_json::to_value(CompletionItemData::TikzLibrary).unwrap()),
- text_edit: Some(CompletionTextEdit::Edit(text_edit)),
- ..CompletionItem::default()
- }
- }
- };
- new_item.preselect = Some(item.preselect);
- new_item
-}
-
-fn append_sort_text(mut item: CompletionItem, index: usize) -> CompletionItem {
- let sort_prefix = format!("{:0>2}", index);
- match &item.sort_text {
- Some(sort_text) => {
- item.sort_text = Some(format!("{} {}", sort_prefix, sort_text));
- }
- None => {
- item.sort_text = Some(sort_prefix);
- }
- };
- item
+ entry_type::complete(&context, &mut builder);
+ field::complete(&context, &mut builder);
+ argument::complete(&context, &mut builder);
+ citation::complete(&context, &mut builder);
+ import::complete(&context, &mut builder);
+ color::complete(&context, &mut builder);
+ color_model::complete(&context, &mut builder);
+ acronym_ref::complete(&context, &mut builder);
+ glossary_ref::complete(&context, &mut builder);
+ include::complete(&context, &mut builder);
+ label::complete(&context, &mut builder);
+ tikz_library::complete(&context, &mut builder);
+ component_environment::complete(&context, &mut builder);
+ theorem::complete(&context, &mut builder);
+ user_environment::complete(&context, &mut builder);
+ begin_snippet::complete(&context, &mut builder);
+ component_command::complete(&context, &mut builder);
+ user_command::complete(&context, &mut builder);
+ Some(builder.finish())
}
diff --git a/support/texlab/src/features/completion/acronym_ref.rs b/support/texlab/src/features/completion/acronym_ref.rs
index 84cbd8bc23..be7725fa2f 100644
--- a/support/texlab/src/features/completion/acronym_ref.rs
+++ b/support/texlab/src/features/completion/acronym_ref.rs
@@ -1,32 +1,29 @@
-use lsp_types::CompletionParams;
use rowan::ast::AstNode;
-use crate::{features::cursor::CursorContext, syntax::latex};
+use crate::{syntax::latex, util::cursor::CursorContext};
-use super::types::{InternalCompletionItem, InternalCompletionItemData};
+use super::builder::CompletionBuilder;
-pub fn complete_acronyms<'a>(
- context: &'a CursorContext<CompletionParams>,
- items: &mut Vec<InternalCompletionItem<'a>>,
+pub fn complete<'db>(
+ context: &'db CursorContext,
+ builder: &mut CompletionBuilder<'db>,
) -> Option<()> {
let (_, range, group) = context.find_curly_group_word()?;
latex::AcronymReference::cast(group.syntax().parent()?)?;
- for document in context.request.workspace.documents_by_uri.values() {
- if let Some(data) = document.data.as_latex() {
- for name in latex::SyntaxNode::new_root(data.green.clone())
+ for document in context.related() {
+ if let Some(data) = document.parse(context.db).as_tex() {
+ for name in data
+ .root(context.db)
.descendants()
.filter_map(latex::AcronymDefinition::cast)
.filter_map(|node| node.name())
.filter_map(|name| name.key())
- .map(|name| name.to_string())
{
- items.push(InternalCompletionItem::new(
- range,
- InternalCompletionItemData::Acronym { name },
- ));
+ builder.glossary_entry(range, name.to_string());
}
}
}
+
Some(())
}
diff --git a/support/texlab/src/features/completion/argument.rs b/support/texlab/src/features/completion/argument.rs
index 3ba28f9dea..10b377cb0e 100644
--- a/support/texlab/src/features/completion/argument.rs
+++ b/support/texlab/src/features/completion/argument.rs
@@ -1,15 +1,17 @@
-use lsp_types::CompletionParams;
use rowan::{ast::AstNode, TextRange};
-use crate::{component_db::COMPONENT_DATABASE, features::cursor::CursorContext, syntax::latex};
+use crate::{
+ syntax::latex,
+ util::{components::COMPONENT_DATABASE, cursor::CursorContext},
+};
-use super::types::{InternalCompletionItem, InternalCompletionItemData};
+use super::builder::CompletionBuilder;
-pub fn complete_arguments<'a>(
- context: &'a CursorContext<CompletionParams>,
- items: &mut Vec<InternalCompletionItem<'a>>,
+pub fn complete<'db>(
+ context: &'db CursorContext,
+ builder: &mut CompletionBuilder<'db>,
) -> Option<()> {
- let token = context.cursor.as_latex()?;
+ let token = context.cursor.as_tex()?;
let range = if token.kind() == latex::WORD {
token.text_range()
@@ -37,7 +39,7 @@ pub fn complete_arguments<'a>(
let command_name = command.name()?;
let command_name = &command_name.text()[1..];
- for component in COMPONENT_DATABASE.linked_components(&context.request.workspace) {
+ for component in COMPONENT_DATABASE.linked_components(context.db, context.document) {
for component_command in component
.commands
.iter()
@@ -50,14 +52,7 @@ pub fn complete_arguments<'a>(
.filter(|(i, _)| *i == index)
{
for arg in &param.0 {
- let item = InternalCompletionItem::new(
- range,
- InternalCompletionItemData::Argument {
- name: &arg.name,
- image: arg.image.as_deref(),
- },
- );
- items.push(item);
+ builder.generic_argument(range, &arg.name, arg.image.as_deref());
}
}
}
diff --git a/support/texlab/src/features/completion/begin_command.rs b/support/texlab/src/features/completion/begin_command.rs
deleted file mode 100644
index 699b06c067..0000000000
--- a/support/texlab/src/features/completion/begin_command.rs
+++ /dev/null
@@ -1,18 +0,0 @@
-use lsp_types::CompletionParams;
-
-use crate::features::cursor::CursorContext;
-
-use super::types::{InternalCompletionItem, InternalCompletionItemData};
-
-pub fn complete_begin_command(
- context: &CursorContext<CompletionParams>,
- items: &mut Vec<InternalCompletionItem>,
-) -> Option<()> {
- let range = context.cursor.command_range(context.offset)?;
-
- items.push(InternalCompletionItem::new(
- range,
- InternalCompletionItemData::BeginCommand,
- ));
- Some(())
-}
diff --git a/support/texlab/src/features/completion/begin_snippet.rs b/support/texlab/src/features/completion/begin_snippet.rs
new file mode 100644
index 0000000000..06be4faf13
--- /dev/null
+++ b/support/texlab/src/features/completion/begin_snippet.rs
@@ -0,0 +1,9 @@
+use crate::util::cursor::CursorContext;
+
+use super::builder::CompletionBuilder;
+
+pub fn complete(context: &CursorContext, builder: &mut CompletionBuilder) -> Option<()> {
+ let range = context.cursor.command_range(context.offset)?;
+ builder.begin_snippet(range);
+ Some(())
+}
diff --git a/support/texlab/src/features/completion/builder.rs b/support/texlab/src/features/completion/builder.rs
new file mode 100644
index 0000000000..f8ea4c5d15
--- /dev/null
+++ b/support/texlab/src/features/completion/builder.rs
@@ -0,0 +1,784 @@
+use fuzzy_matcher::{skim::SkimMatcherV2, FuzzyMatcher};
+use itertools::Itertools;
+use lsp_types::{
+ CompletionItem, CompletionItemKind, CompletionList, CompletionTextEdit, Documentation,
+ InsertTextFormat, MarkupContent, MarkupKind, TextEdit, Url,
+};
+use once_cell::sync::Lazy;
+use regex::Regex;
+use rowan::{ast::AstNode, TextRange, TextSize};
+use serde::{Deserialize, Serialize};
+use smol_str::SmolStr;
+
+use crate::{
+ db::{Document, Workspace},
+ syntax::{
+ bibtex::{self, HasName, HasType},
+ latex,
+ },
+ util::{
+ capabilities::ClientCapabilitiesExt,
+ cursor::{Cursor, CursorContext},
+ lang_data::{BibtexEntryTypeCategory, BibtexEntryTypeDoc, BibtexFieldDoc, LANGUAGE_DATA},
+ line_index_ext::LineIndexExt,
+ lsp_enums::Structure,
+ },
+};
+
+use super::COMPLETION_LIMIT;
+
+pub struct CompletionBuilder<'db> {
+ context: &'db CursorContext<'db>,
+ items: Vec<Item<'db>>,
+ matcher: SkimMatcherV2,
+ text_pattern: String,
+ file_pattern: String,
+ preselect: Option<String>,
+ snippets: bool,
+ markdown: bool,
+ item_kinds: &'db [CompletionItemKind],
+}
+
+impl<'db> CompletionBuilder<'db> {
+ pub fn new(context: &'db CursorContext) -> Self {
+ let items = Vec::new();
+ let matcher = SkimMatcherV2::default().ignore_case();
+ let text_pattern = match &context.cursor {
+ Cursor::Tex(token) if token.kind().is_command_name() => {
+ if token.text_range().start() + TextSize::from(1) == context.offset {
+ // Handle cases similar to this one correctly:
+ // $\|$ % (| is the cursor)
+ String::from("\\")
+ } else {
+ token.text().trim_end().into()
+ }
+ }
+ Cursor::Tex(token) if token.kind() == latex::WORD => {
+ match token.parent().and_then(latex::Key::cast) {
+ Some(key) => key
+ .words()
+ .take_while(|word| word.text_range() != token.text_range())
+ .chain(std::iter::once(token.clone()))
+ .filter(|word| word.text_range().start() < context.offset)
+ .join(" ")
+ .into(),
+ None => token.text().into(),
+ }
+ }
+ Cursor::Bib(token)
+ if matches!(
+ token.kind(),
+ bibtex::TYPE
+ | bibtex::NAME
+ | bibtex::WORD
+ | bibtex::COMMAND_NAME
+ | bibtex::ACCENT_NAME
+ ) =>
+ {
+ token.text().into()
+ }
+ Cursor::Tex(_) | Cursor::Bib(_) | Cursor::Nothing => "".into(),
+ };
+
+ let file_pattern = text_pattern.split('/').last().unwrap().to_string();
+
+ let preselect = context
+ .cursor
+ .as_tex()
+ .and_then(|name| name.parent())
+ .and_then(latex::CurlyGroupWord::cast)
+ .and_then(|group| group.syntax().parent())
+ .and_then(|end| end.parent())
+ .and_then(latex::Environment::cast)
+ .and_then(|env| env.begin())
+ .and_then(|begin| begin.name())
+ .and_then(|name| name.key())
+ .map(|name| name.to_string());
+
+ let client_capabilities = context.workspace.client_capabilities(context.db);
+ let snippets = client_capabilities.has_snippet_support();
+ let markdown = client_capabilities.has_completion_markdown_support();
+ let item_kinds = client_capabilities
+ .text_document
+ .as_ref()
+ .and_then(|cap| cap.completion.as_ref())
+ .and_then(|cap| cap.completion_item_kind.as_ref())
+ .and_then(|cap| cap.value_set.as_deref())
+ .unwrap_or_default();
+
+ Self {
+ context,
+ items,
+ matcher,
+ text_pattern,
+ file_pattern,
+ preselect,
+ snippets,
+ markdown,
+ item_kinds,
+ }
+ }
+
+ pub fn glossary_entry(&mut self, range: TextRange, name: String) -> Option<()> {
+ let score = self.matcher.fuzzy_match(&name, &self.text_pattern)?;
+ self.items.push(Item {
+ range,
+ data: Data::GlossaryEntry { name },
+ preselect: false,
+ score,
+ });
+
+ Some(())
+ }
+
+ pub fn generic_argument(
+ &mut self,
+ range: TextRange,
+ name: &'db str,
+ image: Option<&'db str>,
+ ) -> Option<()> {
+ let score = self.matcher.fuzzy_match(name, &self.text_pattern)?;
+ self.items.push(Item {
+ range,
+ data: Data::Argument { name, image },
+ preselect: false,
+ score,
+ });
+
+ Some(())
+ }
+
+ pub fn begin_snippet(&mut self, range: TextRange) -> Option<()> {
+ let capabilities = Workspace::get(self.context.db).client_capabilities(self.context.db);
+ if capabilities.has_snippet_support() {
+ let score = self.matcher.fuzzy_match("begin", &self.text_pattern[1..])?;
+ self.items.push(Item {
+ range,
+ data: Data::BeginSnippet,
+ preselect: false,
+ score,
+ });
+ }
+
+ Some(())
+ }
+
+ pub fn citation(
+ &mut self,
+ range: TextRange,
+ document: Document,
+ entry: &bibtex::Entry,
+ ) -> Option<()> {
+ let key = entry.name_token()?.to_string();
+
+ let category = LANGUAGE_DATA
+ .find_entry_type(&entry.type_token()?.text()[1..])
+ .map_or(BibtexEntryTypeCategory::Misc, |ty| ty.category);
+
+ let code = entry.syntax().text().to_string();
+ let filter_text = format!(
+ "{} {}",
+ key,
+ WHITESPACE_REGEX
+ .replace_all(
+ &code
+ .replace('{', " ")
+ .replace('}', " ")
+ .replace(',', " ")
+ .replace('=', " "),
+ " "
+ )
+ .trim(),
+ );
+
+ let score = self.matcher.fuzzy_match(&filter_text, &self.text_pattern)?;
+
+ let data = Data::Citation {
+ document,
+ key,
+ filter_text,
+ category,
+ };
+
+ self.items.push(Item {
+ range,
+ data,
+ preselect: false,
+ score,
+ });
+
+ Some(())
+ }
+
+ pub fn color_model(&mut self, range: TextRange, name: &'db str) -> Option<()> {
+ let score = self.matcher.fuzzy_match(name, &self.text_pattern)?;
+ self.items.push(Item {
+ range,
+ data: Data::ColorModel { name },
+ preselect: false,
+ score,
+ });
+
+ Some(())
+ }
+
+ pub fn color(&mut self, range: TextRange, name: &'db str) -> Option<()> {
+ let score = self.matcher.fuzzy_match(name, &self.text_pattern)?;
+ self.items.push(Item {
+ range,
+ data: Data::Color { name },
+ preselect: false,
+ score,
+ });
+
+ Some(())
+ }
+
+ pub fn component_command(
+ &mut self,
+ range: TextRange,
+ name: &'db str,
+ image: Option<&'db str>,
+ glyph: Option<&'db str>,
+ file_names: &'db [SmolStr],
+ ) -> Option<()> {
+ let score = self.matcher.fuzzy_match(name, &self.text_pattern[1..])?;
+ let data = Data::ComponentCommand {
+ name,
+ image,
+ glyph,
+ file_names,
+ };
+
+ self.items.push(Item {
+ range,
+ data,
+ preselect: false,
+ score,
+ });
+
+ Some(())
+ }
+
+ pub fn component_environment(
+ &mut self,
+ range: TextRange,
+ name: &'db str,
+ file_names: &'db [SmolStr],
+ ) -> Option<()> {
+ let score = self.matcher.fuzzy_match(name, &self.text_pattern)?;
+ self.items.push(Item {
+ range,
+ data: Data::ComponentEnvironment { name, file_names },
+ preselect: Some(name) == self.preselect.as_deref(),
+ score,
+ });
+
+ Some(())
+ }
+
+ pub fn entry_type(
+ &mut self,
+ range: TextRange,
+ entry_type: &'db BibtexEntryTypeDoc,
+ ) -> Option<()> {
+ let score = self
+ .matcher
+ .fuzzy_match(&entry_type.name, &self.text_pattern[1..])?;
+
+ self.items.push(Item {
+ range,
+ data: Data::EntryType { entry_type },
+ preselect: false,
+ score,
+ });
+
+ Some(())
+ }
+
+ pub fn field(&mut self, range: TextRange, field: &'db BibtexFieldDoc) -> Option<()> {
+ let score = self.matcher.fuzzy_match(&field.name, &self.text_pattern)?;
+ self.items.push(Item {
+ range,
+ data: Data::Field { field },
+ preselect: false,
+ score,
+ });
+
+ Some(())
+ }
+
+ pub fn class(&mut self, range: TextRange, name: &'db str) -> Option<()> {
+ let score = self.matcher.fuzzy_match(name, &self.text_pattern)?;
+ self.items.push(Item {
+ range,
+ data: Data::Class { name },
+ preselect: false,
+ score,
+ });
+
+ Some(())
+ }
+
+ pub fn package(&mut self, range: TextRange, name: &'db str) -> Option<()> {
+ let score = self.matcher.fuzzy_match(name, &self.text_pattern)?;
+ self.items.push(Item {
+ range,
+ data: Data::Package { name },
+ preselect: false,
+ score,
+ });
+
+ Some(())
+ }
+
+ pub fn file(&mut self, range: TextRange, name: String) -> Option<()> {
+ let score = self.matcher.fuzzy_match(&name, &self.file_pattern)?;
+ self.items.push(Item {
+ range,
+ data: Data::File { name },
+ preselect: false,
+ score,
+ });
+
+ Some(())
+ }
+
+ pub fn directory(&mut self, range: TextRange, name: String) -> Option<()> {
+ let score = self.matcher.fuzzy_match(&name, &self.file_pattern)?;
+ self.items.push(Item {
+ range,
+ data: Data::Directory { name },
+ preselect: false,
+ score,
+ });
+
+ Some(())
+ }
+
+ pub fn label(
+ &mut self,
+ range: TextRange,
+ name: &'db str,
+ kind: Structure,
+ header: Option<String>,
+ footer: Option<String>,
+ text: String,
+ ) -> Option<()> {
+ let score = self.matcher.fuzzy_match(&text, &self.text_pattern)?;
+ self.items.push(Item {
+ range,
+ data: Data::Label {
+ name,
+ kind,
+ header,
+ footer,
+ text,
+ },
+ preselect: false,
+ score,
+ });
+
+ Some(())
+ }
+
+ pub fn tikz_library(&mut self, range: TextRange, name: &'db str) -> Option<()> {
+ let score = self.matcher.fuzzy_match(name, &self.text_pattern)?;
+ self.items.push(Item {
+ range,
+ data: Data::TikzLibrary { name },
+ preselect: false,
+ score,
+ });
+
+ Some(())
+ }
+
+ pub fn user_command(&mut self, range: TextRange, name: &'db str) -> Option<()> {
+ let score = self.matcher.fuzzy_match(name, &self.text_pattern[1..])?;
+ self.items.push(Item {
+ range,
+ data: Data::UserCommand { name },
+ preselect: false,
+ score,
+ });
+
+ Some(())
+ }
+
+ pub fn user_environment(&mut self, range: TextRange, name: &'db str) -> Option<()> {
+ let score = self.matcher.fuzzy_match(name, &self.text_pattern)?;
+ self.items.push(Item {
+ range,
+ data: Data::UserEnvironment { name },
+ preselect: Some(name) == self.preselect.as_deref(),
+ score,
+ });
+
+ Some(())
+ }
+
+ pub fn finish(mut self) -> CompletionList {
+ let mut list = CompletionList::default();
+ list.items = std::mem::take(&mut self.items)
+ .into_iter()
+ .sorted_by(|a, b| {
+ b.preselect
+ .cmp(&a.preselect)
+ .then_with(|| b.score.cmp(&a.score))
+ .then_with(|| a.data.label().cmp(b.data.label()))
+ })
+ .dedup_by(|a, b| a.data.label() == b.data.label())
+ .take(COMPLETION_LIMIT)
+ .enumerate()
+ .map(|(i, item)| self.convert_item(item, i))
+ .collect();
+
+ list.is_incomplete = if self
+ .context
+ .workspace
+ .client_info(self.context.db)
+ .as_ref()
+ .map_or(false, |client| client.name.as_str() == "Visual Studio Code")
+ {
+ true
+ } else {
+ list.items.len() >= COMPLETION_LIMIT
+ };
+
+ list
+ }
+
+ fn convert_item(&self, item: Item, index: usize) -> CompletionItem {
+ let range = self.context.line_index.line_col_lsp_range(item.range);
+ let preselect = item.preselect;
+ let mut item = match item.data {
+ Data::EntryType { entry_type } => CompletionItem {
+ label: entry_type.name.clone(),
+ kind: Some(Structure::Entry(entry_type.category).completion_kind()),
+ documentation: entry_type.documentation.clone().map(|value| {
+ let kind = MarkupKind::Markdown;
+ Documentation::MarkupContent(MarkupContent { kind, value })
+ }),
+ text_edit: Some(TextEdit::new(range, entry_type.name.clone()).into()),
+ ..CompletionItem::default()
+ },
+ Data::Field { field } => CompletionItem {
+ label: field.name.clone(),
+ kind: Some(Structure::Field.completion_kind()),
+ documentation: Some(Documentation::MarkupContent(MarkupContent {
+ kind: MarkupKind::Markdown,
+ value: field.documentation.clone(),
+ })),
+ text_edit: Some(TextEdit::new(range, field.name.clone()).into()),
+ ..CompletionItem::default()
+ },
+ Data::Argument { name, image } => {
+ let text_edit = TextEdit::new(range, String::from(name));
+ CompletionItem {
+ label: name.into(),
+ kind: Some(Structure::Argument.completion_kind()),
+ text_edit: Some(CompletionTextEdit::Edit(text_edit)),
+ documentation: image.and_then(|base64| self.inline_image(name, base64)),
+ ..CompletionItem::default()
+ }
+ }
+ Data::BeginSnippet => {
+ if self.snippets {
+ CompletionItem {
+ kind: Some(Structure::Snippet.completion_kind()),
+ text_edit: Some(
+ TextEdit::new(range, "begin{$1}\n\t\n\\end{$1}".into()).into(),
+ ),
+ insert_text_format: Some(InsertTextFormat::SNIPPET),
+ ..CompletionItem::new_simple("begin".into(), self.component_detail(&[]))
+ }
+ } else {
+ CompletionItem {
+ kind: Some(Structure::Command.completion_kind()),
+ text_edit: Some(TextEdit::new(range, "begin".to_string()).into()),
+ ..CompletionItem::new_simple("begin".into(), self.component_detail(&[]))
+ }
+ }
+ }
+ Data::Citation {
+ document,
+ key,
+ filter_text,
+ category,
+ } => CompletionItem {
+ label: key.clone(),
+ kind: Some(Structure::Entry(category).completion_kind()),
+ filter_text: Some(filter_text.clone()),
+ sort_text: Some(filter_text),
+ data: Some(
+ serde_json::to_value(CompletionItemData::Citation {
+ uri: document
+ .location(self.context.db)
+ .uri(self.context.db)
+ .clone(),
+ key: key.clone(),
+ })
+ .unwrap(),
+ ),
+ text_edit: Some(TextEdit::new(range, key).into()),
+ ..CompletionItem::default()
+ },
+ Data::ComponentCommand {
+ name,
+ image,
+ glyph,
+ file_names,
+ } => CompletionItem {
+ label: name.into(),
+ detail: Some(glyph.map_or_else(
+ || self.component_detail(file_names),
+ |glyph| format!("{}, {}", glyph, self.component_detail(file_names)),
+ )),
+ kind: Some(Structure::Command.completion_kind()),
+ documentation: image.and_then(|base64| self.inline_image(name, base64)),
+ text_edit: Some(TextEdit::new(range, name.into()).into()),
+ ..CompletionItem::default()
+ },
+ Data::ComponentEnvironment { name, file_names } => CompletionItem {
+ kind: Some(Structure::Environment.completion_kind()),
+ text_edit: Some(TextEdit::new(range, name.into()).into()),
+ ..CompletionItem::new_simple(name.into(), self.component_detail(file_names))
+ },
+ Data::Class { name } => CompletionItem {
+ label: name.into(),
+ kind: Some(Structure::Class.completion_kind()),
+ data: Some(serde_json::to_value(CompletionItemData::Package).unwrap()),
+ text_edit: Some(TextEdit::new(range, name.into()).into()),
+ ..CompletionItem::default()
+ },
+ Data::Package { name } => CompletionItem {
+ label: name.into(),
+ kind: Some(Structure::Package.completion_kind()),
+ data: Some(serde_json::to_value(CompletionItemData::Class).unwrap()),
+ text_edit: Some(TextEdit::new(range, name.into()).into()),
+ ..CompletionItem::default()
+ },
+ Data::Color { name } => CompletionItem {
+ label: name.into(),
+ kind: Some(Structure::Color.completion_kind()),
+ text_edit: Some(TextEdit::new(range, name.into()).into()),
+ ..CompletionItem::default()
+ },
+ Data::ColorModel { name } => CompletionItem {
+ label: name.into(),
+ kind: Some(Structure::ColorModel.completion_kind()),
+ text_edit: Some(TextEdit::new(range, name.into()).into()),
+ ..CompletionItem::default()
+ },
+ Data::GlossaryEntry { name } => CompletionItem {
+ label: name.clone(),
+ kind: Some(Structure::GlossaryEntry.completion_kind()),
+ text_edit: Some(TextEdit::new(range, name).into()),
+ ..CompletionItem::default()
+ },
+ Data::File { name } => CompletionItem {
+ label: name.clone(),
+ kind: Some(Structure::File.completion_kind()),
+ text_edit: Some(TextEdit::new(range, name).into()),
+ ..CompletionItem::default()
+ },
+ Data::Directory { name } => CompletionItem {
+ label: name.clone(),
+ kind: Some(Structure::Folder.completion_kind()),
+ text_edit: Some(TextEdit::new(range, name).into()),
+ ..CompletionItem::default()
+ },
+ Data::Label {
+ name,
+ kind,
+ header,
+ footer,
+ text,
+ } => CompletionItem {
+ label: name.into(),
+ kind: Some(kind.completion_kind()),
+ detail: header,
+ documentation: footer.map(Documentation::String),
+ sort_text: Some(text.clone()),
+ filter_text: Some(text),
+ text_edit: Some(TextEdit::new(range, name.into()).into()),
+ ..CompletionItem::default()
+ },
+ Data::UserCommand { name } => {
+ let detail = "user-defined".into();
+ let name = &name[1..];
+ CompletionItem {
+ kind: Some(Structure::Command.completion_kind()),
+ text_edit: Some(TextEdit::new(range, name.into()).into()),
+ ..CompletionItem::new_simple(name.into(), detail)
+ }
+ }
+ Data::UserEnvironment { name } => {
+ let detail = "user-defined".into();
+ CompletionItem {
+ kind: Some(Structure::Environment.completion_kind()),
+ text_edit: Some(TextEdit::new(range, name.into()).into()),
+ ..CompletionItem::new_simple(name.into(), detail)
+ }
+ }
+ Data::TikzLibrary { name } => CompletionItem {
+ label: name.into(),
+ kind: Some(Structure::TikzLibrary.completion_kind()),
+ text_edit: Some(TextEdit::new(range, name.into()).into()),
+ ..CompletionItem::default()
+ },
+ };
+
+ item.preselect = Some(preselect);
+
+ if !self.item_kinds.contains(&item.kind.unwrap()) {
+ item.kind = Some(CompletionItemKind::TEXT);
+ }
+
+ let sort_prefix = format!("{:0>2}", index);
+ match &item.sort_text {
+ Some(sort_text) => {
+ item.sort_text = Some(format!("{} {}", sort_prefix, sort_text));
+ }
+ None => {
+ item.sort_text = Some(sort_prefix);
+ }
+ };
+
+ item
+ }
+
+ fn inline_image(&self, name: &str, base64: &str) -> Option<Documentation> {
+ if self.markdown {
+ let kind = MarkupKind::Markdown;
+ let value = format!(
+ "![{}](data:image/png;base64,{}|width=48,height=48)",
+ name, base64
+ );
+
+ Some(Documentation::MarkupContent(MarkupContent { kind, value }))
+ } else {
+ None
+ }
+ }
+
+ fn component_detail(&self, file_names: &[SmolStr]) -> String {
+ if file_names.is_empty() {
+ "built-in".into()
+ } else {
+ file_names.join(", ")
+ }
+ }
+}
+
+#[derive(Debug, Clone)]
+struct Item<'db> {
+ range: TextRange,
+ data: Data<'db>,
+ preselect: bool,
+ score: i32,
+}
+
+#[derive(Debug, Clone)]
+enum Data<'db> {
+ EntryType {
+ entry_type: &'db BibtexEntryTypeDoc,
+ },
+ Field {
+ field: &'db BibtexFieldDoc,
+ },
+ Argument {
+ name: &'db str,
+ image: Option<&'db str>,
+ },
+ BeginSnippet,
+ Citation {
+ document: Document,
+ key: String,
+ filter_text: String,
+ category: BibtexEntryTypeCategory,
+ },
+ ComponentCommand {
+ name: &'db str,
+ image: Option<&'db str>,
+ glyph: Option<&'db str>,
+ file_names: &'db [SmolStr],
+ },
+ ComponentEnvironment {
+ name: &'db str,
+ file_names: &'db [SmolStr],
+ },
+ Class {
+ name: &'db str,
+ },
+ Package {
+ name: &'db str,
+ },
+ Color {
+ name: &'db str,
+ },
+ ColorModel {
+ name: &'db str,
+ },
+ GlossaryEntry {
+ name: String,
+ },
+ File {
+ name: String,
+ },
+ Directory {
+ name: String,
+ },
+ Label {
+ name: &'db str,
+ kind: Structure,
+ header: Option<String>,
+ footer: Option<String>,
+ text: String,
+ },
+ UserCommand {
+ name: &'db str,
+ },
+ UserEnvironment {
+ name: &'db str,
+ },
+ TikzLibrary {
+ name: &'db str,
+ },
+}
+
+impl<'db> Data<'db> {
+ pub fn label<'this: 'db>(&'this self) -> &'db str {
+ match self {
+ Self::EntryType { entry_type } => &entry_type.name,
+ Self::Field { field } => &field.name,
+ Self::Argument { name, .. } => name,
+ Self::BeginSnippet => "begin",
+ Self::Citation { key, .. } => key,
+ Self::ComponentCommand { name, .. } => name,
+ Self::ComponentEnvironment { name, .. } => name,
+ Self::Class { name } => name,
+ Self::Package { name } => name,
+ Self::Color { name } => name,
+ Self::ColorModel { name } => name,
+ Self::GlossaryEntry { name } => name,
+ Self::File { name } => name,
+ Self::Directory { name } => name,
+ Self::Label { name, .. } => name,
+ Self::UserCommand { name } => name,
+ Self::UserEnvironment { name } => name,
+ Self::TikzLibrary { name } => name,
+ }
+ }
+}
+
+#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
+#[serde(rename_all = "camelCase")]
+pub(crate) enum CompletionItemData {
+ Package,
+ Class,
+ Citation { uri: Url, key: String },
+}
+
+static WHITESPACE_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new("\\s+").unwrap());
diff --git a/support/texlab/src/features/completion/citation.rs b/support/texlab/src/features/completion/citation.rs
index e6436689d7..68d72717a7 100644
--- a/support/texlab/src/features/completion/citation.rs
+++ b/support/texlab/src/features/completion/citation.rs
@@ -1,26 +1,17 @@
-use std::sync::Arc;
-
-use lsp_types::CompletionParams;
-use once_cell::sync::Lazy;
-use regex::Regex;
use rowan::{ast::AstNode, TextRange};
use crate::{
- features::{cursor::CursorContext, lsp_kinds::Structure},
- syntax::{
- bibtex::{self, HasName, HasType},
- latex,
- },
- BibtexEntryTypeCategory, Document, LANGUAGE_DATA,
+ syntax::{bibtex, latex},
+ util::cursor::CursorContext,
};
-use super::types::{InternalCompletionItem, InternalCompletionItemData};
+use super::builder::CompletionBuilder;
-pub fn complete_citations<'a>(
- context: &'a CursorContext<CompletionParams>,
- items: &mut Vec<InternalCompletionItem<'a>>,
+pub fn complete<'db>(
+ context: &'db CursorContext,
+ builder: &mut CompletionBuilder<'db>,
) -> Option<()> {
- let token = context.cursor.as_latex()?;
+ let token = context.cursor.as_tex()?;
let range = if token.kind() == latex::WORD {
latex::Key::cast(token.parent()?)
@@ -36,15 +27,14 @@ pub fn complete_citations<'a>(
};
check_citation(context).or_else(|| check_acronym(context))?;
- for document in context.request.workspace.documents_by_uri.values() {
- if let Some(data) = document.data.as_bibtex() {
- for entry in bibtex::SyntaxNode::new_root(data.green.clone())
+ for document in context.related() {
+ if let Some(data) = document.parse(context.db).as_bib() {
+ for entry in data
+ .root(context.db)
.children()
.filter_map(bibtex::Entry::cast)
{
- if let Some(item) = make_item(document, &entry, range) {
- items.push(item);
- }
+ builder.citation(range, document, &entry);
}
}
}
@@ -52,14 +42,14 @@ pub fn complete_citations<'a>(
Some(())
}
-fn check_citation(context: &CursorContext<CompletionParams>) -> Option<()> {
+fn check_citation(context: &CursorContext) -> Option<()> {
let (_, _, group) = context.find_curly_group_word_list()?;
latex::Citation::cast(group.syntax().parent()?)?;
Some(())
}
-fn check_acronym(context: &CursorContext<CompletionParams>) -> Option<()> {
- let token = context.cursor.as_latex()?;
+fn check_acronym(context: &CursorContext) -> Option<()> {
+ let token = context.cursor.as_tex()?;
let pair = token
.parent_ancestors()
@@ -71,45 +61,3 @@ fn check_acronym(context: &CursorContext<CompletionParams>) -> Option<()> {
latex::AcronymDeclaration::cast(pair.syntax().parent()?.parent()?.parent()?)?;
Some(())
}
-
-fn make_item<'a>(
- document: &'a Document,
- entry: &bibtex::Entry,
- range: TextRange,
-) -> Option<InternalCompletionItem<'a>> {
- let key = entry.name_token()?.to_string();
- let ty = LANGUAGE_DATA
- .find_entry_type(&entry.type_token()?.text()[1..])
- .map_or_else(
- || Structure::Entry(BibtexEntryTypeCategory::Misc),
- |ty| Structure::Entry(ty.category),
- );
-
- let entry_code = entry.syntax().text().to_string();
- let text = format!(
- "{} {}",
- key,
- WHITESPACE_REGEX
- .replace_all(
- &entry_code
- .replace('{', " ")
- .replace('}', " ")
- .replace(',', " ")
- .replace('=', " "),
- " "
- )
- .trim(),
- );
-
- Some(InternalCompletionItem::new(
- range,
- InternalCompletionItemData::Citation {
- uri: Arc::clone(&document.uri),
- key,
- text,
- ty,
- },
- ))
-}
-
-static WHITESPACE_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new("\\s+").unwrap());
diff --git a/support/texlab/src/features/completion/color.rs b/support/texlab/src/features/completion/color.rs
index a9a385c3c9..cce6517cb4 100644
--- a/support/texlab/src/features/completion/color.rs
+++ b/support/texlab/src/features/completion/color.rs
@@ -1,22 +1,21 @@
-use lsp_types::CompletionParams;
use rowan::ast::AstNode;
-use crate::{features::cursor::CursorContext, syntax::latex, LANGUAGE_DATA};
+use crate::{
+ syntax::latex,
+ util::{cursor::CursorContext, lang_data::LANGUAGE_DATA},
+};
-use super::types::{InternalCompletionItem, InternalCompletionItemData};
+use super::builder::CompletionBuilder;
-pub fn complete_colors<'a>(
- context: &'a CursorContext<CompletionParams>,
- items: &mut Vec<InternalCompletionItem<'a>>,
+pub fn complete<'db>(
+ context: &'db CursorContext,
+ builder: &mut CompletionBuilder<'db>,
) -> Option<()> {
let (_, range, group) = context.find_curly_group_word()?;
latex::ColorReference::cast(group.syntax().parent()?)?;
for name in &LANGUAGE_DATA.colors {
- items.push(InternalCompletionItem::new(
- range,
- InternalCompletionItemData::Color { name },
- ));
+ builder.color(range, name);
}
Some(())
diff --git a/support/texlab/src/features/completion/color_model.rs b/support/texlab/src/features/completion/color_model.rs
index cd582d0031..af700a96fb 100644
--- a/support/texlab/src/features/completion/color_model.rs
+++ b/support/texlab/src/features/completion/color_model.rs
@@ -1,29 +1,25 @@
-use lsp_types::CompletionParams;
use rowan::{ast::AstNode, TextRange};
-use crate::{features::cursor::CursorContext, syntax::latex};
+use crate::{syntax::latex, util::cursor::CursorContext};
-use super::types::{InternalCompletionItem, InternalCompletionItemData};
+use super::builder::CompletionBuilder;
const MODEL_NAMES: &[&str] = &["gray", "rgb", "RGB", "HTML", "cmyk"];
-pub fn complete_color_models<'a>(
- context: &'a CursorContext<CompletionParams>,
- items: &mut Vec<InternalCompletionItem<'a>>,
+pub fn complete<'db>(
+ context: &'db CursorContext,
+ builder: &mut CompletionBuilder<'db>,
) -> Option<()> {
let range = check_color_definition(context).or_else(|| check_color_definition_set(context))?;
for name in MODEL_NAMES {
- items.push(InternalCompletionItem::new(
- range,
- InternalCompletionItemData::ColorModel { name },
- ));
+ builder.color_model(range, name);
}
Some(())
}
-fn check_color_definition(context: &CursorContext<CompletionParams>) -> Option<TextRange> {
+fn check_color_definition(context: &CursorContext) -> Option<TextRange> {
let (_, range, group) = context.find_curly_group_word()?;
let definition = latex::ColorDefinition::cast(group.syntax().parent()?)?;
@@ -33,7 +29,7 @@ fn check_color_definition(context: &CursorContext<CompletionParams>) -> Option<T
Some(range)
}
-fn check_color_definition_set(context: &CursorContext<CompletionParams>) -> Option<TextRange> {
+fn check_color_definition_set(context: &CursorContext) -> Option<TextRange> {
let (_, range, group) = context.find_curly_group_word_list()?;
let definition = latex::ColorSetDefinition::cast(group.syntax().parent()?)?;
definition
diff --git a/support/texlab/src/features/completion/component_command.rs b/support/texlab/src/features/completion/component_command.rs
index 097b28633f..20cef73848 100644
--- a/support/texlab/src/features/completion/component_command.rs
+++ b/support/texlab/src/features/completion/component_command.rs
@@ -1,26 +1,22 @@
-use lsp_types::CompletionParams;
+use crate::util::{components::COMPONENT_DATABASE, cursor::CursorContext};
-use crate::{component_db::COMPONENT_DATABASE, features::cursor::CursorContext};
+use super::builder::CompletionBuilder;
-use super::types::{InternalCompletionItem, InternalCompletionItemData};
-
-pub fn complete_component_commands<'a>(
- context: &'a CursorContext<CompletionParams>,
- items: &mut Vec<InternalCompletionItem<'a>>,
+pub fn complete<'db>(
+ context: &'db CursorContext,
+ builder: &mut CompletionBuilder<'db>,
) -> Option<()> {
let range = context.cursor.command_range(context.offset)?;
- for component in COMPONENT_DATABASE.linked_components(&context.request.workspace) {
+ for component in COMPONENT_DATABASE.linked_components(context.db, context.document) {
for command in &component.commands {
- items.push(InternalCompletionItem::new(
+ builder.component_command(
range,
- InternalCompletionItemData::ComponentCommand {
- name: &command.name,
- image: command.image.as_deref(),
- glyph: command.glyph.as_deref(),
- file_names: &component.file_names,
- },
- ));
+ &command.name,
+ command.image.as_deref(),
+ command.glyph.as_deref(),
+ &component.file_names,
+ );
}
}
diff --git a/support/texlab/src/features/completion/component_environment.rs b/support/texlab/src/features/completion/component_environment.rs
index a9478694bb..e58e319a46 100644
--- a/support/texlab/src/features/completion/component_environment.rs
+++ b/support/texlab/src/features/completion/component_environment.rs
@@ -1,24 +1,16 @@
-use lsp_types::CompletionParams;
+use crate::util::{components::COMPONENT_DATABASE, cursor::CursorContext};
-use crate::{component_db::COMPONENT_DATABASE, features::cursor::CursorContext};
+use super::builder::CompletionBuilder;
-use super::types::{InternalCompletionItem, InternalCompletionItemData};
-
-pub fn complete_component_environments<'a>(
- context: &'a CursorContext<CompletionParams>,
- items: &mut Vec<InternalCompletionItem<'a>>,
+pub fn complete<'db>(
+ context: &'db CursorContext,
+ builder: &mut CompletionBuilder<'db>,
) -> Option<()> {
let (_, range) = context.find_environment_name()?;
- for component in COMPONENT_DATABASE.linked_components(&context.request.workspace) {
+ for component in COMPONENT_DATABASE.linked_components(context.db, context.document) {
for name in &component.environments {
- items.push(InternalCompletionItem::new(
- range,
- InternalCompletionItemData::ComponentEnvironment {
- name,
- file_names: &component.file_names,
- },
- ));
+ builder.component_environment(range, name, &component.file_names);
}
}
diff --git a/support/texlab/src/features/completion/entry_type.rs b/support/texlab/src/features/completion/entry_type.rs
index 519737ed08..38bc0e3f3a 100644
--- a/support/texlab/src/features/completion/entry_type.rs
+++ b/support/texlab/src/features/completion/entry_type.rs
@@ -1,26 +1,26 @@
-use lsp_types::CompletionParams;
use rowan::{TextRange, TextSize};
-use crate::{features::cursor::CursorContext, syntax::bibtex, LANGUAGE_DATA};
+use crate::{
+ syntax::bibtex,
+ util::{cursor::CursorContext, lang_data::LANGUAGE_DATA},
+};
-use super::types::{InternalCompletionItem, InternalCompletionItemData};
+use super::builder::CompletionBuilder;
-pub fn complete_entry_types<'a>(
- context: &'a CursorContext<CompletionParams>,
- items: &mut Vec<InternalCompletionItem<'a>>,
+pub fn complete<'db>(
+ context: &'db CursorContext,
+ builder: &mut CompletionBuilder<'db>,
) -> Option<()> {
let range = context
.cursor
- .as_bibtex()
+ .as_bib()
.filter(|token| token.kind() == bibtex::TYPE)
.map(bibtex::SyntaxToken::text_range)
.filter(|range| range.start() != context.offset)
.map(|range| TextRange::new(range.start() + TextSize::from(1), range.end()))?;
- for ty in &LANGUAGE_DATA.entry_types {
- let data = InternalCompletionItemData::EntryType { ty };
- let item = InternalCompletionItem::new(range, data);
- items.push(item);
+ for entry_type in &LANGUAGE_DATA.entry_types {
+ builder.entry_type(range, entry_type);
}
Some(())
diff --git a/support/texlab/src/features/completion/field.rs b/support/texlab/src/features/completion/field.rs
index 4f386ed1b1..ff6e54a562 100644
--- a/support/texlab/src/features/completion/field.rs
+++ b/support/texlab/src/features/completion/field.rs
@@ -1,19 +1,17 @@
-use lsp_types::CompletionParams;
use rowan::{ast::AstNode, TextRange};
use crate::{
- features::cursor::CursorContext,
syntax::bibtex::{self, HasName},
- LANGUAGE_DATA,
+ util::{cursor::CursorContext, lang_data::LANGUAGE_DATA},
};
-use super::types::{InternalCompletionItem, InternalCompletionItemData};
+use super::builder::CompletionBuilder;
-pub fn complete_fields<'a>(
- context: &'a CursorContext<CompletionParams>,
- items: &mut Vec<InternalCompletionItem<'a>>,
+pub fn complete<'db>(
+ context: &'db CursorContext,
+ builder: &mut CompletionBuilder<'db>,
) -> Option<()> {
- let token = context.cursor.as_bibtex()?;
+ let token = context.cursor.as_bib()?;
let range = if token.kind() == bibtex::NAME {
token.text_range()
@@ -31,9 +29,8 @@ pub fn complete_fields<'a>(
}
for field in &LANGUAGE_DATA.fields {
- let data = InternalCompletionItemData::Field { field };
- let item = InternalCompletionItem::new(range, data);
- items.push(item);
+ builder.field(range, field);
}
+
Some(())
}
diff --git a/support/texlab/src/features/completion/glossary_ref.rs b/support/texlab/src/features/completion/glossary_ref.rs
index 6223079fdf..34d53bf24d 100644
--- a/support/texlab/src/features/completion/glossary_ref.rs
+++ b/support/texlab/src/features/completion/glossary_ref.rs
@@ -1,38 +1,31 @@
-use lsp_types::CompletionParams;
use rowan::ast::AstNode;
-use crate::{features::cursor::CursorContext, syntax::latex};
+use crate::{syntax::latex, util::cursor::CursorContext};
-use super::types::{InternalCompletionItem, InternalCompletionItemData};
+use super::builder::CompletionBuilder;
-pub fn complete_glossary_entries<'a>(
- context: &'a CursorContext<CompletionParams>,
- items: &mut Vec<InternalCompletionItem<'a>>,
+pub fn complete<'db>(
+ context: &'db CursorContext,
+ builder: &mut CompletionBuilder<'db>,
) -> Option<()> {
let (_, range, group) = context.find_curly_group_word()?;
latex::GlossaryEntryReference::cast(group.syntax().parent()?)?;
- for document in context.request.workspace.documents_by_uri.values() {
- if let Some(data) = document.data.as_latex() {
- for node in latex::SyntaxNode::new_root(data.green.clone()).descendants() {
+ for document in context.related() {
+ if let Some(data) = document.parse(context.db).as_tex() {
+ for node in data.root(context.db).descendants() {
if let Some(name) = latex::GlossaryEntryDefinition::cast(node.clone())
.and_then(|entry| entry.name())
.and_then(|name| name.key())
.map(|name| name.to_string())
{
- items.push(InternalCompletionItem::new(
- range,
- InternalCompletionItemData::GlossaryEntry { name },
- ));
+ builder.glossary_entry(range, name);
} else if let Some(name) = latex::AcronymDefinition::cast(node)
.and_then(|entry| entry.name())
.and_then(|name| name.key())
.map(|name| name.to_string())
{
- items.push(InternalCompletionItem::new(
- range,
- InternalCompletionItemData::Acronym { name },
- ));
+ builder.glossary_entry(range, name);
}
}
}
diff --git a/support/texlab/src/features/completion/import.rs b/support/texlab/src/features/completion/import.rs
index 9cb35b68a7..bf6da78a43 100644
--- a/support/texlab/src/features/completion/import.rs
+++ b/support/texlab/src/features/completion/import.rs
@@ -1,31 +1,24 @@
-use lsp_types::CompletionParams;
use rowan::ast::AstNode;
use rustc_hash::FxHashSet;
-use smol_str::SmolStr;
-use crate::{component_db::COMPONENT_DATABASE, features::cursor::CursorContext, syntax::latex};
+use crate::{
+ syntax::latex,
+ util::{components::COMPONENT_DATABASE, cursor::CursorContext},
+};
-use super::types::{InternalCompletionItem, InternalCompletionItemData};
+use super::builder::CompletionBuilder;
-pub fn complete_imports<'a>(
- context: &'a CursorContext<CompletionParams>,
- items: &mut Vec<InternalCompletionItem<'a>>,
+pub fn complete<'db>(
+ context: &'db CursorContext,
+ builder: &mut CompletionBuilder<'db>,
) -> Option<()> {
let (_, range, group) = context.find_curly_group_word_list()?;
- let (extension, mut factory): (
- &str,
- Box<dyn FnMut(SmolStr) -> InternalCompletionItemData<'a>>,
- ) = match group.syntax().parent()?.kind() {
- latex::PACKAGE_INCLUDE => (
- "sty",
- Box::new(|name| InternalCompletionItemData::Package { name }),
- ),
- latex::CLASS_INCLUDE => (
- "cls",
- Box::new(|name| InternalCompletionItemData::Class { name }),
- ),
- _ => return None,
+ let kind = group.syntax().parent()?.kind();
+ let extension = match kind {
+ latex::PACKAGE_INCLUDE => "sty",
+ latex::CLASS_INCLUDE => "cls",
+ _ => return Some(()),
};
let mut file_names = FxHashSet::default();
@@ -35,21 +28,27 @@ pub fn complete_imports<'a>(
.flat_map(|comp| comp.file_names.iter())
.filter(|file_name| file_name.ends_with(extension))
{
- file_names.insert(file_name);
+ file_names.insert(file_name.as_str());
let stem = &file_name[0..file_name.len() - 4];
- let data = factory(stem.into());
- items.push(InternalCompletionItem::new(range, data));
+ if kind == latex::PACKAGE_INCLUDE {
+ builder.package(range, stem);
+ } else {
+ builder.class(range, stem);
+ }
}
- let resolver = &context.request.workspace.environment.resolver;
- for file_name in resolver
- .files_by_name
- .keys()
+ let file_name_db = context.workspace.file_name_db(context.db);
+ for file_name in file_name_db
+ .iter()
+ .map(|(file_name, _)| file_name)
.filter(|file_name| file_name.ends_with(extension) && !file_names.contains(file_name))
{
let stem = &file_name[0..file_name.len() - 4];
- let data = factory(stem.into());
- items.push(InternalCompletionItem::new(range, data));
+ if kind == latex::PACKAGE_INCLUDE {
+ builder.package(range, stem);
+ } else {
+ builder.class(range, stem);
+ }
}
Some(())
diff --git a/support/texlab/src/features/completion/include.rs b/support/texlab/src/features/completion/include.rs
index d851aca820..bde7544570 100644
--- a/support/texlab/src/features/completion/include.rs
+++ b/support/texlab/src/features/completion/include.rs
@@ -4,18 +4,22 @@ use std::{
path::{Path, PathBuf},
};
-use lsp_types::CompletionParams;
use rowan::{ast::AstNode, TextRange, TextSize};
-use crate::{features::cursor::CursorContext, syntax::latex};
+use crate::{syntax::latex, util::cursor::CursorContext};
-use super::types::{InternalCompletionItem, InternalCompletionItemData};
+use super::builder::CompletionBuilder;
-pub fn complete_includes<'a>(
- context: &'a CursorContext<CompletionParams>,
- items: &mut Vec<InternalCompletionItem<'a>>,
+pub fn complete<'db>(
+ context: &'db CursorContext,
+ builder: &mut CompletionBuilder<'db>,
) -> Option<()> {
- if context.request.main_document().uri.scheme() != "file" {
+ if context
+ .document
+ .location(context.db)
+ .path(context.db)
+ .is_none()
+ {
return None;
}
@@ -51,10 +55,15 @@ pub fn complete_includes<'a>(
let mut dirs = vec![current_dir(context, &path_text, None)];
if include.kind() == latex::GRAPHICS_INCLUDE {
- for document in context.request.workspace.documents_by_uri.values() {
- if let Some(data) = document.data.as_latex() {
- for graphics_path in &data.extras.graphics_paths {
- dirs.push(current_dir(context, &path_text, Some(graphics_path)));
+ for document in context.related() {
+ if let Some(data) = document.parse(context.db).as_tex() {
+ for path in data
+ .analyze(context.db)
+ .graphics_paths(context.db)
+ .iter()
+ .map(|node| node.path(context.db))
+ {
+ dirs.push(current_dir(context, &path_text, Some(path)));
}
}
}
@@ -74,15 +83,12 @@ pub fn complete_includes<'a>(
if !include_extension {
remove_extension(&mut path);
}
+
let name = path.file_name()?.to_str()?.into();
- let data = InternalCompletionItemData::File { name };
- let item = InternalCompletionItem::new(segment_range, data);
- items.push(item);
+ builder.file(segment_range, name);
} else if file_type.is_dir() {
let name = path.file_name()?.to_str()?.into();
- let data = InternalCompletionItemData::Directory { name };
- let item = InternalCompletionItem::new(segment_range, data);
- items.push(item);
+ builder.directory(segment_range, name);
}
}
@@ -90,34 +96,24 @@ pub fn complete_includes<'a>(
}
fn current_dir(
- context: &CursorContext<CompletionParams>,
+ context: &CursorContext,
path_text: &str,
graphics_path: Option<&str>,
) -> Option<PathBuf> {
- let mut path = context
- .request
+ let parent = context
.workspace
- .environment
- .options
- .root_directory
- .as_ref()
- .map_or_else(
- || {
- let mut path = context.request.main_document().uri.to_file_path().unwrap();
- path.pop();
- path
- },
- |root_directory| {
- context
- .request
- .workspace
- .environment
- .current_directory
- .join(root_directory)
- },
- );
-
- path = PathBuf::from(path.to_str()?.replace('\\', "/"));
+ .parents(context.db, context.document)
+ .iter()
+ .next()
+ .map_or(context.document, Clone::clone);
+
+ let path = context
+ .workspace
+ .working_dir(context.db, parent.directory(context.db))
+ .path(context.db)
+ .as_deref()?;
+
+ let mut path = PathBuf::from(path.to_str()?.replace('\\', "/"));
if !path_text.is_empty() {
if let Some(graphics_path) = graphics_path {
path.push(graphics_path);
diff --git a/support/texlab/src/features/completion/label.rs b/support/texlab/src/features/completion/label.rs
index b662f98c8a..ae3e010b0e 100644
--- a/support/texlab/src/features/completion/label.rs
+++ b/support/texlab/src/features/completion/label.rs
@@ -1,83 +1,61 @@
-use lsp_types::CompletionParams;
use rowan::{ast::AstNode, TextRange};
use crate::{
- features::{cursor::CursorContext, lsp_kinds::Structure},
- render_label,
syntax::latex,
- LabelledObject,
+ util::{self, cursor::CursorContext, label::LabeledObject, lsp_enums::Structure},
};
-use super::types::{InternalCompletionItem, InternalCompletionItemData};
+use super::builder::CompletionBuilder;
-pub fn complete_labels<'a>(
- context: &'a CursorContext<CompletionParams>,
- items: &mut Vec<InternalCompletionItem<'a>>,
+pub fn complete<'db>(
+ context: &'db CursorContext,
+ builder: &mut CompletionBuilder<'db>,
) -> Option<()> {
let (range, is_math) = find_reference(context).or_else(|| find_reference_range(context))?;
- for document in context.request.workspace.documents_by_uri.values() {
- if let Some(data) = document.data.as_latex() {
- for label in latex::SyntaxNode::new_root(data.green.clone())
- .descendants()
- .filter_map(latex::LabelDefinition::cast)
+ let db = context.db;
+ for document in context.related() {
+ if let Some(data) = document.parse(db).as_tex() {
+ for label in data
+ .analyze(db)
+ .labels(db)
+ .iter()
+ .filter(|label| label.origin(db).as_definition().is_some())
{
- if let Some(name) = label
- .name()
- .and_then(|name| name.key())
- .map(|name| name.to_string())
- {
- match render_label(&context.request.workspace, &name, Some(label)) {
- Some(rendered_label) => {
- let kind = match &rendered_label.object {
- LabelledObject::Section { .. } => Structure::Section,
- LabelledObject::Float { .. } => Structure::Float,
- LabelledObject::Theorem { .. } => Structure::Theorem,
- LabelledObject::Equation => Structure::Equation,
- LabelledObject::EnumItem => Structure::Item,
- };
+ match util::label::render(db, document, *label) {
+ Some(rendered_label) => {
+ let kind = match &rendered_label.object {
+ LabeledObject::Section { .. } => Structure::Section,
+ LabeledObject::Float { .. } => Structure::Float,
+ LabeledObject::Theorem { .. } => Structure::Theorem,
+ LabeledObject::Equation => Structure::Equation,
+ LabeledObject::EnumItem => Structure::Item,
+ };
- if is_math && kind != Structure::Equation {
- continue;
- }
+ if is_math && kind != Structure::Equation {
+ continue;
+ }
- let header = rendered_label.detail();
- let footer = match &rendered_label.object {
- LabelledObject::Float { caption, .. } => Some(caption.clone()),
- _ => None,
- };
+ let header = rendered_label.detail(db);
+ let footer = match &rendered_label.object {
+ LabeledObject::Float { caption, .. } => Some(caption.clone()),
+ _ => None,
+ };
- let text = format!("{} {}", name, rendered_label.reference());
+ let text = format!(
+ "{} {}",
+ label.name(db).text(db),
+ rendered_label.reference(db)
+ );
- let item = InternalCompletionItem::new(
- range,
- InternalCompletionItemData::Label {
- name,
- kind,
- header,
- footer,
- text,
- },
- );
- items.push(item);
- }
- None => {
- let kind = Structure::Label;
- let header = None;
- let footer = None;
- let text = name.to_string();
- let item = InternalCompletionItem::new(
- range,
- InternalCompletionItemData::Label {
- name,
- kind,
- header,
- footer,
- text,
- },
- );
- items.push(item);
- }
+ builder.label(range, label.name(db).text(db), kind, header, footer, text);
+ }
+ None => {
+ let kind = Structure::Label;
+ let header = None;
+ let footer = None;
+ let text = label.name(db).text(db).clone();
+ builder.label(range, label.name(db).text(db), kind, header, footer, text);
}
}
}
@@ -87,14 +65,14 @@ pub fn complete_labels<'a>(
Some(())
}
-fn find_reference(context: &CursorContext<CompletionParams>) -> Option<(TextRange, bool)> {
+fn find_reference(context: &CursorContext) -> Option<(TextRange, bool)> {
let (_, range, group) = context.find_curly_group_word_list()?;
let reference = latex::LabelReference::cast(group.syntax().parent()?)?;
let is_math = reference.command()?.text() == "\\eqref";
Some((range, is_math))
}
-fn find_reference_range(context: &CursorContext<CompletionParams>) -> Option<(TextRange, bool)> {
+fn find_reference_range(context: &CursorContext) -> Option<(TextRange, bool)> {
let (_, range, group) = context.find_curly_group_word()?;
latex::LabelReferenceRange::cast(group.syntax().parent()?)?;
Some((range, false))
diff --git a/support/texlab/src/features/completion/theorem.rs b/support/texlab/src/features/completion/theorem.rs
index 65b511f7ad..cd0dfe3a84 100644
--- a/support/texlab/src/features/completion/theorem.rs
+++ b/support/texlab/src/features/completion/theorem.rs
@@ -1,24 +1,18 @@
-use lsp_types::CompletionParams;
+use crate::util::cursor::CursorContext;
-use crate::features::cursor::CursorContext;
+use super::builder::CompletionBuilder;
-use super::types::{InternalCompletionItem, InternalCompletionItemData};
-
-pub fn complete_theorem_environments<'a>(
- context: &'a CursorContext<CompletionParams>,
- items: &mut Vec<InternalCompletionItem<'a>>,
+pub fn complete<'db>(
+ context: &'db CursorContext,
+ builder: &mut CompletionBuilder<'db>,
) -> Option<()> {
let (_, range) = context.find_environment_name()?;
- for document in context.request.workspace.documents_by_uri.values() {
- if let Some(data) = document.data.as_latex() {
- for environment in &data.extras.theorem_environments {
- items.push(InternalCompletionItem::new(
- range,
- InternalCompletionItemData::UserEnvironment {
- name: environment.name.clone(),
- },
- ));
+ let db = context.db;
+ for document in context.related() {
+ if let Some(data) = document.parse(db).as_tex() {
+ for environment in data.analyze(db).theorem_environments(db) {
+ builder.user_environment(range, environment.name(db).text(db));
}
}
}
diff --git a/support/texlab/src/features/completion/tikz_library.rs b/support/texlab/src/features/completion/tikz_library.rs
index bf15695722..7ea26d50c2 100644
--- a/support/texlab/src/features/completion/tikz_library.rs
+++ b/support/texlab/src/features/completion/tikz_library.rs
@@ -1,13 +1,15 @@
-use lsp_types::CompletionParams;
use rowan::ast::AstNode;
-use crate::{features::cursor::CursorContext, syntax::latex, LANGUAGE_DATA};
+use crate::{
+ syntax::latex,
+ util::{cursor::CursorContext, lang_data::LANGUAGE_DATA},
+};
-use super::types::{InternalCompletionItem, InternalCompletionItemData};
+use super::builder::CompletionBuilder;
-pub fn complete_tikz_libraries<'a>(
- context: &'a CursorContext<CompletionParams>,
- items: &mut Vec<InternalCompletionItem<'a>>,
+pub fn complete<'db>(
+ context: &'db CursorContext,
+ builder: &mut CompletionBuilder<'db>,
) -> Option<()> {
let (_, range, group) = context.find_curly_group_word_list()?;
@@ -15,17 +17,11 @@ pub fn complete_tikz_libraries<'a>(
if import.command()?.text() == "\\usepgflibrary" {
for name in &LANGUAGE_DATA.pgf_libraries {
- items.push(InternalCompletionItem::new(
- range,
- InternalCompletionItemData::PgfLibrary { name },
- ));
+ builder.tikz_library(range, name);
}
} else {
for name in &LANGUAGE_DATA.tikz_libraries {
- items.push(InternalCompletionItem::new(
- range,
- InternalCompletionItemData::TikzLibrary { name },
- ));
+ builder.tikz_library(range, name);
}
}
diff --git a/support/texlab/src/features/completion/types.rs b/support/texlab/src/features/completion/types.rs
deleted file mode 100644
index 3655c6a074..0000000000
--- a/support/texlab/src/features/completion/types.rs
+++ /dev/null
@@ -1,151 +0,0 @@
-use std::sync::Arc;
-
-use lsp_types::Url;
-use rowan::TextRange;
-use serde::{Deserialize, Serialize};
-use smol_str::SmolStr;
-
-use crate::{features::lsp_kinds::Structure, BibtexEntryTypeDoc, BibtexFieldDoc};
-
-#[derive(Debug, Clone)]
-pub struct InternalCompletionItem<'a> {
- pub range: TextRange,
- pub data: InternalCompletionItemData<'a>,
- pub preselect: bool,
- pub score: Option<i64>,
-}
-
-impl<'a> InternalCompletionItem<'a> {
- pub fn new(range: TextRange, data: InternalCompletionItemData<'a>) -> Self {
- Self {
- range,
- data,
- preselect: false,
- score: None,
- }
- }
-}
-
-#[derive(Debug, Clone)]
-pub enum InternalCompletionItemData<'a> {
- EntryType {
- ty: &'a BibtexEntryTypeDoc,
- },
- Field {
- field: &'a BibtexFieldDoc,
- },
- Argument {
- name: &'a str,
- image: Option<&'a str>,
- },
- BeginCommand,
- Citation {
- uri: Arc<Url>,
- key: String,
- text: String,
- ty: Structure,
- },
- ComponentCommand {
- name: &'a SmolStr,
- image: Option<&'a str>,
- glyph: Option<&'a str>,
- file_names: &'a [SmolStr],
- },
- ComponentEnvironment {
- name: &'a SmolStr,
- file_names: &'a [SmolStr],
- },
- Class {
- name: SmolStr,
- },
- Package {
- name: SmolStr,
- },
- Color {
- name: &'a str,
- },
- ColorModel {
- name: &'a str,
- },
- Acronym {
- name: String,
- },
- GlossaryEntry {
- name: String,
- },
- File {
- name: SmolStr,
- },
- Directory {
- name: SmolStr,
- },
- Label {
- name: String,
- kind: Structure,
- header: Option<String>,
- footer: Option<String>,
- text: String,
- },
- UserCommand {
- name: SmolStr,
- },
- UserEnvironment {
- name: String,
- },
- PgfLibrary {
- name: &'a str,
- },
- TikzLibrary {
- name: &'a str,
- },
-}
-
-impl<'a> InternalCompletionItemData<'a> {
- pub fn label<'b: 'a>(&'b self) -> &'a str {
- match self {
- Self::EntryType { ty } => &ty.name,
- Self::Field { field } => &field.name,
- Self::Argument { name, .. } => name,
- Self::BeginCommand => "begin",
- Self::Citation { key, .. } => key,
- Self::ComponentCommand { name, .. } => name,
- Self::ComponentEnvironment { name, .. } => name,
- Self::Class { name } => name,
- Self::Package { name } => name,
- Self::Color { name } => name,
- Self::ColorModel { name } => name,
- Self::Acronym { name } => name,
- Self::GlossaryEntry { name } => name,
- Self::File { name } => name,
- Self::Directory { name } => name,
- Self::Label { name, .. } => name,
- Self::UserCommand { name } => name,
- Self::UserEnvironment { name } => name,
- Self::PgfLibrary { name } => name,
- Self::TikzLibrary { name } => name,
- }
- }
-}
-
-#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
-#[serde(rename_all = "camelCase")]
-pub enum CompletionItemData {
- Command,
- CommandSnippet,
- Environment,
- Label,
- Folder,
- File,
- PgfLibrary,
- TikzLibrary,
- Color,
- ColorModel,
- Package,
- Class,
- EntryType,
- FieldName,
- Citation { uri: Url, key: SmolStr },
- Argument,
- Acronym,
- GlossaryEntry,
-}
diff --git a/support/texlab/src/features/completion/user_command.rs b/support/texlab/src/features/completion/user_command.rs
index 8c4d593bee..99d9883ee9 100644
--- a/support/texlab/src/features/completion/user_command.rs
+++ b/support/texlab/src/features/completion/user_command.rs
@@ -1,29 +1,27 @@
-use lsp_types::CompletionParams;
+use crate::util::cursor::CursorContext;
-use crate::features::cursor::CursorContext;
+use super::builder::CompletionBuilder;
-use super::types::{InternalCompletionItem, InternalCompletionItemData};
-
-pub fn complete_user_commands<'a>(
- context: &'a CursorContext<CompletionParams>,
- items: &mut Vec<InternalCompletionItem<'a>>,
+pub fn complete<'db>(
+ context: &'db CursorContext,
+ builder: &mut CompletionBuilder<'db>,
) -> Option<()> {
let range = context.cursor.command_range(context.offset)?;
- let token = context.cursor.as_latex()?;
+ let token = context.cursor.as_tex()?;
- for document in context.request.workspace.documents_by_uri.values() {
- if let Some(data) = document.data.as_latex() {
+ let db = context.db;
+ for document in context.related() {
+ if let Some(data) = document.parse(db).as_tex() {
+ let text = document.contents(db).text(db);
for name in data
- .extras
- .command_names
+ .analyze(db)
+ .command_name_ranges(db)
.iter()
- .filter(|name| name.as_str() != token.text())
- .cloned()
+ .copied()
+ .filter(|range| *range != token.text_range())
+ .map(|range| &text[std::ops::Range::<usize>::from(range)])
{
- items.push(InternalCompletionItem::new(
- range,
- InternalCompletionItemData::UserCommand { name },
- ));
+ builder.user_command(range, name);
}
}
}
diff --git a/support/texlab/src/features/completion/user_environment.rs b/support/texlab/src/features/completion/user_environment.rs
index 91dd5a9ea2..cf6c2a7c78 100644
--- a/support/texlab/src/features/completion/user_environment.rs
+++ b/support/texlab/src/features/completion/user_environment.rs
@@ -1,28 +1,22 @@
-use lsp_types::CompletionParams;
+use crate::util::cursor::CursorContext;
-use crate::features::cursor::CursorContext;
+use super::builder::CompletionBuilder;
-use super::types::{InternalCompletionItem, InternalCompletionItemData};
-
-pub fn complete_user_environments<'a>(
- context: &'a CursorContext<CompletionParams>,
- items: &mut Vec<InternalCompletionItem<'a>>,
+pub fn complete<'db>(
+ context: &'db CursorContext,
+ builder: &mut CompletionBuilder<'db>,
) -> Option<()> {
let (name, range) = context.find_environment_name()?;
- for document in context.request.workspace.documents_by_uri.values() {
- if let Some(data) = document.data.as_latex() {
+ for document in context.related() {
+ if let Some(data) = document.parse(context.db).as_tex() {
for name in data
- .extras
- .environment_names
+ .analyze(context.db)
+ .environment_names(context.db)
.iter()
.filter(|n| n.as_str() != name)
- .cloned()
{
- items.push(InternalCompletionItem::new(
- range,
- InternalCompletionItemData::UserEnvironment { name },
- ));
+ builder.user_environment(range, name);
}
}
}
diff --git a/support/texlab/src/features/completion/util.rs b/support/texlab/src/features/completion/util.rs
deleted file mode 100644
index 05827bbf24..0000000000
--- a/support/texlab/src/features/completion/util.rs
+++ /dev/null
@@ -1,64 +0,0 @@
-use lsp_types::{CompletionItemKind, CompletionParams, Documentation, MarkupContent, MarkupKind};
-use smol_str::SmolStr;
-
-use crate::features::FeatureRequest;
-
-pub fn component_detail(file_names: &[SmolStr]) -> String {
- if file_names.is_empty() {
- "built-in".to_owned()
- } else {
- file_names.join(", ")
- }
-}
-
-pub fn image_documentation(
- request: &FeatureRequest<CompletionParams>,
- name: &str,
- image: &str,
-) -> Option<Documentation> {
- if supports_images(request) {
- Some(Documentation::MarkupContent(MarkupContent {
- kind: MarkupKind::Markdown,
- value: format!(
- "![{}](data:image/png;base64,{}|width=48,height=48)",
- name, image
- ),
- }))
- } else {
- None
- }
-}
-
-fn supports_images(request: &FeatureRequest<CompletionParams>) -> bool {
- request
- .workspace
- .environment
- .client_capabilities
- .text_document
- .as_ref()
- .and_then(|cap| cap.completion.as_ref())
- .and_then(|cap| cap.completion_item.as_ref())
- .and_then(|cap| cap.documentation_format.as_ref())
- .map_or(true, |formats| formats.contains(&MarkupKind::Markdown))
-}
-
-pub fn adjust_kind(
- request: &FeatureRequest<CompletionParams>,
- kind: CompletionItemKind,
-) -> CompletionItemKind {
- if let Some(value_set) = request
- .workspace
- .environment
- .client_capabilities
- .text_document
- .as_ref()
- .and_then(|cap| cap.completion.as_ref())
- .and_then(|cap| cap.completion_item_kind.as_ref())
- .and_then(|cap| cap.value_set.as_ref())
- {
- if value_set.contains(&kind) {
- return kind;
- }
- }
- CompletionItemKind::TEXT
-}
diff --git a/support/texlab/src/features/cursor.rs b/support/texlab/src/features/cursor.rs
deleted file mode 100644
index 84ee6e98c4..0000000000
--- a/support/texlab/src/features/cursor.rs
+++ /dev/null
@@ -1,333 +0,0 @@
-use lsp_types::{
- CompletionParams, DocumentHighlightParams, GotoDefinitionParams, HoverParams, Position,
- ReferenceParams, RenameParams, TextDocumentPositionParams,
-};
-use rowan::{ast::AstNode, TextRange, TextSize};
-
-use crate::{
- syntax::{bibtex, latex},
- DocumentData, LineIndexExt,
-};
-
-use super::FeatureRequest;
-
-#[derive(Debug)]
-pub enum Cursor {
- Latex(latex::SyntaxToken),
- Bibtex(bibtex::SyntaxToken),
- Nothing,
-}
-
-impl Cursor {
- pub fn new_latex(
- left: Option<latex::SyntaxToken>,
- right: Option<latex::SyntaxToken>,
- ) -> Option<Self> {
- let left = left?;
- let right = right?;
-
- if left.kind().is_command_name() {
- return Some(Self::Latex(left));
- }
-
- if right.kind() == latex::WORD {
- return Some(Self::Latex(right));
- }
-
- if left.kind() == latex::WORD {
- return Some(Self::Latex(left));
- }
-
- if right.kind().is_command_name() {
- return Some(Self::Latex(right));
- }
-
- if left.kind() == latex::WHITESPACE && left.parent()?.kind() == latex::KEY {
- return Some(Self::Latex(left));
- }
-
- if matches!(right.kind(), latex::WHITESPACE | latex::LINE_BREAK)
- && right.parent()?.kind() == latex::KEY
- {
- return Some(Self::Latex(right));
- }
-
- Some(Self::Latex(right))
- }
-
- pub fn new_bibtex(
- left: Option<bibtex::SyntaxToken>,
- right: Option<bibtex::SyntaxToken>,
- ) -> Option<Self> {
- let left = left?;
- let right = right?;
-
- if right.kind() == bibtex::TYPE {
- return Some(Self::Bibtex(right));
- }
-
- if left.kind() == bibtex::TYPE {
- return Some(Self::Bibtex(left));
- }
-
- if matches!(left.kind(), bibtex::COMMAND_NAME | bibtex::ACCENT_NAME) {
- return Some(Self::Bibtex(left));
- }
-
- if matches!(right.kind(), bibtex::WORD | bibtex::NAME) {
- return Some(Self::Bibtex(right));
- }
-
- if matches!(left.kind(), bibtex::WORD | bibtex::NAME) {
- return Some(Self::Bibtex(left));
- }
-
- if matches!(right.kind(), bibtex::COMMAND_NAME | bibtex::ACCENT_NAME) {
- return Some(Self::Bibtex(right));
- }
-
- Some(Self::Bibtex(right))
- }
-
- pub fn as_latex(&self) -> Option<&latex::SyntaxToken> {
- if let Self::Latex(v) = self {
- Some(v)
- } else {
- None
- }
- }
-
- pub fn as_bibtex(&self) -> Option<&bibtex::SyntaxToken> {
- if let Self::Bibtex(v) = self {
- Some(v)
- } else {
- None
- }
- }
-
- pub fn command_range(&self, offset: TextSize) -> Option<TextRange> {
- self.as_latex()
- .filter(|token| token.kind().is_command_name())
- .filter(|token| token.text_range().start() != offset)
- .map(|token| token.text_range())
- .map(|range| TextRange::new(range.start() + TextSize::from(1), range.end()))
- .or_else(|| {
- self.as_bibtex()
- .filter(|token| {
- matches!(token.kind(), bibtex::COMMAND_NAME | bibtex::ACCENT_NAME)
- })
- .filter(|token| token.text_range().start() != offset)
- .map(|token| token.text_range())
- .map(|range| TextRange::new(range.start() + TextSize::from(1), range.end()))
- })
- }
-}
-
-pub struct CursorContext<P> {
- pub request: FeatureRequest<P>,
- pub cursor: Cursor,
- pub offset: TextSize,
-}
-
-impl<P: HasPosition> CursorContext<P> {
- pub fn new(request: FeatureRequest<P>) -> Self {
- let main_document = request.main_document();
- let offset = main_document
- .line_index
- .offset_lsp(request.params.position());
-
- let cursor = match &main_document.data {
- DocumentData::Latex(data) => {
- let root = latex::SyntaxNode::new_root(data.green.clone());
- let left = root.token_at_offset(offset).left_biased();
- let right = root.token_at_offset(offset).right_biased();
- Cursor::new_latex(left, right)
- }
- DocumentData::Bibtex(data) => {
- let root = bibtex::SyntaxNode::new_root(data.green.clone());
- let left = root.token_at_offset(offset).left_biased();
- let right = root.token_at_offset(offset).right_biased();
- Cursor::new_bibtex(left, right)
- }
- DocumentData::BuildLog(_) => None,
- };
-
- Self {
- request,
- cursor: cursor.unwrap_or(Cursor::Nothing),
- offset,
- }
- }
-
- pub fn is_inside_latex_curly(&self, group: &impl latex::HasCurly) -> bool {
- latex::small_range(group).contains(self.offset) || group.right_curly().is_none()
- }
-
- pub fn find_citation_key_word(&self) -> Option<(String, TextRange)> {
- let word = self
- .cursor
- .as_latex()
- .filter(|token| token.kind() == latex::WORD)?;
-
- let key = latex::Key::cast(word.parent()?)?;
-
- let group = latex::CurlyGroupWordList::cast(key.syntax().parent()?)?;
- latex::Citation::cast(group.syntax().parent()?)?;
- Some((key.to_string(), latex::small_range(&key)))
- }
-
- pub fn find_citation_key_command(&self) -> Option<(String, TextRange)> {
- let command = self.cursor.as_latex()?;
-
- let citation = latex::Citation::cast(command.parent()?)?;
- let key = citation.key_list()?.keys().next()?;
- Some((key.to_string(), latex::small_range(&key)))
- }
-
- pub fn find_entry_key(&self) -> Option<(String, TextRange)> {
- let key = self
- .cursor
- .as_bibtex()
- .filter(|token| token.kind() == bibtex::NAME)?;
-
- bibtex::Entry::cast(key.parent()?)?;
- Some((key.to_string(), key.text_range()))
- }
-
- pub fn find_label_name_key(&self) -> Option<(String, TextRange)> {
- let name = self
- .cursor
- .as_latex()
- .filter(|token| token.kind() == latex::WORD)?;
-
- let key = latex::Key::cast(name.parent()?)?;
-
- if matches!(
- key.syntax().parent()?.parent()?.kind(),
- latex::LABEL_DEFINITION | latex::LABEL_REFERENCE | latex::LABEL_REFERENCE_RANGE
- ) {
- Some((key.to_string(), latex::small_range(&key)))
- } else {
- None
- }
- }
-
- pub fn find_label_name_command(&self) -> Option<(String, TextRange)> {
- let node = self.cursor.as_latex()?.parent()?;
- if let Some(label) = latex::LabelDefinition::cast(node.clone()) {
- let name = label.name()?.key()?;
- Some((name.to_string(), latex::small_range(&name)))
- } else if let Some(label) = latex::LabelReference::cast(node.clone()) {
- let name = label.name_list()?.keys().next()?;
- Some((name.to_string(), latex::small_range(&name)))
- } else if let Some(label) = latex::LabelReferenceRange::cast(node) {
- let name = label.from()?.key()?;
- Some((name.to_string(), latex::small_range(&name)))
- } else {
- None
- }
- }
-
- pub fn find_environment_name(&self) -> Option<(String, TextRange)> {
- let (name, range, group) = self.find_curly_group_word()?;
-
- if !matches!(group.syntax().parent()?.kind(), latex::BEGIN | latex::END) {
- return None;
- }
-
- Some((name, range))
- }
-
- pub fn find_curly_group_word(&self) -> Option<(String, TextRange, latex::CurlyGroupWord)> {
- let token = self.cursor.as_latex()?;
- let key = latex::Key::cast(token.parent()?);
-
- let group = key
- .as_ref()
- .and_then(|key| key.syntax().parent())
- .unwrap_or(token.parent()?);
-
- let group =
- latex::CurlyGroupWord::cast(group).filter(|group| self.is_inside_latex_curly(group))?;
-
- key.map(|key| (key.to_string(), latex::small_range(&key), group.clone()))
- .or_else(|| Some((String::new(), TextRange::empty(self.offset), group)))
- }
-
- pub fn find_curly_group_word_list(
- &self,
- ) -> Option<(String, TextRange, latex::CurlyGroupWordList)> {
- let token = self.cursor.as_latex()?;
- let key = latex::Key::cast(token.parent()?);
-
- let group = key
- .as_ref()
- .and_then(|key| key.syntax().parent())
- .unwrap_or(token.parent()?);
-
- let group = latex::CurlyGroupWordList::cast(group)
- .filter(|group| self.is_inside_latex_curly(group))?;
-
- key.map(|key| {
- let range = if group
- .syntax()
- .last_token()
- .filter(|tok| tok.kind() == latex::MISSING)
- .is_some()
- {
- TextRange::new(latex::small_range(&key).start(), token.text_range().end())
- } else {
- latex::small_range(&key)
- };
-
- (key.to_string(), range, group.clone())
- })
- .or_else(|| Some((String::new(), TextRange::empty(self.offset), group)))
- }
-}
-
-pub trait HasPosition {
- fn position(&self) -> Position;
-}
-
-impl HasPosition for CompletionParams {
- fn position(&self) -> Position {
- self.text_document_position.position
- }
-}
-
-impl HasPosition for TextDocumentPositionParams {
- fn position(&self) -> Position {
- self.position
- }
-}
-
-impl HasPosition for RenameParams {
- fn position(&self) -> Position {
- self.text_document_position.position
- }
-}
-
-impl HasPosition for ReferenceParams {
- fn position(&self) -> Position {
- self.text_document_position.position
- }
-}
-
-impl HasPosition for HoverParams {
- fn position(&self) -> Position {
- self.text_document_position_params.position
- }
-}
-
-impl HasPosition for GotoDefinitionParams {
- fn position(&self) -> Position {
- self.text_document_position_params.position
- }
-}
-
-impl HasPosition for DocumentHighlightParams {
- fn position(&self) -> Position {
- self.text_document_position_params.position
- }
-}
diff --git a/support/texlab/src/features/definition.rs b/support/texlab/src/features/definition.rs
index 20da72a142..7c9e6d4841 100644
--- a/support/texlab/src/features/definition.rs
+++ b/support/texlab/src/features/definition.rs
@@ -4,48 +4,44 @@ mod entry;
mod label;
mod string;
-use std::sync::Arc;
-
-use lsp_types::{GotoDefinitionParams, GotoDefinitionResponse, LocationLink, Url};
+use lsp_types::{GotoDefinitionResponse, LocationLink, Position, Url};
use rowan::TextRange;
-use crate::LineIndexExt;
-
-use self::{
- command::goto_command_definition, document::goto_document_definition,
- entry::goto_entry_definition, label::goto_label_definition, string::goto_string_definition,
+use crate::{
+ db::Document,
+ util::{cursor::CursorContext, line_index_ext::LineIndexExt},
+ Db,
};
-use super::{cursor::CursorContext, FeatureRequest};
-
pub fn goto_definition(
- request: FeatureRequest<GotoDefinitionParams>,
+ db: &dyn Db,
+ uri: &Url,
+ position: Position,
) -> Option<GotoDefinitionResponse> {
- let context = CursorContext::new(request);
+ let context = CursorContext::new(db, uri, position, ())?;
log::debug!("[Definition] Cursor: {:?}", context.cursor);
- let origin_document = context.request.main_document();
- let links: Vec<_> = goto_command_definition(&context)
- .or_else(|| goto_document_definition(&context))
- .or_else(|| goto_entry_definition(&context))
- .or_else(|| goto_label_definition(&context))
- .or_else(|| goto_string_definition(&context))?
+ let links: Vec<_> = command::goto_definition(&context)
+ .or_else(|| document::goto_definition(&context))
+ .or_else(|| entry::goto_definition(&context))
+ .or_else(|| label::goto_definition(&context))
+ .or_else(|| string::goto_definition(&context))?
.into_iter()
.map(|result| {
let origin_selection_range = Some(
- origin_document
- .line_index
+ context
+ .document
+ .contents(db)
+ .line_index(db)
.line_col_lsp_range(result.origin_selection_range),
);
- let target_document = &context.request.workspace.documents_by_uri[&result.target_uri];
- let target_uri = result.target_uri.as_ref().clone();
- let target_range = target_document
- .line_index
- .line_col_lsp_range(result.target_range);
- let target_selection_range = target_document
- .line_index
- .line_col_lsp_range(result.target_selection_range);
+ let target_line_index = result.target.contents(db).line_index(db);
+ let target_uri = result.target.location(context.db).uri(context.db).clone();
+ let target_range = target_line_index.line_col_lsp_range(result.target_range);
+
+ let target_selection_range =
+ target_line_index.line_col_lsp_range(result.target_selection_range);
LocationLink {
origin_selection_range,
@@ -62,7 +58,7 @@ pub fn goto_definition(
#[derive(Debug, Clone)]
struct DefinitionResult {
origin_selection_range: TextRange,
- target_uri: Arc<Url>,
+ target: Document,
target_range: TextRange,
target_selection_range: TextRange,
}
diff --git a/support/texlab/src/features/definition/command.rs b/support/texlab/src/features/definition/command.rs
index 599c792dfd..cf9fe1b511 100644
--- a/support/texlab/src/features/definition/command.rs
+++ b/support/texlab/src/features/definition/command.rs
@@ -1,26 +1,20 @@
-use std::sync::Arc;
-
-use lsp_types::GotoDefinitionParams;
use rowan::ast::AstNode;
-use crate::{features::cursor::CursorContext, syntax::latex};
+use crate::{syntax::latex, util::cursor::CursorContext};
use super::DefinitionResult;
-pub(super) fn goto_command_definition(
- context: &CursorContext<GotoDefinitionParams>,
-) -> Option<Vec<DefinitionResult>> {
+pub(super) fn goto_definition(context: &CursorContext) -> Option<Vec<DefinitionResult>> {
let name = context
.cursor
- .as_latex()
+ .as_tex()
.filter(|token| token.kind().is_command_name())?;
let origin_selection_range = name.text_range();
- for document in context.request.workspace.documents_by_uri.values() {
- if let Some(data) = document.data.as_latex() {
- let root = latex::SyntaxNode::new_root(data.green.clone());
-
+ for document in context.related() {
+ if let Some(data) = document.parse(context.db).as_tex() {
+ let root = data.root(context.db);
if let Some(result) = root
.descendants()
.filter_map(latex::CommandDefinition::cast)
@@ -32,7 +26,7 @@ pub(super) fn goto_command_definition(
.find_map(|def| {
Some(DefinitionResult {
origin_selection_range,
- target_uri: Arc::clone(&document.uri),
+ target: document,
target_range: latex::small_range(&def),
target_selection_range: def.name()?.command()?.text_range(),
})
diff --git a/support/texlab/src/features/definition/document.rs b/support/texlab/src/features/definition/document.rs
index 1c09bbc88d..58237f7521 100644
--- a/support/texlab/src/features/definition/document.rs
+++ b/support/texlab/src/features/definition/document.rs
@@ -1,40 +1,30 @@
-use std::sync::Arc;
-
-use lsp_types::GotoDefinitionParams;
use rowan::TextRange;
-use crate::features::cursor::CursorContext;
+use crate::{db::dependency_graph, util::cursor::CursorContext};
use super::DefinitionResult;
-pub(super) fn goto_document_definition(
- context: &CursorContext<GotoDefinitionParams>,
-) -> Option<Vec<DefinitionResult>> {
- let data = context.request.main_document().data.as_latex()?;
-
- for include in data
- .extras
- .explicit_links
+pub(super) fn goto_definition(context: &CursorContext) -> Option<Vec<DefinitionResult>> {
+ let db = context.db;
+ context
+ .workspace
+ .parents(db, context.document)
.iter()
- .filter(|link| link.stem_range.contains_inclusive(context.offset))
- {
- for target in &include.targets {
- if context
- .request
- .workspace
- .documents_by_uri
- .values()
- .any(|document| document.uri.as_ref() == target.as_ref())
- {
- return Some(vec![DefinitionResult {
- origin_selection_range: include.stem_range,
- target_uri: Arc::clone(target),
+ .copied()
+ .chain(std::iter::once(context.document))
+ .flat_map(|parent| dependency_graph(db, parent).edges)
+ .filter(|edge| edge.source == context.document)
+ .find_map(|edge| {
+ let range = edge.origin?.link.range(db);
+ if range.contains_inclusive(context.offset) {
+ Some(vec![DefinitionResult {
+ origin_selection_range: range,
+ target: edge.target,
target_range: TextRange::default(),
target_selection_range: TextRange::default(),
- }]);
+ }])
+ } else {
+ None
}
- }
- }
-
- None
+ })
}
diff --git a/support/texlab/src/features/definition/entry.rs b/support/texlab/src/features/definition/entry.rs
index 1dcd983af1..a945b18c41 100644
--- a/support/texlab/src/features/definition/entry.rs
+++ b/support/texlab/src/features/definition/entry.rs
@@ -1,24 +1,21 @@
-use std::sync::Arc;
-
-use lsp_types::GotoDefinitionParams;
use rowan::ast::AstNode;
use crate::{
- features::cursor::CursorContext,
syntax::{
bibtex::{self, HasName},
latex,
},
+ util::cursor::CursorContext,
};
use super::DefinitionResult;
-pub(super) fn goto_entry_definition(
- context: &CursorContext<GotoDefinitionParams>,
-) -> Option<Vec<DefinitionResult>> {
+pub(super) fn goto_definition(context: &CursorContext) -> Option<Vec<DefinitionResult>> {
+ let db = context.db;
+
let word = context
.cursor
- .as_latex()
+ .as_tex()
.filter(|token| token.kind() == latex::WORD)?;
let key = latex::Key::cast(word.parent()?)?;
@@ -27,16 +24,13 @@ pub(super) fn goto_entry_definition(
let origin_selection_range = latex::small_range(&key);
- for document in context.request.workspace.documents_by_uri.values() {
- if let Some(data) = document.data.as_bibtex() {
- for entry in bibtex::SyntaxNode::new_root(data.green.clone())
- .children()
- .filter_map(bibtex::Entry::cast)
- {
+ for document in context.related() {
+ if let Some(data) = document.parse(db).as_bib() {
+ for entry in data.root(db).children().filter_map(bibtex::Entry::cast) {
if let Some(key) = entry.name_token().filter(|k| k.text() == word.text()) {
return Some(vec![DefinitionResult {
origin_selection_range,
- target_uri: Arc::clone(&document.uri),
+ target: document,
target_selection_range: key.text_range(),
target_range: entry.syntax().text_range(),
}]);
diff --git a/support/texlab/src/features/definition/label.rs b/support/texlab/src/features/definition/label.rs
index 867e8a1076..a1cc4ed15b 100644
--- a/support/texlab/src/features/definition/label.rs
+++ b/support/texlab/src/features/definition/label.rs
@@ -1,31 +1,32 @@
-use std::sync::Arc;
-
-use lsp_types::GotoDefinitionParams;
-
-use crate::{features::cursor::CursorContext, find_label_definition, render_label, syntax::latex};
+use crate::{
+ db::analysis::label,
+ util::{self, cursor::CursorContext},
+};
use super::DefinitionResult;
-pub(super) fn goto_label_definition(
- context: &CursorContext<GotoDefinitionParams>,
-) -> Option<Vec<DefinitionResult>> {
+pub(super) fn goto_definition(context: &CursorContext) -> Option<Vec<DefinitionResult>> {
+ let db = context.db;
let (name_text, origin_selection_range) = context
.find_label_name_key()
.or_else(|| context.find_label_name_command())?;
- for document in context.request.workspace.documents_by_uri.values() {
- if let Some(data) = document.data.as_latex() {
- let root = latex::SyntaxNode::new_root(data.green.clone());
- if let Some(definition) = find_label_definition(&root, &name_text) {
- let target_selection_range = latex::small_range(&definition.name()?.key()?);
- let target_range =
- render_label(&context.request.workspace, &name_text, Some(definition))
- .map(|label| label.range)
- .unwrap_or(target_selection_range);
+ for document in context.related() {
+ if let Some(data) = document.parse(db).as_tex() {
+ if let Some(label) = data
+ .analyze(db)
+ .labels(db)
+ .iter()
+ .filter(|label| matches!(label.origin(db), label::Origin::Definition(_)))
+ .find(|label| label.name(db).text(db) == name_text.as_str())
+ {
+ let target_selection_range = label.range(db);
+ let target_range = util::label::render(db, document, *label)
+ .map_or(target_selection_range, |label| label.range);
return Some(vec![DefinitionResult {
origin_selection_range,
- target_uri: Arc::clone(&document.uri),
+ target: document,
target_range,
target_selection_range,
}]);
diff --git a/support/texlab/src/features/definition/string.rs b/support/texlab/src/features/definition/string.rs
index c774e24e1d..44ea1e9094 100644
--- a/support/texlab/src/features/definition/string.rs
+++ b/support/texlab/src/features/definition/string.rs
@@ -1,43 +1,34 @@
-use std::sync::Arc;
-
-use lsp_types::GotoDefinitionParams;
use rowan::ast::AstNode;
use crate::{
- features::cursor::CursorContext,
syntax::bibtex::{self, HasName},
+ util::cursor::CursorContext,
};
use super::DefinitionResult;
-pub(super) fn goto_string_definition(
- context: &CursorContext<GotoDefinitionParams>,
-) -> Option<Vec<DefinitionResult>> {
- let main_document = context.request.main_document();
-
- let data = main_document.data.as_bibtex()?;
+pub(super) fn goto_definition(context: &CursorContext) -> Option<Vec<DefinitionResult>> {
+ let db = context.db;
+ let data = context.document.parse(db).as_bib()?;
let key = context
.cursor
- .as_bibtex()
+ .as_bib()
.filter(|token| token.kind() == bibtex::NAME)?;
bibtex::Value::cast(key.parent()?)?;
let origin_selection_range = key.text_range();
- for string in bibtex::SyntaxNode::new_root(data.green.clone())
+ data.root(db)
.children()
.filter_map(bibtex::StringDef::cast)
- {
- if let Some(string_name) = string.name_token().filter(|k| k.text() == key.text()) {
- return Some(vec![DefinitionResult {
+ .find_map(|string| {
+ let string_name = string.name_token().filter(|k| k.text() == key.text())?;
+ Some(vec![DefinitionResult {
origin_selection_range,
- target_uri: Arc::clone(&main_document.uri),
+ target: context.document,
target_selection_range: string_name.text_range(),
target_range: string.syntax().text_range(),
- }]);
- }
- }
-
- None
+ }])
+ })
}
diff --git a/support/texlab/src/features/execute_command.rs b/support/texlab/src/features/execute_command.rs
deleted file mode 100644
index b1b89d4f50..0000000000
--- a/support/texlab/src/features/execute_command.rs
+++ /dev/null
@@ -1,103 +0,0 @@
-use std::{path::PathBuf, process::Stdio, sync::Arc};
-
-use anyhow::Result;
-use lsp_types::{TextDocumentIdentifier, Url};
-
-use crate::Workspace;
-
-pub fn execute_command(
- workspace: &Workspace,
- name: &str,
- args: Vec<serde_json::Value>,
-) -> Result<()> {
- match name {
- "texlab.cleanAuxiliary" => {
- let params = args
- .into_iter()
- .next()
- .ok_or_else(|| anyhow::anyhow!("texlab.cleanAuxiliary requires one argument"))?;
-
- clean_output_files(workspace, CleanOptions::Auxiliary, params)?;
- }
- "texlab.cleanArtifacts" => {
- let params = args
- .into_iter()
- .next()
- .ok_or_else(|| anyhow::anyhow!("texlab.cleanArtifacts requires one argument"))?;
-
- clean_output_files(workspace, CleanOptions::Artifacts, params)?;
- }
- _ => anyhow::bail!("Unknown command: {}", name),
- }
-
- Ok(())
-}
-
-#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Hash)]
-enum CleanOptions {
- Auxiliary,
- Artifacts,
-}
-
-fn clean_output_files(
- workspace: &Workspace,
- options: CleanOptions,
- params: serde_json::Value,
-) -> Result<()> {
- let params: TextDocumentIdentifier = serde_json::from_value(params)?;
-
- let uri = workspace
- .find_parent(&params.uri)
- .map(|document| document.uri)
- .unwrap_or_else(|| Arc::new(params.uri));
-
- if let Some(cx) = BuildContext::find(workspace, &uri) {
- let flag = match options {
- CleanOptions::Auxiliary => "-c",
- CleanOptions::Artifacts => "-C",
- };
-
- std::process::Command::new("latexmk")
- .arg(format!("-outdir={}", cx.output_dir.to_string_lossy()))
- .arg(flag)
- .arg(cx.input_file)
- .stdin(Stdio::null())
- .stdout(Stdio::null())
- .stderr(Stdio::null())
- .status()?;
- }
-
- Ok(())
-}
-
-struct BuildContext {
- input_file: PathBuf,
- output_dir: PathBuf,
-}
-
-impl BuildContext {
- pub fn find(workspace: &Workspace, uri: &Url) -> Option<Self> {
- if uri.scheme() != "file" {
- return None;
- }
-
- let input_file = uri.to_file_path().ok()?;
- let options = &workspace.environment.options;
- let current_dir = &workspace.environment.current_directory;
- let output_dir = match (
- options.root_directory.as_ref(),
- options.aux_directory.as_ref(),
- ) {
- (_, Some(aux_dir)) => current_dir.join(aux_dir),
- (Some(root_dir), None) => current_dir.join(root_dir),
- (None, None) => input_file.parent()?.to_path_buf(),
- };
-
- log::info!("Output = {:#?}", output_dir);
-
- Some(Self {
- input_file,
- output_dir,
- })
- }
-}
diff --git a/support/texlab/src/features/folding.rs b/support/texlab/src/features/folding.rs
index 71fb0bee6f..ea26121f3f 100644
--- a/support/texlab/src/features/folding.rs
+++ b/support/texlab/src/features/folding.rs
@@ -1,49 +1,52 @@
-use lsp_types::{FoldingRange, FoldingRangeKind, FoldingRangeParams, Range};
+use lsp_types::{FoldingRange, FoldingRangeKind, Range, Url};
use rowan::ast::AstNode;
use crate::{
+ db::{parse::DocumentData, Workspace},
syntax::{bibtex, latex},
- DocumentData, LineIndexExt,
+ util::line_index_ext::LineIndexExt,
+ Db,
};
-use super::FeatureRequest;
-
-pub fn find_foldings(request: FeatureRequest<FoldingRangeParams>) -> Vec<FoldingRange> {
- let mut foldings = Vec::new();
- let main_document = request.main_document();
- match &main_document.data {
- DocumentData::Latex(data) => {
- for node in latex::SyntaxNode::new_root(data.green.clone()).descendants() {
+pub fn find_all(db: &dyn Db, uri: &Url) -> Option<Vec<FoldingRange>> {
+ let document = Workspace::get(db).lookup_uri(db, uri)?;
+ let line_index = document.contents(db).line_index(db);
+ let foldings = match document.parse(db) {
+ DocumentData::Tex(data) => {
+ let mut results = Vec::new();
+ let root = data.root(db);
+ for node in root.descendants() {
if let Some(folding) = latex::Environment::cast(node.clone())
.map(|node| latex::small_range(&node))
.or_else(|| {
latex::Section::cast(node.clone()).map(|node| latex::small_range(&node))
})
.or_else(|| latex::EnumItem::cast(node).map(|node| latex::small_range(&node)))
- .map(|node| main_document.line_index.line_col_lsp_range(node))
+ .map(|node| line_index.line_col_lsp_range(node))
.map(create_range)
{
- foldings.push(folding);
+ results.push(folding);
}
}
+
+ results
}
- DocumentData::Bibtex(data) => {
- for node in bibtex::SyntaxNode::new_root(data.green.clone()).descendants() {
- if matches!(
- node.kind(),
- bibtex::PREAMBLE | bibtex::STRING | bibtex::ENTRY
- ) {
- foldings.push(create_range(
- main_document
- .line_index
- .line_col_lsp_range(node.text_range()),
- ));
- }
- }
+ DocumentData::Bib(data) => {
+ let root = data.root(db);
+ root.descendants()
+ .filter(|node| {
+ matches!(
+ node.kind(),
+ bibtex::PREAMBLE | bibtex::STRING | bibtex::ENTRY
+ )
+ })
+ .map(|node| create_range(line_index.line_col_lsp_range(node.text_range())))
+ .collect()
}
- DocumentData::BuildLog(_) => {}
- }
- foldings
+ DocumentData::Log(_) => return None,
+ };
+
+ Some(foldings)
}
fn create_range(range: Range) -> FoldingRange {
diff --git a/support/texlab/src/features/formatting.rs b/support/texlab/src/features/formatting.rs
index 1107e5fd38..66375bd576 100644
--- a/support/texlab/src/features/formatting.rs
+++ b/support/texlab/src/features/formatting.rs
@@ -1,26 +1,31 @@
mod bibtex_internal;
mod latexindent;
-use lsp_types::{DocumentFormattingParams, TextEdit};
+use lsp_types::{FormattingOptions, TextEdit, Url};
-use crate::{BibtexFormatter, LatexFormatter};
+use crate::{
+ db::{Language, Workspace},
+ BibtexFormatter, Db, LatexFormatter,
+};
use self::{bibtex_internal::format_bibtex_internal, latexindent::format_with_latexindent};
-use super::FeatureRequest;
-
pub fn format_source_code(
- request: FeatureRequest<DocumentFormattingParams>,
+ db: &dyn Db,
+ uri: &Url,
+ options: &FormattingOptions,
) -> Option<Vec<TextEdit>> {
- let mut edits = None;
- if request.workspace.environment.options.bibtex_formatter == BibtexFormatter::Texlab {
- edits = edits.or_else(|| format_bibtex_internal(&request));
- }
-
- if request.workspace.environment.options.latex_formatter == LatexFormatter::Texlab {
- edits = edits.or_else(|| Some(vec![]));
+ let workspace = Workspace::get(db);
+ let document = workspace.lookup_uri(db, uri)?;
+ match document.language(db) {
+ Language::Tex => match workspace.options(db).latex_formatter {
+ LatexFormatter::Texlab => None,
+ LatexFormatter::Latexindent => format_with_latexindent(db, document),
+ },
+ Language::Bib => match workspace.options(db).bibtex_formatter {
+ BibtexFormatter::Texlab => format_bibtex_internal(db, document, options),
+ BibtexFormatter::Latexindent => format_with_latexindent(db, document),
+ },
+ Language::Log => None,
}
-
- edits = edits.or_else(|| format_with_latexindent(&request));
- edits
}
diff --git a/support/texlab/src/features/formatting/bibtex_internal.rs b/support/texlab/src/features/formatting/bibtex_internal.rs
index 2dcc507be9..8447778d90 100644
--- a/support/texlab/src/features/formatting/bibtex_internal.rs
+++ b/support/texlab/src/features/formatting/bibtex_internal.rs
@@ -1,63 +1,57 @@
-use lsp_types::{DocumentFormattingParams, TextEdit};
+use lsp_types::{FormattingOptions, TextEdit};
use rowan::{ast::AstNode, NodeOrToken};
use crate::{
- features::FeatureRequest,
+ db::{Document, Workspace},
syntax::bibtex::{self, HasName, HasType, HasValue},
- LineIndex, LineIndexExt,
+ util::{line_index::LineIndex, line_index_ext::LineIndexExt},
+ Db,
};
pub fn format_bibtex_internal(
- request: &FeatureRequest<DocumentFormattingParams>,
+ db: &dyn Db,
+ document: Document,
+ options: &FormattingOptions,
) -> Option<Vec<TextEdit>> {
let mut indent = String::new();
- if request.params.options.insert_spaces {
- for _ in 0..request.params.options.tab_size {
+
+ if options.insert_spaces {
+ for _ in 0..options.tab_size {
indent.push(' ');
}
} else {
indent.push('\t');
}
- let line_length = request
- .workspace
- .environment
- .options
+ let line_length = Workspace::get(db)
+ .options(db)
.formatter_line_length
- .map(|value| {
+ .map_or(80, |value| {
if value <= 0 {
usize::MAX
} else {
value as usize
}
- })
- .unwrap_or(80);
+ });
- let document = request.main_document();
- let data = document.data.as_bibtex()?;
+ let line_index = document.contents(db).line_index(db);
+ let data = document.parse(db).as_bib()?;
let mut edits = Vec::new();
- for node in bibtex::SyntaxNode::new_root(data.green.clone())
- .children()
- .filter(|node| {
- matches!(
- node.kind(),
- bibtex::PREAMBLE | bibtex::STRING | bibtex::ENTRY
- )
- })
- {
+ for node in data.root(db).children().filter(|node| {
+ matches!(
+ node.kind(),
+ bibtex::PREAMBLE | bibtex::STRING | bibtex::ENTRY
+ )
+ }) {
let range = node.text_range();
- let mut formatter = Formatter::new(
- indent.clone(),
- request.params.options.tab_size,
- line_length,
- &document.line_index,
- );
+ let mut formatter =
+ Formatter::new(indent.clone(), options.tab_size, line_length, line_index);
formatter.visit_node(node);
edits.push(TextEdit {
- range: document.line_index.line_col_lsp_range(range),
+ range: line_index.line_col_lsp_range(range),
new_text: formatter.output,
});
}
diff --git a/support/texlab/src/features/formatting/latexindent.rs b/support/texlab/src/features/formatting/latexindent.rs
index 4898aa4415..3dab10c923 100644
--- a/support/texlab/src/features/formatting/latexindent.rs
+++ b/support/texlab/src/features/formatting/latexindent.rs
@@ -1,94 +1,78 @@
use std::{
- fs,
+ path::Path,
process::{Command, Stdio},
};
-use lsp_types::{DocumentFormattingParams, TextEdit};
+use lsp_types::TextEdit;
use rowan::{TextLen, TextRange};
use tempfile::tempdir;
-use crate::{features::FeatureRequest, DocumentLanguage, LineIndexExt};
-
-pub fn format_with_latexindent(
- request: &FeatureRequest<DocumentFormattingParams>,
-) -> Option<Vec<TextEdit>> {
- let directory = tempdir().ok()?;
- let document = request.main_document();
+use crate::{
+ db::{Document, Language, Workspace},
+ util::line_index_ext::LineIndexExt,
+ Db, LatexindentOptions,
+};
- let options = &request.workspace.environment.options;
- let current_dir = options
- .root_directory
- .as_ref()
- .cloned()
- .or_else(|| {
- if document.uri.scheme() == "file" {
- document
- .uri
- .to_file_path()
- .unwrap()
- .parent()
- .map(ToOwned::to_owned)
- } else {
- None
- }
- })
- .unwrap_or_else(|| ".".into());
+pub fn format_with_latexindent(db: &dyn Db, document: Document) -> Option<Vec<TextEdit>> {
+ let workspace = Workspace::get(db);
+ let options = workspace.options(db);
+ let target_dir = tempdir().ok()?;
+ let source_dir = workspace
+ .working_dir(db, document.directory(db))
+ .path(db)
+ .as_deref()?;
- let local = match &options.latexindent.local {
- Some(local) => format!("--local={}", local),
- None => "-l".to_string(),
- };
+ let target_file = target_dir
+ .path()
+ .join(if document.language(db) == Language::Bib {
+ "file.bib"
+ } else {
+ "file.tex"
+ });
+ std::fs::write(&target_file, document.contents(db).text(db)).ok()?;
- let modify_line_breaks = options.latexindent.modify_line_breaks;
+ let args = build_arguments(&options.latexindent, &target_file);
- let path = directory.path();
- let _ = fs::copy(
- current_dir.join("localSettings.yaml"),
- path.join("localSettings.yaml"),
- );
- let _ = fs::copy(
- current_dir.join(".localSettings.yaml"),
- path.join(".localSettings.yaml"),
- );
- let _ = fs::copy(
- current_dir.join("latexindent.yaml"),
- path.join("latexindent.yaml"),
+ log::debug!(
+ "Running latexindent in folder \"{}\" with args: {:?}",
+ source_dir.display(),
+ args,
);
- let name = if document.data.language() == DocumentLanguage::Bibtex {
- "file.bib"
- } else {
- "file.tex"
- };
-
- fs::write(directory.path().join(name), document.text.as_str()).ok()?;
-
- let mut args = Vec::new();
- if modify_line_breaks {
- args.push("--modifylinebreaks");
- }
- args.push(&local);
- args.push(name);
-
let output = Command::new("latexindent")
.args(&args)
- .current_dir(current_dir)
.stdin(Stdio::null())
.stdout(Stdio::piped())
.stderr(Stdio::null())
- .current_dir(directory.path())
+ .current_dir(source_dir)
.output()
.ok()?;
+ let old_text = document.contents(db).text(db);
let new_text = String::from_utf8_lossy(&output.stdout).into_owned();
if new_text.is_empty() {
None
} else {
+ let line_index = document.contents(db).line_index(db);
Some(vec![TextEdit {
- range: document
- .line_index
- .line_col_lsp_range(TextRange::new(0.into(), document.text.text_len())),
+ range: line_index.line_col_lsp_range(TextRange::new(0.into(), old_text.text_len())),
new_text,
}])
}
}
+
+fn build_arguments(options: &LatexindentOptions, target_file: &Path) -> Vec<String> {
+ let mut args = Vec::new();
+
+ args.push(match &options.local {
+ Some(yaml_file) => format!("--local={yaml_file}"),
+ None => "--local".to_string(),
+ });
+
+ if options.modify_line_breaks {
+ args.push("--modifylinebreaks".to_string());
+ }
+
+ args.push(target_file.display().to_string());
+ args
+}
diff --git a/support/texlab/src/features/forward_search.rs b/support/texlab/src/features/forward_search.rs
index 54629d0dc0..47be702659 100644
--- a/support/texlab/src/features/forward_search.rs
+++ b/support/texlab/src/features/forward_search.rs
@@ -1,94 +1,120 @@
use std::{
io,
- path::Path,
- process::{Command, Stdio},
+ path::{Path, PathBuf},
+ process::Stdio,
};
use log::error;
-use lsp_types::TextDocumentPositionParams;
-use serde::{Deserialize, Serialize};
-use serde_repr::{Deserialize_repr, Serialize_repr};
-
-use super::FeatureRequest;
-
-#[derive(Debug, PartialEq, Eq, Clone, Copy, Serialize_repr, Deserialize_repr)]
-#[repr(i32)]
-pub enum ForwardSearchStatus {
- SUCCESS = 0,
- ERROR = 1,
- FAILURE = 2,
- UNCONFIGURED = 3,
+use lsp_types::{Position, Url};
+use thiserror::Error;
+
+use crate::{db::Workspace, util::line_index_ext::LineIndexExt, Db};
+
+#[derive(Debug, Error)]
+pub enum Error {
+ #[error("TeX document '{0}' not found")]
+ TexNotFound(Url),
+
+ #[error("PDF document '{0}' not found")]
+ PdfNotFound(PathBuf),
+
+ #[error("TeX document '{0}' is not a local file")]
+ NoLocalFile(Url),
+
+ #[error("PDF viewer is not configured")]
+ Unconfigured,
+
+ #[error("Failed to spawn process: {0}")]
+ Spawn(io::Error),
}
-#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
-pub struct ForwardSearchResult {
- pub status: ForwardSearchStatus,
+pub struct Command {
+ executable: String,
+ args: Vec<String>,
}
-pub fn execute_forward_search(
- request: FeatureRequest<TextDocumentPositionParams>,
-) -> Option<ForwardSearchResult> {
- let options = &request.workspace.environment.options.forward_search;
+impl Command {
+ pub fn configure(db: &dyn Db, uri: &Url, position: Option<Position>) -> Result<Self, Error> {
+ let workspace = Workspace::get(db);
+ let child = workspace
+ .lookup_uri(db, uri)
+ .ok_or_else(|| Error::TexNotFound(uri.clone()))?;
+
+ let parent = workspace
+ .parents(db, child)
+ .iter()
+ .copied()
+ .next()
+ .unwrap_or(child);
+
+ let output_dir = workspace
+ .output_dir(db, workspace.working_dir(db, parent.directory(db)))
+ .path(db)
+ .as_deref()
+ .ok_or_else(|| Error::NoLocalFile(uri.clone()))?;
+
+ let tex_path = child
+ .location(db)
+ .path(db)
+ .as_deref()
+ .ok_or_else(|| Error::NoLocalFile(uri.clone()))?;
+
+ let pdf_name = format!("{}.pdf", parent.location(db).stem(db).unwrap());
+ let pdf_path = output_dir.join(pdf_name);
+ if !pdf_path.exists() {
+ return Err(Error::PdfNotFound(pdf_path));
+ }
- if options.executable.is_none() || options.args.is_none() {
- return Some(ForwardSearchResult {
- status: ForwardSearchStatus::UNCONFIGURED,
+ let position = position.unwrap_or_else(|| {
+ child
+ .contents(db)
+ .line_index(db)
+ .line_col_lsp(child.cursor(db))
});
+
+ let options = &workspace.options(db).forward_search;
+
+ let executable = options
+ .executable
+ .as_deref()
+ .ok_or(Error::Unconfigured)?
+ .to_string();
+
+ let args: Vec<_> = options
+ .args
+ .as_deref()
+ .ok_or(Error::Unconfigured)?
+ .iter()
+ .flat_map(|arg| replace_placeholder(tex_path, &pdf_path, position.line, arg))
+ .collect();
+
+ Ok(Self { executable, args })
}
+}
- let root_document = request
- .workspace
- .documents_by_uri
- .values()
- .find(|document| {
- if let Some(data) = document.data.as_latex() {
- data.extras.has_document_environment
- && !data
- .extras
- .explicit_links
- .iter()
- .filter_map(|link| link.as_component_name())
- .any(|name| name == "subfiles.cls")
- } else {
- false
- }
- })
- .filter(|document| document.uri.scheme() == "file")?;
-
- let data = root_document.data.as_latex()?;
- let pdf_path = data
- .extras
- .implicit_links
- .pdf
- .iter()
- .filter_map(|uri| uri.to_file_path().ok())
- .find(|path| path.exists())?;
-
- let tex_path = request.main_document().uri.to_file_path().ok()?;
-
- let args: Vec<String> = options
- .args
- .as_ref()
- .unwrap()
- .iter()
- .flat_map(|arg| {
- replace_placeholder(&tex_path, &pdf_path, request.params.position.line, arg)
- })
- .collect();
-
- let status = match run_process(options.executable.as_ref().unwrap(), args) {
- Ok(()) => ForwardSearchStatus::SUCCESS,
- Err(why) => {
- error!("Unable to execute forward search: {}", why);
- ForwardSearchStatus::FAILURE
- }
- };
- Some(ForwardSearchResult { status })
+impl Command {
+ pub fn run(self) -> Result<(), Error> {
+ log::debug!(
+ "Executing forward search: {} {:?}",
+ self.executable,
+ self.args
+ );
+
+ std::process::Command::new(self.executable)
+ .args(self.args)
+ .stdin(Stdio::null())
+ .stdout(Stdio::null())
+ .stderr(Stdio::null())
+ .status()
+ .map_err(Error::Spawn)?;
+
+ Ok(())
+ }
}
/// Iterate overs chunks of a string. Either returns a slice of the
/// original string, or the placeholder replacement.
-pub struct PlaceHolderIterator<'a> {
+struct PlaceHolderIterator<'a> {
remainder: &'a str,
tex_file: &'a str,
pdf_file: &'a str,
@@ -168,13 +194,3 @@ fn replace_placeholder(
};
Some(result)
}
-
-fn run_process(executable: &str, args: Vec<String>) -> io::Result<()> {
- Command::new(executable)
- .args(args)
- .stdin(Stdio::null())
- .stdout(Stdio::null())
- .stderr(Stdio::null())
- .status()?;
- Ok(())
-}
diff --git a/support/texlab/src/features/highlight.rs b/support/texlab/src/features/highlight.rs
index b2c5794461..52746a040f 100644
--- a/support/texlab/src/features/highlight.rs
+++ b/support/texlab/src/features/highlight.rs
@@ -1,14 +1,10 @@
mod label;
-use lsp_types::{DocumentHighlight, DocumentHighlightParams};
+use lsp_types::{DocumentHighlight, Position, Url};
-use self::label::find_label_highlights;
+use crate::{util::cursor::CursorContext, Db};
-use super::{cursor::CursorContext, FeatureRequest};
-
-pub fn find_document_highlights(
- request: FeatureRequest<DocumentHighlightParams>,
-) -> Option<Vec<DocumentHighlight>> {
- let context = CursorContext::new(request);
- find_label_highlights(&context)
+pub fn find_all(db: &dyn Db, uri: &Url, position: Position) -> Option<Vec<DocumentHighlight>> {
+ let context = CursorContext::new(db, uri, position, ())?;
+ label::find_highlights(&context)
}
diff --git a/support/texlab/src/features/highlight/label.rs b/support/texlab/src/features/highlight/label.rs
index cb51abdd68..2ae66d5c47 100644
--- a/support/texlab/src/features/highlight/label.rs
+++ b/support/texlab/src/features/highlight/label.rs
@@ -1,78 +1,31 @@
-use lsp_types::{DocumentHighlight, DocumentHighlightKind, DocumentHighlightParams};
-use rowan::ast::AstNode;
+use lsp_types::{DocumentHighlight, DocumentHighlightKind};
-use crate::{features::cursor::CursorContext, syntax::latex, LineIndexExt};
+use crate::{
+ db::analysis::label,
+ util::{cursor::CursorContext, line_index_ext::LineIndexExt},
+};
-pub fn find_label_highlights(
- context: &CursorContext<DocumentHighlightParams>,
-) -> Option<Vec<DocumentHighlight>> {
+pub fn find_highlights(context: &CursorContext) -> Option<Vec<DocumentHighlight>> {
+ let db = context.db;
let (name_text, _) = context.find_label_name_key()?;
-
- let main_document = context.request.main_document();
- let data = main_document.data.as_latex()?;
+ let data = context.document.parse(db).as_tex()?;
let mut highlights = Vec::new();
- for node in latex::SyntaxNode::new_root(data.green.clone()).descendants() {
- if let Some(label_name) = latex::LabelDefinition::cast(node.clone())
- .and_then(|label| label.name())
- .and_then(|label_name| label_name.key())
- .filter(|label_name| label_name.to_string() == name_text)
- {
- let range = main_document
- .line_index
- .line_col_lsp_range(latex::small_range(&label_name));
-
- highlights.push(DocumentHighlight {
- range,
- kind: Some(DocumentHighlightKind::WRITE),
- });
- } else if let Some(label) = latex::LabelReference::cast(node.clone()) {
- for label_name in label
- .name_list()
- .into_iter()
- .flat_map(|name| name.keys())
- .filter(|label_name| label_name.to_string() == name_text)
- {
- let range = main_document
- .line_index
- .line_col_lsp_range(latex::small_range(&label_name));
-
- highlights.push(DocumentHighlight {
- range,
- kind: Some(DocumentHighlightKind::READ),
- });
- }
- } else if let Some(label) = latex::LabelReferenceRange::cast(node.clone()) {
- if let Some(label_name) = label
- .from()
- .and_then(|label_name| label_name.key())
- .filter(|label_name| label_name.to_string() == name_text)
- {
- let range = main_document
- .line_index
- .line_col_lsp_range(latex::small_range(&label_name));
-
- highlights.push(DocumentHighlight {
- range,
- kind: Some(DocumentHighlightKind::READ),
- });
- }
-
- if let Some(label_name) = label
- .to()
- .and_then(|label_name| label_name.key())
- .filter(|label_name| label_name.to_string() == name_text)
- {
- let range = main_document
- .line_index
- .line_col_lsp_range(latex::small_range(&label_name));
-
- highlights.push(DocumentHighlight {
- range,
- kind: Some(DocumentHighlightKind::READ),
- });
- }
- }
+ let line_index = context.document.contents(db).line_index(db);
+ for label in data
+ .analyze(db)
+ .labels(db)
+ .iter()
+ .filter(|label| label.name(db).text(db) == &name_text)
+ {
+ let range = line_index.line_col_lsp_range(label.range(db));
+ let kind = Some(match label.origin(db) {
+ label::Origin::Definition(_) => DocumentHighlightKind::WRITE,
+ label::Origin::Reference(_) => DocumentHighlightKind::READ,
+ label::Origin::ReferenceRange(_) => DocumentHighlightKind::READ,
+ });
+
+ highlights.push(DocumentHighlight { range, kind });
}
Some(highlights)
diff --git a/support/texlab/src/features/hover.rs b/support/texlab/src/features/hover.rs
index 8663f6256d..27d07f74cd 100644
--- a/support/texlab/src/features/hover.rs
+++ b/support/texlab/src/features/hover.rs
@@ -5,43 +5,32 @@ mod field;
mod label;
mod string_ref;
-use lsp_types::{Hover, HoverContents, HoverParams, MarkupContent, MarkupKind};
+use lsp_types::{Hover, HoverContents, MarkupContent, MarkupKind, Position, Url};
use rowan::TextRange;
use crate::{
- features::{cursor::CursorContext, hover::citation::find_citation_hover},
- LineIndexExt,
+ util::{cursor::CursorContext, line_index_ext::LineIndexExt},
+ Db,
};
-use self::{
- component::find_component_hover, entry_type::find_entry_type_hover, field::find_field_hover,
- label::find_label_hover, string_ref::find_string_reference_hover,
-};
-
-use super::FeatureRequest;
-
-pub fn find_hover(request: FeatureRequest<HoverParams>) -> Option<Hover> {
- let context = CursorContext::new(request);
+pub fn find(db: &dyn Db, uri: &Url, position: Position) -> Option<Hover> {
+ let context = CursorContext::new(db, uri, position, ())?;
log::debug!("[Hover] Cursor: {:?}", context.cursor);
- let result = find_label_hover(&context)
- .or_else(|| find_citation_hover(&context))
- .or_else(|| find_component_hover(&context))
- .or_else(|| find_string_reference_hover(&context))
- .or_else(|| find_field_hover(&context))
- .or_else(|| find_entry_type_hover(&context))?;
+ let result = label::find_hover(&context)
+ .or_else(|| citation::find_hover(&context))
+ .or_else(|| component::find_hover(&context))
+ .or_else(|| string_ref::find_hover(&context))
+ .or_else(|| field::find_hover(&context))
+ .or_else(|| entry_type::find_hover(&context))?;
+
+ let line_index = context.document.contents(db).line_index(db);
Some(Hover {
contents: HoverContents::Markup(MarkupContent {
kind: result.value_kind,
value: result.value,
}),
- range: Some(
- context
- .request
- .main_document()
- .line_index
- .line_col_lsp_range(result.range),
- ),
+ range: Some(line_index.line_col_lsp_range(result.range)),
})
}
diff --git a/support/texlab/src/features/hover/citation.rs b/support/texlab/src/features/hover/citation.rs
index 5e86019eee..e0e17176d5 100644
--- a/support/texlab/src/features/hover/citation.rs
+++ b/support/texlab/src/features/hover/citation.rs
@@ -1,28 +1,23 @@
-use lsp_types::{HoverParams, MarkupKind};
+use lsp_types::MarkupKind;
use rowan::ast::AstNode;
-use crate::{citation, features::cursor::CursorContext, syntax::bibtex};
+use crate::{citation, syntax::bibtex, util::cursor::CursorContext};
use super::HoverResult;
-pub(super) fn find_citation_hover(context: &CursorContext<HoverParams>) -> Option<HoverResult> {
+pub(super) fn find_hover(context: &CursorContext) -> Option<HoverResult> {
let (key, range) = context
.find_citation_key_word()
.or_else(|| context.find_citation_key_command())
.or_else(|| context.find_entry_key())?;
- let value = context
- .request
- .workspace
- .documents_by_uri
- .values()
- .find_map(|document| {
- let data = document.data.as_bibtex()?;
- let root = bibtex::SyntaxNode::new_root(data.green.clone());
- let root = bibtex::Root::cast(root)?;
- let entry = root.find_entry(&key)?;
- citation::render(&entry)
- })?;
+ let value = context.related().find_map(|document| {
+ let data = document.parse(context.db).as_bib()?;
+ let root = data.root(context.db);
+ let root = bibtex::Root::cast(root)?;
+ let entry = root.find_entry(&key)?;
+ citation::render(&entry)
+ })?;
Some(HoverResult {
range,
diff --git a/support/texlab/src/features/hover/component.rs b/support/texlab/src/features/hover/component.rs
index 737a99d3ea..61a428a9ce 100644
--- a/support/texlab/src/features/hover/component.rs
+++ b/support/texlab/src/features/hover/component.rs
@@ -1,25 +1,28 @@
-use lsp_types::{HoverParams, MarkupKind};
+use lsp_types::MarkupKind;
-use crate::{component_db::COMPONENT_DATABASE, features::cursor::CursorContext, syntax::latex};
+use crate::{
+ db::analysis::TexLinkKind,
+ util::{components::COMPONENT_DATABASE, cursor::CursorContext},
+};
use super::HoverResult;
-pub(super) fn find_component_hover(context: &CursorContext<HoverParams>) -> Option<HoverResult> {
- let data = context.request.main_document().data.as_latex()?;
- for link in &data.extras.explicit_links {
- if matches!(
- link.kind,
- latex::ExplicitLinkKind::Package | latex::ExplicitLinkKind::Class
- ) && link.stem_range.contains_inclusive(context.offset)
- {
- let value = COMPONENT_DATABASE.documentation(&link.stem)?.value;
- return Some(HoverResult {
+pub(super) fn find_hover(context: &CursorContext) -> Option<HoverResult> {
+ let db = context.db;
+ let links = context.document.parse(db).as_tex()?.analyze(db).links(db);
+ links
+ .iter()
+ .filter(|link| matches!(link.kind(db), TexLinkKind::Sty | TexLinkKind::Cls))
+ .filter(|link| link.range(db).contains_inclusive(context.offset))
+ .find_map(|link| {
+ let value = COMPONENT_DATABASE
+ .documentation(link.path(db).text(db))?
+ .value;
+
+ Some(HoverResult {
value,
value_kind: MarkupKind::PlainText,
- range: link.stem_range,
- });
- }
- }
-
- None
+ range: link.range(db),
+ })
+ })
}
diff --git a/support/texlab/src/features/hover/entry_type.rs b/support/texlab/src/features/hover/entry_type.rs
index 41a561395c..0f4ccbdb72 100644
--- a/support/texlab/src/features/hover/entry_type.rs
+++ b/support/texlab/src/features/hover/entry_type.rs
@@ -1,13 +1,16 @@
-use lsp_types::{HoverParams, MarkupKind};
+use lsp_types::MarkupKind;
-use crate::{features::cursor::CursorContext, syntax::bibtex, LANGUAGE_DATA};
+use crate::{
+ syntax::bibtex,
+ util::{cursor::CursorContext, lang_data::LANGUAGE_DATA},
+};
use super::HoverResult;
-pub(super) fn find_entry_type_hover(context: &CursorContext<HoverParams>) -> Option<HoverResult> {
+pub(super) fn find_hover(context: &CursorContext) -> Option<HoverResult> {
let name = context
.cursor
- .as_bibtex()
+ .as_bib()
.filter(|token| token.kind() == bibtex::TYPE)?;
let docs = LANGUAGE_DATA.entry_type_documentation(&name.text()[1..])?;
diff --git a/support/texlab/src/features/hover/field.rs b/support/texlab/src/features/hover/field.rs
index 6102bd6b68..48193997d6 100644
--- a/support/texlab/src/features/hover/field.rs
+++ b/support/texlab/src/features/hover/field.rs
@@ -1,14 +1,17 @@
-use lsp_types::{HoverParams, MarkupKind};
+use lsp_types::MarkupKind;
use rowan::ast::AstNode;
-use crate::{features::cursor::CursorContext, syntax::bibtex, LANGUAGE_DATA};
+use crate::{
+ syntax::bibtex,
+ util::{cursor::CursorContext, lang_data::LANGUAGE_DATA},
+};
use super::HoverResult;
-pub(super) fn find_field_hover(context: &CursorContext<HoverParams>) -> Option<HoverResult> {
+pub(super) fn find_hover(context: &CursorContext) -> Option<HoverResult> {
let name = context
.cursor
- .as_bibtex()
+ .as_bib()
.filter(|token| token.kind() == bibtex::NAME)?;
bibtex::Field::cast(name.parent()?)?;
diff --git a/support/texlab/src/features/hover/label.rs b/support/texlab/src/features/hover/label.rs
index 2f8489d769..eafc104de6 100644
--- a/support/texlab/src/features/hover/label.rs
+++ b/support/texlab/src/features/hover/label.rs
@@ -1,19 +1,23 @@
-use lsp_types::{HoverParams, MarkupKind};
+use lsp_types::MarkupKind;
-use crate::{features::cursor::CursorContext, render_label};
+use crate::{
+ db::Word,
+ util::{self, cursor::CursorContext},
+};
use super::HoverResult;
-pub(super) fn find_label_hover(context: &CursorContext<HoverParams>) -> Option<HoverResult> {
+pub(super) fn find_hover(context: &CursorContext) -> Option<HoverResult> {
let (name_text, range) = context
.find_label_name_key()
.or_else(|| context.find_label_name_command())?;
- let label = render_label(&context.request.workspace, &name_text, None)?;
-
- Some(HoverResult {
- range,
- value: label.reference(),
- value_kind: MarkupKind::PlainText,
- })
+ let db = context.db;
+ util::label::find_label_definition(db, context.document, Word::new(db, name_text))
+ .and_then(|(document, label)| util::label::render(db, document, label))
+ .map(|label| HoverResult {
+ range,
+ value: label.reference(db),
+ value_kind: MarkupKind::PlainText,
+ })
}
diff --git a/support/texlab/src/features/hover/string_ref.rs b/support/texlab/src/features/hover/string_ref.rs
index adccdd556c..41c4c5bddb 100644
--- a/support/texlab/src/features/hover/string_ref.rs
+++ b/support/texlab/src/features/hover/string_ref.rs
@@ -1,29 +1,28 @@
-use lsp_types::{HoverParams, MarkupKind};
+use lsp_types::MarkupKind;
use rowan::ast::AstNode;
use crate::{
citation::field::text::TextFieldData,
- features::cursor::CursorContext,
syntax::bibtex::{self, HasName, HasValue},
+ util::cursor::CursorContext,
};
use super::HoverResult;
-pub(super) fn find_string_reference_hover(
- context: &CursorContext<HoverParams>,
-) -> Option<HoverResult> {
- let data = context.request.main_document().data.as_bibtex()?;
+pub(super) fn find_hover(context: &CursorContext) -> Option<HoverResult> {
+ let data = context.document.parse(context.db).as_bib()?;
let name = context
.cursor
- .as_bibtex()
+ .as_bib()
.filter(|token| token.kind() == bibtex::NAME)
.filter(|token| {
let parent = token.parent().unwrap();
bibtex::Value::can_cast(parent.kind()) || bibtex::StringDef::can_cast(parent.kind())
})?;
- for string in bibtex::SyntaxNode::new_root(data.green.clone())
+ for string in data
+ .root(context.db)
.children()
.filter_map(bibtex::StringDef::cast)
{
diff --git a/support/texlab/src/features/inlay_hint.rs b/support/texlab/src/features/inlay_hint.rs
new file mode 100644
index 0000000000..6d53259900
--- /dev/null
+++ b/support/texlab/src/features/inlay_hint.rs
@@ -0,0 +1,45 @@
+mod label;
+
+use lsp_types::{InlayHint, InlayHintLabel, Range, Url};
+use rowan::TextSize;
+
+use crate::{
+ db::Workspace,
+ util::{line_index::LineIndex, line_index_ext::LineIndexExt},
+ Db,
+};
+
+pub fn find_all(db: &dyn Db, uri: &Url, range: Range) -> Option<Vec<InlayHint>> {
+ let document = Workspace::get(db).lookup_uri(db, uri)?;
+ let line_index = document.contents(db).line_index(db);
+
+ let mut builder = InlayHintBuilder {
+ line_index,
+ hints: Vec::new(),
+ };
+
+ let range = line_index.offset_lsp_range(range);
+ label::find_hints(db, document, range, &mut builder);
+ Some(builder.hints)
+}
+
+struct InlayHintBuilder<'db> {
+ line_index: &'db LineIndex,
+ hints: Vec<InlayHint>,
+}
+
+impl<'db> InlayHintBuilder<'db> {
+ pub fn push(&mut self, offset: TextSize, text: String) {
+ let position = self.line_index.line_col_lsp(offset);
+ self.hints.push(InlayHint {
+ position,
+ label: InlayHintLabel::String(text),
+ kind: None,
+ text_edits: None,
+ tooltip: None,
+ padding_left: Some(true),
+ padding_right: None,
+ data: None,
+ });
+ }
+}
diff --git a/support/texlab/src/features/inlay_hint/label.rs b/support/texlab/src/features/inlay_hint/label.rs
new file mode 100644
index 0000000000..5654f79bf4
--- /dev/null
+++ b/support/texlab/src/features/inlay_hint/label.rs
@@ -0,0 +1,31 @@
+use rowan::TextRange;
+
+use crate::{
+ db::{analysis::label, Document},
+ util, Db,
+};
+
+use super::InlayHintBuilder;
+
+pub(super) fn find_hints(
+ db: &dyn Db,
+ document: Document,
+ range: TextRange,
+ builder: &mut InlayHintBuilder,
+) -> Option<()> {
+ let data = document.parse(db).as_tex()?;
+ for label in data
+ .analyze(db)
+ .labels(db)
+ .iter()
+ .copied()
+ .filter(|label| matches!(label.origin(db), label::Origin::Definition(_)))
+ .filter(|label| label.range(db).intersect(range).is_some())
+ {
+ if let Some(rendered) = util::label::render(db, document, label) {
+ builder.push(label.range(db).end(), rendered.reference(db));
+ }
+ }
+
+ Some(())
+}
diff --git a/support/texlab/src/features/link.rs b/support/texlab/src/features/link.rs
index c4ce4413f5..71bba58ba7 100644
--- a/support/texlab/src/features/link.rs
+++ b/support/texlab/src/features/link.rs
@@ -1,33 +1,41 @@
mod include;
-use std::sync::Arc;
-
-use lsp_types::{DocumentLink, DocumentLinkParams, Url};
+use lsp_types::{DocumentLink, Url};
use rowan::TextRange;
-use crate::LineIndexExt;
+use crate::{
+ db::{Document, Workspace},
+ util::{line_index::LineIndex, line_index_ext::LineIndexExt},
+ Db,
+};
-use self::include::find_include_links;
+pub fn find_all(db: &dyn Db, uri: &Url) -> Option<Vec<DocumentLink>> {
+ let document = Workspace::get(db).lookup_uri(db, uri)?;
+ let mut builder = LinkBuilder {
+ db,
+ line_index: document.contents(db).line_index(db),
+ links: Vec::new(),
+ };
-use super::FeatureRequest;
+ include::find_links(db, document, &mut builder);
+ Some(builder.links)
+}
-pub fn find_document_links(request: FeatureRequest<DocumentLinkParams>) -> Vec<DocumentLink> {
- let document = request.main_document();
- let mut results = Vec::new();
- find_include_links(&request, &mut results);
- results
- .into_iter()
- .map(|result| DocumentLink {
- range: document.line_index.line_col_lsp_range(result.range),
- target: Some(result.target.as_ref().clone()),
- tooltip: None,
- data: None,
- })
- .collect()
+struct LinkBuilder<'db> {
+ db: &'db dyn Db,
+ line_index: &'db LineIndex,
+ links: Vec<DocumentLink>,
}
-#[derive(Debug, Clone)]
-struct LinkResult {
- range: TextRange,
- target: Arc<Url>,
+impl<'db> LinkBuilder<'db> {
+ pub fn push(&mut self, range: TextRange, target: Document) {
+ let range = self.line_index.line_col_lsp_range(range);
+ let target = Some(target.location(self.db).uri(self.db).clone());
+ self.links.push(DocumentLink {
+ range,
+ target,
+ tooltip: None,
+ data: None,
+ });
+ }
}
diff --git a/support/texlab/src/features/link/include.rs b/support/texlab/src/features/link/include.rs
index b4d1804cce..f0c9d147fb 100644
--- a/support/texlab/src/features/link/include.rs
+++ b/support/texlab/src/features/link/include.rs
@@ -1,31 +1,23 @@
-use std::sync::Arc;
+use crate::{
+ db::{dependency_graph, Document, Workspace},
+ Db,
+};
-use lsp_types::DocumentLinkParams;
+use super::LinkBuilder;
-use crate::features::FeatureRequest;
+pub(super) fn find_links(db: &dyn Db, document: Document, builder: &mut LinkBuilder) -> Option<()> {
+ let workspace = Workspace::get(db);
+ let parent = workspace
+ .parents(db, document)
+ .iter()
+ .next()
+ .copied()
+ .unwrap_or(document);
-use super::LinkResult;
-
-pub(super) fn find_include_links(
- request: &FeatureRequest<DocumentLinkParams>,
- results: &mut Vec<LinkResult>,
-) -> Option<()> {
- let data = request.main_document().data.as_latex()?;
-
- for include in &data.extras.explicit_links {
- for target in &include.targets {
- if request
- .workspace
- .documents_by_uri
- .values()
- .any(|document| document.uri.as_ref() == target.as_ref())
- {
- results.push(LinkResult {
- range: include.stem_range,
- target: Arc::clone(target),
- });
- break;
- }
+ let graph = dependency_graph(db, parent);
+ for edge in graph.edges.iter().filter(|edge| edge.source == document) {
+ if let Some(origin) = edge.origin {
+ builder.push(origin.link.range(db), edge.target);
}
}
diff --git a/support/texlab/src/features/lsp_kinds.rs b/support/texlab/src/features/lsp_kinds.rs
deleted file mode 100644
index 82706b5aee..0000000000
--- a/support/texlab/src/features/lsp_kinds.rs
+++ /dev/null
@@ -1,94 +0,0 @@
-use lsp_types::{CompletionItemKind, SymbolKind};
-
-use crate::BibtexEntryTypeCategory;
-
-#[derive(Debug, PartialEq, Eq, Clone, Copy)]
-pub enum Structure {
- Command,
- Snippet,
- Environment,
- Section,
- Float,
- Theorem,
- Equation,
- Item,
- Label,
- Folder,
- File,
- PgfLibrary,
- TikzLibrary,
- Color,
- ColorModel,
- Package,
- Class,
- Entry(BibtexEntryTypeCategory),
- Field,
- Argument,
- GlossaryEntry,
-}
-
-impl Structure {
- pub fn completion_kind(self) -> CompletionItemKind {
- match self {
- Self::Command => CompletionItemKind::FUNCTION,
- Self::Snippet => CompletionItemKind::SNIPPET,
- Self::Environment => CompletionItemKind::ENUM,
- Self::Section => CompletionItemKind::MODULE,
- Self::Float => CompletionItemKind::METHOD,
- Self::Theorem => CompletionItemKind::VARIABLE,
- Self::Equation => CompletionItemKind::CONSTANT,
- Self::Item => CompletionItemKind::ENUM_MEMBER,
- Self::Label => CompletionItemKind::CONSTRUCTOR,
- Self::Folder => CompletionItemKind::FOLDER,
- Self::File => CompletionItemKind::FILE,
- Self::PgfLibrary => CompletionItemKind::PROPERTY,
- Self::TikzLibrary => CompletionItemKind::PROPERTY,
- Self::Color => CompletionItemKind::COLOR,
- Self::ColorModel => CompletionItemKind::COLOR,
- Self::Package => CompletionItemKind::CLASS,
- Self::Class => CompletionItemKind::CLASS,
- Self::Entry(BibtexEntryTypeCategory::Misc) => CompletionItemKind::INTERFACE,
- Self::Entry(BibtexEntryTypeCategory::String) => CompletionItemKind::TEXT,
- Self::Entry(BibtexEntryTypeCategory::Article) => CompletionItemKind::EVENT,
- Self::Entry(BibtexEntryTypeCategory::Book) => CompletionItemKind::STRUCT,
- Self::Entry(BibtexEntryTypeCategory::Collection) => CompletionItemKind::TYPE_PARAMETER,
- Self::Entry(BibtexEntryTypeCategory::Part) => CompletionItemKind::OPERATOR,
- Self::Entry(BibtexEntryTypeCategory::Thesis) => CompletionItemKind::UNIT,
- Self::Field => CompletionItemKind::FIELD,
- Self::Argument => CompletionItemKind::VALUE,
- Self::GlossaryEntry => CompletionItemKind::KEYWORD,
- }
- }
-
- pub fn symbol_kind(self) -> SymbolKind {
- match self {
- Self::Command => SymbolKind::FUNCTION,
- Self::Snippet => unimplemented!(),
- Self::Environment => SymbolKind::ENUM,
- Self::Section => SymbolKind::MODULE,
- Self::Float => SymbolKind::METHOD,
- Self::Theorem => SymbolKind::VARIABLE,
- Self::Equation => SymbolKind::CONSTANT,
- Self::Item => SymbolKind::ENUM_MEMBER,
- Self::Label => SymbolKind::CONSTRUCTOR,
- Self::Folder => SymbolKind::NAMESPACE,
- Self::File => SymbolKind::FILE,
- Self::PgfLibrary => SymbolKind::PROPERTY,
- Self::TikzLibrary => SymbolKind::PROPERTY,
- Self::Color => unimplemented!(),
- Self::ColorModel => unimplemented!(),
- Self::Package => SymbolKind::CLASS,
- Self::Class => SymbolKind::CLASS,
- Self::Entry(BibtexEntryTypeCategory::Misc) => SymbolKind::INTERFACE,
- Self::Entry(BibtexEntryTypeCategory::String) => SymbolKind::STRING,
- Self::Entry(BibtexEntryTypeCategory::Article) => SymbolKind::EVENT,
- Self::Entry(BibtexEntryTypeCategory::Book) => SymbolKind::STRUCT,
- Self::Entry(BibtexEntryTypeCategory::Collection) => SymbolKind::TYPE_PARAMETER,
- Self::Entry(BibtexEntryTypeCategory::Part) => SymbolKind::OPERATOR,
- Self::Entry(BibtexEntryTypeCategory::Thesis) => SymbolKind::OBJECT,
- Self::Field => SymbolKind::FIELD,
- Self::Argument => SymbolKind::NUMBER,
- Self::GlossaryEntry => unimplemented!(),
- }
- }
-}
diff --git a/support/texlab/src/features/reference.rs b/support/texlab/src/features/reference.rs
index f84a3f67d5..54a5fefb27 100644
--- a/support/texlab/src/features/reference.rs
+++ b/support/texlab/src/features/reference.rs
@@ -2,40 +2,45 @@ mod entry;
mod label;
mod string;
-use std::sync::Arc;
-
-use lsp_types::{Location, ReferenceParams, Url};
+use lsp_types::{Location, Position, ReferenceContext, Url};
use rowan::TextRange;
-use crate::LineIndexExt;
-
-use self::{
- entry::find_entry_references, label::find_label_references, string::find_string_references,
+use crate::{
+ db::Document,
+ util::{cursor::CursorContext, line_index_ext::LineIndexExt},
+ Db,
};
-use super::{cursor::CursorContext, FeatureRequest};
-
-pub fn find_all_references(request: FeatureRequest<ReferenceParams>) -> Vec<Location> {
+pub fn find_all(
+ db: &dyn Db,
+ uri: &Url,
+ position: Position,
+ params: &ReferenceContext,
+) -> Option<Vec<Location>> {
let mut results = Vec::new();
- let context = CursorContext::new(request);
+ let context = CursorContext::new(db, uri, position, params)?;
log::debug!("[References] Cursor: {:?}", context.cursor);
- find_label_references(&context, &mut results);
- find_entry_references(&context, &mut results);
- find_string_references(&context, &mut results);
+ label::find_all_references(&context, &mut results);
+ entry::find_all_references(&context, &mut results);
+ string::find_all_references(&context, &mut results);
- results
+ let locations = results
.into_iter()
.map(|result| Location {
- uri: result.uri.as_ref().clone(),
- range: context.request.workspace.documents_by_uri[&result.uri]
- .line_index
+ uri: result.document.location(db).uri(db).clone(),
+ range: result
+ .document
+ .contents(db)
+ .line_index(db)
.line_col_lsp_range(result.range),
})
- .collect()
+ .collect();
+
+ Some(locations)
}
#[derive(Debug, Clone)]
struct ReferenceResult {
- uri: Arc<Url>,
+ document: Document,
range: TextRange,
}
diff --git a/support/texlab/src/features/reference/entry.rs b/support/texlab/src/features/reference/entry.rs
index 0144d2d074..3dfa10da5b 100644
--- a/support/texlab/src/features/reference/entry.rs
+++ b/support/texlab/src/features/reference/entry.rs
@@ -1,32 +1,31 @@
-use std::sync::Arc;
-
-use lsp_types::ReferenceParams;
+use lsp_types::ReferenceContext;
use rowan::ast::AstNode;
use crate::{
- features::cursor::CursorContext,
+ db::parse::DocumentData,
syntax::{
bibtex::{self, HasName},
latex,
},
- DocumentData,
+ util::cursor::CursorContext,
};
use super::ReferenceResult;
-pub(super) fn find_entry_references(
- context: &CursorContext<ReferenceParams>,
+pub(super) fn find_all_references(
+ context: &CursorContext<&ReferenceContext>,
results: &mut Vec<ReferenceResult>,
) -> Option<()> {
+ let db = context.db;
let (key_text, _) = context
.find_citation_key_word()
.or_else(|| context.find_citation_key_command())
.or_else(|| context.find_entry_key())?;
- for document in context.request.workspace.documents_by_uri.values() {
- match &document.data {
- DocumentData::Latex(data) => {
- latex::SyntaxNode::new_root(data.green.clone())
+ for document in context.related() {
+ match document.parse(db) {
+ DocumentData::Tex(data) => {
+ data.root(db)
.descendants()
.filter_map(latex::Citation::cast)
.filter_map(|citation| citation.key_list())
@@ -34,24 +33,22 @@ pub(super) fn find_entry_references(
.filter(|key| key.to_string() == key_text)
.map(|key| latex::small_range(&key))
.for_each(|range| {
- let uri = Arc::clone(&document.uri);
- results.push(ReferenceResult { uri, range });
+ results.push(ReferenceResult { document, range });
});
}
- DocumentData::Bibtex(data) if context.request.params.context.include_declaration => {
- bibtex::SyntaxNode::new_root(data.green.clone())
+ DocumentData::Bib(data) if context.params.include_declaration => {
+ data.root(db)
.children()
.filter_map(bibtex::Entry::cast)
.filter_map(|entry| entry.name_token())
.filter(|key| key.text() == key_text)
.map(|key| key.text_range())
.for_each(|range| {
- let uri = Arc::clone(&document.uri);
- results.push(ReferenceResult { uri, range });
+ results.push(ReferenceResult { document, range });
});
}
- DocumentData::Bibtex(_) | DocumentData::BuildLog(_) => {}
- }
+ DocumentData::Bib(_) | DocumentData::Log(_) => {}
+ };
}
Some(())
diff --git a/support/texlab/src/features/reference/label.rs b/support/texlab/src/features/reference/label.rs
index 3dadada08b..394542320c 100644
--- a/support/texlab/src/features/reference/label.rs
+++ b/support/texlab/src/features/reference/label.rs
@@ -1,33 +1,32 @@
-use std::sync::Arc;
+use lsp_types::ReferenceContext;
-use lsp_types::ReferenceParams;
-
-use crate::features::cursor::CursorContext;
+use crate::util::cursor::CursorContext;
use super::ReferenceResult;
-pub(super) fn find_label_references(
- context: &CursorContext<ReferenceParams>,
+pub(super) fn find_all_references(
+ context: &CursorContext<&ReferenceContext>,
results: &mut Vec<ReferenceResult>,
) -> Option<()> {
+ let db = context.db;
let (name_text, _) = context
.find_label_name_key()
.or_else(|| context.find_label_name_command())?;
- for document in context.request.workspace.documents_by_uri.values() {
- if let Some(data) = document.data.as_latex() {
- for name in data
- .extras
- .label_names
+ for document in context.related() {
+ if let Some(data) = document.parse(db).as_tex() {
+ for label in data
+ .analyze(db)
+ .labels(db)
.iter()
- .filter(|name| name.text == name_text)
- .filter(|name| {
- !name.is_definition || context.request.params.context.include_declaration
+ .filter(|label| label.name(db).text(db) == &name_text)
+ .filter(|label| {
+ label.origin(db).as_definition().is_none() || context.params.include_declaration
})
{
results.push(ReferenceResult {
- uri: Arc::clone(&document.uri),
- range: name.range,
+ document,
+ range: label.range(db),
});
}
}
diff --git a/support/texlab/src/features/reference/string.rs b/support/texlab/src/features/reference/string.rs
index c00cf93546..c4bc6ded3d 100644
--- a/support/texlab/src/features/reference/string.rs
+++ b/support/texlab/src/features/reference/string.rs
@@ -1,22 +1,21 @@
-use std::sync::Arc;
-
-use lsp_types::ReferenceParams;
+use lsp_types::ReferenceContext;
use rowan::ast::AstNode;
use crate::{
- features::cursor::CursorContext,
syntax::bibtex::{self, HasName},
+ util::cursor::CursorContext,
};
use super::ReferenceResult;
-pub(super) fn find_string_references(
- context: &CursorContext<ReferenceParams>,
+pub(super) fn find_all_references(
+ context: &CursorContext<&ReferenceContext>,
results: &mut Vec<ReferenceResult>,
) -> Option<()> {
+ let db = context.db;
let name_text = context
.cursor
- .as_bibtex()
+ .as_bib()
.filter(|token| token.kind() == bibtex::NAME)
.filter(|token| {
let parent = token.parent().unwrap();
@@ -24,14 +23,11 @@ pub(super) fn find_string_references(
})?
.text();
- let document = context.request.main_document();
- let data = document.data.as_bibtex()?;
- for node in bibtex::SyntaxNode::new_root(data.green.clone()).descendants() {
+ let data = context.document.parse(db).as_bib()?;
+ for node in data.root(db).descendants() {
if let Some(name) = bibtex::StringDef::cast(node.clone())
.and_then(|string| string.name_token())
- .filter(|name| {
- context.request.params.context.include_declaration && name.text() == name_text
- })
+ .filter(|name| context.params.include_declaration && name.text() == name_text)
.or_else(|| {
bibtex::Value::cast(node)
.and_then(|token| token.syntax().first_token())
@@ -39,7 +35,7 @@ pub(super) fn find_string_references(
})
{
results.push(ReferenceResult {
- uri: Arc::clone(&document.uri),
+ document: context.document,
range: name.text_range(),
});
}
diff --git a/support/texlab/src/features/rename.rs b/support/texlab/src/features/rename.rs
index c21faf262e..57e1ef5763 100644
--- a/support/texlab/src/features/rename.rs
+++ b/support/texlab/src/features/rename.rs
@@ -2,57 +2,61 @@ mod command;
mod entry;
mod label;
-use std::sync::Arc;
-
-use lsp_types::{Range, RenameParams, TextDocumentPositionParams, TextEdit, Url, WorkspaceEdit};
+use lsp_types::{Position, Range, TextEdit, Url, WorkspaceEdit};
use rowan::TextRange;
use rustc_hash::FxHashMap;
-use crate::LineIndexExt;
-
-use self::{
- command::{prepare_command_rename, rename_command},
- entry::{prepare_entry_rename, rename_entry},
- label::{prepare_label_rename, rename_label},
+use crate::{
+ db::Document,
+ util::{cursor::CursorContext, line_index_ext::LineIndexExt},
+ Db,
};
-use super::{cursor::CursorContext, FeatureRequest};
-
-pub fn prepare_rename_all(request: FeatureRequest<TextDocumentPositionParams>) -> Option<Range> {
- let context = CursorContext::new(request);
- let range = prepare_entry_rename(&context)
- .or_else(|| prepare_label_rename(&context))
- .or_else(|| prepare_command_rename(&context))?;
+pub fn prepare_rename_all(db: &dyn Db, uri: &Url, position: Position) -> Option<Range> {
+ let context = CursorContext::new(db, uri, position, ())?;
+ let range = entry::prepare_rename(&context)
+ .or_else(|| label::prepare_rename(&context))
+ .or_else(|| command::prepare_rename(&context))?;
- let line_index = &context.request.main_document().line_index;
+ let line_index = context.document.contents(db).line_index(db);
Some(line_index.line_col_lsp_range(range))
}
-pub fn rename_all(request: FeatureRequest<RenameParams>) -> Option<WorkspaceEdit> {
- let context = CursorContext::new(request);
- let result = rename_entry(&context)
- .or_else(|| rename_label(&context))
- .or_else(|| rename_command(&context))?;
+pub fn rename_all(
+ db: &dyn Db,
+ uri: &Url,
+ position: Position,
+ new_name: String,
+) -> Option<WorkspaceEdit> {
+ let context = CursorContext::new(db, uri, position, Params { new_name })?;
+ let result = entry::rename(&context)
+ .or_else(|| label::rename(&context))
+ .or_else(|| command::rename(&context))?;
let changes = result
.changes
.into_iter()
- .map(|(uri, old_edits)| {
- let document = &context.request.workspace.documents_by_uri[&uri];
+ .map(|(document, old_edits)| {
+ let line_index = document.contents(db).line_index(db);
let new_edits = old_edits
.into_iter()
.map(|Indel { delete, insert }| {
- TextEdit::new(document.line_index.line_col_lsp_range(delete), insert)
+ TextEdit::new(line_index.line_col_lsp_range(delete), insert)
})
.collect();
- (uri.as_ref().clone(), new_edits)
+ (document.location(db).uri(db).clone(), new_edits)
})
.collect();
Some(WorkspaceEdit::new(changes))
}
+#[derive(Debug)]
+struct Params {
+ new_name: String,
+}
+
#[derive(Debug, PartialEq, Eq, Clone)]
struct Indel {
delete: TextRange,
@@ -61,5 +65,5 @@ struct Indel {
#[derive(Debug, PartialEq, Eq, Clone)]
struct RenameResult {
- changes: FxHashMap<Arc<Url>, Vec<Indel>>,
+ changes: FxHashMap<Document, Vec<Indel>>,
}
diff --git a/support/texlab/src/features/rename/command.rs b/support/texlab/src/features/rename/command.rs
index aaa08f8cb4..5610b2f7ee 100644
--- a/support/texlab/src/features/rename/command.rs
+++ b/support/texlab/src/features/rename/command.rs
@@ -1,29 +1,21 @@
-use std::sync::Arc;
-
-use lsp_types::RenameParams;
use rowan::{TextRange, TextSize};
use rustc_hash::FxHashMap;
-use crate::{
- features::cursor::{CursorContext, HasPosition},
- syntax::latex,
-};
+use crate::util::cursor::CursorContext;
-use super::{Indel, RenameResult};
+use super::{Indel, Params, RenameResult};
-pub(super) fn prepare_command_rename<P: HasPosition>(
- context: &CursorContext<P>,
-) -> Option<TextRange> {
+pub(super) fn prepare_rename<T>(context: &CursorContext<T>) -> Option<TextRange> {
context.cursor.command_range(context.offset)
}
-pub(super) fn rename_command(context: &CursorContext<RenameParams>) -> Option<RenameResult> {
- prepare_command_rename(context)?;
- let name = context.cursor.as_latex()?.text();
+pub(super) fn rename(context: &CursorContext<Params>) -> Option<RenameResult> {
+ prepare_rename(context)?;
+ let name = context.cursor.as_tex()?.text();
let mut changes = FxHashMap::default();
- for document in context.request.workspace.documents_by_uri.values() {
- if let Some(data) = document.data.as_latex() {
- let root = latex::SyntaxNode::new_root(data.green.clone());
+ for document in context.related() {
+ if let Some(data) = document.parse(context.db).as_tex() {
+ let root = data.root(context.db);
let edits = root
.descendants_with_tokens()
.filter_map(|element| element.into_token())
@@ -32,12 +24,12 @@ pub(super) fn rename_command(context: &CursorContext<RenameParams>) -> Option<Re
let range = token.text_range();
Indel {
delete: TextRange::new(range.start() + TextSize::from(1), range.end()),
- insert: context.request.params.new_name.clone(),
+ insert: context.params.new_name.clone(),
}
})
.collect();
- changes.insert(Arc::clone(&document.uri), edits);
+ changes.insert(document, edits);
}
}
diff --git a/support/texlab/src/features/rename/entry.rs b/support/texlab/src/features/rename/entry.rs
index 6eac79030b..79ee25ac71 100644
--- a/support/texlab/src/features/rename/entry.rs
+++ b/support/texlab/src/features/rename/entry.rs
@@ -1,23 +1,18 @@
-use std::sync::Arc;
-
-use lsp_types::RenameParams;
use rowan::{ast::AstNode, TextRange};
use rustc_hash::FxHashMap;
use crate::{
- features::cursor::{CursorContext, HasPosition},
+ db::parse::DocumentData,
syntax::{
bibtex::{self, HasName},
latex,
},
- DocumentData,
+ util::cursor::CursorContext,
};
-use super::{Indel, RenameResult};
+use super::{Indel, Params, RenameResult};
-pub(super) fn prepare_entry_rename<P: HasPosition>(
- context: &CursorContext<P>,
-) -> Option<TextRange> {
+pub(super) fn prepare_rename<T>(context: &CursorContext<T>) -> Option<TextRange> {
let (_, range) = context
.find_citation_key_word()
.or_else(|| context.find_entry_key())?;
@@ -25,18 +20,17 @@ pub(super) fn prepare_entry_rename<P: HasPosition>(
Some(range)
}
-pub(super) fn rename_entry(context: &CursorContext<RenameParams>) -> Option<RenameResult> {
- prepare_entry_rename(context)?;
+pub(super) fn rename(context: &CursorContext<Params>) -> Option<RenameResult> {
+ prepare_rename(context)?;
let (key_text, _) = context
.find_citation_key_word()
.or_else(|| context.find_entry_key())?;
let mut changes = FxHashMap::default();
- for document in context.request.workspace.documents_by_uri.values() {
- let uri = Arc::clone(&document.uri);
- match &document.data {
- DocumentData::Latex(data) => {
- let root = latex::SyntaxNode::new_root(data.green.clone());
+ for document in context.related() {
+ match document.parse(context.db) {
+ DocumentData::Tex(data) => {
+ let root = data.root(context.db);
let edits: Vec<_> = root
.descendants()
.filter_map(latex::Citation::cast)
@@ -45,13 +39,13 @@ pub(super) fn rename_entry(context: &CursorContext<RenameParams>) -> Option<Rena
.filter(|key| key.to_string() == key_text)
.map(|key| Indel {
delete: latex::small_range(&key),
- insert: context.request.params.new_name.clone(),
+ insert: context.params.new_name.clone(),
})
.collect();
- changes.insert(uri, edits);
+ changes.insert(document, edits);
}
- DocumentData::Bibtex(data) => {
- let root = bibtex::SyntaxNode::new_root(data.green.clone());
+ DocumentData::Bib(data) => {
+ let root = data.root(context.db);
let edits: Vec<_> = root
.descendants()
.filter_map(bibtex::Entry::cast)
@@ -59,12 +53,12 @@ pub(super) fn rename_entry(context: &CursorContext<RenameParams>) -> Option<Rena
.filter(|key| key.text() == key_text)
.map(|key| Indel {
delete: key.text_range(),
- insert: context.request.params.new_name.clone(),
+ insert: context.params.new_name.clone(),
})
.collect();
- changes.insert(uri, edits);
+ changes.insert(document, edits);
}
- DocumentData::BuildLog(_) => {}
+ DocumentData::Log(_) => {}
}
}
diff --git a/support/texlab/src/features/rename/label.rs b/support/texlab/src/features/rename/label.rs
index 30a99c3b36..d38d13ae93 100644
--- a/support/texlab/src/features/rename/label.rs
+++ b/support/texlab/src/features/rename/label.rs
@@ -1,32 +1,24 @@
-use std::sync::Arc;
-
-use lsp_types::RenameParams;
use rowan::{ast::AstNode, TextRange};
use rustc_hash::FxHashMap;
-use crate::{
- features::cursor::{CursorContext, HasPosition},
- syntax::latex,
-};
+use crate::{syntax::latex, util::cursor::CursorContext};
-use super::{Indel, RenameResult};
+use super::{Indel, Params, RenameResult};
-pub(super) fn prepare_label_rename<P: HasPosition>(
- context: &CursorContext<P>,
-) -> Option<TextRange> {
+pub(super) fn prepare_rename<T>(context: &CursorContext<T>) -> Option<TextRange> {
let (_, range) = context.find_label_name_key()?;
Some(range)
}
-pub(super) fn rename_label(context: &CursorContext<RenameParams>) -> Option<RenameResult> {
- prepare_label_rename(context)?;
+pub(super) fn rename(context: &CursorContext<Params>) -> Option<RenameResult> {
+ prepare_rename(context)?;
let (name_text, _) = context.find_label_name_key()?;
let mut changes = FxHashMap::default();
- for document in context.request.workspace.documents_by_uri.values() {
- if let Some(data) = document.data.as_latex() {
+ for document in context.related() {
+ if let Some(data) = document.parse(context.db).as_tex() {
let mut edits = Vec::new();
- for node in latex::SyntaxNode::new_root(data.green.clone()).descendants() {
+ for node in data.root(context.db).descendants() {
if let Some(range) = latex::LabelDefinition::cast(node.clone())
.and_then(|label| label.name())
.and_then(|name| name.key())
@@ -35,7 +27,7 @@ pub(super) fn rename_label(context: &CursorContext<RenameParams>) -> Option<Rena
{
edits.push(Indel {
delete: range,
- insert: context.request.params.new_name.clone(),
+ insert: context.params.new_name.clone(),
});
}
@@ -47,7 +39,7 @@ pub(super) fn rename_label(context: &CursorContext<RenameParams>) -> Option<Rena
.for_each(|name| {
edits.push(Indel {
delete: latex::small_range(&name),
- insert: context.request.params.new_name.clone(),
+ insert: context.params.new_name.clone(),
});
});
@@ -59,7 +51,7 @@ pub(super) fn rename_label(context: &CursorContext<RenameParams>) -> Option<Rena
{
edits.push(Indel {
delete: latex::small_range(&name1),
- insert: context.request.params.new_name.clone(),
+ insert: context.params.new_name.clone(),
});
}
@@ -70,13 +62,13 @@ pub(super) fn rename_label(context: &CursorContext<RenameParams>) -> Option<Rena
{
edits.push(Indel {
delete: latex::small_range(&name2),
- insert: context.request.params.new_name.clone(),
+ insert: context.params.new_name.clone(),
});
}
}
}
- changes.insert(Arc::clone(&document.uri), edits);
+ changes.insert(document, edits);
}
}
diff --git a/support/texlab/src/features/symbol.rs b/support/texlab/src/features/symbol.rs
index 24d599c3e1..c16193efcb 100644
--- a/support/texlab/src/features/symbol.rs
+++ b/support/texlab/src/features/symbol.rs
@@ -3,49 +3,44 @@ mod latex;
mod project_order;
mod types;
-use std::{cmp::Reverse, sync::Arc};
+use std::cmp::Reverse;
-use lsp_types::{
- DocumentSymbolParams, DocumentSymbolResponse, PartialResultParams, SymbolInformation,
- TextDocumentIdentifier, WorkDoneProgressParams, WorkspaceSymbolParams,
-};
+use lsp_types::{DocumentSymbolResponse, SymbolInformation, Url, WorkspaceSymbolParams};
-use crate::{ClientCapabilitiesExt, Workspace};
+use crate::{db::Workspace, util::capabilities::ClientCapabilitiesExt, Db};
-use self::{
- bibtex::find_bibtex_symbols, latex::find_latex_symbols, project_order::ProjectOrdering,
- types::InternalSymbol,
-};
+use self::project_order::ProjectOrdering;
-use super::FeatureRequest;
+pub fn find_document_symbols(db: &dyn Db, uri: &Url) -> Option<DocumentSymbolResponse> {
+ let workspace = Workspace::get(db);
+ let document = workspace.lookup_uri(db, uri)?;
-#[must_use]
-pub fn find_document_symbols(req: FeatureRequest<DocumentSymbolParams>) -> DocumentSymbolResponse {
let mut buf = Vec::new();
- find_latex_symbols(&req, &mut buf);
- find_bibtex_symbols(&req, &mut buf);
- if req
- .workspace
- .environment
- .client_capabilities
+ latex::find_symbols(db, document, &mut buf);
+ bibtex::find_symbols(db, document, &mut buf);
+ if workspace
+ .client_capabilities(db)
.has_hierarchical_document_symbol_support()
{
- DocumentSymbolResponse::Nested(
- buf.into_iter()
- .map(InternalSymbol::into_document_symbol)
- .collect(),
- )
+ let symbols = buf
+ .into_iter()
+ .map(|symbol| symbol.into_document_symbol(db))
+ .collect();
+
+ Some(DocumentSymbolResponse::Nested(symbols))
} else {
let mut new_buf = Vec::new();
for symbol in buf {
symbol.flatten(&mut new_buf);
}
+
let mut new_buf: Vec<_> = new_buf
.into_iter()
- .map(|symbol| symbol.into_symbol_info(req.main_document().uri.as_ref().clone()))
+ .map(|symbol| symbol.into_symbol_info(uri.clone()))
.collect();
- sort_symbols(&req.workspace, &mut new_buf);
- DocumentSymbolResponse::Flat(new_buf)
+
+ sort_symbols(db, &mut new_buf);
+ Some(DocumentSymbolResponse::Flat(new_buf))
}
}
@@ -57,25 +52,16 @@ struct WorkspaceSymbol {
#[must_use]
pub fn find_workspace_symbols(
- workspace: &Workspace,
+ db: &dyn Db,
params: &WorkspaceSymbolParams,
) -> Vec<SymbolInformation> {
let mut symbols = Vec::new();
- for document in workspace.documents_by_uri.values() {
- let request = FeatureRequest {
- uri: Arc::clone(&document.uri),
- params: DocumentSymbolParams {
- text_document: TextDocumentIdentifier::new(document.uri.as_ref().clone()),
- partial_result_params: PartialResultParams::default(),
- work_done_progress_params: WorkDoneProgressParams::default(),
- },
- workspace: workspace.slice(&document.uri),
- };
-
+ let workspace = Workspace::get(db);
+ for document in workspace.documents(db).iter().copied() {
let mut buf = Vec::new();
- find_latex_symbols(&request, &mut buf);
- find_bibtex_symbols(&request, &mut buf);
+ latex::find_symbols(db, document, &mut buf);
+ bibtex::find_symbols(db, document, &mut buf);
let mut new_buf = Vec::new();
for symbol in buf {
@@ -85,7 +71,7 @@ pub fn find_workspace_symbols(
for symbol in new_buf {
symbols.push(WorkspaceSymbol {
search_text: symbol.search_text(),
- info: symbol.into_symbol_info(document.uri.as_ref().clone()),
+ info: symbol.into_symbol_info(document.location(db).uri(db).clone()),
});
}
}
@@ -95,6 +81,7 @@ pub fn find_workspace_symbols(
.split_whitespace()
.map(str::to_lowercase)
.collect();
+
let mut filtered = Vec::new();
for symbol in symbols {
let mut included = true;
@@ -109,20 +96,21 @@ pub fn find_workspace_symbols(
filtered.push(symbol.info);
}
}
- sort_symbols(workspace, &mut filtered);
+
+ sort_symbols(db, &mut filtered);
filtered
}
-fn sort_symbols(workspace: &Workspace, symbols: &mut [SymbolInformation]) {
- let ordering = ProjectOrdering::from(workspace);
+fn sort_symbols(db: &dyn Db, symbols: &mut [SymbolInformation]) {
+ let ordering = ProjectOrdering::new(db);
symbols.sort_by(|left, right| {
let left_key = (
- ordering.get(&left.location.uri),
+ ordering.get(db, &left.location.uri),
left.location.range.start,
Reverse(left.location.range.end),
);
let right_key = (
- ordering.get(&right.location.uri),
+ ordering.get(db, &right.location.uri),
right.location.range.start,
Reverse(right.location.range.end),
);
diff --git a/support/texlab/src/features/symbol/bibtex.rs b/support/texlab/src/features/symbol/bibtex.rs
index bbc9026b0b..6a92405f17 100644
--- a/support/texlab/src/features/symbol/bibtex.rs
+++ b/support/texlab/src/features/symbol/bibtex.rs
@@ -1,82 +1,87 @@
-use lsp_types::DocumentSymbolParams;
use rowan::ast::AstNode;
use crate::{
- features::FeatureRequest,
+ db::Document,
syntax::bibtex::{self, HasName, HasType},
- BibtexEntryTypeCategory, LineIndexExt, LANGUAGE_DATA,
+ util::{
+ lang_data::{BibtexEntryTypeCategory, LANGUAGE_DATA},
+ line_index::LineIndex,
+ line_index_ext::LineIndexExt,
+ },
+ Db,
};
use super::types::{InternalSymbol, InternalSymbolKind};
-pub fn find_bibtex_symbols(
- request: &FeatureRequest<DocumentSymbolParams>,
+pub fn find_symbols(db: &dyn Db, document: Document, buf: &mut Vec<InternalSymbol>) -> Option<()> {
+ let data = document.parse(db).as_bib()?;
+ let line_index = document.contents(db).line_index(db);
+ for node in data.root(db).children() {
+ process_string(node.clone(), line_index, buf)
+ .or_else(|| process_entry(node, line_index, buf));
+ }
+
+ Some(())
+}
+
+fn process_string(
+ node: bibtex::SyntaxNode,
+ line_index: &LineIndex,
buf: &mut Vec<InternalSymbol>,
) -> Option<()> {
- let main_document = request.main_document();
- let data = main_document.data.as_bibtex()?;
-
- for node in bibtex::SyntaxNode::new_root(data.green.clone()).children() {
- if let Some(string) = bibtex::StringDef::cast(node.clone()) {
- if let Some(name) = string.name_token() {
- buf.push(InternalSymbol {
- name: name.text().into(),
- label: None,
- kind: InternalSymbolKind::String,
- deprecated: false,
- full_range: main_document
- .line_index
- .line_col_lsp_range(string.syntax().text_range()),
- selection_range: main_document
- .line_index
- .line_col_lsp_range(name.text_range()),
- children: Vec::new(),
- })
- }
- } else if let Some(entry) = bibtex::Entry::cast(node) {
- if let Some(ty) = entry.type_token() {
- if let Some(key) = entry.name_token() {
- let mut children = Vec::new();
- for field in entry.fields() {
- if let Some(name) = field.name_token() {
- let symbol = InternalSymbol {
- name: name.text().to_string(),
- label: None,
- kind: InternalSymbolKind::Field,
- deprecated: false,
- full_range: main_document
- .line_index
- .line_col_lsp_range(field.syntax().text_range()),
- selection_range: main_document
- .line_index
- .line_col_lsp_range(name.text_range()),
- children: Vec::new(),
- };
- children.push(symbol);
- }
- }
+ let string = bibtex::StringDef::cast(node)?;
+ let name = string.name_token()?;
+ buf.push(InternalSymbol {
+ name: name.text().into(),
+ label: None,
+ kind: InternalSymbolKind::String,
+ deprecated: false,
+ full_range: line_index.line_col_lsp_range(string.syntax().text_range()),
+ selection_range: line_index.line_col_lsp_range(name.text_range()),
+ children: Vec::new(),
+ });
- let category = LANGUAGE_DATA
- .find_entry_type(&ty.text()[1..])
- .map(|ty| ty.category)
- .unwrap_or(BibtexEntryTypeCategory::Misc);
+ Some(())
+}
- buf.push(InternalSymbol {
- name: key.to_string(),
- label: None,
- kind: InternalSymbolKind::Entry(category),
- deprecated: false,
- full_range: main_document
- .line_index
- .line_col_lsp_range(entry.syntax().text_range()),
- selection_range: main_document
- .line_index
- .line_col_lsp_range(key.text_range()),
- children,
- });
- }
- }
+fn process_entry(
+ node: bibtex::SyntaxNode,
+ line_index: &LineIndex,
+ buf: &mut Vec<InternalSymbol>,
+) -> Option<()> {
+ let entry = bibtex::Entry::cast(node)?;
+ let ty = entry.type_token()?;
+ let key = entry.name_token()?;
+ let mut children = Vec::new();
+ for field in entry.fields() {
+ if let Some(name) = field.name_token() {
+ let symbol = InternalSymbol {
+ name: name.text().to_string(),
+ label: None,
+ kind: InternalSymbolKind::Field,
+ deprecated: false,
+ full_range: line_index.line_col_lsp_range(field.syntax().text_range()),
+ selection_range: line_index.line_col_lsp_range(name.text_range()),
+ children: Vec::new(),
+ };
+ children.push(symbol);
}
}
+
+ let category = LANGUAGE_DATA
+ .find_entry_type(&ty.text()[1..])
+ .map(|ty| ty.category)
+ .unwrap_or(BibtexEntryTypeCategory::Misc);
+
+ buf.push(InternalSymbol {
+ name: key.to_string(),
+ label: None,
+ kind: InternalSymbolKind::Entry(category),
+ deprecated: false,
+ full_range: line_index.line_col_lsp_range(entry.syntax().text_range()),
+ selection_range: line_index.line_col_lsp_range(key.text_range()),
+ children,
+ });
+
Some(())
}
diff --git a/support/texlab/src/features/symbol/latex.rs b/support/texlab/src/features/symbol/latex.rs
index 8a88ffb0d9..81927ab515 100644
--- a/support/texlab/src/features/symbol/latex.rs
+++ b/support/texlab/src/features/symbol/latex.rs
@@ -1,38 +1,30 @@
use std::str::FromStr;
-use lsp_types::{DocumentSymbolParams, Range};
+use lsp_types::Range;
use rowan::ast::AstNode;
-use smol_str::SmolStr;
use titlecase::titlecase;
use crate::{
- features::FeatureRequest,
- find_caption_by_parent, find_label_number,
+ db::{Document, Word, Workspace},
syntax::latex::{self, HasBrack, HasCurly},
- LabelledFloatKind, LatexDocumentData, LineIndexExt, LANGUAGE_DATA,
+ util::{
+ label::{find_caption_by_parent, LabeledFloatKind},
+ lang_data::LANGUAGE_DATA,
+ line_index_ext::LineIndexExt,
+ },
+ Db,
};
use super::types::{InternalSymbol, InternalSymbolKind};
-pub fn find_latex_symbols(
- request: &FeatureRequest<DocumentSymbolParams>,
- buf: &mut Vec<InternalSymbol>,
-) -> Option<()> {
- let data = request.main_document().data.as_latex()?;
- let mut context = Context { request, data };
-
- let root = context.data.green.clone();
- let mut symbols = visit(&mut context, latex::SyntaxNode::new_root(root));
+pub fn find_symbols(db: &dyn Db, document: Document, buf: &mut Vec<InternalSymbol>) -> Option<()> {
+ let data = document.parse(db).as_tex()?;
+ let mut symbols = visit(db, document, data.root(db));
buf.append(&mut symbols);
Some(())
}
-struct Context<'a> {
- request: &'a FeatureRequest<DocumentSymbolParams>,
- data: &'a LatexDocumentData,
-}
-
-fn visit(context: &mut Context, node: latex::SyntaxNode) -> Vec<InternalSymbol> {
+fn visit(db: &dyn Db, document: Document, node: latex::SyntaxNode) -> Vec<InternalSymbol> {
let symbol = match node.kind() {
latex::PART
| latex::CHAPTER
@@ -40,9 +32,9 @@ fn visit(context: &mut Context, node: latex::SyntaxNode) -> Vec<InternalSymbol>
| latex::SUBSECTION
| latex::SUBSUBSECTION
| latex::PARAGRAPH
- | latex::SUBPARAGRAPH => visit_section(context, node.clone()),
- latex::ENUM_ITEM => visit_enum_item(context, node.clone()),
- latex::EQUATION => visit_equation(context, node.clone()),
+ | latex::SUBPARAGRAPH => visit_section(db, document, node.clone()),
+ latex::ENUM_ITEM => visit_enum_item(db, document, node.clone()),
+ latex::EQUATION => visit_equation(db, document, node.clone()),
latex::ENVIRONMENT => latex::Environment::cast(node.clone())
.and_then(|env| env.begin())
.and_then(|begin| begin.name())
@@ -54,17 +46,17 @@ fn visit(context: &mut Context, node: latex::SyntaxNode) -> Vec<InternalSymbol>
.iter()
.any(|env| env == &name)
{
- visit_equation_environment(context, node.clone())
+ visit_equation_environment(db, document, node.clone())
} else if LANGUAGE_DATA
.enum_environments
.iter()
.any(|env| env == &name)
{
- visit_enumeration(context, node.clone(), &name)
- } else if let Ok(float_kind) = LabelledFloatKind::from_str(&name) {
- visit_float(context, node.clone(), float_kind)
+ visit_enumeration(db, document, node.clone(), &name)
+ } else if let Ok(float_kind) = LabeledFloatKind::from_str(&name) {
+ visit_float(db, document, node.clone(), float_kind)
} else {
- visit_theorem(context, node.clone(), &name)
+ visit_theorem(db, document, node.clone(), &name)
}
}),
_ => None,
@@ -73,39 +65,42 @@ fn visit(context: &mut Context, node: latex::SyntaxNode) -> Vec<InternalSymbol>
match symbol {
Some(mut parent) => {
for child in node.children() {
- parent.children.append(&mut visit(context, child));
+ parent.children.append(&mut visit(db, document, child));
}
vec![parent]
}
None => {
let mut symbols = Vec::new();
for child in node.children() {
- symbols.append(&mut visit(context, child));
+ symbols.append(&mut visit(db, document, child));
}
symbols
}
}
}
-fn visit_section(context: &mut Context, node: latex::SyntaxNode) -> Option<InternalSymbol> {
+fn visit_section(
+ db: &dyn Db,
+ document: Document,
+ node: latex::SyntaxNode,
+) -> Option<InternalSymbol> {
let section = latex::Section::cast(node)?;
- let full_range = context
- .request
- .main_document()
- .line_index
+ let full_range = document
+ .contents(db)
+ .line_index(db)
.line_col_lsp_range(latex::small_range(&section));
let group = section.name()?;
let group_text = group.content_text()?;
- let symbol = match find_label_by_parent(context, section.syntax()) {
+ let symbol = match find_label_by_parent(db, document, section.syntax()) {
Some(NumberedLabel {
name: label,
range: selection_range,
number,
}) => {
let name = match number {
- Some(number) => format!("{} {}", number, group_text),
+ Some(number) => format!("{} {}", number.text(db), group_text),
None => group_text,
};
@@ -129,10 +124,15 @@ fn visit_section(context: &mut Context, node: latex::SyntaxNode) -> Option<Inter
children: Vec::new(),
},
};
+
Some(symbol)
}
-fn visit_enum_item(context: &mut Context, node: latex::SyntaxNode) -> Option<InternalSymbol> {
+fn visit_enum_item(
+ db: &dyn Db,
+ document: Document,
+ node: latex::SyntaxNode,
+) -> Option<InternalSymbol> {
let enum_item = latex::EnumItem::cast(node.clone())?;
if !enum_item
.syntax()
@@ -151,10 +151,9 @@ fn visit_enum_item(context: &mut Context, node: latex::SyntaxNode) -> Option<Int
return None;
}
- let full_range = context
- .request
- .main_document()
- .line_index
+ let full_range = document
+ .contents(db)
+ .line_index(db)
.line_col_lsp_range(latex::small_range(&enum_item));
let name = enum_item
@@ -162,13 +161,15 @@ fn visit_enum_item(context: &mut Context, node: latex::SyntaxNode) -> Option<Int
.and_then(|label| label.content_text())
.unwrap_or_else(|| "Item".to_string());
- let symbol = match find_label_by_parent(context, &node) {
+ let symbol = match find_label_by_parent(db, document, &node) {
Some(NumberedLabel {
name: label,
range: selection_range,
number,
}) => InternalSymbol {
- name: number.map(Into::into).unwrap_or_else(|| name.to_string()),
+ name: number
+ .map(|num| num.text(db).clone())
+ .unwrap_or_else(|| name.clone()),
label: Some(label),
kind: InternalSymbolKind::EnumerationItem,
deprecated: false,
@@ -189,46 +190,50 @@ fn visit_enum_item(context: &mut Context, node: latex::SyntaxNode) -> Option<Int
Some(symbol)
}
-fn visit_equation(context: &mut Context, node: latex::SyntaxNode) -> Option<InternalSymbol> {
+fn visit_equation(
+ db: &dyn Db,
+ document: Document,
+ node: latex::SyntaxNode,
+) -> Option<InternalSymbol> {
let equation = latex::Equation::cast(node)?;
- let full_range = context
- .request
- .main_document()
- .line_index
+ let full_range = document
+ .contents(db)
+ .line_index(db)
.line_col_lsp_range(latex::small_range(&equation));
- make_equation_symbol(context, equation.syntax(), full_range)
+ make_equation_symbol(db, document, equation.syntax(), full_range)
}
fn visit_equation_environment(
- context: &mut Context,
+ db: &dyn Db,
+ document: Document,
node: latex::SyntaxNode,
) -> Option<InternalSymbol> {
let environment = latex::Environment::cast(node)?;
- let full_range = context
- .request
- .main_document()
- .line_index
+ let full_range = document
+ .contents(db)
+ .line_index(db)
.line_col_lsp_range(latex::small_range(&environment));
- make_equation_symbol(context, environment.syntax(), full_range)
+ make_equation_symbol(db, document, environment.syntax(), full_range)
}
fn make_equation_symbol(
- context: &mut Context,
+ db: &dyn Db,
+ document: Document,
node: &latex::SyntaxNode,
full_range: Range,
) -> Option<InternalSymbol> {
- let symbol = match find_label_by_parent(context, node) {
+ let symbol = match find_label_by_parent(db, document, node) {
Some(NumberedLabel {
name: label,
range: selection_range,
number,
}) => {
let name = match number {
- Some(number) => format!("Equation ({})", number),
+ Some(number) => format!("Equation ({})", number.text(db)),
None => "Equation".to_string(),
};
@@ -256,26 +261,26 @@ fn make_equation_symbol(
}
fn visit_enumeration(
- context: &mut Context,
+ db: &dyn Db,
+ document: Document,
node: latex::SyntaxNode,
env_name: &str,
) -> Option<InternalSymbol> {
let environment = latex::Environment::cast(node)?;
- let full_range = context
- .request
- .main_document()
- .line_index
+ let full_range = document
+ .contents(db)
+ .line_index(db)
.line_col_lsp_range(latex::small_range(&environment));
let name = titlecase(env_name);
- let symbol = match find_label_by_parent(context, environment.syntax()) {
+ let symbol = match find_label_by_parent(db, document, environment.syntax()) {
Some(NumberedLabel {
name: label,
range: selection_range,
number,
}) => {
let name = match number {
- Some(number) => format!("{} {}", name, number),
+ Some(number) => format!("{} {}", name, number.text(db)),
None => name,
};
@@ -303,33 +308,33 @@ fn visit_enumeration(
}
fn visit_float(
- context: &mut Context,
+ db: &dyn Db,
+ document: Document,
node: latex::SyntaxNode,
- float_kind: LabelledFloatKind,
+ float_kind: LabeledFloatKind,
) -> Option<InternalSymbol> {
let environment = latex::Environment::cast(node)?;
- let full_range = context
- .request
- .main_document()
- .line_index
+ let full_range = document
+ .contents(db)
+ .line_index(db)
.line_col_lsp_range(latex::small_range(&environment));
let (float_kind, symbol_kind) = match float_kind {
- LabelledFloatKind::Algorithm => ("Algorithm", InternalSymbolKind::Algorithm),
- LabelledFloatKind::Figure => ("Figure", InternalSymbolKind::Figure),
- LabelledFloatKind::Listing => ("Listing", InternalSymbolKind::Listing),
- LabelledFloatKind::Table => ("Table", InternalSymbolKind::Table),
+ LabeledFloatKind::Algorithm => ("Algorithm", InternalSymbolKind::Algorithm),
+ LabeledFloatKind::Figure => ("Figure", InternalSymbolKind::Figure),
+ LabeledFloatKind::Listing => ("Listing", InternalSymbolKind::Listing),
+ LabeledFloatKind::Table => ("Table", InternalSymbolKind::Table),
};
let caption = find_caption_by_parent(environment.syntax())?;
- let symbol = match find_label_by_parent(context, environment.syntax()) {
+ let symbol = match find_label_by_parent(db, document, environment.syntax()) {
Some(NumberedLabel {
name: label,
range: selection_range,
number,
}) => {
let name = match number {
- Some(number) => format!("{} {}: {}", float_kind, number, caption),
+ Some(number) => format!("{} {}: {}", float_kind, number.text(db), caption),
None => format!("{}: {}", float_kind, caption),
};
@@ -358,23 +363,17 @@ fn visit_float(
}
fn visit_theorem(
- context: &mut Context,
+ db: &dyn Db,
+ document: Document,
node: latex::SyntaxNode,
environment_name: &str,
) -> Option<InternalSymbol> {
- let definition = context
- .request
- .workspace
- .documents_by_uri
- .values()
- .filter_map(|document| document.data.as_latex())
- .find_map(|data| {
- data.extras
- .theorem_environments
- .iter()
- .find(|environment| environment.name == environment_name)
- .cloned()
- })?;
+ let definition = Workspace::get(db)
+ .related(db, document)
+ .iter()
+ .filter_map(|document| document.parse(db).as_tex())
+ .flat_map(|data| data.analyze(db).theorem_environments(db))
+ .find(|env| env.name(db).text(db) == environment_name)?;
let node = latex::Environment::cast(node)?;
let theorem_description = node
@@ -382,13 +381,12 @@ fn visit_theorem(
.options()
.and_then(|option| option.content_text());
- let full_range = context
- .request
- .main_document()
- .line_index
+ let full_range = document
+ .contents(db)
+ .line_index(db)
.line_col_lsp_range(latex::small_range(&node));
- let symbol = match find_label_by_parent(context, node.syntax()) {
+ let symbol = match find_label_by_parent(db, document, node.syntax()) {
Some(NumberedLabel {
name: label,
range: selection_range,
@@ -396,11 +394,20 @@ fn visit_theorem(
}) => {
let name = match (number, theorem_description) {
(Some(number), Some(desc)) => {
- format!("{} {} ({})", definition.description, number, desc)
+ format!(
+ "{} {} ({})",
+ definition.description(db).text(db),
+ number.text(db),
+ desc
+ )
}
- (Some(number), None) => format!("{} {}", definition.description, number),
- (None, Some(desc)) => format!("{} ({})", definition.description, desc),
- (None, None) => definition.description,
+ (Some(number), None) => format!(
+ "{} {}",
+ definition.description(db).text(db),
+ number.text(db)
+ ),
+ (None, Some(desc)) => format!("{} ({})", definition.description(db).text(db), desc),
+ (None, None) => definition.description(db).text(db).clone(),
};
InternalSymbol {
@@ -415,8 +422,8 @@ fn visit_theorem(
}
None => {
let name = match theorem_description {
- Some(desc) => format!("{} ({})", definition.description, desc),
- None => definition.description,
+ Some(desc) => format!("{} ({})", definition.description(db).text(db), desc),
+ None => definition.description(db).text(db).clone(),
};
InternalSymbol {
name,
@@ -434,28 +441,27 @@ fn visit_theorem(
#[derive(Debug, PartialEq, Eq, Clone)]
struct NumberedLabel {
- name: String,
+ name: Word,
range: Range,
- number: Option<SmolStr>,
+ number: Option<Word>,
}
fn find_label_by_parent(
- context: &mut Context,
+ db: &dyn Db,
+ document: Document,
parent: &latex::SyntaxNode,
) -> Option<NumberedLabel> {
let node = parent.children().find_map(latex::LabelDefinition::cast)?;
-
- let name = node.name()?.key()?.to_string();
- let range = context
- .request
- .main_document()
- .line_index
+ let name = Word::new(db, node.name()?.key()?.to_string());
+ let range = document
+ .contents(db)
+ .line_index(db)
.line_col_lsp_range(latex::small_range(&node));
- let number = find_label_number(&context.request.workspace, &name);
+ let number = Workspace::get(db).number_of_label(db, document, name);
Some(NumberedLabel {
- name: name.to_string(),
+ name,
range,
- number: number.map(Into::into),
+ number,
})
}
diff --git a/support/texlab/src/features/symbol/project_order.rs b/support/texlab/src/features/symbol/project_order.rs
index 794447ecf6..f4aeb63bca 100644
--- a/support/texlab/src/features/symbol/project_order.rs
+++ b/support/texlab/src/features/symbol/project_order.rs
@@ -1,209 +1,202 @@
-use std::{sync::Arc, usize};
-
+use itertools::Itertools;
use lsp_types::Url;
-use petgraph::{algo::tarjan_scc, Directed, Graph};
-use rustc_hash::FxHashSet;
-use crate::{Document, Workspace};
+use crate::{
+ db::{dependency_graph, Document, Workspace},
+ Db,
+};
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct ProjectOrdering {
- ordering: Vec<Arc<Url>>,
+ ordering: Vec<Document>,
}
impl ProjectOrdering {
- pub fn get(&self, uri: &Url) -> usize {
- self.ordering
- .iter()
- .position(|u| u.as_ref() == uri)
- .unwrap_or(usize::MAX)
- }
-}
-
-impl From<&Workspace> for ProjectOrdering {
- fn from(workspace: &Workspace) -> Self {
- let mut ordering = Vec::new();
- let uris: FxHashSet<Arc<Url>> = workspace
- .documents_by_uri
- .values()
- .map(|document| Arc::clone(&document.uri))
+ pub fn new(db: &dyn Db) -> Self {
+ let workspace = Workspace::get(db);
+
+ let ordering: Vec<_> = workspace
+ .index_files(db)
+ .chain(workspace.documents(db).iter().copied())
+ .flat_map(|document| {
+ dependency_graph(db, document)
+ .preorder()
+ .rev()
+ .collect_vec()
+ })
+ .unique()
.collect();
- let comps = connected_components(workspace);
- for comp in comps {
- let (graph, documents) = build_dependency_graph(&comp);
-
- let mut visited = FxHashSet::default();
- let root_index = *graph.node_weight(tarjan_scc(&graph)[0][0]).unwrap();
- let mut stack = vec![Arc::clone(&documents[root_index].uri)];
-
- while let Some(uri) = stack.pop() {
- if !visited.insert(Arc::clone(&uri)) {
- continue;
- }
-
- ordering.push(Arc::clone(&uri));
- if let Some(document) = workspace.documents_by_uri.get(&uri) {
- if let Some(data) = document.data.as_latex() {
- for link in data.extras.explicit_links.iter().rev() {
- for target in &link.targets {
- if uris.contains(target.as_ref()) {
- stack.push(Arc::clone(target));
- }
- }
- }
- }
- }
- }
- }
-
Self { ordering }
}
-}
-
-fn connected_components(workspace: &Workspace) -> Vec<Workspace> {
- let mut components = Vec::new();
- let mut visited = FxHashSet::default();
- for root_document in workspace.documents_by_uri.values() {
- if !visited.insert(Arc::clone(&root_document.uri)) {
- continue;
- }
-
- let slice = workspace.slice(&root_document.uri);
- for uri in slice.documents_by_uri.keys() {
- visited.insert(Arc::clone(uri));
- }
- components.push(slice);
- }
- components
-}
-fn build_dependency_graph(workspace: &Workspace) -> (Graph<usize, (), Directed>, Vec<&Document>) {
- let mut graph = Graph::new();
- let documents: Vec<_> = workspace.documents_by_uri.values().collect();
- let nodes: Vec<_> = (0..documents.len()).map(|i| graph.add_node(i)).collect();
-
- for (i, document) in documents.iter().enumerate() {
- if let Some(data) = document.data.as_latex() {
- for link in &data.extras.explicit_links {
- for target in &link.targets {
- if let Some(j) = documents
- .iter()
- .position(|document| document.uri.as_ref() == target.as_ref())
- {
- graph.add_edge(nodes[j], nodes[i], ());
- break;
- }
- }
- }
- }
+ pub fn get(&self, db: &dyn Db, uri: &Url) -> usize {
+ self.ordering
+ .iter()
+ .position(|doc| doc.location(db).uri(db) == uri)
+ .unwrap_or(std::usize::MAX)
}
-
- (graph, documents)
}
#[cfg(test)]
mod tests {
- use std::sync::Arc;
-
- use anyhow::Result;
-
- use crate::DocumentLanguage;
+ use crate::{
+ db::{Language, Owner},
+ Database,
+ };
use super::*;
#[test]
- fn test_no_cycles() -> Result<()> {
- let mut workspace = Workspace::default();
+ fn test_no_cycles() {
+ let mut db = Database::default();
+ let workspace = Workspace::get(&db);
let a = workspace.open(
- Arc::new(Url::parse("http://example.com/a.tex")?),
- Arc::new(String::new()),
- DocumentLanguage::Latex,
- )?;
+ &mut db,
+ Url::parse("http://example.com/a.tex").unwrap(),
+ String::new(),
+ Language::Tex,
+ Owner::Client,
+ );
let b = workspace.open(
- Arc::new(Url::parse("http://example.com/b.tex")?),
- Arc::new(String::new()),
- DocumentLanguage::Latex,
- )?;
+ &mut db,
+ Url::parse("http://example.com/b.tex").unwrap(),
+ String::new(),
+ Language::Tex,
+ Owner::Client,
+ );
let c = workspace.open(
- Arc::new(Url::parse("http://example.com/c.tex")?),
- Arc::new(r#"\include{b}\include{a}"#.to_string()),
- DocumentLanguage::Latex,
- )?;
-
- let ordering = ProjectOrdering::from(&workspace);
-
- assert_eq!(ordering.get(&a.uri), 2);
- assert_eq!(ordering.get(&b.uri), 1);
- assert_eq!(ordering.get(&c.uri), 0);
- Ok(())
+ &mut db,
+ Url::parse("http://example.com/c.tex").unwrap(),
+ r#"\documentclass{article}\include{b}\include{a}"#.to_string(),
+ Language::Tex,
+ Owner::Client,
+ );
+
+ let ordering = ProjectOrdering::new(&db);
+
+ assert_eq!(ordering.get(&db, a.location(&db).uri(&db)), 0);
+ assert_eq!(ordering.get(&db, b.location(&db).uri(&db)), 1);
+ assert_eq!(ordering.get(&db, c.location(&db).uri(&db)), 2);
}
#[test]
- fn test_cycles() -> Result<()> {
- let mut workspace = Workspace::default();
+ fn test_two_layers() {
+ let mut db = Database::default();
+ let workspace = Workspace::get(&db);
let a = workspace.open(
- Arc::new(Url::parse("http://example.com/a.tex")?),
- Arc::new(r#"\include{b}"#.to_string()),
- DocumentLanguage::Latex,
- )?;
+ &mut db,
+ Url::parse("http://example.com/a.tex").unwrap(),
+ String::new(),
+ Language::Tex,
+ Owner::Client,
+ );
let b = workspace.open(
- Arc::new(Url::parse("http://example.com/b.tex")?),
- Arc::new(r#"\include{a}"#.to_string()),
- DocumentLanguage::Latex,
- )?;
+ &mut db,
+ Url::parse("http://example.com/b.tex").unwrap(),
+ r#"\include{a}"#.to_string(),
+ Language::Tex,
+ Owner::Client,
+ );
let c = workspace.open(
- Arc::new(Url::parse("http://example.com/c.tex")?),
- Arc::new(r#"\include{a}"#.to_string()),
- DocumentLanguage::Latex,
- )?;
+ &mut db,
+ Url::parse("http://example.com/c.tex").unwrap(),
+ r#"\documentclass{article}\include{b}"#.to_string(),
+ Language::Tex,
+ Owner::Client,
+ );
+
+ let ordering = ProjectOrdering::new(&db);
+
+ assert_eq!(ordering.get(&db, a.location(&db).uri(&db)), 0);
+ assert_eq!(ordering.get(&db, b.location(&db).uri(&db)), 1);
+ assert_eq!(ordering.get(&db, c.location(&db).uri(&db)), 2);
+ }
- let ordering = ProjectOrdering::from(&workspace);
+ #[test]
+ fn test_cycles() {
+ let mut db = Database::default();
+ let workspace = Workspace::get(&db);
- assert_eq!(ordering.get(&a.uri), 1);
- assert_eq!(ordering.get(&b.uri), 2);
- assert_eq!(ordering.get(&c.uri), 0);
- Ok(())
+ let a = workspace.open(
+ &mut db,
+ Url::parse("http://example.com/a.tex").unwrap(),
+ r#"\documentclass{article}\include{b}"#.to_string(),
+ Language::Tex,
+ Owner::Client,
+ );
+
+ workspace.open(
+ &mut db,
+ Url::parse("http://example.com/b.tex").unwrap(),
+ r#"\include{a}"#.to_string(),
+ Language::Tex,
+ Owner::Client,
+ );
+
+ workspace.open(
+ &mut db,
+ Url::parse("http://example.com/c.tex").unwrap(),
+ r#"\include{a}"#.to_string(),
+ Language::Tex,
+ Owner::Client,
+ );
+
+ let ordering = ProjectOrdering::new(&db);
+ assert_ne!(ordering.get(&db, a.location(&db).uri(&db)), 0);
}
#[test]
- fn test_multiple_roots() -> Result<()> {
- let mut workspace = Workspace::default();
+ fn test_multiple_roots() {
+ let mut db = Database::default();
+ let workspace = Workspace::get(&db);
let a = workspace.open(
- Arc::new(Url::parse("http://example.com/a.tex")?),
- Arc::new(r#"\include{b}"#.to_string()),
- DocumentLanguage::Latex,
- )?;
+ &mut db,
+ Url::parse("http://example.com/a.tex").unwrap(),
+ r#"\documentclass{article}\include{b}"#.to_string(),
+ Language::Tex,
+ Owner::Client,
+ );
let b = workspace.open(
- Arc::new(Url::parse("http://example.com/b.tex")?),
- Arc::new(r#""#.to_string()),
- DocumentLanguage::Latex,
- )?;
+ &mut db,
+ Url::parse("http://example.com/b.tex").unwrap(),
+ String::new(),
+ Language::Tex,
+ Owner::Client,
+ );
let c = workspace.open(
- Arc::new(Url::parse("http://example.com/c.tex")?),
- Arc::new(r#""#.to_string()),
- DocumentLanguage::Latex,
- )?;
+ &mut db,
+ Url::parse("http://example.com/c.tex").unwrap(),
+ String::new(),
+ Language::Tex,
+ Owner::Client,
+ );
let d = workspace.open(
- Arc::new(Url::parse("http://example.com/d.tex")?),
- Arc::new(r#"\include{c}"#.to_string()),
- DocumentLanguage::Latex,
- )?;
-
- let ordering = ProjectOrdering::from(&workspace);
-
- assert!(ordering.get(&a.uri) < ordering.get(&b.uri));
- assert!(ordering.get(&d.uri) < ordering.get(&c.uri));
- Ok(())
+ &mut db,
+ Url::parse("http://example.com/d.tex").unwrap(),
+ r#"\documentclass{article}\include{c}"#.to_string(),
+ Language::Tex,
+ Owner::Client,
+ );
+
+ let ordering = ProjectOrdering::new(&db);
+ assert!(
+ ordering.get(&db, b.location(&db).uri(&db))
+ < ordering.get(&db, a.location(&db).uri(&db))
+ );
+ assert!(
+ ordering.get(&db, c.location(&db).uri(&db))
+ < ordering.get(&db, d.location(&db).uri(&db))
+ );
}
}
diff --git a/support/texlab/src/features/symbol/types.rs b/support/texlab/src/features/symbol/types.rs
index 88e7ffcb86..be0ffeceda 100644
--- a/support/texlab/src/features/symbol/types.rs
+++ b/support/texlab/src/features/symbol/types.rs
@@ -1,6 +1,10 @@
use lsp_types::{DocumentSymbol, Location, Range, SymbolInformation, SymbolKind, Url};
-use crate::{features::lsp_kinds::Structure, BibtexEntryTypeCategory};
+use crate::{
+ db::Word,
+ util::{lang_data::BibtexEntryTypeCategory, lsp_enums::Structure},
+ Db,
+};
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub enum InternalSymbolKind {
@@ -39,7 +43,7 @@ impl InternalSymbolKind {
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct InternalSymbol {
pub name: String,
- pub label: Option<String>,
+ pub label: Option<Word>,
pub kind: InternalSymbolKind,
pub deprecated: bool,
pub full_range: Range,
@@ -76,16 +80,17 @@ impl InternalSymbol {
buffer.push(self);
}
- pub fn into_document_symbol(self) -> DocumentSymbol {
+ pub fn into_document_symbol(self, db: &dyn Db) -> DocumentSymbol {
let children = self
.children
.into_iter()
- .map(|child| child.into_document_symbol())
+ .map(|child| child.into_document_symbol(db))
.collect();
+
#[allow(deprecated)]
DocumentSymbol {
name: self.name,
- detail: self.label,
+ detail: self.label.map(|word| word.text(db).clone()),
kind: self.kind.into_symbol_kind(),
deprecated: Some(self.deprecated),
range: self.full_range,
diff --git a/support/texlab/src/features/workspace_command.rs b/support/texlab/src/features/workspace_command.rs
new file mode 100644
index 0000000000..8d6732cd1c
--- /dev/null
+++ b/support/texlab/src/features/workspace_command.rs
@@ -0,0 +1,103 @@
+use std::process::Stdio;
+
+use anyhow::{bail, Result};
+use lsp_types::{TextDocumentIdentifier, Url};
+use thiserror::Error;
+
+use crate::{db::Workspace, normalize_uri, Db};
+
+pub fn select(db: &dyn Db, name: &str, args: Vec<serde_json::Value>) -> Result<CleanCommand> {
+ Ok(match name {
+ "texlab.cleanAuxiliary" => CleanCommand::new(db, CleanOptions::Auxiliary, args)?,
+ "texlab.cleanArtifacts" => CleanCommand::new(db, CleanOptions::Artifacts, args)?,
+ _ => bail!("Unknown command: {}", name),
+ })
+}
+
+#[derive(Debug, Error)]
+pub enum CleanError {
+ #[error("document '{0}' not found")]
+ DocumentNotFound(Url),
+
+ #[error("document '{0}' is not a local file")]
+ NoLocalFile(Url),
+
+ #[error("document was not provided as an argument")]
+ MissingArg,
+
+ #[error("invalid argument: {0}")]
+ InvalidArg(serde_json::Error),
+
+ #[error("failed to spawn process: {0}")]
+ Spawn(std::io::Error),
+}
+
+#[derive(Debug)]
+pub struct CleanCommand {
+ executable: String,
+ args: Vec<String>,
+}
+
+impl CleanCommand {
+ fn new(db: &dyn Db, options: CleanOptions, args: Vec<serde_json::Value>) -> Result<Self> {
+ let params: TextDocumentIdentifier =
+ serde_json::from_value(args.into_iter().next().ok_or(CleanError::MissingArg)?)
+ .map_err(CleanError::InvalidArg)?;
+
+ let mut uri = params.uri;
+ normalize_uri(&mut uri);
+
+ let workspace = Workspace::get(db);
+
+ let document = workspace
+ .lookup_uri(db, &uri)
+ .ok_or_else(|| CleanError::DocumentNotFound(uri.clone()))?;
+
+ let working_dir = workspace.working_dir(db, document.directory(db));
+
+ let output_dir = workspace
+ .output_dir(db, working_dir)
+ .path(db)
+ .as_deref()
+ .ok_or_else(|| CleanError::NoLocalFile(uri.clone()))?;
+
+ let path = document
+ .location(db)
+ .path(db)
+ .as_deref()
+ .ok_or_else(|| CleanError::NoLocalFile(uri.clone()))?;
+
+ let flag = match options {
+ CleanOptions::Auxiliary => "-c",
+ CleanOptions::Artifacts => "-C",
+ };
+
+ let executable = "latexmk".to_string();
+ let args = vec![
+ format!("-outdir={}", output_dir.display()),
+ flag.to_string(),
+ path.display().to_string(),
+ ];
+
+ Ok(Self { executable, args })
+ }
+
+ pub fn run(self) -> Result<(), CleanError> {
+ log::debug!("Cleaning output files: {} {:?}", self.executable, self.args);
+ std::process::Command::new(self.executable)
+ .args(self.args)
+ .stdin(Stdio::null())
+ .stdout(Stdio::null())
+ .stderr(Stdio::null())
+ .status()
+ .map_err(CleanError::Spawn)?;
+
+ Ok(())
+ }
+}
+
+#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Hash)]
+enum CleanOptions {
+ Auxiliary,
+ Artifacts,
+}