summaryrefslogtreecommitdiff
path: root/support/texlab/src/features
diff options
context:
space:
mode:
authorNorbert Preining <norbert@preining.info>2023-04-12 03:01:14 +0000
committerNorbert Preining <norbert@preining.info>2023-04-12 03:01:14 +0000
commit45c7bac9080d91b53c686e776fc6217d7f139b86 (patch)
tree9dd0ad4713ddbc97580545398e8a3c84ac52bf49 /support/texlab/src/features
parente6c62f5e4d4a4d5ab654dad1652e83a5a4a42891 (diff)
CTAN sync 202304120301
Diffstat (limited to 'support/texlab/src/features')
-rw-r--r--support/texlab/src/features/build.rs190
-rw-r--r--support/texlab/src/features/build/progress.rs54
-rw-r--r--support/texlab/src/features/completion.rs50
-rw-r--r--support/texlab/src/features/completion/acronym_ref.rs29
-rw-r--r--support/texlab/src/features/completion/argument.rs62
-rw-r--r--support/texlab/src/features/completion/begin_snippet.rs9
-rw-r--r--support/texlab/src/features/completion/builder.rs767
-rw-r--r--support/texlab/src/features/completion/citation.rs63
-rw-r--r--support/texlab/src/features/completion/color.rs22
-rw-r--r--support/texlab/src/features/completion/color_model.rs39
-rw-r--r--support/texlab/src/features/completion/component_command.rs24
-rw-r--r--support/texlab/src/features/completion/component_environment.rs18
-rw-r--r--support/texlab/src/features/completion/entry_type.rs27
-rw-r--r--support/texlab/src/features/completion/field.rs36
-rw-r--r--support/texlab/src/features/completion/glossary_ref.rs35
-rw-r--r--support/texlab/src/features/completion/import.rs55
-rw-r--r--support/texlab/src/features/completion/include.rs149
-rw-r--r--support/texlab/src/features/completion/label.rs79
-rw-r--r--support/texlab/src/features/completion/theorem.rs21
-rw-r--r--support/texlab/src/features/completion/tikz_library.rs29
-rw-r--r--support/texlab/src/features/completion/user_command.rs30
-rw-r--r--support/texlab/src/features/completion/user_environment.rs25
-rw-r--r--support/texlab/src/features/definition.rs63
-rw-r--r--support/texlab/src/features/definition/command.rs41
-rw-r--r--support/texlab/src/features/definition/document.rs30
-rw-r--r--support/texlab/src/features/definition/entry.rs43
-rw-r--r--support/texlab/src/features/definition/label.rs38
-rw-r--r--support/texlab/src/features/definition/string.rs34
-rw-r--r--support/texlab/src/features/folding.rs63
-rw-r--r--support/texlab/src/features/formatting.rs33
-rw-r--r--support/texlab/src/features/formatting/bibtex_internal.rs200
-rw-r--r--support/texlab/src/features/formatting/latexindent.rs78
-rw-r--r--support/texlab/src/features/forward_search.rs195
-rw-r--r--support/texlab/src/features/highlight.rs10
-rw-r--r--support/texlab/src/features/highlight/label.rs32
-rw-r--r--support/texlab/src/features/hover.rs42
-rw-r--r--support/texlab/src/features/hover/citation.rs27
-rw-r--r--support/texlab/src/features/hover/component.rs28
-rw-r--r--support/texlab/src/features/hover/entry_type.rs22
-rw-r--r--support/texlab/src/features/hover/field.rs25
-rw-r--r--support/texlab/src/features/hover/label.rs23
-rw-r--r--support/texlab/src/features/hover/string_ref.rs43
-rw-r--r--support/texlab/src/features/inlay_hint.rs45
-rw-r--r--support/texlab/src/features/inlay_hint/label.rs48
-rw-r--r--support/texlab/src/features/link.rs41
-rw-r--r--support/texlab/src/features/link/include.rs25
-rw-r--r--support/texlab/src/features/reference.rs45
-rw-r--r--support/texlab/src/features/reference/entry.rs58
-rw-r--r--support/texlab/src/features/reference/label.rs36
-rw-r--r--support/texlab/src/features/reference/string.rs45
-rw-r--r--support/texlab/src/features/rename.rs69
-rw-r--r--support/texlab/src/features/rename/command.rs37
-rw-r--r--support/texlab/src/features/rename/entry.rs66
-rw-r--r--support/texlab/src/features/rename/label.rs76
-rw-r--r--support/texlab/src/features/symbol.rs124
-rw-r--r--support/texlab/src/features/symbol/bibtex.rs87
-rw-r--r--support/texlab/src/features/symbol/latex.rs446
-rw-r--r--support/texlab/src/features/symbol/project_order.rs202
-rw-r--r--support/texlab/src/features/symbol/types.rs136
-rw-r--r--support/texlab/src/features/workspace_command.rs3
-rw-r--r--support/texlab/src/features/workspace_command/change_environment.rs108
-rw-r--r--support/texlab/src/features/workspace_command/clean.rs95
-rw-r--r--support/texlab/src/features/workspace_command/dep_graph.rs57
63 files changed, 0 insertions, 4732 deletions
diff --git a/support/texlab/src/features/build.rs b/support/texlab/src/features/build.rs
deleted file mode 100644
index 83a1d4cb32..0000000000
--- a/support/texlab/src/features/build.rs
+++ /dev/null
@@ -1,190 +0,0 @@
-mod progress;
-
-use std::{
- io::{BufRead, BufReader, Read},
- path::{Path, PathBuf},
- process::Stdio,
- thread::{self, JoinHandle},
-};
-
-use encoding_rs_io::DecodeReaderBytesBuilder;
-use lsp_types::{notification::LogMessage, LogMessageParams, TextDocumentIdentifier, Url};
-use serde::{Deserialize, Serialize};
-use serde_repr::{Deserialize_repr, Serialize_repr};
-
-use crate::{client::LspClient, db::Workspace, util::capabilities::ClientCapabilitiesExt, Db};
-
-#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
-#[serde(rename_all = "camelCase")]
-pub struct BuildParams {
- pub text_document: TextDocumentIdentifier,
-}
-
-#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
-#[serde(rename_all = "camelCase")]
-pub struct BuildResult {
- pub status: BuildStatus,
-}
-
-#[derive(Debug, PartialEq, Eq, Clone, Copy, Serialize_repr, Deserialize_repr)]
-#[repr(i32)]
-pub enum BuildStatus {
- SUCCESS = 0,
- ERROR = 1,
- FAILURE = 2,
- CANCELLED = 3,
-}
-
-#[derive(Debug)]
-pub struct Command {
- uri: Url,
- progress: bool,
- program: String,
- args: Vec<String>,
- working_dir: PathBuf,
- client: LspClient,
-}
-
-impl Command {
- pub fn new(db: &dyn Db, uri: Url, client: LspClient) -> Option<Self> {
- let workspace = Workspace::get(db);
- let document = match workspace.lookup_uri(db, &uri) {
- Some(child) => workspace
- .parents(db, child)
- .iter()
- .next()
- .copied()
- .unwrap_or(child),
- None => return None,
- };
-
- if document.location(db).path(db).is_none() {
- log::warn!("Document {uri} cannot be compiled; skipping...");
- return None;
- }
-
- let config = &db.config().build;
- let program = config.program.clone();
- let path = document.location(db).path(db).as_deref().unwrap();
- let args = config
- .args
- .iter()
- .map(|arg| replace_placeholder(arg, path))
- .collect();
-
- let working_dir = workspace
- .working_dir(db, document.directory(db))
- .path(db)
- .clone()?;
-
- Some(Self {
- uri: document.location(db).uri(db).clone(),
- progress: workspace
- .client_capabilities(db)
- .has_work_done_progress_support(),
- program,
- args,
- working_dir,
- client,
- })
- }
-
- pub fn run(self) -> BuildStatus {
- let reporter = if self.progress {
- let inner = progress::Reporter::new(&self.client);
- inner.start(&self.uri).expect("report progress");
- Some(inner)
- } else {
- None
- };
-
- let mut process = match std::process::Command::new(&self.program)
- .args(self.args)
- .stdin(Stdio::null())
- .stdout(Stdio::piped())
- .stderr(Stdio::piped())
- .current_dir(&self.working_dir)
- .spawn()
- {
- Ok(process) => process,
- Err(why) => {
- log::error!(
- "Failed to spawn process {:?} in directory {}: {}",
- self.program,
- self.working_dir.display(),
- why
- );
- return BuildStatus::FAILURE;
- }
- };
-
- let (line_sender, line_receiver) = flume::unbounded();
- let (exit_sender, exit_receiver) = flume::unbounded();
- track_output(process.stderr.take().unwrap(), line_sender.clone());
- track_output(process.stdout.take().unwrap(), line_sender);
- let client = self.client.clone();
- let handle = std::thread::spawn(move || {
- let typ = lsp_types::MessageType::LOG;
-
- loop {
- let done = flume::Selector::new()
- .recv(&line_receiver, |line| match line {
- Ok(message) => {
- let params = LogMessageParams { message, typ };
- let _ = client.send_notification::<LogMessage>(params);
- false
- }
- Err(_) => true,
- })
- .recv(&exit_receiver, |_| true)
- .wait();
-
- if done {
- break;
- }
- }
- });
-
- let status = process.wait().map_or(BuildStatus::FAILURE, |result| {
- if result.success() {
- BuildStatus::SUCCESS
- } else {
- BuildStatus::ERROR
- }
- });
-
- let _ = exit_sender.send(());
- handle.join().unwrap();
-
- drop(reporter);
- status
- }
-}
-
-fn track_output(
- output: impl Read + Send + 'static,
- sender: flume::Sender<String>,
-) -> JoinHandle<()> {
- let reader = BufReader::new(
- DecodeReaderBytesBuilder::new()
- .encoding(Some(encoding_rs::UTF_8))
- .utf8_passthru(true)
- .strip_bom(true)
- .build(output),
- );
-
- thread::spawn(move || {
- let _ = reader
- .lines()
- .flatten()
- .try_for_each(|line| sender.send(line));
- })
-}
-
-fn replace_placeholder(arg: &str, file: &Path) -> String {
- if arg.starts_with('"') || arg.ends_with('"') {
- arg.to_string()
- } else {
- arg.replace("%f", &file.to_string_lossy())
- }
-}
diff --git a/support/texlab/src/features/build/progress.rs b/support/texlab/src/features/build/progress.rs
deleted file mode 100644
index 6f235bebd6..0000000000
--- a/support/texlab/src/features/build/progress.rs
+++ /dev/null
@@ -1,54 +0,0 @@
-use std::sync::atomic::{AtomicI32, Ordering};
-
-use anyhow::Result;
-use lsp_types::{
- notification::Progress, request::WorkDoneProgressCreate, NumberOrString, ProgressParams,
- ProgressParamsValue, Url, WorkDoneProgress, WorkDoneProgressBegin,
- WorkDoneProgressCreateParams, WorkDoneProgressEnd,
-};
-
-use crate::client::LspClient;
-
-static NEXT_TOKEN: AtomicI32 = AtomicI32::new(1);
-
-pub struct Reporter<'a> {
- client: &'a LspClient,
- token: i32,
-}
-
-impl<'a> Reporter<'a> {
- pub fn new(client: &'a LspClient) -> Self {
- let token = NEXT_TOKEN.fetch_add(1, Ordering::SeqCst);
- Self { client, token }
- }
-
- pub fn start(&self, uri: &Url) -> Result<()> {
- self.client
- .send_request::<WorkDoneProgressCreate>(WorkDoneProgressCreateParams {
- token: NumberOrString::Number(self.token),
- })?;
-
- self.client.send_notification::<Progress>(ProgressParams {
- token: NumberOrString::Number(self.token),
- value: ProgressParamsValue::WorkDone(WorkDoneProgress::Begin(WorkDoneProgressBegin {
- title: "Building".to_string(),
- message: Some(uri.as_str().to_string()),
- cancellable: Some(false),
- percentage: None,
- })),
- })?;
-
- Ok(())
- }
-}
-
-impl<'a> Drop for Reporter<'a> {
- fn drop(&mut self) {
- let _ = self.client.send_notification::<Progress>(ProgressParams {
- token: NumberOrString::Number(self.token),
- value: ProgressParamsValue::WorkDone(WorkDoneProgress::End(WorkDoneProgressEnd {
- message: None,
- })),
- });
- }
-}
diff --git a/support/texlab/src/features/completion.rs b/support/texlab/src/features/completion.rs
deleted file mode 100644
index 949b5552e5..0000000000
--- a/support/texlab/src/features/completion.rs
+++ /dev/null
@@ -1,50 +0,0 @@
-mod acronym_ref;
-mod argument;
-mod begin_snippet;
-pub mod builder;
-mod citation;
-mod color;
-mod color_model;
-mod component_command;
-mod component_environment;
-mod entry_type;
-mod field;
-mod glossary_ref;
-mod import;
-mod include;
-mod label;
-mod theorem;
-mod tikz_library;
-mod user_command;
-mod user_environment;
-
-use lsp_types::{CompletionList, Position, Url};
-
-use crate::{features::completion::builder::CompletionBuilder, util::cursor::CursorContext, Db};
-
-pub const COMPLETION_LIMIT: usize = 50;
-
-pub fn complete(db: &dyn Db, uri: &Url, position: Position) -> Option<CompletionList> {
- let context = CursorContext::new(db, uri, position, ())?;
- let mut builder = CompletionBuilder::new(&context);
- log::debug!("[Completion] Cursor: {:?}", context.cursor);
- entry_type::complete(&context, &mut builder);
- field::complete(&context, &mut builder);
- argument::complete(&context, &mut builder);
- citation::complete(&context, &mut builder);
- import::complete(&context, &mut builder);
- color::complete(&context, &mut builder);
- color_model::complete(&context, &mut builder);
- acronym_ref::complete(&context, &mut builder);
- glossary_ref::complete(&context, &mut builder);
- include::complete(&context, &mut builder);
- label::complete(&context, &mut builder);
- tikz_library::complete(&context, &mut builder);
- component_environment::complete(&context, &mut builder);
- theorem::complete(&context, &mut builder);
- user_environment::complete(&context, &mut builder);
- begin_snippet::complete(&context, &mut builder);
- component_command::complete(&context, &mut builder);
- user_command::complete(&context, &mut builder);
- Some(builder.finish())
-}
diff --git a/support/texlab/src/features/completion/acronym_ref.rs b/support/texlab/src/features/completion/acronym_ref.rs
deleted file mode 100644
index be7725fa2f..0000000000
--- a/support/texlab/src/features/completion/acronym_ref.rs
+++ /dev/null
@@ -1,29 +0,0 @@
-use rowan::ast::AstNode;
-
-use crate::{syntax::latex, util::cursor::CursorContext};
-
-use super::builder::CompletionBuilder;
-
-pub fn complete<'db>(
- context: &'db CursorContext,
- builder: &mut CompletionBuilder<'db>,
-) -> Option<()> {
- let (_, range, group) = context.find_curly_group_word()?;
- latex::AcronymReference::cast(group.syntax().parent()?)?;
-
- for document in context.related() {
- if let Some(data) = document.parse(context.db).as_tex() {
- for name in data
- .root(context.db)
- .descendants()
- .filter_map(latex::AcronymDefinition::cast)
- .filter_map(|node| node.name())
- .filter_map(|name| name.key())
- {
- builder.glossary_entry(range, name.to_string());
- }
- }
- }
-
- Some(())
-}
diff --git a/support/texlab/src/features/completion/argument.rs b/support/texlab/src/features/completion/argument.rs
deleted file mode 100644
index 10b377cb0e..0000000000
--- a/support/texlab/src/features/completion/argument.rs
+++ /dev/null
@@ -1,62 +0,0 @@
-use rowan::{ast::AstNode, TextRange};
-
-use crate::{
- syntax::latex,
- util::{components::COMPONENT_DATABASE, cursor::CursorContext},
-};
-
-use super::builder::CompletionBuilder;
-
-pub fn complete<'db>(
- context: &'db CursorContext,
- builder: &mut CompletionBuilder<'db>,
-) -> Option<()> {
- let token = context.cursor.as_tex()?;
-
- let range = if token.kind() == latex::WORD {
- token.text_range()
- } else {
- TextRange::empty(context.offset)
- };
-
- let group = latex::CurlyGroup::cast(token.parent()?)
- .or_else(|| {
- token
- .parent()
- .and_then(|node| node.parent())
- .and_then(latex::CurlyGroup::cast)
- })
- .filter(|group| context.is_inside_latex_curly(group))?;
-
- let command = latex::GenericCommand::cast(group.syntax().parent()?)?;
-
- let index = command
- .syntax()
- .children()
- .filter_map(latex::CurlyGroup::cast)
- .position(|g| g.syntax().text_range() == group.syntax().text_range())?;
-
- let command_name = command.name()?;
- let command_name = &command_name.text()[1..];
-
- for component in COMPONENT_DATABASE.linked_components(context.db, context.document) {
- for component_command in component
- .commands
- .iter()
- .filter(|command| command.name == command_name)
- {
- for (_, param) in component_command
- .parameters
- .iter()
- .enumerate()
- .filter(|(i, _)| *i == index)
- {
- for arg in &param.0 {
- builder.generic_argument(range, &arg.name, arg.image.as_deref());
- }
- }
- }
- }
-
- Some(())
-}
diff --git a/support/texlab/src/features/completion/begin_snippet.rs b/support/texlab/src/features/completion/begin_snippet.rs
deleted file mode 100644
index 06be4faf13..0000000000
--- a/support/texlab/src/features/completion/begin_snippet.rs
+++ /dev/null
@@ -1,9 +0,0 @@
-use crate::util::cursor::CursorContext;
-
-use super::builder::CompletionBuilder;
-
-pub fn complete(context: &CursorContext, builder: &mut CompletionBuilder) -> Option<()> {
- let range = context.cursor.command_range(context.offset)?;
- builder.begin_snippet(range);
- Some(())
-}
diff --git a/support/texlab/src/features/completion/builder.rs b/support/texlab/src/features/completion/builder.rs
deleted file mode 100644
index 0373587a06..0000000000
--- a/support/texlab/src/features/completion/builder.rs
+++ /dev/null
@@ -1,767 +0,0 @@
-use fuzzy_matcher::{skim::SkimMatcherV2, FuzzyMatcher};
-use itertools::Itertools;
-use lsp_types::{
- CompletionItem, CompletionItemKind, CompletionList, CompletionTextEdit, Documentation,
- InsertTextFormat, MarkupContent, MarkupKind, TextEdit, Url,
-};
-use once_cell::sync::Lazy;
-use regex::Regex;
-use rowan::{ast::AstNode, TextRange, TextSize};
-use serde::{Deserialize, Serialize};
-use smol_str::SmolStr;
-
-use crate::{
- db::{Document, ServerContext, Workspace},
- syntax::{
- bibtex::{self, HasName, HasType},
- latex,
- },
- util::{
- capabilities::ClientCapabilitiesExt,
- cursor::{Cursor, CursorContext},
- lang_data::{BibtexEntryTypeCategory, BibtexEntryTypeDoc, BibtexFieldDoc, LANGUAGE_DATA},
- line_index_ext::LineIndexExt,
- lsp_enums::Structure,
- },
-};
-
-use super::COMPLETION_LIMIT;
-
-pub struct CompletionBuilder<'db> {
- context: &'db CursorContext<'db>,
- items: Vec<Item<'db>>,
- matcher: SkimMatcherV2,
- text_pattern: String,
- file_pattern: String,
- preselect: Option<String>,
- snippets: bool,
- markdown: bool,
- item_kinds: &'db [CompletionItemKind],
-}
-
-impl<'db> CompletionBuilder<'db> {
- pub fn new(context: &'db CursorContext) -> Self {
- let items = Vec::new();
- let matcher = SkimMatcherV2::default().ignore_case();
- let text_pattern = match &context.cursor {
- Cursor::Tex(token) if token.kind() == latex::COMMAND_NAME => {
- if token.text_range().start() + TextSize::from(1) == context.offset {
- // Handle cases similar to this one correctly:
- // $\|$ % (| is the cursor)
- String::from("\\")
- } else {
- token.text().trim_end().into()
- }
- }
- Cursor::Tex(token) if token.kind() == latex::WORD => {
- match token.parent().and_then(latex::Key::cast) {
- Some(key) => key
- .words()
- .take_while(|word| word.text_range() != token.text_range())
- .chain(std::iter::once(token.clone()))
- .filter(|word| word.text_range().start() < context.offset)
- .join(" "),
- None => token.text().into(),
- }
- }
- Cursor::Bib(token)
- if matches!(
- token.kind(),
- bibtex::TYPE
- | bibtex::NAME
- | bibtex::WORD
- | bibtex::COMMAND_NAME
- | bibtex::ACCENT_NAME
- ) =>
- {
- token.text().into()
- }
- Cursor::Tex(_) | Cursor::Bib(_) | Cursor::Nothing => "".into(),
- };
-
- let file_pattern = text_pattern.split('/').last().unwrap().to_string();
-
- let preselect = context
- .cursor
- .as_tex()
- .and_then(|name| name.parent())
- .and_then(latex::CurlyGroupWord::cast)
- .and_then(|group| group.syntax().parent())
- .and_then(|end| end.parent())
- .and_then(latex::Environment::cast)
- .and_then(|env| env.begin())
- .and_then(|begin| begin.name())
- .and_then(|name| name.key())
- .map(|name| name.to_string());
-
- let client_capabilities = context.workspace.client_capabilities(context.db);
- let snippets = client_capabilities.has_snippet_support();
- let markdown = client_capabilities.has_completion_markdown_support();
- let item_kinds = client_capabilities
- .text_document
- .as_ref()
- .and_then(|cap| cap.completion.as_ref())
- .and_then(|cap| cap.completion_item_kind.as_ref())
- .and_then(|cap| cap.value_set.as_deref())
- .unwrap_or_default();
-
- Self {
- context,
- items,
- matcher,
- text_pattern,
- file_pattern,
- preselect,
- snippets,
- markdown,
- item_kinds,
- }
- }
-
- pub fn glossary_entry(&mut self, range: TextRange, name: String) -> Option<()> {
- let score = self.matcher.fuzzy_match(&name, &self.text_pattern)?;
- self.items.push(Item {
- range,
- data: Data::GlossaryEntry { name },
- preselect: false,
- score,
- });
-
- Some(())
- }
-
- pub fn generic_argument(
- &mut self,
- range: TextRange,
- name: &'db str,
- image: Option<&'db str>,
- ) -> Option<()> {
- let score = self.matcher.fuzzy_match(name, &self.text_pattern)?;
- self.items.push(Item {
- range,
- data: Data::Argument { name, image },
- preselect: false,
- score,
- });
-
- Some(())
- }
-
- pub fn begin_snippet(&mut self, range: TextRange) -> Option<()> {
- let capabilities = Workspace::get(self.context.db).client_capabilities(self.context.db);
- if capabilities.has_snippet_support() {
- let score = self.matcher.fuzzy_match("begin", &self.text_pattern[1..])?;
- self.items.push(Item {
- range,
- data: Data::BeginSnippet,
- preselect: false,
- score,
- });
- }
-
- Some(())
- }
-
- pub fn citation(
- &mut self,
- range: TextRange,
- document: Document,
- entry: &bibtex::Entry,
- ) -> Option<()> {
- let key = entry.name_token()?.to_string();
-
- let category = LANGUAGE_DATA
- .find_entry_type(&entry.type_token()?.text()[1..])
- .map_or(BibtexEntryTypeCategory::Misc, |ty| ty.category);
-
- let code = entry.syntax().text().to_string();
- let filter_text = format!(
- "{} {}",
- key,
- WHITESPACE_REGEX
- .replace_all(&code.replace(['{', '}', ',', '='], " "), " ")
- .trim(),
- );
-
- let score = self.matcher.fuzzy_match(&filter_text, &self.text_pattern)?;
-
- let data = Data::Citation {
- document,
- key,
- filter_text,
- category,
- };
-
- self.items.push(Item {
- range,
- data,
- preselect: false,
- score,
- });
-
- Some(())
- }
-
- pub fn color_model(&mut self, range: TextRange, name: &'db str) -> Option<()> {
- let score = self.matcher.fuzzy_match(name, &self.text_pattern)?;
- self.items.push(Item {
- range,
- data: Data::ColorModel { name },
- preselect: false,
- score,
- });
-
- Some(())
- }
-
- pub fn color(&mut self, range: TextRange, name: &'db str) -> Option<()> {
- let score = self.matcher.fuzzy_match(name, &self.text_pattern)?;
- self.items.push(Item {
- range,
- data: Data::Color { name },
- preselect: false,
- score,
- });
-
- Some(())
- }
-
- pub fn component_command(
- &mut self,
- range: TextRange,
- name: &'db str,
- image: Option<&'db str>,
- glyph: Option<&'db str>,
- file_names: &'db [SmolStr],
- ) -> Option<()> {
- let score = self.matcher.fuzzy_match(name, &self.text_pattern[1..])?;
- let data = Data::ComponentCommand {
- name,
- image,
- glyph,
- file_names,
- };
-
- self.items.push(Item {
- range,
- data,
- preselect: false,
- score,
- });
-
- Some(())
- }
-
- pub fn component_environment(
- &mut self,
- range: TextRange,
- name: &'db str,
- file_names: &'db [SmolStr],
- ) -> Option<()> {
- let score = self.matcher.fuzzy_match(name, &self.text_pattern)?;
- self.items.push(Item {
- range,
- data: Data::ComponentEnvironment { name, file_names },
- preselect: Some(name) == self.preselect.as_deref(),
- score,
- });
-
- Some(())
- }
-
- pub fn entry_type(
- &mut self,
- range: TextRange,
- entry_type: &'db BibtexEntryTypeDoc,
- ) -> Option<()> {
- let score = self
- .matcher
- .fuzzy_match(&entry_type.name, &self.text_pattern[1..])?;
-
- self.items.push(Item {
- range,
- data: Data::EntryType { entry_type },
- preselect: false,
- score,
- });
-
- Some(())
- }
-
- pub fn field(&mut self, range: TextRange, field: &'db BibtexFieldDoc) -> Option<()> {
- let score = self.matcher.fuzzy_match(&field.name, &self.text_pattern)?;
- self.items.push(Item {
- range,
- data: Data::Field { field },
- preselect: false,
- score,
- });
-
- Some(())
- }
-
- pub fn class(&mut self, range: TextRange, name: &'db str) -> Option<()> {
- let score = self.matcher.fuzzy_match(name, &self.text_pattern)?;
- self.items.push(Item {
- range,
- data: Data::Class { name },
- preselect: false,
- score,
- });
-
- Some(())
- }
-
- pub fn package(&mut self, range: TextRange, name: &'db str) -> Option<()> {
- let score = self.matcher.fuzzy_match(name, &self.text_pattern)?;
- self.items.push(Item {
- range,
- data: Data::Package { name },
- preselect: false,
- score,
- });
-
- Some(())
- }
-
- pub fn file(&mut self, range: TextRange, name: String) -> Option<()> {
- let score = self.matcher.fuzzy_match(&name, &self.file_pattern)?;
- self.items.push(Item {
- range,
- data: Data::File { name },
- preselect: false,
- score,
- });
-
- Some(())
- }
-
- pub fn directory(&mut self, range: TextRange, name: String) -> Option<()> {
- let score = self.matcher.fuzzy_match(&name, &self.file_pattern)?;
- self.items.push(Item {
- range,
- data: Data::Directory { name },
- preselect: false,
- score,
- });
-
- Some(())
- }
-
- pub fn label(
- &mut self,
- range: TextRange,
- name: &'db str,
- kind: Structure,
- header: Option<String>,
- footer: Option<String>,
- text: String,
- ) -> Option<()> {
- let score = self.matcher.fuzzy_match(&text, &self.text_pattern)?;
- self.items.push(Item {
- range,
- data: Data::Label {
- name,
- kind,
- header,
- footer,
- text,
- },
- preselect: false,
- score,
- });
-
- Some(())
- }
-
- pub fn tikz_library(&mut self, range: TextRange, name: &'db str) -> Option<()> {
- let score = self.matcher.fuzzy_match(name, &self.text_pattern)?;
- self.items.push(Item {
- range,
- data: Data::TikzLibrary { name },
- preselect: false,
- score,
- });
-
- Some(())
- }
-
- pub fn user_command(&mut self, range: TextRange, name: &'db str) -> Option<()> {
- let score = self.matcher.fuzzy_match(name, &self.text_pattern[1..])?;
- self.items.push(Item {
- range,
- data: Data::UserCommand { name },
- preselect: false,
- score,
- });
-
- Some(())
- }
-
- pub fn user_environment(&mut self, range: TextRange, name: &'db str) -> Option<()> {
- let score = self.matcher.fuzzy_match(name, &self.text_pattern)?;
- self.items.push(Item {
- range,
- data: Data::UserEnvironment { name },
- preselect: Some(name) == self.preselect.as_deref(),
- score,
- });
-
- Some(())
- }
-
- pub fn finish(mut self) -> CompletionList {
- let mut list = CompletionList::default();
- list.items = std::mem::take(&mut self.items)
- .into_iter()
- .sorted_by(|a, b| {
- b.preselect
- .cmp(&a.preselect)
- .then_with(|| b.score.cmp(&a.score))
- .then_with(|| a.data.label().cmp(b.data.label()))
- })
- .dedup_by(|a, b| a.data.label() == b.data.label())
- .take(COMPLETION_LIMIT)
- .enumerate()
- .map(|(i, item)| self.convert_item(item, i))
- .collect();
-
- let db = self.context.db;
- let always_incomplete = ServerContext::get(db).always_incomplete_completion_list(db);
- list.is_incomplete = always_incomplete || list.items.len() >= COMPLETION_LIMIT;
- list
- }
-
- fn convert_item(&self, item: Item, index: usize) -> CompletionItem {
- let range = self.context.line_index.line_col_lsp_range(item.range);
- let preselect = item.preselect;
- let mut item = match item.data {
- Data::EntryType { entry_type } => CompletionItem {
- label: entry_type.name.clone(),
- kind: Some(Structure::Entry(entry_type.category).completion_kind()),
- documentation: entry_type.documentation.clone().map(|value| {
- let kind = MarkupKind::Markdown;
- Documentation::MarkupContent(MarkupContent { kind, value })
- }),
- text_edit: Some(TextEdit::new(range, entry_type.name.clone()).into()),
- ..CompletionItem::default()
- },
- Data::Field { field } => CompletionItem {
- label: field.name.clone(),
- kind: Some(Structure::Field.completion_kind()),
- documentation: Some(Documentation::MarkupContent(MarkupContent {
- kind: MarkupKind::Markdown,
- value: field.documentation.clone(),
- })),
- text_edit: Some(TextEdit::new(range, field.name.clone()).into()),
- ..CompletionItem::default()
- },
- Data::Argument { name, image } => {
- let text_edit = TextEdit::new(range, String::from(name));
- CompletionItem {
- label: name.into(),
- kind: Some(Structure::Argument.completion_kind()),
- text_edit: Some(CompletionTextEdit::Edit(text_edit)),
- documentation: image.and_then(|base64| self.inline_image(name, base64)),
- ..CompletionItem::default()
- }
- }
- Data::BeginSnippet => {
- if self.snippets {
- CompletionItem {
- kind: Some(Structure::Snippet.completion_kind()),
- text_edit: Some(
- TextEdit::new(range, "begin{$1}\n\t\n\\end{$1}".into()).into(),
- ),
- insert_text_format: Some(InsertTextFormat::SNIPPET),
- ..CompletionItem::new_simple("begin".into(), self.component_detail(&[]))
- }
- } else {
- CompletionItem {
- kind: Some(Structure::Command.completion_kind()),
- text_edit: Some(TextEdit::new(range, "begin".to_string()).into()),
- ..CompletionItem::new_simple("begin".into(), self.component_detail(&[]))
- }
- }
- }
- Data::Citation {
- document,
- key,
- filter_text,
- category,
- } => CompletionItem {
- label: key.clone(),
- kind: Some(Structure::Entry(category).completion_kind()),
- filter_text: Some(filter_text.clone()),
- sort_text: Some(filter_text),
- data: Some(
- serde_json::to_value(CompletionItemData::Citation {
- uri: document
- .location(self.context.db)
- .uri(self.context.db)
- .clone(),
- key: key.clone(),
- })
- .unwrap(),
- ),
- text_edit: Some(TextEdit::new(range, key).into()),
- ..CompletionItem::default()
- },
- Data::ComponentCommand {
- name,
- image,
- glyph,
- file_names,
- } => CompletionItem {
- label: name.into(),
- detail: Some(glyph.map_or_else(
- || self.component_detail(file_names),
- |glyph| format!("{}, {}", glyph, self.component_detail(file_names)),
- )),
- kind: Some(Structure::Command.completion_kind()),
- documentation: image.and_then(|base64| self.inline_image(name, base64)),
- text_edit: Some(TextEdit::new(range, name.into()).into()),
- ..CompletionItem::default()
- },
- Data::ComponentEnvironment { name, file_names } => CompletionItem {
- kind: Some(Structure::Environment.completion_kind()),
- text_edit: Some(TextEdit::new(range, name.into()).into()),
- ..CompletionItem::new_simple(name.into(), self.component_detail(file_names))
- },
- Data::Class { name } => CompletionItem {
- label: name.into(),
- kind: Some(Structure::Class.completion_kind()),
- data: Some(serde_json::to_value(CompletionItemData::Package).unwrap()),
- text_edit: Some(TextEdit::new(range, name.into()).into()),
- ..CompletionItem::default()
- },
- Data::Package { name } => CompletionItem {
- label: name.into(),
- kind: Some(Structure::Package.completion_kind()),
- data: Some(serde_json::to_value(CompletionItemData::Class).unwrap()),
- text_edit: Some(TextEdit::new(range, name.into()).into()),
- ..CompletionItem::default()
- },
- Data::Color { name } => CompletionItem {
- label: name.into(),
- kind: Some(Structure::Color.completion_kind()),
- text_edit: Some(TextEdit::new(range, name.into()).into()),
- ..CompletionItem::default()
- },
- Data::ColorModel { name } => CompletionItem {
- label: name.into(),
- kind: Some(Structure::ColorModel.completion_kind()),
- text_edit: Some(TextEdit::new(range, name.into()).into()),
- ..CompletionItem::default()
- },
- Data::GlossaryEntry { name } => CompletionItem {
- label: name.clone(),
- kind: Some(Structure::GlossaryEntry.completion_kind()),
- text_edit: Some(TextEdit::new(range, name).into()),
- ..CompletionItem::default()
- },
- Data::File { name } => CompletionItem {
- label: name.clone(),
- kind: Some(Structure::File.completion_kind()),
- text_edit: Some(TextEdit::new(range, name).into()),
- ..CompletionItem::default()
- },
- Data::Directory { name } => CompletionItem {
- label: name.clone(),
- kind: Some(Structure::Folder.completion_kind()),
- text_edit: Some(TextEdit::new(range, name).into()),
- ..CompletionItem::default()
- },
- Data::Label {
- name,
- kind,
- header,
- footer,
- text,
- } => CompletionItem {
- label: name.into(),
- kind: Some(kind.completion_kind()),
- detail: header,
- documentation: footer.map(Documentation::String),
- sort_text: Some(text.clone()),
- filter_text: Some(text),
- text_edit: Some(TextEdit::new(range, name.into()).into()),
- ..CompletionItem::default()
- },
- Data::UserCommand { name } => {
- let detail = "user-defined".into();
- let name = &name[1..];
- CompletionItem {
- kind: Some(Structure::Command.completion_kind()),
- text_edit: Some(TextEdit::new(range, name.into()).into()),
- ..CompletionItem::new_simple(name.into(), detail)
- }
- }
- Data::UserEnvironment { name } => {
- let detail = "user-defined".into();
- CompletionItem {
- kind: Some(Structure::Environment.completion_kind()),
- text_edit: Some(TextEdit::new(range, name.into()).into()),
- ..CompletionItem::new_simple(name.into(), detail)
- }
- }
- Data::TikzLibrary { name } => CompletionItem {
- label: name.into(),
- kind: Some(Structure::TikzLibrary.completion_kind()),
- text_edit: Some(TextEdit::new(range, name.into()).into()),
- ..CompletionItem::default()
- },
- };
-
- item.preselect = Some(preselect);
-
- if !self.item_kinds.contains(&item.kind.unwrap()) {
- item.kind = Some(CompletionItemKind::TEXT);
- }
-
- let sort_prefix = format!("{:0>2}", index);
- match &item.sort_text {
- Some(sort_text) => {
- item.sort_text = Some(format!("{} {}", sort_prefix, sort_text));
- }
- None => {
- item.sort_text = Some(sort_prefix);
- }
- };
-
- item
- }
-
- fn inline_image(&self, name: &str, base64: &str) -> Option<Documentation> {
- if self.markdown {
- let kind = MarkupKind::Markdown;
- let value = format!(
- "![{}](data:image/png;base64,{}|width=48,height=48)",
- name, base64
- );
-
- Some(Documentation::MarkupContent(MarkupContent { kind, value }))
- } else {
- None
- }
- }
-
- fn component_detail(&self, file_names: &[SmolStr]) -> String {
- if file_names.is_empty() {
- "built-in".into()
- } else {
- file_names.join(", ")
- }
- }
-}
-
-#[derive(Debug, Clone)]
-struct Item<'db> {
- range: TextRange,
- data: Data<'db>,
- preselect: bool,
- score: i32,
-}
-
-#[derive(Debug, Clone)]
-enum Data<'db> {
- EntryType {
- entry_type: &'db BibtexEntryTypeDoc,
- },
- Field {
- field: &'db BibtexFieldDoc,
- },
- Argument {
- name: &'db str,
- image: Option<&'db str>,
- },
- BeginSnippet,
- Citation {
- document: Document,
- key: String,
- filter_text: String,
- category: BibtexEntryTypeCategory,
- },
- ComponentCommand {
- name: &'db str,
- image: Option<&'db str>,
- glyph: Option<&'db str>,
- file_names: &'db [SmolStr],
- },
- ComponentEnvironment {
- name: &'db str,
- file_names: &'db [SmolStr],
- },
- Class {
- name: &'db str,
- },
- Package {
- name: &'db str,
- },
- Color {
- name: &'db str,
- },
- ColorModel {
- name: &'db str,
- },
- GlossaryEntry {
- name: String,
- },
- File {
- name: String,
- },
- Directory {
- name: String,
- },
- Label {
- name: &'db str,
- kind: Structure,
- header: Option<String>,
- footer: Option<String>,
- text: String,
- },
- UserCommand {
- name: &'db str,
- },
- UserEnvironment {
- name: &'db str,
- },
- TikzLibrary {
- name: &'db str,
- },
-}
-
-impl<'db> Data<'db> {
- pub fn label<'this: 'db>(&'this self) -> &'db str {
- match self {
- Self::EntryType { entry_type } => &entry_type.name,
- Self::Field { field } => &field.name,
- Self::Argument { name, .. } => name,
- Self::BeginSnippet => "begin",
- Self::Citation { key, .. } => key,
- Self::ComponentCommand { name, .. } => name,
- Self::ComponentEnvironment { name, .. } => name,
- Self::Class { name } => name,
- Self::Package { name } => name,
- Self::Color { name } => name,
- Self::ColorModel { name } => name,
- Self::GlossaryEntry { name } => name,
- Self::File { name } => name,
- Self::Directory { name } => name,
- Self::Label { name, .. } => name,
- Self::UserCommand { name } => name,
- Self::UserEnvironment { name } => name,
- Self::TikzLibrary { name } => name,
- }
- }
-}
-
-#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
-#[serde(rename_all = "camelCase")]
-pub(crate) enum CompletionItemData {
- Package,
- Class,
- Citation { uri: Url, key: String },
-}
-
-static WHITESPACE_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new("\\s+").unwrap());
diff --git a/support/texlab/src/features/completion/citation.rs b/support/texlab/src/features/completion/citation.rs
deleted file mode 100644
index 68d72717a7..0000000000
--- a/support/texlab/src/features/completion/citation.rs
+++ /dev/null
@@ -1,63 +0,0 @@
-use rowan::{ast::AstNode, TextRange};
-
-use crate::{
- syntax::{bibtex, latex},
- util::cursor::CursorContext,
-};
-
-use super::builder::CompletionBuilder;
-
-pub fn complete<'db>(
- context: &'db CursorContext,
- builder: &mut CompletionBuilder<'db>,
-) -> Option<()> {
- let token = context.cursor.as_tex()?;
-
- let range = if token.kind() == latex::WORD {
- latex::Key::cast(token.parent()?)
- .map(|key| latex::small_range(&key))
- .or_else(|| {
- token
- .parent()
- .and_then(latex::Text::cast)
- .map(|text| latex::small_range(&text))
- })?
- } else {
- TextRange::empty(context.offset)
- };
-
- check_citation(context).or_else(|| check_acronym(context))?;
- for document in context.related() {
- if let Some(data) = document.parse(context.db).as_bib() {
- for entry in data
- .root(context.db)
- .children()
- .filter_map(bibtex::Entry::cast)
- {
- builder.citation(range, document, &entry);
- }
- }
- }
-
- Some(())
-}
-
-fn check_citation(context: &CursorContext) -> Option<()> {
- let (_, _, group) = context.find_curly_group_word_list()?;
- latex::Citation::cast(group.syntax().parent()?)?;
- Some(())
-}
-
-fn check_acronym(context: &CursorContext) -> Option<()> {
- let token = context.cursor.as_tex()?;
-
- let pair = token
- .parent_ancestors()
- .find_map(latex::KeyValuePair::cast)?;
- if pair.key()?.to_string() != "cite" {
- return None;
- }
-
- latex::AcronymDeclaration::cast(pair.syntax().parent()?.parent()?.parent()?)?;
- Some(())
-}
diff --git a/support/texlab/src/features/completion/color.rs b/support/texlab/src/features/completion/color.rs
deleted file mode 100644
index cce6517cb4..0000000000
--- a/support/texlab/src/features/completion/color.rs
+++ /dev/null
@@ -1,22 +0,0 @@
-use rowan::ast::AstNode;
-
-use crate::{
- syntax::latex,
- util::{cursor::CursorContext, lang_data::LANGUAGE_DATA},
-};
-
-use super::builder::CompletionBuilder;
-
-pub fn complete<'db>(
- context: &'db CursorContext,
- builder: &mut CompletionBuilder<'db>,
-) -> Option<()> {
- let (_, range, group) = context.find_curly_group_word()?;
- latex::ColorReference::cast(group.syntax().parent()?)?;
-
- for name in &LANGUAGE_DATA.colors {
- builder.color(range, name);
- }
-
- Some(())
-}
diff --git a/support/texlab/src/features/completion/color_model.rs b/support/texlab/src/features/completion/color_model.rs
deleted file mode 100644
index af700a96fb..0000000000
--- a/support/texlab/src/features/completion/color_model.rs
+++ /dev/null
@@ -1,39 +0,0 @@
-use rowan::{ast::AstNode, TextRange};
-
-use crate::{syntax::latex, util::cursor::CursorContext};
-
-use super::builder::CompletionBuilder;
-
-const MODEL_NAMES: &[&str] = &["gray", "rgb", "RGB", "HTML", "cmyk"];
-
-pub fn complete<'db>(
- context: &'db CursorContext,
- builder: &mut CompletionBuilder<'db>,
-) -> Option<()> {
- let range = check_color_definition(context).or_else(|| check_color_definition_set(context))?;
-
- for name in MODEL_NAMES {
- builder.color_model(range, name);
- }
-
- Some(())
-}
-
-fn check_color_definition(context: &CursorContext) -> Option<TextRange> {
- let (_, range, group) = context.find_curly_group_word()?;
-
- let definition = latex::ColorDefinition::cast(group.syntax().parent()?)?;
- definition
- .model()
- .filter(|model| model.syntax().text_range() == group.syntax().text_range())?;
- Some(range)
-}
-
-fn check_color_definition_set(context: &CursorContext) -> Option<TextRange> {
- let (_, range, group) = context.find_curly_group_word_list()?;
- let definition = latex::ColorSetDefinition::cast(group.syntax().parent()?)?;
- definition
- .model_list()
- .filter(|model| model.syntax().text_range() == group.syntax().text_range())?;
- Some(range)
-}
diff --git a/support/texlab/src/features/completion/component_command.rs b/support/texlab/src/features/completion/component_command.rs
deleted file mode 100644
index 20cef73848..0000000000
--- a/support/texlab/src/features/completion/component_command.rs
+++ /dev/null
@@ -1,24 +0,0 @@
-use crate::util::{components::COMPONENT_DATABASE, cursor::CursorContext};
-
-use super::builder::CompletionBuilder;
-
-pub fn complete<'db>(
- context: &'db CursorContext,
- builder: &mut CompletionBuilder<'db>,
-) -> Option<()> {
- let range = context.cursor.command_range(context.offset)?;
-
- for component in COMPONENT_DATABASE.linked_components(context.db, context.document) {
- for command in &component.commands {
- builder.component_command(
- range,
- &command.name,
- command.image.as_deref(),
- command.glyph.as_deref(),
- &component.file_names,
- );
- }
- }
-
- Some(())
-}
diff --git a/support/texlab/src/features/completion/component_environment.rs b/support/texlab/src/features/completion/component_environment.rs
deleted file mode 100644
index e58e319a46..0000000000
--- a/support/texlab/src/features/completion/component_environment.rs
+++ /dev/null
@@ -1,18 +0,0 @@
-use crate::util::{components::COMPONENT_DATABASE, cursor::CursorContext};
-
-use super::builder::CompletionBuilder;
-
-pub fn complete<'db>(
- context: &'db CursorContext,
- builder: &mut CompletionBuilder<'db>,
-) -> Option<()> {
- let (_, range) = context.find_environment_name()?;
-
- for component in COMPONENT_DATABASE.linked_components(context.db, context.document) {
- for name in &component.environments {
- builder.component_environment(range, name, &component.file_names);
- }
- }
-
- Some(())
-}
diff --git a/support/texlab/src/features/completion/entry_type.rs b/support/texlab/src/features/completion/entry_type.rs
deleted file mode 100644
index 38bc0e3f3a..0000000000
--- a/support/texlab/src/features/completion/entry_type.rs
+++ /dev/null
@@ -1,27 +0,0 @@
-use rowan::{TextRange, TextSize};
-
-use crate::{
- syntax::bibtex,
- util::{cursor::CursorContext, lang_data::LANGUAGE_DATA},
-};
-
-use super::builder::CompletionBuilder;
-
-pub fn complete<'db>(
- context: &'db CursorContext,
- builder: &mut CompletionBuilder<'db>,
-) -> Option<()> {
- let range = context
- .cursor
- .as_bib()
- .filter(|token| token.kind() == bibtex::TYPE)
- .map(bibtex::SyntaxToken::text_range)
- .filter(|range| range.start() != context.offset)
- .map(|range| TextRange::new(range.start() + TextSize::from(1), range.end()))?;
-
- for entry_type in &LANGUAGE_DATA.entry_types {
- builder.entry_type(range, entry_type);
- }
-
- Some(())
-}
diff --git a/support/texlab/src/features/completion/field.rs b/support/texlab/src/features/completion/field.rs
deleted file mode 100644
index ff6e54a562..0000000000
--- a/support/texlab/src/features/completion/field.rs
+++ /dev/null
@@ -1,36 +0,0 @@
-use rowan::{ast::AstNode, TextRange};
-
-use crate::{
- syntax::bibtex::{self, HasName},
- util::{cursor::CursorContext, lang_data::LANGUAGE_DATA},
-};
-
-use super::builder::CompletionBuilder;
-
-pub fn complete<'db>(
- context: &'db CursorContext,
- builder: &mut CompletionBuilder<'db>,
-) -> Option<()> {
- let token = context.cursor.as_bib()?;
-
- let range = if token.kind() == bibtex::NAME {
- token.text_range()
- } else {
- TextRange::empty(context.offset)
- };
-
- let parent = token.parent()?;
- if let Some(entry) = bibtex::Entry::cast(parent.clone()) {
- if entry.name_token()?.text_range() == token.text_range() {
- return None;
- }
- } else {
- bibtex::Field::cast(parent)?;
- }
-
- for field in &LANGUAGE_DATA.fields {
- builder.field(range, field);
- }
-
- Some(())
-}
diff --git a/support/texlab/src/features/completion/glossary_ref.rs b/support/texlab/src/features/completion/glossary_ref.rs
deleted file mode 100644
index 34d53bf24d..0000000000
--- a/support/texlab/src/features/completion/glossary_ref.rs
+++ /dev/null
@@ -1,35 +0,0 @@
-use rowan::ast::AstNode;
-
-use crate::{syntax::latex, util::cursor::CursorContext};
-
-use super::builder::CompletionBuilder;
-
-pub fn complete<'db>(
- context: &'db CursorContext,
- builder: &mut CompletionBuilder<'db>,
-) -> Option<()> {
- let (_, range, group) = context.find_curly_group_word()?;
- latex::GlossaryEntryReference::cast(group.syntax().parent()?)?;
-
- for document in context.related() {
- if let Some(data) = document.parse(context.db).as_tex() {
- for node in data.root(context.db).descendants() {
- if let Some(name) = latex::GlossaryEntryDefinition::cast(node.clone())
- .and_then(|entry| entry.name())
- .and_then(|name| name.key())
- .map(|name| name.to_string())
- {
- builder.glossary_entry(range, name);
- } else if let Some(name) = latex::AcronymDefinition::cast(node)
- .and_then(|entry| entry.name())
- .and_then(|name| name.key())
- .map(|name| name.to_string())
- {
- builder.glossary_entry(range, name);
- }
- }
- }
- }
-
- Some(())
-}
diff --git a/support/texlab/src/features/completion/import.rs b/support/texlab/src/features/completion/import.rs
deleted file mode 100644
index bf6da78a43..0000000000
--- a/support/texlab/src/features/completion/import.rs
+++ /dev/null
@@ -1,55 +0,0 @@
-use rowan::ast::AstNode;
-use rustc_hash::FxHashSet;
-
-use crate::{
- syntax::latex,
- util::{components::COMPONENT_DATABASE, cursor::CursorContext},
-};
-
-use super::builder::CompletionBuilder;
-
-pub fn complete<'db>(
- context: &'db CursorContext,
- builder: &mut CompletionBuilder<'db>,
-) -> Option<()> {
- let (_, range, group) = context.find_curly_group_word_list()?;
-
- let kind = group.syntax().parent()?.kind();
- let extension = match kind {
- latex::PACKAGE_INCLUDE => "sty",
- latex::CLASS_INCLUDE => "cls",
- _ => return Some(()),
- };
-
- let mut file_names = FxHashSet::default();
- for file_name in COMPONENT_DATABASE
- .components
- .iter()
- .flat_map(|comp| comp.file_names.iter())
- .filter(|file_name| file_name.ends_with(extension))
- {
- file_names.insert(file_name.as_str());
- let stem = &file_name[0..file_name.len() - 4];
- if kind == latex::PACKAGE_INCLUDE {
- builder.package(range, stem);
- } else {
- builder.class(range, stem);
- }
- }
-
- let file_name_db = context.workspace.file_name_db(context.db);
- for file_name in file_name_db
- .iter()
- .map(|(file_name, _)| file_name)
- .filter(|file_name| file_name.ends_with(extension) && !file_names.contains(file_name))
- {
- let stem = &file_name[0..file_name.len() - 4];
- if kind == latex::PACKAGE_INCLUDE {
- builder.package(range, stem);
- } else {
- builder.class(range, stem);
- }
- }
-
- Some(())
-}
diff --git a/support/texlab/src/features/completion/include.rs b/support/texlab/src/features/completion/include.rs
deleted file mode 100644
index 5d7654a208..0000000000
--- a/support/texlab/src/features/completion/include.rs
+++ /dev/null
@@ -1,149 +0,0 @@
-use std::{
- convert::TryFrom,
- fs,
- path::{Path, PathBuf},
-};
-
-use rowan::{ast::AstNode, TextRange, TextSize};
-
-use crate::{syntax::latex, util::cursor::CursorContext};
-
-use super::builder::CompletionBuilder;
-
-pub fn complete<'db>(
- context: &'db CursorContext,
- builder: &mut CompletionBuilder<'db>,
-) -> Option<()> {
- if context
- .document
- .location(context.db)
- .path(context.db)
- .is_none()
- {
- return None;
- }
-
- let (path_text, path_range, group) = context.find_curly_group_word_list()?;
-
- let include = group.syntax().parent()?;
- let (include_extension, extensions): (bool, &[&str]) = match include.kind() {
- latex::PACKAGE_INCLUDE => (false, &["sty"]),
- latex::CLASS_INCLUDE => (false, &["cls"]),
- latex::LATEX_INCLUDE => {
- let include = latex::Include::cast(include.clone())?;
- (
- matches!(include.command()?.text(), "\\input" | "\\subfile"),
- &["tex"],
- )
- }
- latex::BIBLATEX_INCLUDE => (true, &["bib"]),
- latex::BIBTEX_INCLUDE => (false, &["bib"]),
- latex::GRAPHICS_INCLUDE => (true, &["pdf", "png", "jpg", "jpeg", "bmp"]),
- latex::SVG_INCLUDE => (true, &["svg"]),
- latex::INKSCAPE_INCLUDE => (true, &["pdf", "eps", "ps", "png"]),
- latex::VERBATIM_INCLUDE => (true, &[]),
- _ => return None,
- };
-
- let segment_range = if path_text.is_empty() {
- path_range
- } else {
- let start =
- path_range.end() - TextSize::try_from(path_text.split('/').last()?.len()).ok()?;
- TextRange::new(start, path_range.end())
- };
-
- let mut dirs = vec![current_dir(context, &path_text, None)];
- if include.kind() == latex::GRAPHICS_INCLUDE {
- for document in context.related() {
- if let Some(data) = document.parse(context.db).as_tex() {
- for path in data
- .analyze(context.db)
- .graphics_paths(context.db)
- .iter()
- .map(|node| node.path(context.db))
- {
- dirs.push(current_dir(context, &path_text, Some(path)));
- }
- }
- }
- }
-
- for entry in dirs
- .into_iter()
- .flatten()
- .filter_map(|dir| fs::read_dir(dir).ok())
- .flatten()
- .flatten()
- {
- let mut path = entry.path();
-
- let file_type = entry.file_type().ok()?;
- if file_type.is_file() && is_included(&path, extensions) {
- if !include_extension {
- remove_extension(&mut path);
- }
-
- let name = path.file_name()?.to_str()?.into();
- builder.file(segment_range, name);
- } else if file_type.is_dir() {
- let name = path.file_name()?.to_str()?.into();
- builder.directory(segment_range, name);
- }
- }
-
- Some(())
-}
-
-fn current_dir(
- context: &CursorContext,
- path_text: &str,
- graphics_path: Option<&str>,
-) -> Option<PathBuf> {
- let parent = context
- .workspace
- .parents(context.db, context.document)
- .iter()
- .next()
- .map_or(context.document, Clone::clone);
-
- let path = context
- .workspace
- .working_dir(context.db, parent.directory(context.db))
- .path(context.db)
- .as_deref()?;
-
- let mut path = PathBuf::from(path.to_str()?.replace('\\', "/"));
- if !path_text.is_empty() {
- if let Some(graphics_path) = graphics_path {
- path.push(graphics_path);
- }
-
- path.push(path_text);
- if !path_text.ends_with('/') {
- path.pop();
- }
- }
- Some(path)
-}
-
-fn is_included(file: &Path, allowed_extensions: &[&str]) -> bool {
- allowed_extensions.is_empty()
- || file
- .extension()
- .and_then(std::ffi::OsStr::to_str)
- .map(str::to_lowercase)
- .map(|ext| allowed_extensions.contains(&ext.as_str()))
- .unwrap_or_default()
-}
-
-fn remove_extension(path: &mut PathBuf) {
- if let Some(stem) = path
- .file_stem()
- .and_then(std::ffi::OsStr::to_str)
- .map(ToOwned::to_owned)
- {
- path.pop();
- path.push(stem);
- }
-}
diff --git a/support/texlab/src/features/completion/label.rs b/support/texlab/src/features/completion/label.rs
deleted file mode 100644
index ae3e010b0e..0000000000
--- a/support/texlab/src/features/completion/label.rs
+++ /dev/null
@@ -1,79 +0,0 @@
-use rowan::{ast::AstNode, TextRange};
-
-use crate::{
- syntax::latex,
- util::{self, cursor::CursorContext, label::LabeledObject, lsp_enums::Structure},
-};
-
-use super::builder::CompletionBuilder;
-
-pub fn complete<'db>(
- context: &'db CursorContext,
- builder: &mut CompletionBuilder<'db>,
-) -> Option<()> {
- let (range, is_math) = find_reference(context).or_else(|| find_reference_range(context))?;
-
- let db = context.db;
- for document in context.related() {
- if let Some(data) = document.parse(db).as_tex() {
- for label in data
- .analyze(db)
- .labels(db)
- .iter()
- .filter(|label| label.origin(db).as_definition().is_some())
- {
- match util::label::render(db, document, *label) {
- Some(rendered_label) => {
- let kind = match &rendered_label.object {
- LabeledObject::Section { .. } => Structure::Section,
- LabeledObject::Float { .. } => Structure::Float,
- LabeledObject::Theorem { .. } => Structure::Theorem,
- LabeledObject::Equation => Structure::Equation,
- LabeledObject::EnumItem => Structure::Item,
- };
-
- if is_math && kind != Structure::Equation {
- continue;
- }
-
- let header = rendered_label.detail(db);
- let footer = match &rendered_label.object {
- LabeledObject::Float { caption, .. } => Some(caption.clone()),
- _ => None,
- };
-
- let text = format!(
- "{} {}",
- label.name(db).text(db),
- rendered_label.reference(db)
- );
-
- builder.label(range, label.name(db).text(db), kind, header, footer, text);
- }
- None => {
- let kind = Structure::Label;
- let header = None;
- let footer = None;
- let text = label.name(db).text(db).clone();
- builder.label(range, label.name(db).text(db), kind, header, footer, text);
- }
- }
- }
- }
- }
-
- Some(())
-}
-
-fn find_reference(context: &CursorContext) -> Option<(TextRange, bool)> {
- let (_, range, group) = context.find_curly_group_word_list()?;
- let reference = latex::LabelReference::cast(group.syntax().parent()?)?;
- let is_math = reference.command()?.text() == "\\eqref";
- Some((range, is_math))
-}
-
-fn find_reference_range(context: &CursorContext) -> Option<(TextRange, bool)> {
- let (_, range, group) = context.find_curly_group_word()?;
- latex::LabelReferenceRange::cast(group.syntax().parent()?)?;
- Some((range, false))
-}
diff --git a/support/texlab/src/features/completion/theorem.rs b/support/texlab/src/features/completion/theorem.rs
deleted file mode 100644
index cd0dfe3a84..0000000000
--- a/support/texlab/src/features/completion/theorem.rs
+++ /dev/null
@@ -1,21 +0,0 @@
-use crate::util::cursor::CursorContext;
-
-use super::builder::CompletionBuilder;
-
-pub fn complete<'db>(
- context: &'db CursorContext,
- builder: &mut CompletionBuilder<'db>,
-) -> Option<()> {
- let (_, range) = context.find_environment_name()?;
-
- let db = context.db;
- for document in context.related() {
- if let Some(data) = document.parse(db).as_tex() {
- for environment in data.analyze(db).theorem_environments(db) {
- builder.user_environment(range, environment.name(db).text(db));
- }
- }
- }
-
- Some(())
-}
diff --git a/support/texlab/src/features/completion/tikz_library.rs b/support/texlab/src/features/completion/tikz_library.rs
deleted file mode 100644
index 7ea26d50c2..0000000000
--- a/support/texlab/src/features/completion/tikz_library.rs
+++ /dev/null
@@ -1,29 +0,0 @@
-use rowan::ast::AstNode;
-
-use crate::{
- syntax::latex,
- util::{cursor::CursorContext, lang_data::LANGUAGE_DATA},
-};
-
-use super::builder::CompletionBuilder;
-
-pub fn complete<'db>(
- context: &'db CursorContext,
- builder: &mut CompletionBuilder<'db>,
-) -> Option<()> {
- let (_, range, group) = context.find_curly_group_word_list()?;
-
- let import = latex::TikzLibraryImport::cast(group.syntax().parent()?)?;
-
- if import.command()?.text() == "\\usepgflibrary" {
- for name in &LANGUAGE_DATA.pgf_libraries {
- builder.tikz_library(range, name);
- }
- } else {
- for name in &LANGUAGE_DATA.tikz_libraries {
- builder.tikz_library(range, name);
- }
- }
-
- Some(())
-}
diff --git a/support/texlab/src/features/completion/user_command.rs b/support/texlab/src/features/completion/user_command.rs
deleted file mode 100644
index 14c2222701..0000000000
--- a/support/texlab/src/features/completion/user_command.rs
+++ /dev/null
@@ -1,30 +0,0 @@
-use crate::util::cursor::CursorContext;
-
-use super::builder::CompletionBuilder;
-
-pub fn complete<'db>(
- context: &'db CursorContext,
- builder: &mut CompletionBuilder<'db>,
-) -> Option<()> {
- let range = context.cursor.command_range(context.offset)?;
- let token = context.cursor.as_tex()?;
-
- let db = context.db;
- for document in context.related() {
- if let Some(data) = document.parse(db).as_tex() {
- let text = document.text(db);
- for name in data
- .analyze(db)
- .command_name_ranges(db)
- .iter()
- .copied()
- .filter(|range| *range != token.text_range())
- .map(|range| &text[std::ops::Range::<usize>::from(range)])
- {
- builder.user_command(range, name);
- }
- }
- }
-
- Some(())
-}
diff --git a/support/texlab/src/features/completion/user_environment.rs b/support/texlab/src/features/completion/user_environment.rs
deleted file mode 100644
index cf6c2a7c78..0000000000
--- a/support/texlab/src/features/completion/user_environment.rs
+++ /dev/null
@@ -1,25 +0,0 @@
-use crate::util::cursor::CursorContext;
-
-use super::builder::CompletionBuilder;
-
-pub fn complete<'db>(
- context: &'db CursorContext,
- builder: &mut CompletionBuilder<'db>,
-) -> Option<()> {
- let (name, range) = context.find_environment_name()?;
-
- for document in context.related() {
- if let Some(data) = document.parse(context.db).as_tex() {
- for name in data
- .analyze(context.db)
- .environment_names(context.db)
- .iter()
- .filter(|n| n.as_str() != name)
- {
- builder.user_environment(range, name);
- }
- }
- }
-
- Some(())
-}
diff --git a/support/texlab/src/features/definition.rs b/support/texlab/src/features/definition.rs
deleted file mode 100644
index 5843882bd0..0000000000
--- a/support/texlab/src/features/definition.rs
+++ /dev/null
@@ -1,63 +0,0 @@
-mod command;
-mod document;
-mod entry;
-mod label;
-mod string;
-
-use lsp_types::{GotoDefinitionResponse, LocationLink, Position, Url};
-use rowan::TextRange;
-
-use crate::{
- db::Document,
- util::{cursor::CursorContext, line_index_ext::LineIndexExt},
- Db,
-};
-
-pub fn goto_definition(
- db: &dyn Db,
- uri: &Url,
- position: Position,
-) -> Option<GotoDefinitionResponse> {
- let context = CursorContext::new(db, uri, position, ())?;
- log::debug!("[Definition] Cursor: {:?}", context.cursor);
-
- let links: Vec<_> = command::goto_definition(&context)
- .or_else(|| document::goto_definition(&context))
- .or_else(|| entry::goto_definition(&context))
- .or_else(|| label::goto_definition(&context))
- .or_else(|| string::goto_definition(&context))?
- .into_iter()
- .map(|result| {
- let origin_selection_range = Some(
- context
- .document
- .line_index(db)
- .line_col_lsp_range(result.origin_selection_range),
- );
-
- let target_line_index = result.target.line_index(db);
- let target_uri = result.target.location(context.db).uri(context.db).clone();
- let target_range = target_line_index.line_col_lsp_range(result.target_range);
-
- let target_selection_range =
- target_line_index.line_col_lsp_range(result.target_selection_range);
-
- LocationLink {
- origin_selection_range,
- target_uri,
- target_range,
- target_selection_range,
- }
- })
- .collect();
-
- Some(GotoDefinitionResponse::Link(links))
-}
-
-#[derive(Debug, Clone)]
-struct DefinitionResult {
- origin_selection_range: TextRange,
- target: Document,
- target_range: TextRange,
- target_selection_range: TextRange,
-}
diff --git a/support/texlab/src/features/definition/command.rs b/support/texlab/src/features/definition/command.rs
deleted file mode 100644
index 743a0f6d3c..0000000000
--- a/support/texlab/src/features/definition/command.rs
+++ /dev/null
@@ -1,41 +0,0 @@
-use rowan::ast::AstNode;
-
-use crate::{syntax::latex, util::cursor::CursorContext};
-
-use super::DefinitionResult;
-
-pub(super) fn goto_definition(context: &CursorContext) -> Option<Vec<DefinitionResult>> {
- let name = context
- .cursor
- .as_tex()
- .filter(|token| token.kind() == latex::COMMAND_NAME)?;
-
- let origin_selection_range = name.text_range();
-
- for document in context.related() {
- if let Some(data) = document.parse(context.db).as_tex() {
- let root = data.root(context.db);
- if let Some(result) = root
- .descendants()
- .filter_map(latex::CommandDefinition::cast)
- .filter(|def| {
- def.name()
- .and_then(|name| name.command())
- .map_or(false, |node| node.text() == name.text())
- })
- .find_map(|def| {
- Some(DefinitionResult {
- origin_selection_range,
- target: document,
- target_range: latex::small_range(&def),
- target_selection_range: def.name()?.command()?.text_range(),
- })
- })
- {
- return Some(vec![result]);
- }
- }
- }
-
- None
-}
diff --git a/support/texlab/src/features/definition/document.rs b/support/texlab/src/features/definition/document.rs
deleted file mode 100644
index 40cfdf1d38..0000000000
--- a/support/texlab/src/features/definition/document.rs
+++ /dev/null
@@ -1,30 +0,0 @@
-use rowan::TextRange;
-
-use crate::{db::dependency_graph, util::cursor::CursorContext};
-
-use super::DefinitionResult;
-
-pub(super) fn goto_definition(context: &CursorContext) -> Option<Vec<DefinitionResult>> {
- let db = context.db;
- context
- .workspace
- .parents(db, context.document)
- .iter()
- .copied()
- .chain(std::iter::once(context.document))
- .flat_map(|parent| dependency_graph(db, parent).edges.iter())
- .filter(|edge| edge.source == context.document)
- .find_map(|edge| {
- let range = edge.origin?.link.range(db);
- if range.contains_inclusive(context.offset) {
- Some(vec![DefinitionResult {
- origin_selection_range: range,
- target: edge.target,
- target_range: TextRange::default(),
- target_selection_range: TextRange::default(),
- }])
- } else {
- None
- }
- })
-}
diff --git a/support/texlab/src/features/definition/entry.rs b/support/texlab/src/features/definition/entry.rs
deleted file mode 100644
index a945b18c41..0000000000
--- a/support/texlab/src/features/definition/entry.rs
+++ /dev/null
@@ -1,43 +0,0 @@
-use rowan::ast::AstNode;
-
-use crate::{
- syntax::{
- bibtex::{self, HasName},
- latex,
- },
- util::cursor::CursorContext,
-};
-
-use super::DefinitionResult;
-
-pub(super) fn goto_definition(context: &CursorContext) -> Option<Vec<DefinitionResult>> {
- let db = context.db;
-
- let word = context
- .cursor
- .as_tex()
- .filter(|token| token.kind() == latex::WORD)?;
-
- let key = latex::Key::cast(word.parent()?)?;
-
- latex::Citation::cast(key.syntax().parent()?.parent()?)?;
-
- let origin_selection_range = latex::small_range(&key);
-
- for document in context.related() {
- if let Some(data) = document.parse(db).as_bib() {
- for entry in data.root(db).children().filter_map(bibtex::Entry::cast) {
- if let Some(key) = entry.name_token().filter(|k| k.text() == word.text()) {
- return Some(vec![DefinitionResult {
- origin_selection_range,
- target: document,
- target_selection_range: key.text_range(),
- target_range: entry.syntax().text_range(),
- }]);
- }
- }
- }
- }
-
- None
-}
diff --git a/support/texlab/src/features/definition/label.rs b/support/texlab/src/features/definition/label.rs
deleted file mode 100644
index a1cc4ed15b..0000000000
--- a/support/texlab/src/features/definition/label.rs
+++ /dev/null
@@ -1,38 +0,0 @@
-use crate::{
- db::analysis::label,
- util::{self, cursor::CursorContext},
-};
-
-use super::DefinitionResult;
-
-pub(super) fn goto_definition(context: &CursorContext) -> Option<Vec<DefinitionResult>> {
- let db = context.db;
- let (name_text, origin_selection_range) = context
- .find_label_name_key()
- .or_else(|| context.find_label_name_command())?;
-
- for document in context.related() {
- if let Some(data) = document.parse(db).as_tex() {
- if let Some(label) = data
- .analyze(db)
- .labels(db)
- .iter()
- .filter(|label| matches!(label.origin(db), label::Origin::Definition(_)))
- .find(|label| label.name(db).text(db) == name_text.as_str())
- {
- let target_selection_range = label.range(db);
- let target_range = util::label::render(db, document, *label)
- .map_or(target_selection_range, |label| label.range);
-
- return Some(vec![DefinitionResult {
- origin_selection_range,
- target: document,
- target_range,
- target_selection_range,
- }]);
- }
- }
- }
-
- None
-}
diff --git a/support/texlab/src/features/definition/string.rs b/support/texlab/src/features/definition/string.rs
deleted file mode 100644
index 44ea1e9094..0000000000
--- a/support/texlab/src/features/definition/string.rs
+++ /dev/null
@@ -1,34 +0,0 @@
-use rowan::ast::AstNode;
-
-use crate::{
- syntax::bibtex::{self, HasName},
- util::cursor::CursorContext,
-};
-
-use super::DefinitionResult;
-
-pub(super) fn goto_definition(context: &CursorContext) -> Option<Vec<DefinitionResult>> {
- let db = context.db;
- let data = context.document.parse(db).as_bib()?;
- let key = context
- .cursor
- .as_bib()
- .filter(|token| token.kind() == bibtex::NAME)?;
-
- bibtex::Value::cast(key.parent()?)?;
-
- let origin_selection_range = key.text_range();
-
- data.root(db)
- .children()
- .filter_map(bibtex::StringDef::cast)
- .find_map(|string| {
- let string_name = string.name_token().filter(|k| k.text() == key.text())?;
- Some(vec![DefinitionResult {
- origin_selection_range,
- target: context.document,
- target_selection_range: string_name.text_range(),
- target_range: string.syntax().text_range(),
- }])
- })
-}
diff --git a/support/texlab/src/features/folding.rs b/support/texlab/src/features/folding.rs
deleted file mode 100644
index 44ddc0c4fd..0000000000
--- a/support/texlab/src/features/folding.rs
+++ /dev/null
@@ -1,63 +0,0 @@
-use lsp_types::{FoldingRange, FoldingRangeKind, Range, Url};
-use rowan::ast::AstNode;
-
-use crate::{
- db::{parse::DocumentData, Workspace},
- syntax::{bibtex, latex},
- util::line_index_ext::LineIndexExt,
- Db,
-};
-
-pub fn find_all(db: &dyn Db, uri: &Url) -> Option<Vec<FoldingRange>> {
- let document = Workspace::get(db).lookup_uri(db, uri)?;
- let line_index = document.line_index(db);
- let foldings = match document.parse(db) {
- DocumentData::Tex(data) => {
- let mut results = Vec::new();
- let root = data.root(db);
- for node in root.descendants() {
- if let Some(folding) = latex::Environment::cast(node.clone())
- .map(|node| latex::small_range(&node))
- .or_else(|| {
- latex::Section::cast(node.clone()).map(|node| latex::small_range(&node))
- })
- .or_else(|| latex::EnumItem::cast(node).map(|node| latex::small_range(&node)))
- .map(|node| line_index.line_col_lsp_range(node))
- .map(create_range)
- {
- results.push(folding);
- }
- }
-
- results
- }
- DocumentData::Bib(data) => {
- let root = data.root(db);
- root.descendants()
- .filter(|node| {
- matches!(
- node.kind(),
- bibtex::PREAMBLE | bibtex::STRING | bibtex::ENTRY
- )
- })
- .map(|node| create_range(line_index.line_col_lsp_range(node.text_range())))
- .collect()
- }
- DocumentData::Log(_) | DocumentData::TexlabRoot(_) | DocumentData::Tectonic(_) => {
- return None;
- }
- };
-
- Some(foldings)
-}
-
-fn create_range(range: Range) -> FoldingRange {
- FoldingRange {
- start_line: range.start.line,
- start_character: Some(range.start.character),
- end_line: range.end.line,
- end_character: Some(range.end.character),
- collapsed_text: None,
- kind: Some(FoldingRangeKind::Region),
- }
-}
diff --git a/support/texlab/src/features/formatting.rs b/support/texlab/src/features/formatting.rs
deleted file mode 100644
index 4a98df3f88..0000000000
--- a/support/texlab/src/features/formatting.rs
+++ /dev/null
@@ -1,33 +0,0 @@
-mod bibtex_internal;
-mod latexindent;
-
-use lsp_types::{FormattingOptions, TextEdit, Url};
-
-use crate::{
- db::{Language, Workspace},
- Db, Formatter,
-};
-
-use self::{bibtex_internal::format_bibtex_internal, latexindent::format_with_latexindent};
-
-pub fn format_source_code(
- db: &dyn Db,
- uri: &Url,
- options: &FormattingOptions,
-) -> Option<Vec<TextEdit>> {
- let workspace = Workspace::get(db);
- let document = workspace.lookup_uri(db, uri)?;
- match document.language(db) {
- Language::Tex => match db.config().formatting.tex_formatter {
- Formatter::Null => None,
- Formatter::Server => None,
- Formatter::LatexIndent => format_with_latexindent(db, document),
- },
- Language::Bib => match db.config().formatting.bib_formatter {
- Formatter::Null => None,
- Formatter::Server => format_bibtex_internal(db, document, options),
- Formatter::LatexIndent => format_with_latexindent(db, document),
- },
- Language::Log | Language::TexlabRoot | Language::Tectonic => None,
- }
-}
diff --git a/support/texlab/src/features/formatting/bibtex_internal.rs b/support/texlab/src/features/formatting/bibtex_internal.rs
deleted file mode 100644
index 5026c512f2..0000000000
--- a/support/texlab/src/features/formatting/bibtex_internal.rs
+++ /dev/null
@@ -1,200 +0,0 @@
-use lsp_types::{FormattingOptions, TextEdit};
-use rowan::{ast::AstNode, NodeOrToken};
-
-use crate::{
- db::Document,
- syntax::bibtex::{self, HasName, HasType, HasValue},
- util::{line_index::LineIndex, line_index_ext::LineIndexExt},
- Db,
-};
-
-pub fn format_bibtex_internal(
- db: &dyn Db,
- document: Document,
- options: &FormattingOptions,
-) -> Option<Vec<TextEdit>> {
- let mut indent = String::new();
-
- if options.insert_spaces {
- for _ in 0..options.tab_size {
- indent.push(' ');
- }
- } else {
- indent.push('\t');
- }
-
- let line_length = db.config().formatting.line_length;
-
- let line_index = document.line_index(db);
- let data = document.parse(db).as_bib()?;
- let mut edits = Vec::new();
-
- for node in data.root(db).children().filter(|node| {
- matches!(
- node.kind(),
- bibtex::PREAMBLE | bibtex::STRING | bibtex::ENTRY
- )
- }) {
- let range = node.text_range();
-
- let mut formatter =
- Formatter::new(indent.clone(), options.tab_size, line_length, line_index);
-
- formatter.visit_node(node);
- edits.push(TextEdit {
- range: line_index.line_col_lsp_range(range),
- new_text: formatter.output,
- });
- }
-
- Some(edits)
-}
-
-struct Formatter<'a> {
- indent: String,
- tab_size: u32,
- line_length: usize,
- output: String,
- align: Vec<usize>,
- line_index: &'a LineIndex,
-}
-
-impl<'a> Formatter<'a> {
- fn new(indent: String, tab_size: u32, line_length: usize, line_index: &'a LineIndex) -> Self {
- Self {
- indent,
- tab_size,
- line_length,
- output: String::new(),
- align: Vec::new(),
- line_index,
- }
- }
-
- fn visit_token_lowercase(&mut self, token: &bibtex::SyntaxToken) {
- self.output.push_str(&token.text().to_lowercase());
- }
-
- fn should_insert_space(
- &self,
- previous: &bibtex::SyntaxToken,
- current: &bibtex::SyntaxToken,
- ) -> bool {
- let previous_range = self.line_index.line_col_lsp_range(previous.text_range());
- let current_range = self.line_index.line_col_lsp_range(current.text_range());
- previous_range.start.line != current_range.start.line
- || previous_range.end.character < current_range.start.character
- }
-
- fn base_align(&self) -> usize {
- self.output[self.output.rfind('\n').unwrap_or(0)..]
- .chars()
- .count()
- }
-
- fn visit_node(&mut self, parent: bibtex::SyntaxNode) {
- match parent.kind() {
- bibtex::PREAMBLE => {
- let preamble = bibtex::Preamble::cast(parent).unwrap();
- self.visit_token_lowercase(&preamble.type_token().unwrap());
- self.output.push('{');
- if preamble.syntax().children().next().is_some() {
- self.align.push(self.base_align());
- for node in preamble.syntax().children() {
- self.visit_node(node);
- }
- self.output.push('}');
- }
- }
- bibtex::STRING => {
- let string = bibtex::StringDef::cast(parent).unwrap();
- self.visit_token_lowercase(&string.type_token().unwrap());
- self.output.push('{');
- if let Some(name) = string.name_token() {
- self.output.push_str(name.text());
- self.output.push_str(" = ");
- if let Some(value) = string.value() {
- self.align.push(self.base_align());
- self.visit_node(value.syntax().clone());
- self.output.push('}');
- }
- }
- }
- bibtex::ENTRY => {
- let entry = bibtex::Entry::cast(parent).unwrap();
- self.visit_token_lowercase(&entry.type_token().unwrap());
- self.output.push('{');
- if let Some(key) = entry.name_token() {
- self.output.push_str(&key.to_string());
- self.output.push(',');
- self.output.push('\n');
- for field in entry.fields() {
- self.visit_node(field.syntax().clone());
- }
- self.output.push('}');
- }
- }
- bibtex::FIELD => {
- let field = bibtex::Field::cast(parent).unwrap();
- self.output.push_str(&self.indent);
- let name = field.name_token().unwrap();
- self.output.push_str(name.text());
- self.output.push_str(" = ");
- if let Some(value) = field.value() {
- let count = name.text().chars().count();
- self.align.push(self.tab_size as usize + count + 3);
- self.visit_node(value.syntax().clone());
- self.output.push(',');
- self.output.push('\n');
- }
- }
- kind if bibtex::Value::can_cast(kind) => {
- let tokens: Vec<_> = parent
- .descendants_with_tokens()
- .filter_map(|element| element.into_token())
- .filter(|token| token.kind() != bibtex::WHITESPACE)
- .collect();
-
- self.output.push_str(tokens[0].text());
-
- let align = self.align.pop().unwrap_or_default();
- let mut length = align + tokens[0].text().chars().count();
- for i in 1..tokens.len() {
- let previous = &tokens[i - 1];
- let current = &tokens[i];
- let current_length = current.text().chars().count();
-
- let insert_space = self.should_insert_space(previous, current);
- let space_length = if insert_space { 1 } else { 0 };
-
- if length + current_length + space_length > self.line_length {
- self.output.push('\n');
- self.output.push_str(self.indent.as_ref());
- for _ in 0..=align - self.tab_size as usize {
- self.output.push(' ');
- }
- length = align;
- } else if insert_space {
- self.output.push(' ');
- length += 1;
- }
- self.output.push_str(current.text());
- length += current_length;
- }
- }
- bibtex::ROOT | bibtex::JUNK => {
- for element in parent.children_with_tokens() {
- match element {
- NodeOrToken::Token(token) => {
- self.output.push_str(token.text());
- }
- NodeOrToken::Node(node) => {
- self.visit_node(node);
- }
- }
- }
- }
- _ => unreachable!(),
- }
- }
-}
diff --git a/support/texlab/src/features/formatting/latexindent.rs b/support/texlab/src/features/formatting/latexindent.rs
deleted file mode 100644
index 087b8f0941..0000000000
--- a/support/texlab/src/features/formatting/latexindent.rs
+++ /dev/null
@@ -1,78 +0,0 @@
-use std::{
- path::Path,
- process::{Command, Stdio},
-};
-
-use lsp_types::TextEdit;
-use rowan::{TextLen, TextRange};
-use tempfile::tempdir;
-
-use crate::{
- db::{Document, Language, Workspace},
- util::line_index_ext::LineIndexExt,
- Db, LatexIndentConfig,
-};
-
-pub fn format_with_latexindent(db: &dyn Db, document: Document) -> Option<Vec<TextEdit>> {
- let workspace = Workspace::get(db);
- let config = db.config();
- let target_dir = tempdir().ok()?;
- let source_dir = workspace
- .working_dir(db, document.directory(db))
- .path(db)
- .as_deref()?;
-
- let target_file = target_dir
- .path()
- .join(if document.language(db) == Language::Bib {
- "file.bib"
- } else {
- "file.tex"
- });
- std::fs::write(&target_file, document.text(db)).ok()?;
-
- let args = build_arguments(&config.formatting.latex_indent, &target_file);
-
- log::debug!(
- "Running latexindent in folder \"{}\" with args: {:?}",
- source_dir.display(),
- args,
- );
-
- let output = Command::new("latexindent")
- .args(&args)
- .stdin(Stdio::null())
- .stdout(Stdio::piped())
- .stderr(Stdio::null())
- .current_dir(source_dir)
- .output()
- .ok()?;
-
- let old_text = document.text(db);
- let new_text = String::from_utf8_lossy(&output.stdout).into_owned();
- if new_text.is_empty() {
- None
- } else {
- let line_index = document.line_index(db);
- Some(vec![TextEdit {
- range: line_index.line_col_lsp_range(TextRange::new(0.into(), old_text.text_len())),
- new_text,
- }])
- }
-}
-
-fn build_arguments(config: &LatexIndentConfig, target_file: &Path) -> Vec<String> {
- let mut args = Vec::new();
-
- args.push(match &config.local {
- Some(yaml_file) => format!("--local={yaml_file}"),
- None => "--local".to_string(),
- });
-
- if config.modify_line_breaks {
- args.push("--modifylinebreaks".to_string());
- }
-
- args.push(target_file.display().to_string());
- args
-}
diff --git a/support/texlab/src/features/forward_search.rs b/support/texlab/src/features/forward_search.rs
deleted file mode 100644
index 350fdd02b9..0000000000
--- a/support/texlab/src/features/forward_search.rs
+++ /dev/null
@@ -1,195 +0,0 @@
-use std::{
- io,
- path::{Path, PathBuf},
- process::Stdio,
-};
-
-use log::error;
-use lsp_types::{Position, Url};
-use thiserror::Error;
-
-use crate::{db::Workspace, util::line_index_ext::LineIndexExt, Db};
-
-#[derive(Debug, Error)]
-pub enum Error {
- #[error("TeX document '{0}' not found")]
- TexNotFound(Url),
-
- #[error("TeX document '{0}' is invalid")]
- InvalidTexFile(Url),
-
- #[error("PDF document '{0}' not found")]
- PdfNotFound(PathBuf),
-
- #[error("TeX document '{0}' is not a local file")]
- NoLocalFile(Url),
-
- #[error("PDF viewer is not configured")]
- Unconfigured,
-
- #[error("Failed to spawn process: {0}")]
- Spawn(io::Error),
-}
-
-pub struct Command {
- program: String,
- args: Vec<String>,
-}
-
-impl Command {
- pub fn configure(db: &dyn Db, uri: &Url, position: Option<Position>) -> Result<Self, Error> {
- let workspace = Workspace::get(db);
- let child = workspace
- .lookup_uri(db, uri)
- .ok_or_else(|| Error::TexNotFound(uri.clone()))?;
-
- let parent = workspace
- .parents(db, child)
- .iter()
- .copied()
- .next()
- .unwrap_or(child);
-
- let output_dir = workspace
- .output_dir(db, workspace.working_dir(db, parent.directory(db)))
- .path(db)
- .as_deref()
- .ok_or_else(|| Error::NoLocalFile(uri.clone()))?;
-
- let tex_path = child
- .location(db)
- .path(db)
- .as_deref()
- .ok_or_else(|| Error::NoLocalFile(uri.clone()))?;
-
- let pdf_path = match parent.location(db).stem(db) {
- Some(stem) => {
- let pdf_name = format!("{}.pdf", stem);
- output_dir.join(pdf_name)
- }
- None => {
- return Err(Error::InvalidTexFile(uri.clone()));
- }
- };
-
- if !pdf_path.exists() {
- return Err(Error::PdfNotFound(pdf_path));
- }
-
- let position =
- position.unwrap_or_else(|| child.line_index(db).line_col_lsp(child.cursor(db)));
-
- let Some(config) = &db.config().synctex else {
- return Err(Error::Unconfigured);
- };
-
- let program = config.program.clone();
-
- let args: Vec<_> = config
- .args
- .iter()
- .flat_map(|arg| replace_placeholder(tex_path, &pdf_path, position.line, arg))
- .collect();
-
- Ok(Self { program, args })
- }
-}
-
-impl Command {
- pub fn run(self) -> Result<(), Error> {
- log::debug!("Executing forward search: {} {:?}", self.program, self.args);
-
- std::process::Command::new(self.program)
- .args(self.args)
- .stdin(Stdio::null())
- .stdout(Stdio::null())
- .stderr(Stdio::null())
- .status()
- .map_err(Error::Spawn)?;
-
- Ok(())
- }
-}
-
-/// Iterate overs chunks of a string. Either returns a slice of the
-/// original string, or the placeholder replacement.
-struct PlaceHolderIterator<'a> {
- remainder: &'a str,
- tex_file: &'a str,
- pdf_file: &'a str,
- line_number: &'a str,
-}
-
-impl<'a> PlaceHolderIterator<'a> {
- pub fn new(s: &'a str, tex_file: &'a str, pdf_file: &'a str, line_number: &'a str) -> Self {
- Self {
- remainder: s,
- tex_file,
- pdf_file,
- line_number,
- }
- }
-
- pub fn yield_remainder(&mut self) -> Option<&'a str> {
- let chunk = self.remainder;
- self.remainder = "";
- Some(chunk)
- }
-
- pub fn yield_placeholder(&mut self) -> Option<&'a str> {
- if self.remainder.len() >= 2 {
- let placeholder = self.remainder;
- self.remainder = &self.remainder[2..];
- match &placeholder[1..2] {
- "f" => Some(self.tex_file),
- "p" => Some(self.pdf_file),
- "l" => Some(self.line_number),
- "%" => Some("%"), // escape %
- _ => Some(&placeholder[0..2]),
- }
- } else {
- self.remainder = &self.remainder[1..];
- Some("%")
- }
- }
-
- pub fn yield_str(&mut self, end: usize) -> Option<&'a str> {
- let chunk = &self.remainder[..end];
- self.remainder = &self.remainder[end..];
- Some(chunk)
- }
-}
-
-impl<'a> Iterator for PlaceHolderIterator<'a> {
- type Item = &'a str;
-
- fn next(&mut self) -> Option<Self::Item> {
- return if self.remainder.is_empty() {
- None
- } else if self.remainder.starts_with('%') {
- self.yield_placeholder()
- } else {
- // yield up to the next % or to the end
- match self.remainder.find('%') {
- None => self.yield_remainder(),
- Some(end) => self.yield_str(end),
- }
- };
- }
-}
-
-fn replace_placeholder(
- tex_file: &Path,
- pdf_file: &Path,
- line_number: u32,
- argument: &str,
-) -> Option<String> {
- let result = if argument.starts_with('"') || argument.ends_with('"') {
- argument.to_string()
- } else {
- let line = &(line_number + 1).to_string();
- let it = PlaceHolderIterator::new(argument, tex_file.to_str()?, pdf_file.to_str()?, line);
- it.collect::<Vec<&str>>().join("")
- };
- Some(result)
-}
diff --git a/support/texlab/src/features/highlight.rs b/support/texlab/src/features/highlight.rs
deleted file mode 100644
index 52746a040f..0000000000
--- a/support/texlab/src/features/highlight.rs
+++ /dev/null
@@ -1,10 +0,0 @@
-mod label;
-
-use lsp_types::{DocumentHighlight, Position, Url};
-
-use crate::{util::cursor::CursorContext, Db};
-
-pub fn find_all(db: &dyn Db, uri: &Url, position: Position) -> Option<Vec<DocumentHighlight>> {
- let context = CursorContext::new(db, uri, position, ())?;
- label::find_highlights(&context)
-}
diff --git a/support/texlab/src/features/highlight/label.rs b/support/texlab/src/features/highlight/label.rs
deleted file mode 100644
index e362295b04..0000000000
--- a/support/texlab/src/features/highlight/label.rs
+++ /dev/null
@@ -1,32 +0,0 @@
-use lsp_types::{DocumentHighlight, DocumentHighlightKind};
-
-use crate::{
- db::analysis::label,
- util::{cursor::CursorContext, line_index_ext::LineIndexExt},
-};
-
-pub fn find_highlights(context: &CursorContext) -> Option<Vec<DocumentHighlight>> {
- let db = context.db;
- let (name_text, _) = context.find_label_name_key()?;
- let data = context.document.parse(db).as_tex()?;
-
- let mut highlights = Vec::new();
- let line_index = context.document.line_index(db);
- for label in data
- .analyze(db)
- .labels(db)
- .iter()
- .filter(|label| label.name(db).text(db) == &name_text)
- {
- let range = line_index.line_col_lsp_range(label.range(db));
- let kind = Some(match label.origin(db) {
- label::Origin::Definition(_) => DocumentHighlightKind::WRITE,
- label::Origin::Reference(_) => DocumentHighlightKind::READ,
- label::Origin::ReferenceRange(_) => DocumentHighlightKind::READ,
- });
-
- highlights.push(DocumentHighlight { range, kind });
- }
-
- Some(highlights)
-}
diff --git a/support/texlab/src/features/hover.rs b/support/texlab/src/features/hover.rs
deleted file mode 100644
index 6baeab8ad6..0000000000
--- a/support/texlab/src/features/hover.rs
+++ /dev/null
@@ -1,42 +0,0 @@
-mod citation;
-mod component;
-mod entry_type;
-mod field;
-mod label;
-mod string_ref;
-
-use lsp_types::{Hover, HoverContents, MarkupContent, MarkupKind, Position, Url};
-use rowan::TextRange;
-
-use crate::{
- util::{cursor::CursorContext, line_index_ext::LineIndexExt},
- Db,
-};
-
-pub fn find(db: &dyn Db, uri: &Url, position: Position) -> Option<Hover> {
- let context = CursorContext::new(db, uri, position, ())?;
- log::debug!("[Hover] Cursor: {:?}", context.cursor);
-
- let result = label::find_hover(&context)
- .or_else(|| citation::find_hover(&context))
- .or_else(|| component::find_hover(&context))
- .or_else(|| string_ref::find_hover(&context))
- .or_else(|| field::find_hover(&context))
- .or_else(|| entry_type::find_hover(&context))?;
-
- let line_index = context.document.line_index(db);
- Some(Hover {
- contents: HoverContents::Markup(MarkupContent {
- kind: result.value_kind,
- value: result.value,
- }),
- range: Some(line_index.line_col_lsp_range(result.range)),
- })
-}
-
-#[derive(Debug, Clone)]
-struct HoverResult {
- range: TextRange,
- value: String,
- value_kind: MarkupKind,
-}
diff --git a/support/texlab/src/features/hover/citation.rs b/support/texlab/src/features/hover/citation.rs
deleted file mode 100644
index e0e17176d5..0000000000
--- a/support/texlab/src/features/hover/citation.rs
+++ /dev/null
@@ -1,27 +0,0 @@
-use lsp_types::MarkupKind;
-use rowan::ast::AstNode;
-
-use crate::{citation, syntax::bibtex, util::cursor::CursorContext};
-
-use super::HoverResult;
-
-pub(super) fn find_hover(context: &CursorContext) -> Option<HoverResult> {
- let (key, range) = context
- .find_citation_key_word()
- .or_else(|| context.find_citation_key_command())
- .or_else(|| context.find_entry_key())?;
-
- let value = context.related().find_map(|document| {
- let data = document.parse(context.db).as_bib()?;
- let root = data.root(context.db);
- let root = bibtex::Root::cast(root)?;
- let entry = root.find_entry(&key)?;
- citation::render(&entry)
- })?;
-
- Some(HoverResult {
- range,
- value,
- value_kind: MarkupKind::Markdown,
- })
-}
diff --git a/support/texlab/src/features/hover/component.rs b/support/texlab/src/features/hover/component.rs
deleted file mode 100644
index 61a428a9ce..0000000000
--- a/support/texlab/src/features/hover/component.rs
+++ /dev/null
@@ -1,28 +0,0 @@
-use lsp_types::MarkupKind;
-
-use crate::{
- db::analysis::TexLinkKind,
- util::{components::COMPONENT_DATABASE, cursor::CursorContext},
-};
-
-use super::HoverResult;
-
-pub(super) fn find_hover(context: &CursorContext) -> Option<HoverResult> {
- let db = context.db;
- let links = context.document.parse(db).as_tex()?.analyze(db).links(db);
- links
- .iter()
- .filter(|link| matches!(link.kind(db), TexLinkKind::Sty | TexLinkKind::Cls))
- .filter(|link| link.range(db).contains_inclusive(context.offset))
- .find_map(|link| {
- let value = COMPONENT_DATABASE
- .documentation(link.path(db).text(db))?
- .value;
-
- Some(HoverResult {
- value,
- value_kind: MarkupKind::PlainText,
- range: link.range(db),
- })
- })
-}
diff --git a/support/texlab/src/features/hover/entry_type.rs b/support/texlab/src/features/hover/entry_type.rs
deleted file mode 100644
index 0f4ccbdb72..0000000000
--- a/support/texlab/src/features/hover/entry_type.rs
+++ /dev/null
@@ -1,22 +0,0 @@
-use lsp_types::MarkupKind;
-
-use crate::{
- syntax::bibtex,
- util::{cursor::CursorContext, lang_data::LANGUAGE_DATA},
-};
-
-use super::HoverResult;
-
-pub(super) fn find_hover(context: &CursorContext) -> Option<HoverResult> {
- let name = context
- .cursor
- .as_bib()
- .filter(|token| token.kind() == bibtex::TYPE)?;
-
- let docs = LANGUAGE_DATA.entry_type_documentation(&name.text()[1..])?;
- Some(HoverResult {
- range: name.text_range(),
- value: docs.to_string(),
- value_kind: MarkupKind::Markdown,
- })
-}
diff --git a/support/texlab/src/features/hover/field.rs b/support/texlab/src/features/hover/field.rs
deleted file mode 100644
index 48193997d6..0000000000
--- a/support/texlab/src/features/hover/field.rs
+++ /dev/null
@@ -1,25 +0,0 @@
-use lsp_types::MarkupKind;
-use rowan::ast::AstNode;
-
-use crate::{
- syntax::bibtex,
- util::{cursor::CursorContext, lang_data::LANGUAGE_DATA},
-};
-
-use super::HoverResult;
-
-pub(super) fn find_hover(context: &CursorContext) -> Option<HoverResult> {
- let name = context
- .cursor
- .as_bib()
- .filter(|token| token.kind() == bibtex::NAME)?;
-
- bibtex::Field::cast(name.parent()?)?;
-
- let docs = LANGUAGE_DATA.field_documentation(name.text())?;
- Some(HoverResult {
- range: name.text_range(),
- value: docs.to_string(),
- value_kind: MarkupKind::Markdown,
- })
-}
diff --git a/support/texlab/src/features/hover/label.rs b/support/texlab/src/features/hover/label.rs
deleted file mode 100644
index eafc104de6..0000000000
--- a/support/texlab/src/features/hover/label.rs
+++ /dev/null
@@ -1,23 +0,0 @@
-use lsp_types::MarkupKind;
-
-use crate::{
- db::Word,
- util::{self, cursor::CursorContext},
-};
-
-use super::HoverResult;
-
-pub(super) fn find_hover(context: &CursorContext) -> Option<HoverResult> {
- let (name_text, range) = context
- .find_label_name_key()
- .or_else(|| context.find_label_name_command())?;
-
- let db = context.db;
- util::label::find_label_definition(db, context.document, Word::new(db, name_text))
- .and_then(|(document, label)| util::label::render(db, document, label))
- .map(|label| HoverResult {
- range,
- value: label.reference(db),
- value_kind: MarkupKind::PlainText,
- })
-}
diff --git a/support/texlab/src/features/hover/string_ref.rs b/support/texlab/src/features/hover/string_ref.rs
deleted file mode 100644
index 41c4c5bddb..0000000000
--- a/support/texlab/src/features/hover/string_ref.rs
+++ /dev/null
@@ -1,43 +0,0 @@
-use lsp_types::MarkupKind;
-use rowan::ast::AstNode;
-
-use crate::{
- citation::field::text::TextFieldData,
- syntax::bibtex::{self, HasName, HasValue},
- util::cursor::CursorContext,
-};
-
-use super::HoverResult;
-
-pub(super) fn find_hover(context: &CursorContext) -> Option<HoverResult> {
- let data = context.document.parse(context.db).as_bib()?;
-
- let name = context
- .cursor
- .as_bib()
- .filter(|token| token.kind() == bibtex::NAME)
- .filter(|token| {
- let parent = token.parent().unwrap();
- bibtex::Value::can_cast(parent.kind()) || bibtex::StringDef::can_cast(parent.kind())
- })?;
-
- for string in data
- .root(context.db)
- .children()
- .filter_map(bibtex::StringDef::cast)
- {
- if string
- .name_token()
- .map_or(false, |token| token.text() == name.text())
- {
- let value = TextFieldData::parse(&string.value()?)?.text;
- return Some(HoverResult {
- range: name.text_range(),
- value,
- value_kind: MarkupKind::PlainText,
- });
- }
- }
-
- None
-}
diff --git a/support/texlab/src/features/inlay_hint.rs b/support/texlab/src/features/inlay_hint.rs
deleted file mode 100644
index 5d2f5d4e11..0000000000
--- a/support/texlab/src/features/inlay_hint.rs
+++ /dev/null
@@ -1,45 +0,0 @@
-mod label;
-
-use lsp_types::{InlayHint, InlayHintLabel, Range, Url};
-use rowan::TextSize;
-
-use crate::{
- db::Workspace,
- util::{line_index::LineIndex, line_index_ext::LineIndexExt},
- Db,
-};
-
-pub fn find_all(db: &dyn Db, uri: &Url, range: Range) -> Option<Vec<InlayHint>> {
- let document = Workspace::get(db).lookup_uri(db, uri)?;
- let line_index = document.line_index(db);
-
- let mut builder = InlayHintBuilder {
- line_index,
- hints: Vec::new(),
- };
-
- let range = line_index.offset_lsp_range(range);
- label::find_hints(db, document, range, &mut builder);
- Some(builder.hints)
-}
-
-struct InlayHintBuilder<'db> {
- line_index: &'db LineIndex,
- hints: Vec<InlayHint>,
-}
-
-impl<'db> InlayHintBuilder<'db> {
- pub fn push(&mut self, offset: TextSize, text: String) {
- let position = self.line_index.line_col_lsp(offset);
- self.hints.push(InlayHint {
- position,
- label: InlayHintLabel::String(text),
- kind: None,
- text_edits: None,
- tooltip: None,
- padding_left: Some(true),
- padding_right: None,
- data: None,
- });
- }
-}
diff --git a/support/texlab/src/features/inlay_hint/label.rs b/support/texlab/src/features/inlay_hint/label.rs
deleted file mode 100644
index ca52433e7b..0000000000
--- a/support/texlab/src/features/inlay_hint/label.rs
+++ /dev/null
@@ -1,48 +0,0 @@
-use rowan::TextRange;
-
-use crate::{
- db::{analysis::label, Document},
- util::{self, label::LabeledObject},
- Db,
-};
-
-use super::InlayHintBuilder;
-
-pub(super) fn find_hints(
- db: &dyn Db,
- document: Document,
- range: TextRange,
- builder: &mut InlayHintBuilder,
-) -> Option<()> {
- let data = document.parse(db).as_tex()?;
- for label in data
- .analyze(db)
- .labels(db)
- .iter()
- .copied()
- .filter(|label| matches!(label.origin(db), label::Origin::Definition(_)))
- .filter(|label| label.range(db).intersect(range).is_some())
- {
- if let Some(rendered) = util::label::render(db, document, label) {
- if let Some(number) = &rendered.number {
- let text = match &rendered.object {
- LabeledObject::Section { prefix, .. } => {
- format!("{} {}", prefix, number.text(db))
- }
- LabeledObject::Float { kind, .. } => {
- format!("{} {}", kind.as_str(), number.text(db))
- }
- LabeledObject::Theorem { kind, .. } => {
- format!("{} {}", kind.text(db), number.text(db))
- }
- LabeledObject::Equation => format!("Equation ({})", number.text(db)),
- LabeledObject::EnumItem => format!("Item {}", number.text(db)),
- };
-
- builder.push(label.range(db).end(), text);
- }
- }
- }
-
- Some(())
-}
diff --git a/support/texlab/src/features/link.rs b/support/texlab/src/features/link.rs
deleted file mode 100644
index c252fc4a91..0000000000
--- a/support/texlab/src/features/link.rs
+++ /dev/null
@@ -1,41 +0,0 @@
-mod include;
-
-use lsp_types::{DocumentLink, Url};
-use rowan::TextRange;
-
-use crate::{
- db::{Document, Workspace},
- util::{line_index::LineIndex, line_index_ext::LineIndexExt},
- Db,
-};
-
-pub fn find_all(db: &dyn Db, uri: &Url) -> Option<Vec<DocumentLink>> {
- let document = Workspace::get(db).lookup_uri(db, uri)?;
- let mut builder = LinkBuilder {
- db,
- line_index: document.line_index(db),
- links: Vec::new(),
- };
-
- include::find_links(db, document, &mut builder);
- Some(builder.links)
-}
-
-struct LinkBuilder<'db> {
- db: &'db dyn Db,
- line_index: &'db LineIndex,
- links: Vec<DocumentLink>,
-}
-
-impl<'db> LinkBuilder<'db> {
- pub fn push(&mut self, range: TextRange, target: Document) {
- let range = self.line_index.line_col_lsp_range(range);
- let target = Some(target.location(self.db).uri(self.db).clone());
- self.links.push(DocumentLink {
- range,
- target,
- tooltip: None,
- data: None,
- });
- }
-}
diff --git a/support/texlab/src/features/link/include.rs b/support/texlab/src/features/link/include.rs
deleted file mode 100644
index f0c9d147fb..0000000000
--- a/support/texlab/src/features/link/include.rs
+++ /dev/null
@@ -1,25 +0,0 @@
-use crate::{
- db::{dependency_graph, Document, Workspace},
- Db,
-};
-
-use super::LinkBuilder;
-
-pub(super) fn find_links(db: &dyn Db, document: Document, builder: &mut LinkBuilder) -> Option<()> {
- let workspace = Workspace::get(db);
- let parent = workspace
- .parents(db, document)
- .iter()
- .next()
- .copied()
- .unwrap_or(document);
-
- let graph = dependency_graph(db, parent);
- for edge in graph.edges.iter().filter(|edge| edge.source == document) {
- if let Some(origin) = edge.origin {
- builder.push(origin.link.range(db), edge.target);
- }
- }
-
- Some(())
-}
diff --git a/support/texlab/src/features/reference.rs b/support/texlab/src/features/reference.rs
deleted file mode 100644
index 3815accc2f..0000000000
--- a/support/texlab/src/features/reference.rs
+++ /dev/null
@@ -1,45 +0,0 @@
-mod entry;
-mod label;
-mod string;
-
-use lsp_types::{Location, Position, ReferenceContext, Url};
-use rowan::TextRange;
-
-use crate::{
- db::Document,
- util::{cursor::CursorContext, line_index_ext::LineIndexExt},
- Db,
-};
-
-pub fn find_all(
- db: &dyn Db,
- uri: &Url,
- position: Position,
- params: &ReferenceContext,
-) -> Option<Vec<Location>> {
- let mut results = Vec::new();
- let context = CursorContext::new(db, uri, position, params)?;
- log::debug!("[References] Cursor: {:?}", context.cursor);
- label::find_all_references(&context, &mut results);
- entry::find_all_references(&context, &mut results);
- string::find_all_references(&context, &mut results);
-
- let locations = results
- .into_iter()
- .map(|result| Location {
- uri: result.document.location(db).uri(db).clone(),
- range: result
- .document
- .line_index(db)
- .line_col_lsp_range(result.range),
- })
- .collect();
-
- Some(locations)
-}
-
-#[derive(Debug, Clone)]
-struct ReferenceResult {
- document: Document,
- range: TextRange,
-}
diff --git a/support/texlab/src/features/reference/entry.rs b/support/texlab/src/features/reference/entry.rs
deleted file mode 100644
index 0c8f374cb7..0000000000
--- a/support/texlab/src/features/reference/entry.rs
+++ /dev/null
@@ -1,58 +0,0 @@
-use lsp_types::ReferenceContext;
-use rowan::ast::AstNode;
-
-use crate::{
- db::parse::DocumentData,
- syntax::{
- bibtex::{self, HasName},
- latex,
- },
- util::cursor::CursorContext,
-};
-
-use super::ReferenceResult;
-
-pub(super) fn find_all_references(
- context: &CursorContext<&ReferenceContext>,
- results: &mut Vec<ReferenceResult>,
-) -> Option<()> {
- let db = context.db;
- let (key_text, _) = context
- .find_citation_key_word()
- .or_else(|| context.find_citation_key_command())
- .or_else(|| context.find_entry_key())?;
-
- for document in context.related() {
- match document.parse(db) {
- DocumentData::Tex(data) => {
- data.root(db)
- .descendants()
- .filter_map(latex::Citation::cast)
- .filter_map(|citation| citation.key_list())
- .flat_map(|keys| keys.keys())
- .filter(|key| key.to_string() == key_text)
- .map(|key| latex::small_range(&key))
- .for_each(|range| {
- results.push(ReferenceResult { document, range });
- });
- }
- DocumentData::Bib(data) if context.params.include_declaration => {
- data.root(db)
- .children()
- .filter_map(bibtex::Entry::cast)
- .filter_map(|entry| entry.name_token())
- .filter(|key| key.text() == key_text)
- .map(|key| key.text_range())
- .for_each(|range| {
- results.push(ReferenceResult { document, range });
- });
- }
- DocumentData::Bib(_)
- | DocumentData::Log(_)
- | DocumentData::TexlabRoot(_)
- | DocumentData::Tectonic(_) => {}
- };
- }
-
- Some(())
-}
diff --git a/support/texlab/src/features/reference/label.rs b/support/texlab/src/features/reference/label.rs
deleted file mode 100644
index 394542320c..0000000000
--- a/support/texlab/src/features/reference/label.rs
+++ /dev/null
@@ -1,36 +0,0 @@
-use lsp_types::ReferenceContext;
-
-use crate::util::cursor::CursorContext;
-
-use super::ReferenceResult;
-
-pub(super) fn find_all_references(
- context: &CursorContext<&ReferenceContext>,
- results: &mut Vec<ReferenceResult>,
-) -> Option<()> {
- let db = context.db;
- let (name_text, _) = context
- .find_label_name_key()
- .or_else(|| context.find_label_name_command())?;
-
- for document in context.related() {
- if let Some(data) = document.parse(db).as_tex() {
- for label in data
- .analyze(db)
- .labels(db)
- .iter()
- .filter(|label| label.name(db).text(db) == &name_text)
- .filter(|label| {
- label.origin(db).as_definition().is_none() || context.params.include_declaration
- })
- {
- results.push(ReferenceResult {
- document,
- range: label.range(db),
- });
- }
- }
- }
-
- Some(())
-}
diff --git a/support/texlab/src/features/reference/string.rs b/support/texlab/src/features/reference/string.rs
deleted file mode 100644
index c4bc6ded3d..0000000000
--- a/support/texlab/src/features/reference/string.rs
+++ /dev/null
@@ -1,45 +0,0 @@
-use lsp_types::ReferenceContext;
-use rowan::ast::AstNode;
-
-use crate::{
- syntax::bibtex::{self, HasName},
- util::cursor::CursorContext,
-};
-
-use super::ReferenceResult;
-
-pub(super) fn find_all_references(
- context: &CursorContext<&ReferenceContext>,
- results: &mut Vec<ReferenceResult>,
-) -> Option<()> {
- let db = context.db;
- let name_text = context
- .cursor
- .as_bib()
- .filter(|token| token.kind() == bibtex::NAME)
- .filter(|token| {
- let parent = token.parent().unwrap();
- bibtex::Value::can_cast(parent.kind()) || bibtex::StringDef::can_cast(parent.kind())
- })?
- .text();
-
- let data = context.document.parse(db).as_bib()?;
- for node in data.root(db).descendants() {
- if let Some(name) = bibtex::StringDef::cast(node.clone())
- .and_then(|string| string.name_token())
- .filter(|name| context.params.include_declaration && name.text() == name_text)
- .or_else(|| {
- bibtex::Value::cast(node)
- .and_then(|token| token.syntax().first_token())
- .filter(|name| name.text() == name_text)
- })
- {
- results.push(ReferenceResult {
- document: context.document,
- range: name.text_range(),
- });
- }
- }
-
- Some(())
-}
diff --git a/support/texlab/src/features/rename.rs b/support/texlab/src/features/rename.rs
deleted file mode 100644
index 51781517b6..0000000000
--- a/support/texlab/src/features/rename.rs
+++ /dev/null
@@ -1,69 +0,0 @@
-mod command;
-mod entry;
-mod label;
-
-use lsp_types::{Position, Range, TextEdit, Url, WorkspaceEdit};
-use rowan::TextRange;
-use rustc_hash::FxHashMap;
-
-use crate::{
- db::Document,
- util::{cursor::CursorContext, line_index_ext::LineIndexExt},
- Db,
-};
-
-pub fn prepare_rename_all(db: &dyn Db, uri: &Url, position: Position) -> Option<Range> {
- let context = CursorContext::new(db, uri, position, ())?;
- let range = entry::prepare_rename(&context)
- .or_else(|| label::prepare_rename(&context))
- .or_else(|| command::prepare_rename(&context))?;
-
- let line_index = context.document.line_index(db);
- Some(line_index.line_col_lsp_range(range))
-}
-
-pub fn rename_all(
- db: &dyn Db,
- uri: &Url,
- position: Position,
- new_name: String,
-) -> Option<WorkspaceEdit> {
- let context = CursorContext::new(db, uri, position, Params { new_name })?;
- let result = entry::rename(&context)
- .or_else(|| label::rename(&context))
- .or_else(|| command::rename(&context))?;
-
- let changes = result
- .changes
- .into_iter()
- .map(|(document, old_edits)| {
- let line_index = document.line_index(db);
- let new_edits = old_edits
- .into_iter()
- .map(|Indel { delete, insert }| {
- TextEdit::new(line_index.line_col_lsp_range(delete), insert)
- })
- .collect();
-
- (document.location(db).uri(db).clone(), new_edits)
- })
- .collect();
-
- Some(WorkspaceEdit::new(changes))
-}
-
-#[derive(Debug)]
-struct Params {
- new_name: String,
-}
-
-#[derive(Debug, PartialEq, Eq, Clone)]
-struct Indel {
- delete: TextRange,
- insert: String,
-}
-
-#[derive(Debug, PartialEq, Eq, Clone)]
-struct RenameResult {
- changes: FxHashMap<Document, Vec<Indel>>,
-}
diff --git a/support/texlab/src/features/rename/command.rs b/support/texlab/src/features/rename/command.rs
deleted file mode 100644
index 1e5f2ff0ca..0000000000
--- a/support/texlab/src/features/rename/command.rs
+++ /dev/null
@@ -1,37 +0,0 @@
-use rowan::{TextRange, TextSize};
-use rustc_hash::FxHashMap;
-
-use crate::{syntax::latex, util::cursor::CursorContext};
-
-use super::{Indel, Params, RenameResult};
-
-pub(super) fn prepare_rename<T>(context: &CursorContext<T>) -> Option<TextRange> {
- context.cursor.command_range(context.offset)
-}
-
-pub(super) fn rename(context: &CursorContext<Params>) -> Option<RenameResult> {
- prepare_rename(context)?;
- let name = context.cursor.as_tex()?.text();
- let mut changes = FxHashMap::default();
- for document in context.related() {
- if let Some(data) = document.parse(context.db).as_tex() {
- let root = data.root(context.db);
- let edits = root
- .descendants_with_tokens()
- .filter_map(|element| element.into_token())
- .filter(|token| token.kind() == latex::COMMAND_NAME && token.text() == name)
- .map(|token| {
- let range = token.text_range();
- Indel {
- delete: TextRange::new(range.start() + TextSize::from(1), range.end()),
- insert: context.params.new_name.clone(),
- }
- })
- .collect();
-
- changes.insert(document, edits);
- }
- }
-
- Some(RenameResult { changes })
-}
diff --git a/support/texlab/src/features/rename/entry.rs b/support/texlab/src/features/rename/entry.rs
deleted file mode 100644
index 67c0388f4f..0000000000
--- a/support/texlab/src/features/rename/entry.rs
+++ /dev/null
@@ -1,66 +0,0 @@
-use rowan::{ast::AstNode, TextRange};
-use rustc_hash::FxHashMap;
-
-use crate::{
- db::parse::DocumentData,
- syntax::{
- bibtex::{self, HasName},
- latex,
- },
- util::cursor::CursorContext,
-};
-
-use super::{Indel, Params, RenameResult};
-
-pub(super) fn prepare_rename<T>(context: &CursorContext<T>) -> Option<TextRange> {
- let (_, range) = context
- .find_citation_key_word()
- .or_else(|| context.find_entry_key())?;
-
- Some(range)
-}
-
-pub(super) fn rename(context: &CursorContext<Params>) -> Option<RenameResult> {
- prepare_rename(context)?;
- let (key_text, _) = context
- .find_citation_key_word()
- .or_else(|| context.find_entry_key())?;
-
- let mut changes = FxHashMap::default();
- for document in context.related() {
- match document.parse(context.db) {
- DocumentData::Tex(data) => {
- let root = data.root(context.db);
- let edits: Vec<_> = root
- .descendants()
- .filter_map(latex::Citation::cast)
- .filter_map(|citation| citation.key_list())
- .flat_map(|keys| keys.keys())
- .filter(|key| key.to_string() == key_text)
- .map(|key| Indel {
- delete: latex::small_range(&key),
- insert: context.params.new_name.clone(),
- })
- .collect();
- changes.insert(document, edits);
- }
- DocumentData::Bib(data) => {
- let root = data.root(context.db);
- let edits: Vec<_> = root
- .descendants()
- .filter_map(bibtex::Entry::cast)
- .filter_map(|entry| entry.name_token())
- .filter(|key| key.text() == key_text)
- .map(|key| Indel {
- delete: key.text_range(),
- insert: context.params.new_name.clone(),
- })
- .collect();
- changes.insert(document, edits);
- }
- DocumentData::Log(_) | DocumentData::TexlabRoot(_) | DocumentData::Tectonic(_) => {}
- }
- }
-
- Some(RenameResult { changes })
-}
diff --git a/support/texlab/src/features/rename/label.rs b/support/texlab/src/features/rename/label.rs
deleted file mode 100644
index 7e596209d8..0000000000
--- a/support/texlab/src/features/rename/label.rs
+++ /dev/null
@@ -1,76 +0,0 @@
-use rowan::{ast::AstNode, TextRange};
-use rustc_hash::FxHashMap;
-
-use crate::{syntax::latex, util::cursor::CursorContext};
-
-use super::{Indel, Params, RenameResult};
-
-pub(super) fn prepare_rename<T>(context: &CursorContext<T>) -> Option<TextRange> {
- let (_, range) = context.find_label_name_key()?;
- Some(range)
-}
-
-pub(super) fn rename(context: &CursorContext<Params>) -> Option<RenameResult> {
- prepare_rename(context)?;
- let (name_text, _) = context.find_label_name_key()?;
-
- let mut changes = FxHashMap::default();
- for document in context.related() {
- if let Some(data) = document.parse(context.db).as_tex() {
- let mut edits = Vec::new();
- for node in data.root(context.db).descendants() {
- if let Some(range) = latex::LabelDefinition::cast(node.clone())
- .and_then(|label| label.name())
- .and_then(|name| name.key())
- .filter(|name| name.to_string() == name_text)
- .map(|name| latex::small_range(&name))
- {
- edits.push(Indel {
- delete: range,
- insert: context.params.new_name.clone(),
- });
- }
-
- latex::LabelReference::cast(node.clone())
- .and_then(|label| label.name_list())
- .into_iter()
- .flat_map(|label| label.keys())
- .filter(|name| name.to_string() == name_text)
- .for_each(|name| {
- edits.push(Indel {
- delete: latex::small_range(&name),
- insert: context.params.new_name.clone(),
- });
- });
-
- if let Some(label) = latex::LabelReferenceRange::cast(node.clone()) {
- if let Some(name_from) = label
- .from()
- .and_then(|name| name.key())
- .filter(|name| name.to_string() == name_text)
- {
- edits.push(Indel {
- delete: latex::small_range(&name_from),
- insert: context.params.new_name.clone(),
- });
- }
-
- if let Some(name_to) = label
- .to()
- .and_then(|name| name.key())
- .filter(|name| name.to_string() == name_text)
- {
- edits.push(Indel {
- delete: latex::small_range(&name_to),
- insert: context.params.new_name.clone(),
- });
- }
- }
- }
-
- changes.insert(document, edits);
- }
- }
-
- Some(RenameResult { changes })
-}
diff --git a/support/texlab/src/features/symbol.rs b/support/texlab/src/features/symbol.rs
deleted file mode 100644
index 26a0ec87ef..0000000000
--- a/support/texlab/src/features/symbol.rs
+++ /dev/null
@@ -1,124 +0,0 @@
-mod bibtex;
-mod latex;
-mod project_order;
-mod types;
-
-use std::cmp::Reverse;
-
-use lsp_types::{DocumentSymbolResponse, SymbolInformation, Url, WorkspaceSymbolParams};
-
-use crate::{db::Workspace, util::capabilities::ClientCapabilitiesExt, Db};
-
-use self::{project_order::ProjectOrdering, types::InternalSymbol};
-
-pub fn find_document_symbols(db: &dyn Db, uri: &Url) -> Option<DocumentSymbolResponse> {
- let workspace = Workspace::get(db);
- let document = workspace.lookup_uri(db, uri)?;
-
- let mut buf = Vec::new();
- latex::find_symbols(db, document, &mut buf);
- bibtex::find_symbols(db, document, &mut buf);
-
- let config = &db.config().symbols;
-
- InternalSymbol::filter(&mut buf, &config);
-
- if workspace
- .client_capabilities(db)
- .has_hierarchical_document_symbol_support()
- {
- let symbols = buf
- .into_iter()
- .map(|symbol| symbol.into_document_symbol(db))
- .collect();
-
- Some(DocumentSymbolResponse::Nested(symbols))
- } else {
- let mut new_buf = Vec::new();
- for symbol in buf {
- symbol.flatten(&mut new_buf);
- }
-
- let mut new_buf: Vec<_> = new_buf
- .into_iter()
- .map(|symbol| symbol.into_symbol_info(uri.clone()))
- .collect();
-
- sort_symbols(db, &mut new_buf);
- Some(DocumentSymbolResponse::Flat(new_buf))
- }
-}
-
-#[derive(Debug, Clone)]
-struct WorkspaceSymbol {
- info: SymbolInformation,
- search_text: String,
-}
-
-#[must_use]
-pub fn find_workspace_symbols(
- db: &dyn Db,
- params: &WorkspaceSymbolParams,
-) -> Vec<SymbolInformation> {
- let mut symbols = Vec::new();
-
- let workspace = Workspace::get(db);
- for document in workspace.documents(db).iter().copied() {
- let mut buf = Vec::new();
- latex::find_symbols(db, document, &mut buf);
- bibtex::find_symbols(db, document, &mut buf);
- let mut new_buf = Vec::new();
-
- for symbol in buf {
- symbol.flatten(&mut new_buf);
- }
-
- for symbol in new_buf {
- symbols.push(WorkspaceSymbol {
- search_text: symbol.search_text(),
- info: symbol.into_symbol_info(document.location(db).uri(db).clone()),
- });
- }
- }
-
- let query_words: Vec<_> = params
- .query
- .split_whitespace()
- .map(str::to_lowercase)
- .collect();
-
- let mut filtered = Vec::new();
- for symbol in symbols {
- let mut included = true;
- for word in &query_words {
- if !symbol.search_text.contains(word) {
- included = false;
- break;
- }
- }
-
- if included {
- filtered.push(symbol.info);
- }
- }
-
- sort_symbols(db, &mut filtered);
- filtered
-}
-
-fn sort_symbols(db: &dyn Db, symbols: &mut [SymbolInformation]) {
- let ordering = ProjectOrdering::new(db);
- symbols.sort_by(|left, right| {
- let left_key = (
- ordering.get(db, &left.location.uri),
- left.location.range.start,
- Reverse(left.location.range.end),
- );
- let right_key = (
- ordering.get(db, &right.location.uri),
- right.location.range.start,
- Reverse(right.location.range.end),
- );
- left_key.cmp(&right_key)
- });
-}
diff --git a/support/texlab/src/features/symbol/bibtex.rs b/support/texlab/src/features/symbol/bibtex.rs
deleted file mode 100644
index 80c10ee1ba..0000000000
--- a/support/texlab/src/features/symbol/bibtex.rs
+++ /dev/null
@@ -1,87 +0,0 @@
-use rowan::ast::AstNode;
-
-use crate::{
- db::Document,
- syntax::bibtex::{self, HasName, HasType},
- util::{
- lang_data::{BibtexEntryTypeCategory, LANGUAGE_DATA},
- line_index::LineIndex,
- line_index_ext::LineIndexExt,
- },
- Db,
-};
-
-use super::types::{InternalSymbol, InternalSymbolKind};
-
-pub fn find_symbols(db: &dyn Db, document: Document, buf: &mut Vec<InternalSymbol>) -> Option<()> {
- let data = document.parse(db).as_bib()?;
- let line_index = document.line_index(db);
- for node in data.root(db).children() {
- process_string(node.clone(), line_index, buf)
- .or_else(|| process_entry(node, line_index, buf));
- }
-
- Some(())
-}
-
-fn process_string(
- node: bibtex::SyntaxNode,
- line_index: &LineIndex,
- buf: &mut Vec<InternalSymbol>,
-) -> Option<()> {
- let string = bibtex::StringDef::cast(node)?;
- let name = string.name_token()?;
- buf.push(InternalSymbol {
- name: name.text().into(),
- label: None,
- kind: InternalSymbolKind::String,
- deprecated: false,
- full_range: line_index.line_col_lsp_range(string.syntax().text_range()),
- selection_range: line_index.line_col_lsp_range(name.text_range()),
- children: Vec::new(),
- });
-
- Some(())
-}
-
-fn process_entry(
- node: bibtex::SyntaxNode,
- line_index: &LineIndex,
- buf: &mut Vec<InternalSymbol>,
-) -> Option<()> {
- let entry = bibtex::Entry::cast(node)?;
- let ty = entry.type_token()?;
- let key = entry.name_token()?;
- let mut children = Vec::new();
- for field in entry.fields() {
- if let Some(name) = field.name_token() {
- let symbol = InternalSymbol {
- name: name.text().to_string(),
- label: None,
- kind: InternalSymbolKind::Field,
- deprecated: false,
- full_range: line_index.line_col_lsp_range(field.syntax().text_range()),
- selection_range: line_index.line_col_lsp_range(name.text_range()),
- children: Vec::new(),
- };
- children.push(symbol);
- }
- }
-
- let category = LANGUAGE_DATA
- .find_entry_type(&ty.text()[1..])
- .map(|ty| ty.category)
- .unwrap_or(BibtexEntryTypeCategory::Misc);
-
- buf.push(InternalSymbol {
- name: key.to_string(),
- label: None,
- kind: InternalSymbolKind::Entry(category),
- deprecated: false,
- full_range: line_index.line_col_lsp_range(entry.syntax().text_range()),
- selection_range: line_index.line_col_lsp_range(key.text_range()),
- children,
- });
-
- Some(())
-}
diff --git a/support/texlab/src/features/symbol/latex.rs b/support/texlab/src/features/symbol/latex.rs
deleted file mode 100644
index 8bb5680483..0000000000
--- a/support/texlab/src/features/symbol/latex.rs
+++ /dev/null
@@ -1,446 +0,0 @@
-use std::str::FromStr;
-
-use lsp_types::Range;
-use rowan::ast::AstNode;
-use titlecase::titlecase;
-
-use crate::{
- db::{Document, Word, Workspace},
- syntax::latex::{self, HasBrack, HasCurly},
- util::{
- label::{find_caption_by_parent, LabeledFloatKind},
- line_index_ext::LineIndexExt,
- },
- Db,
-};
-
-use super::types::{InternalSymbol, InternalSymbolKind};
-
-pub fn find_symbols(db: &dyn Db, document: Document, buf: &mut Vec<InternalSymbol>) -> Option<()> {
- let data = document.parse(db).as_tex()?;
- let mut symbols = visit(db, document, data.root(db));
- buf.append(&mut symbols);
- Some(())
-}
-
-fn visit(db: &dyn Db, document: Document, node: latex::SyntaxNode) -> Vec<InternalSymbol> {
- let symbol = match node.kind() {
- latex::PART
- | latex::CHAPTER
- | latex::SECTION
- | latex::SUBSECTION
- | latex::SUBSUBSECTION
- | latex::PARAGRAPH
- | latex::SUBPARAGRAPH => visit_section(db, document, node.clone()),
- latex::ENUM_ITEM => visit_enum_item(db, document, node.clone()),
- latex::EQUATION => visit_equation(db, document, node.clone()),
- latex::ENVIRONMENT => latex::Environment::cast(node.clone())
- .and_then(|env| env.begin())
- .and_then(|begin| begin.name())
- .and_then(|name| name.key())
- .map(|name| name.to_string())
- .and_then(|name| {
- if db.config().syntax.math_environments.contains(&name) {
- visit_equation_environment(db, document, node.clone())
- } else if db.config().syntax.enum_environments.contains(&name) {
- visit_enumeration(db, document, node.clone(), &name)
- } else if let Ok(float_kind) = LabeledFloatKind::from_str(&name) {
- visit_float(db, document, node.clone(), float_kind)
- } else {
- visit_theorem(db, document, node.clone(), &name)
- }
- }),
- _ => None,
- };
-
- match symbol {
- Some(mut parent) => {
- for child in node.children() {
- parent.children.append(&mut visit(db, document, child));
- }
- vec![parent]
- }
- None => {
- let mut symbols = Vec::new();
- for child in node.children() {
- symbols.append(&mut visit(db, document, child));
- }
- symbols
- }
- }
-}
-
-fn visit_section(
- db: &dyn Db,
- document: Document,
- node: latex::SyntaxNode,
-) -> Option<InternalSymbol> {
- let section = latex::Section::cast(node)?;
- let full_range = document
- .line_index(db)
- .line_col_lsp_range(latex::small_range(&section));
-
- let group = section.name()?;
- let group_text = group.content_text()?;
-
- let symbol = match find_label_by_parent(db, document, section.syntax()) {
- Some(NumberedLabel {
- name: label,
- range: selection_range,
- number,
- }) => {
- let name = match number {
- Some(number) => format!("{} {}", number.text(db), group_text),
- None => group_text,
- };
-
- InternalSymbol {
- name,
- label: Some(label),
- kind: InternalSymbolKind::Section,
- deprecated: false,
- full_range,
- selection_range,
- children: Vec::new(),
- }
- }
- None => InternalSymbol {
- name: group_text,
- label: None,
- kind: InternalSymbolKind::Section,
- deprecated: false,
- full_range,
- selection_range: full_range,
- children: Vec::new(),
- },
- };
-
- Some(symbol)
-}
-
-fn visit_enum_item(
- db: &dyn Db,
- document: Document,
- node: latex::SyntaxNode,
-) -> Option<InternalSymbol> {
- let enum_envs = &db.config().syntax.enum_environments;
- let enum_item = latex::EnumItem::cast(node.clone())?;
- if !enum_item
- .syntax()
- .ancestors()
- .filter_map(latex::Environment::cast)
- .filter_map(|environment| environment.begin())
- .filter_map(|begin| begin.name())
- .filter_map(|name| name.key())
- .any(|name| enum_envs.contains(&name.to_string()))
- {
- return None;
- }
-
- let full_range = document
- .line_index(db)
- .line_col_lsp_range(latex::small_range(&enum_item));
-
- let name = enum_item
- .label()
- .and_then(|label| label.content_text())
- .unwrap_or_else(|| "Item".to_string());
-
- let symbol = match find_label_by_parent(db, document, &node) {
- Some(NumberedLabel {
- name: label,
- range: selection_range,
- number,
- }) => InternalSymbol {
- name: number
- .map(|num| num.text(db).clone())
- .unwrap_or_else(|| name.clone()),
- label: Some(label),
- kind: InternalSymbolKind::EnumerationItem,
- deprecated: false,
- full_range,
- selection_range,
- children: Vec::new(),
- },
- None => InternalSymbol {
- name,
- label: None,
- kind: InternalSymbolKind::EnumerationItem,
- deprecated: false,
- full_range,
- selection_range: full_range,
- children: Vec::new(),
- },
- };
- Some(symbol)
-}
-
-fn visit_equation(
- db: &dyn Db,
- document: Document,
- node: latex::SyntaxNode,
-) -> Option<InternalSymbol> {
- let equation = latex::Equation::cast(node)?;
-
- let full_range = document
- .line_index(db)
- .line_col_lsp_range(latex::small_range(&equation));
-
- make_equation_symbol(db, document, equation.syntax(), full_range)
-}
-
-fn visit_equation_environment(
- db: &dyn Db,
- document: Document,
- node: latex::SyntaxNode,
-) -> Option<InternalSymbol> {
- let environment = latex::Environment::cast(node)?;
-
- let full_range = document
- .line_index(db)
- .line_col_lsp_range(latex::small_range(&environment));
-
- make_equation_symbol(db, document, environment.syntax(), full_range)
-}
-
-fn make_equation_symbol(
- db: &dyn Db,
- document: Document,
- node: &latex::SyntaxNode,
- full_range: Range,
-) -> Option<InternalSymbol> {
- let symbol = match find_label_by_parent(db, document, node) {
- Some(NumberedLabel {
- name: label,
- range: selection_range,
- number,
- }) => {
- let name = match number {
- Some(number) => format!("Equation ({})", number.text(db)),
- None => "Equation".to_string(),
- };
-
- InternalSymbol {
- name,
- label: Some(label),
- kind: InternalSymbolKind::Equation,
- deprecated: false,
- full_range,
- selection_range,
- children: Vec::new(),
- }
- }
- None => InternalSymbol {
- name: "Equation".to_string(),
- label: None,
- kind: InternalSymbolKind::Equation,
- deprecated: false,
- full_range,
- selection_range: full_range,
- children: Vec::new(),
- },
- };
- Some(symbol)
-}
-
-fn visit_enumeration(
- db: &dyn Db,
- document: Document,
- node: latex::SyntaxNode,
- env_name: &str,
-) -> Option<InternalSymbol> {
- let environment = latex::Environment::cast(node)?;
- let full_range = document
- .line_index(db)
- .line_col_lsp_range(latex::small_range(&environment));
-
- let name = titlecase(env_name);
- let symbol = match find_label_by_parent(db, document, environment.syntax()) {
- Some(NumberedLabel {
- name: label,
- range: selection_range,
- number,
- }) => {
- let name = match number {
- Some(number) => format!("{} {}", name, number.text(db)),
- None => name,
- };
-
- InternalSymbol {
- name,
- label: Some(label),
- kind: InternalSymbolKind::Enumeration,
- deprecated: false,
- full_range,
- selection_range,
- children: Vec::new(),
- }
- }
- None => InternalSymbol {
- name,
- label: None,
- kind: InternalSymbolKind::Enumeration,
- deprecated: false,
- full_range,
- selection_range: full_range,
- children: Vec::new(),
- },
- };
- Some(symbol)
-}
-
-fn visit_float(
- db: &dyn Db,
- document: Document,
- node: latex::SyntaxNode,
- float_kind: LabeledFloatKind,
-) -> Option<InternalSymbol> {
- let environment = latex::Environment::cast(node)?;
- let full_range = document
- .line_index(db)
- .line_col_lsp_range(latex::small_range(&environment));
-
- let (float_kind, symbol_kind) = match float_kind {
- LabeledFloatKind::Algorithm => ("Algorithm", InternalSymbolKind::Algorithm),
- LabeledFloatKind::Figure => ("Figure", InternalSymbolKind::Figure),
- LabeledFloatKind::Listing => ("Listing", InternalSymbolKind::Listing),
- LabeledFloatKind::Table => ("Table", InternalSymbolKind::Table),
- };
-
- let caption = find_caption_by_parent(environment.syntax())?;
- let symbol = match find_label_by_parent(db, document, environment.syntax()) {
- Some(NumberedLabel {
- name: label,
- range: selection_range,
- number,
- }) => {
- let name = match number {
- Some(number) => format!("{} {}: {}", float_kind, number.text(db), caption),
- None => format!("{}: {}", float_kind, caption),
- };
-
- InternalSymbol {
- name,
- label: Some(label),
- kind: symbol_kind,
- deprecated: false,
- full_range,
- selection_range,
- children: Vec::new(),
- }
- }
- None => InternalSymbol {
- name: format!("{}: {}", float_kind, caption),
- label: None,
- kind: symbol_kind,
- deprecated: false,
- full_range,
- selection_range: full_range,
- children: Vec::new(),
- },
- };
-
- Some(symbol)
-}
-
-fn visit_theorem(
- db: &dyn Db,
- document: Document,
- node: latex::SyntaxNode,
- environment_name: &str,
-) -> Option<InternalSymbol> {
- let definition = Workspace::get(db)
- .related(db, document)
- .iter()
- .filter_map(|document| document.parse(db).as_tex())
- .flat_map(|data| data.analyze(db).theorem_environments(db))
- .find(|env| env.name(db).text(db) == environment_name)?;
-
- let node = latex::Environment::cast(node)?;
- let theorem_description = node
- .begin()?
- .options()
- .and_then(|option| option.content_text());
-
- let full_range = document
- .line_index(db)
- .line_col_lsp_range(latex::small_range(&node));
-
- let symbol = match find_label_by_parent(db, document, node.syntax()) {
- Some(NumberedLabel {
- name: label,
- range: selection_range,
- number,
- }) => {
- let name = match (number, theorem_description) {
- (Some(number), Some(desc)) => {
- format!(
- "{} {} ({})",
- definition.description(db).text(db),
- number.text(db),
- desc
- )
- }
- (Some(number), None) => format!(
- "{} {}",
- definition.description(db).text(db),
- number.text(db)
- ),
- (None, Some(desc)) => format!("{} ({})", definition.description(db).text(db), desc),
- (None, None) => definition.description(db).text(db).clone(),
- };
-
- InternalSymbol {
- name,
- label: Some(label),
- kind: InternalSymbolKind::Theorem,
- deprecated: false,
- full_range,
- selection_range,
- children: Vec::new(),
- }
- }
- None => {
- let name = match theorem_description {
- Some(desc) => format!("{} ({})", definition.description(db).text(db), desc),
- None => definition.description(db).text(db).clone(),
- };
- InternalSymbol {
- name,
- label: None,
- kind: InternalSymbolKind::Theorem,
- deprecated: false,
- full_range,
- selection_range: full_range,
- children: Vec::new(),
- }
- }
- };
- Some(symbol)
-}
-
-#[derive(Debug, PartialEq, Eq, Clone)]
-struct NumberedLabel {
- name: Word,
- range: Range,
- number: Option<Word>,
-}
-
-fn find_label_by_parent(
- db: &dyn Db,
- document: Document,
- parent: &latex::SyntaxNode,
-) -> Option<NumberedLabel> {
- let node = parent.children().find_map(latex::LabelDefinition::cast)?;
- let name = Word::new(db, node.name()?.key()?.to_string());
- let range = document
- .line_index(db)
- .line_col_lsp_range(latex::small_range(&node));
-
- let number = Workspace::get(db).number_of_label(db, document, name);
- Some(NumberedLabel {
- name,
- range,
- number,
- })
-}
diff --git a/support/texlab/src/features/symbol/project_order.rs b/support/texlab/src/features/symbol/project_order.rs
deleted file mode 100644
index f4aeb63bca..0000000000
--- a/support/texlab/src/features/symbol/project_order.rs
+++ /dev/null
@@ -1,202 +0,0 @@
-use itertools::Itertools;
-use lsp_types::Url;
-
-use crate::{
- db::{dependency_graph, Document, Workspace},
- Db,
-};
-
-#[derive(Debug, PartialEq, Eq, Clone)]
-pub struct ProjectOrdering {
- ordering: Vec<Document>,
-}
-
-impl ProjectOrdering {
- pub fn new(db: &dyn Db) -> Self {
- let workspace = Workspace::get(db);
-
- let ordering: Vec<_> = workspace
- .index_files(db)
- .chain(workspace.documents(db).iter().copied())
- .flat_map(|document| {
- dependency_graph(db, document)
- .preorder()
- .rev()
- .collect_vec()
- })
- .unique()
- .collect();
-
- Self { ordering }
- }
-
- pub fn get(&self, db: &dyn Db, uri: &Url) -> usize {
- self.ordering
- .iter()
- .position(|doc| doc.location(db).uri(db) == uri)
- .unwrap_or(std::usize::MAX)
- }
-}
-
-#[cfg(test)]
-mod tests {
- use crate::{
- db::{Language, Owner},
- Database,
- };
-
- use super::*;
-
- #[test]
- fn test_no_cycles() {
- let mut db = Database::default();
- let workspace = Workspace::get(&db);
-
- let a = workspace.open(
- &mut db,
- Url::parse("http://example.com/a.tex").unwrap(),
- String::new(),
- Language::Tex,
- Owner::Client,
- );
-
- let b = workspace.open(
- &mut db,
- Url::parse("http://example.com/b.tex").unwrap(),
- String::new(),
- Language::Tex,
- Owner::Client,
- );
-
- let c = workspace.open(
- &mut db,
- Url::parse("http://example.com/c.tex").unwrap(),
- r#"\documentclass{article}\include{b}\include{a}"#.to_string(),
- Language::Tex,
- Owner::Client,
- );
-
- let ordering = ProjectOrdering::new(&db);
-
- assert_eq!(ordering.get(&db, a.location(&db).uri(&db)), 0);
- assert_eq!(ordering.get(&db, b.location(&db).uri(&db)), 1);
- assert_eq!(ordering.get(&db, c.location(&db).uri(&db)), 2);
- }
-
- #[test]
- fn test_two_layers() {
- let mut db = Database::default();
- let workspace = Workspace::get(&db);
-
- let a = workspace.open(
- &mut db,
- Url::parse("http://example.com/a.tex").unwrap(),
- String::new(),
- Language::Tex,
- Owner::Client,
- );
-
- let b = workspace.open(
- &mut db,
- Url::parse("http://example.com/b.tex").unwrap(),
- r#"\include{a}"#.to_string(),
- Language::Tex,
- Owner::Client,
- );
-
- let c = workspace.open(
- &mut db,
- Url::parse("http://example.com/c.tex").unwrap(),
- r#"\documentclass{article}\include{b}"#.to_string(),
- Language::Tex,
- Owner::Client,
- );
-
- let ordering = ProjectOrdering::new(&db);
-
- assert_eq!(ordering.get(&db, a.location(&db).uri(&db)), 0);
- assert_eq!(ordering.get(&db, b.location(&db).uri(&db)), 1);
- assert_eq!(ordering.get(&db, c.location(&db).uri(&db)), 2);
- }
-
- #[test]
- fn test_cycles() {
- let mut db = Database::default();
- let workspace = Workspace::get(&db);
-
- let a = workspace.open(
- &mut db,
- Url::parse("http://example.com/a.tex").unwrap(),
- r#"\documentclass{article}\include{b}"#.to_string(),
- Language::Tex,
- Owner::Client,
- );
-
- workspace.open(
- &mut db,
- Url::parse("http://example.com/b.tex").unwrap(),
- r#"\include{a}"#.to_string(),
- Language::Tex,
- Owner::Client,
- );
-
- workspace.open(
- &mut db,
- Url::parse("http://example.com/c.tex").unwrap(),
- r#"\include{a}"#.to_string(),
- Language::Tex,
- Owner::Client,
- );
-
- let ordering = ProjectOrdering::new(&db);
- assert_ne!(ordering.get(&db, a.location(&db).uri(&db)), 0);
- }
-
- #[test]
- fn test_multiple_roots() {
- let mut db = Database::default();
- let workspace = Workspace::get(&db);
-
- let a = workspace.open(
- &mut db,
- Url::parse("http://example.com/a.tex").unwrap(),
- r#"\documentclass{article}\include{b}"#.to_string(),
- Language::Tex,
- Owner::Client,
- );
-
- let b = workspace.open(
- &mut db,
- Url::parse("http://example.com/b.tex").unwrap(),
- String::new(),
- Language::Tex,
- Owner::Client,
- );
-
- let c = workspace.open(
- &mut db,
- Url::parse("http://example.com/c.tex").unwrap(),
- String::new(),
- Language::Tex,
- Owner::Client,
- );
-
- let d = workspace.open(
- &mut db,
- Url::parse("http://example.com/d.tex").unwrap(),
- r#"\documentclass{article}\include{c}"#.to_string(),
- Language::Tex,
- Owner::Client,
- );
-
- let ordering = ProjectOrdering::new(&db);
- assert!(
- ordering.get(&db, b.location(&db).uri(&db))
- < ordering.get(&db, a.location(&db).uri(&db))
- );
- assert!(
- ordering.get(&db, c.location(&db).uri(&db))
- < ordering.get(&db, d.location(&db).uri(&db))
- );
- }
-}
diff --git a/support/texlab/src/features/symbol/types.rs b/support/texlab/src/features/symbol/types.rs
deleted file mode 100644
index 9e8ea28792..0000000000
--- a/support/texlab/src/features/symbol/types.rs
+++ /dev/null
@@ -1,136 +0,0 @@
-use lsp_types::{DocumentSymbol, Location, Range, SymbolInformation, SymbolKind, Url};
-
-use crate::{
- db::Word,
- util::{self, lang_data::BibtexEntryTypeCategory, lsp_enums::Structure},
- Db, SymbolConfig,
-};
-
-#[derive(Debug, PartialEq, Eq, Clone, Copy)]
-pub enum InternalSymbolKind {
- Section,
- Figure,
- Algorithm,
- Table,
- Listing,
- Enumeration,
- EnumerationItem,
- Theorem,
- Equation,
- Entry(BibtexEntryTypeCategory),
- Field,
- String,
-}
-
-impl InternalSymbolKind {
- pub fn into_symbol_kind(self) -> SymbolKind {
- match self {
- Self::Section => Structure::Section.symbol_kind(),
- Self::Figure | Self::Algorithm | Self::Table | Self::Listing => {
- Structure::Float.symbol_kind()
- }
- Self::Enumeration => Structure::Environment.symbol_kind(),
- Self::EnumerationItem => Structure::Item.symbol_kind(),
- Self::Theorem => Structure::Theorem.symbol_kind(),
- Self::Equation => Structure::Equation.symbol_kind(),
- Self::Entry(category) => Structure::Entry(category).symbol_kind(),
- Self::Field => Structure::Field.symbol_kind(),
- Self::String => Structure::Entry(BibtexEntryTypeCategory::String).symbol_kind(),
- }
- }
-}
-
-#[derive(Debug, PartialEq, Eq, Clone)]
-pub struct InternalSymbol {
- pub name: String,
- pub label: Option<Word>,
- pub kind: InternalSymbolKind,
- pub deprecated: bool,
- pub full_range: Range,
- pub selection_range: Range,
- pub children: Vec<InternalSymbol>,
-}
-
-impl InternalSymbol {
- pub fn search_text(&self) -> String {
- let kind = match self.kind {
- InternalSymbolKind::Section => "latex section",
- InternalSymbolKind::Figure => "latex float figure",
- InternalSymbolKind::Algorithm => "latex float algorithm",
- InternalSymbolKind::Table => "latex float table",
- InternalSymbolKind::Listing => "latex float listing",
- InternalSymbolKind::Enumeration => "latex enumeration",
- InternalSymbolKind::EnumerationItem => "latex enumeration item",
- InternalSymbolKind::Theorem => "latex math",
- InternalSymbolKind::Equation => "latex math equation",
- InternalSymbolKind::Entry(_) => "bibtex entry",
- InternalSymbolKind::Field => "bibtex field",
- InternalSymbolKind::String => "bibtex string",
- };
- format!("{} {}", kind, self.name).to_lowercase()
- }
-
- pub fn flatten(mut self, buffer: &mut Vec<Self>) {
- if self.kind == InternalSymbolKind::Field {
- return;
- }
- for symbol in self.children.drain(..) {
- symbol.flatten(buffer);
- }
- buffer.push(self);
- }
-
- pub fn filter(container: &mut Vec<InternalSymbol>, config: &SymbolConfig) {
- let mut i = 0;
- while i < container.len() {
- let symbol = &mut container[i];
-
- if !symbol.name.is_empty()
- && util::regex_filter::filter(
- &symbol.name,
- &config.allowed_patterns,
- &config.ignored_patterns,
- )
- {
- Self::filter(&mut symbol.children, config);
- i += 1;
- } else {
- drop(symbol);
- let mut symbol = container.remove(i);
- container.append(&mut symbol.children);
- }
- }
- }
-
- pub fn into_document_symbol(self, db: &dyn Db) -> DocumentSymbol {
- let children = self
- .children
- .into_iter()
- .map(|child| child.into_document_symbol(db))
- .collect();
-
- #[allow(deprecated)]
- DocumentSymbol {
- name: self.name,
- detail: self.label.map(|word| word.text(db).clone()),
- kind: self.kind.into_symbol_kind(),
- deprecated: Some(self.deprecated),
- range: self.full_range,
- selection_range: self.selection_range,
- children: Some(children),
- tags: None,
- }
- }
-
- pub fn into_symbol_info(self, uri: Url) -> SymbolInformation {
- #[allow(deprecated)]
- SymbolInformation {
- name: self.name,
- kind: self.kind.into_symbol_kind(),
- deprecated: Some(self.deprecated),
- location: Location::new(uri, self.full_range),
- container_name: None,
- tags: None,
- }
- }
-}
diff --git a/support/texlab/src/features/workspace_command.rs b/support/texlab/src/features/workspace_command.rs
deleted file mode 100644
index cac998125e..0000000000
--- a/support/texlab/src/features/workspace_command.rs
+++ /dev/null
@@ -1,3 +0,0 @@
-pub mod change_environment;
-pub mod clean;
-pub mod dep_graph;
diff --git a/support/texlab/src/features/workspace_command/change_environment.rs b/support/texlab/src/features/workspace_command/change_environment.rs
deleted file mode 100644
index ef2c96eae1..0000000000
--- a/support/texlab/src/features/workspace_command/change_environment.rs
+++ /dev/null
@@ -1,108 +0,0 @@
-use std::collections::hash_map::HashMap;
-
-use anyhow::Result;
-use lsp_types::{ApplyWorkspaceEditParams, TextDocumentPositionParams, TextEdit, WorkspaceEdit};
-use rowan::ast::AstNode;
-use serde::{Deserialize, Serialize};
-use thiserror::Error;
-
-use crate::{
- normalize_uri,
- util::{cursor::CursorContext, line_index_ext::LineIndexExt},
- Db,
-};
-
-fn change_environment_context(
- db: &dyn Db,
- args: Vec<serde_json::Value>,
-) -> Result<CursorContext<Params>> {
- let params: ChangeEnvironmentParams = serde_json::from_value(
- args.into_iter()
- .next()
- .ok_or(ChangeEnvironmentError::MissingArg)?,
- )
- .map_err(ChangeEnvironmentError::InvalidArg)?;
-
- let mut uri = params.text_document_position.text_document.uri;
- normalize_uri(&mut uri);
- let position = params.text_document_position.position;
-
- CursorContext::new(
- db,
- &uri,
- position,
- Params {
- new_name: params.new_name,
- },
- )
- .ok_or(ChangeEnvironmentError::FailedCreatingContext.into())
-}
-
-pub fn change_environment(
- db: &dyn Db,
- args: Vec<serde_json::Value>,
-) -> Option<((), ApplyWorkspaceEditParams)> {
- let context = change_environment_context(db, args).ok()?;
- let (beg, end) = context.find_environment()?;
-
- let beg_name = beg.to_string();
- let end_name = end.to_string();
-
- if beg_name != end_name {
- return None;
- }
- let new_name = &context.params.new_name;
-
- let line_index = context.document.line_index(db);
- let mut changes = HashMap::default();
- changes.insert(
- context.document.location(db).uri(db).clone(),
- vec![
- TextEdit::new(
- line_index.line_col_lsp_range(beg.syntax().text_range()),
- new_name.clone(),
- ),
- TextEdit::new(
- line_index.line_col_lsp_range(end.syntax().text_range()),
- new_name.clone(),
- ),
- ],
- );
-
- Some((
- (),
- ApplyWorkspaceEditParams {
- label: Some(format!("change environment: {} -> {}", beg_name, new_name)),
- edit: WorkspaceEdit::new(changes),
- },
- ))
-}
-
-#[derive(Debug, Error)]
-pub enum ChangeEnvironmentError {
- #[error("rename parameters was not provided as an argument")]
- MissingArg,
-
- #[error("invalid argument: {0}")]
- InvalidArg(serde_json::Error),
-
- #[error("failed creating context")]
- FailedCreatingContext,
-
- #[error("could not create workspaces edit")]
- CouldNotCreateWorkspaceEdit,
-}
-
-#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct ChangeEnvironmentParams {
- #[serde(flatten)]
- pub text_document_position: TextDocumentPositionParams,
-
- pub new_name: String,
-}
-
-#[derive(Debug)]
-pub struct Params {
- new_name: String,
-}
diff --git a/support/texlab/src/features/workspace_command/clean.rs b/support/texlab/src/features/workspace_command/clean.rs
deleted file mode 100644
index cea22fc687..0000000000
--- a/support/texlab/src/features/workspace_command/clean.rs
+++ /dev/null
@@ -1,95 +0,0 @@
-use std::process::Stdio;
-
-use anyhow::Result;
-use lsp_types::{TextDocumentIdentifier, Url};
-use thiserror::Error;
-
-use crate::{db::Workspace, normalize_uri, Db};
-
-#[derive(Debug, Error)]
-pub enum CleanError {
- #[error("document '{0}' not found")]
- DocumentNotFound(Url),
-
- #[error("document '{0}' is not a local file")]
- NoLocalFile(Url),
-
- #[error("document was not provided as an argument")]
- MissingArg,
-
- #[error("invalid argument: {0}")]
- InvalidArg(serde_json::Error),
-
- #[error("failed to spawn process: {0}")]
- Spawn(std::io::Error),
-}
-
-#[derive(Debug)]
-pub struct CleanCommand {
- executable: String,
- args: Vec<String>,
-}
-
-impl CleanCommand {
- pub fn new(db: &dyn Db, options: CleanOptions, args: Vec<serde_json::Value>) -> Result<Self> {
- let params: TextDocumentIdentifier =
- serde_json::from_value(args.into_iter().next().ok_or(CleanError::MissingArg)?)
- .map_err(CleanError::InvalidArg)?;
-
- let mut uri = params.uri;
- normalize_uri(&mut uri);
-
- let workspace = Workspace::get(db);
-
- let document = workspace
- .lookup_uri(db, &uri)
- .ok_or_else(|| CleanError::DocumentNotFound(uri.clone()))?;
-
- let working_dir = workspace.working_dir(db, document.directory(db));
-
- let output_dir = workspace
- .output_dir(db, working_dir)
- .path(db)
- .as_deref()
- .ok_or_else(|| CleanError::NoLocalFile(uri.clone()))?;
-
- let path = document
- .location(db)
- .path(db)
- .as_deref()
- .ok_or_else(|| CleanError::NoLocalFile(uri.clone()))?;
-
- let flag = match options {
- CleanOptions::Auxiliary => "-c",
- CleanOptions::Artifacts => "-C",
- };
-
- let executable = "latexmk".to_string();
- let args = vec![
- format!("-outdir={}", output_dir.display()),
- flag.to_string(),
- path.display().to_string(),
- ];
-
- Ok(Self { executable, args })
- }
-
- pub fn run(self) -> Result<()> {
- log::debug!("Cleaning output files: {} {:?}", self.executable, self.args);
- std::process::Command::new(self.executable)
- .args(self.args)
- .stdin(Stdio::null())
- .stdout(Stdio::null())
- .stderr(Stdio::null())
- .status()
- .map_err(move |msg| anyhow::Error::new(CleanError::Spawn(msg)))?;
-
- Ok(())
- }
-}
-
-#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Hash)]
-pub enum CleanOptions {
- Auxiliary,
- Artifacts,
-}
diff --git a/support/texlab/src/features/workspace_command/dep_graph.rs b/support/texlab/src/features/workspace_command/dep_graph.rs
deleted file mode 100644
index 84e87c4026..0000000000
--- a/support/texlab/src/features/workspace_command/dep_graph.rs
+++ /dev/null
@@ -1,57 +0,0 @@
-use anyhow::Result;
-use itertools::Itertools;
-use std::io::Write;
-
-use rustc_hash::FxHashMap;
-
-use crate::{
- db::{dependency_graph, Document, Workspace},
- Db,
-};
-
-pub fn show_dependency_graph(db: &dyn Db) -> Result<String> {
- let workspace = Workspace::get(db);
-
- let documents = workspace
- .documents(db)
- .iter()
- .enumerate()
- .map(|(i, doc)| (*doc, format!("v{i:0>5}")))
- .collect::<FxHashMap<Document, String>>();
-
- let mut writer = Vec::new();
- writeln!(&mut writer, "digraph G {{")?;
- writeln!(&mut writer, "rankdir = LR;")?;
-
- for (document, node) in &documents {
- let label = document.location(db).uri(db).as_str();
- let shape = if document.can_be_root(db) {
- "tripleoctagon"
- } else if document.can_be_built(db) {
- "doubleoctagon"
- } else {
- "octagon"
- };
-
- writeln!(&mut writer, "\t{node} [label=\"{label}\", shape={shape}];")?;
- }
-
- for edge in workspace
- .documents(db)
- .iter()
- .flat_map(|start| dependency_graph(db, *start).edges.iter())
- .unique()
- {
- let source = &documents[&edge.source];
- let target = &documents[&edge.target];
- let label = edge
- .origin
- .as_ref()
- .map_or("<artifact>", |origin| &origin.link.path(db).text(db));
-
- writeln!(&mut writer, "\t{source} -> {target} [label=\"{label}\"];")?;
- }
-
- writeln!(&mut writer, "}}")?;
- Ok(String::from_utf8(writer)?)
-}