summaryrefslogtreecommitdiff
path: root/support/texlab/crates
diff options
context:
space:
mode:
Diffstat (limited to 'support/texlab/crates')
-rw-r--r--support/texlab/crates/base-db/Cargo.toml6
-rw-r--r--support/texlab/crates/base-db/src/deps.rs2
-rw-r--r--support/texlab/crates/base-db/src/deps/discover.rs4
-rw-r--r--support/texlab/crates/base-db/src/deps/graph.rs8
-rw-r--r--support/texlab/crates/base-db/src/deps/root.rs40
-rw-r--r--support/texlab/crates/base-db/src/document.rs7
-rw-r--r--support/texlab/crates/base-db/src/semantics.rs9
-rw-r--r--support/texlab/crates/base-db/src/semantics/tex.rs29
-rw-r--r--support/texlab/crates/base-db/src/util.rs2
-rw-r--r--support/texlab/crates/base-db/src/util/expand.rs31
-rw-r--r--support/texlab/crates/base-db/src/workspace.rs20
-rw-r--r--support/texlab/crates/bibfmt/Cargo.toml2
-rw-r--r--support/texlab/crates/bibtex-utils/Cargo.toml6
-rw-r--r--support/texlab/crates/citeproc/Cargo.toml6
-rw-r--r--support/texlab/crates/commands/Cargo.toml10
-rw-r--r--support/texlab/crates/completion-data/Cargo.toml6
-rw-r--r--support/texlab/crates/completion/Cargo.toml4
-rw-r--r--support/texlab/crates/completion/src/providers/include.rs13
-rw-r--r--support/texlab/crates/completion/src/tests.rs46
-rw-r--r--support/texlab/crates/definition/Cargo.toml2
-rw-r--r--support/texlab/crates/definition/src/command.rs28
-rw-r--r--support/texlab/crates/definition/src/tests.rs16
-rw-r--r--support/texlab/crates/diagnostics/Cargo.toml8
-rw-r--r--support/texlab/crates/diagnostics/src/citations.rs2
-rw-r--r--support/texlab/crates/diagnostics/src/manager.rs14
-rw-r--r--support/texlab/crates/distro/Cargo.toml2
-rw-r--r--support/texlab/crates/folding/Cargo.toml2
-rw-r--r--support/texlab/crates/highlights/Cargo.toml2
-rw-r--r--support/texlab/crates/hover/Cargo.toml2
-rw-r--r--support/texlab/crates/inlay-hints/Cargo.toml2
-rw-r--r--support/texlab/crates/ipc/Cargo.toml21
-rw-r--r--support/texlab/crates/ipc/src/lib.rs54
-rw-r--r--support/texlab/crates/links/Cargo.toml2
-rw-r--r--support/texlab/crates/parser/Cargo.toml9
-rw-r--r--support/texlab/crates/parser/src/bibtex.rs4
-rw-r--r--support/texlab/crates/parser/src/config.rs9
-rw-r--r--support/texlab/crates/parser/src/latex.rs22
-rw-r--r--support/texlab/crates/parser/src/latex/lexer/commands.rs31
-rw-r--r--support/texlab/crates/parser/src/latex/lexer/types.rs3
-rw-r--r--support/texlab/crates/parser/src/latex/tests.rs251
-rw-r--r--support/texlab/crates/parser/src/latexmkrc.rs185
-rw-r--r--support/texlab/crates/references/Cargo.toml1
-rw-r--r--support/texlab/crates/references/src/command.rs52
-rw-r--r--support/texlab/crates/references/src/lib.rs2
-rw-r--r--support/texlab/crates/references/src/tests.rs31
-rw-r--r--support/texlab/crates/symbols/Cargo.toml8
-rw-r--r--support/texlab/crates/syntax/Cargo.toml2
-rw-r--r--support/texlab/crates/syntax/src/latex/cst.rs35
-rw-r--r--support/texlab/crates/syntax/src/latex/kind.rs3
-rw-r--r--support/texlab/crates/texlab/Cargo.toml13
-rw-r--r--support/texlab/crates/texlab/src/main.rs67
-rw-r--r--support/texlab/crates/texlab/src/server.rs102
-rw-r--r--support/texlab/crates/texlab/src/server/options.rs136
-rw-r--r--support/texlab/crates/texlab/src/util.rs2
-rw-r--r--support/texlab/crates/texlab/src/util/client_flags.rs3
-rw-r--r--support/texlab/crates/texlab/src/util/from_proto.rs150
56 files changed, 1104 insertions, 425 deletions
diff --git a/support/texlab/crates/base-db/Cargo.toml b/support/texlab/crates/base-db/Cargo.toml
index 010c83d0e4..ca477c496c 100644
--- a/support/texlab/crates/base-db/Cargo.toml
+++ b/support/texlab/crates/base-db/Cargo.toml
@@ -10,17 +10,19 @@ rust-version.workspace = true
bibtex-utils = { path = "../bibtex-utils" }
dirs = "5.0.1"
distro = { path = "../distro" }
-itertools = "0.12.0"
+itertools = "0.12.1"
line-index = { path = "../line-index" }
log = "0.4.21"
notify = "6.0.1"
once_cell = "1.19.0"
parser = { path = "../parser" }
percent-encoding = "2.3.0"
-regex = "1.10.2"
+regex = "1.10.4"
rowan = "0.15.15"
rustc-hash = "1.1.0"
+shellexpand = "3.1.0"
syntax = { path = "../syntax" }
+titlecase = "3.0.0"
url = "2.5.0"
[lib]
diff --git a/support/texlab/crates/base-db/src/deps.rs b/support/texlab/crates/base-db/src/deps.rs
index 06689a0df2..f501d04935 100644
--- a/support/texlab/crates/base-db/src/deps.rs
+++ b/support/texlab/crates/base-db/src/deps.rs
@@ -5,7 +5,7 @@ mod root;
pub use self::{
discover::{discover, watch},
- graph::{DirectLinkData, Edge, EdgeData, Graph},
+ graph::{DirectLinkData, Edge, EdgeData, Graph, HOME_DIR},
project::{parents, Project},
root::ProjectRoot,
};
diff --git a/support/texlab/crates/base-db/src/deps/discover.rs b/support/texlab/crates/base-db/src/deps/discover.rs
index 06905905a5..cbcab4d8fb 100644
--- a/support/texlab/crates/base-db/src/deps/discover.rs
+++ b/support/texlab/crates/base-db/src/deps/discover.rs
@@ -84,7 +84,7 @@ fn discover_parents(workspace: &mut Workspace, checked_paths: &mut FxHashSet<Pat
continue;
}
- if workspace.lookup_path(&file).is_none() && file.exists() {
+ if workspace.lookup_file(&file).is_none() && file.exists() {
changed |= workspace.load(&file, lang).is_ok();
checked_paths.insert(file);
}
@@ -107,7 +107,7 @@ fn discover_children(workspace: &mut Workspace, checked_paths: &mut FxHashSet<Pa
for file in files {
let language = Language::from_path(&file).unwrap_or(Language::Tex);
- if workspace.lookup_path(&file).is_none() && file.exists() {
+ if workspace.lookup_file(&file).is_none() && file.exists() {
changed |= workspace.load(&file, language).is_ok();
checked_paths.insert(file);
}
diff --git a/support/texlab/crates/base-db/src/deps/graph.rs b/support/texlab/crates/base-db/src/deps/graph.rs
index a90d8fa204..e07368d078 100644
--- a/support/texlab/crates/base-db/src/deps/graph.rs
+++ b/support/texlab/crates/base-db/src/deps/graph.rs
@@ -7,11 +7,11 @@ use percent_encoding::percent_decode_str;
use rustc_hash::FxHashSet;
use url::Url;
-use crate::{semantics, Document, Workspace};
+use crate::{semantics, util, Document, Workspace};
use super::ProjectRoot;
-pub(crate) static HOME_DIR: Lazy<Option<PathBuf>> = Lazy::new(dirs::home_dir);
+pub static HOME_DIR: Lazy<Option<PathBuf>> = Lazy::new(dirs::home_dir);
#[derive(Debug, PartialEq, Eq, Clone, Hash)]
pub struct Edge {
@@ -158,7 +158,9 @@ impl Graph {
for target_uri in file_names
.iter()
- .flat_map(|file_name| start.root.src_dir.join(file_name))
+ .flat_map(|file_name| {
+ util::expand_relative_path(&file_name, &start.root.src_dir, workspace.folders())
+ })
.chain(distro_files)
{
match workspace.lookup(&target_uri) {
diff --git a/support/texlab/crates/base-db/src/deps/root.rs b/support/texlab/crates/base-db/src/deps/root.rs
index 442d6f9e35..a8cec6604a 100644
--- a/support/texlab/crates/base-db/src/deps/root.rs
+++ b/support/texlab/crates/base-db/src/deps/root.rs
@@ -1,6 +1,6 @@
use url::Url;
-use crate::{DocumentData, Workspace};
+use crate::{util, DocumentData, Workspace};
use super::graph::HOME_DIR;
@@ -74,6 +74,7 @@ impl ProjectRoot {
}
pub fn from_latexmkrc(workspace: &Workspace, dir: &Url) -> Option<Self> {
+ let config = workspace.config();
let rcfile = workspace
.iter()
.filter(|document| document.dir == *dir)
@@ -81,20 +82,30 @@ impl ProjectRoot {
let compile_dir = dir.clone();
let src_dir = dir.clone();
- let aux_dir = rcfile
+
+ let aux_dir_rc = rcfile
.aux_dir
.as_ref()
- .and_then(|path| append_dir(dir, path).ok())
- .unwrap_or_else(|| dir.clone());
+ .and_then(|path| append_dir(dir, path, workspace).ok());
- let out_dir = rcfile
+ let out_dir_rc = rcfile
.out_dir
.as_ref()
- .and_then(|path| append_dir(dir, path).ok())
+ .and_then(|path| append_dir(dir, path, workspace).ok());
+
+ let aux_dir = aux_dir_rc
+ .clone()
+ .or_else(|| append_dir(dir, &config.build.aux_dir, workspace).ok())
+ .unwrap_or_else(|| dir.clone());
+
+ let log_dir = aux_dir_rc
+ .or_else(|| append_dir(dir, &config.build.log_dir, workspace).ok())
+ .unwrap_or_else(|| dir.clone());
+
+ let pdf_dir = out_dir_rc
+ .or_else(|| append_dir(dir, &config.build.pdf_dir, workspace).ok())
.unwrap_or_else(|| dir.clone());
- let log_dir = out_dir.clone();
- let pdf_dir = out_dir;
let additional_files = vec![];
Some(Self {
@@ -124,9 +135,12 @@ impl ProjectRoot {
let compile_dir = dir.clone();
let src_dir = dir.clone();
let config = workspace.config();
- let aux_dir = append_dir(dir, &config.build.aux_dir).unwrap_or_else(|_| dir.clone());
- let log_dir = append_dir(dir, &config.build.log_dir).unwrap_or_else(|_| dir.clone());
- let pdf_dir = append_dir(dir, &config.build.pdf_dir).unwrap_or_else(|_| dir.clone());
+ let aux_dir =
+ append_dir(dir, &config.build.aux_dir, workspace).unwrap_or_else(|_| dir.clone());
+ let log_dir =
+ append_dir(dir, &config.build.log_dir, workspace).unwrap_or_else(|_| dir.clone());
+ let pdf_dir =
+ append_dir(dir, &config.build.pdf_dir, workspace).unwrap_or_else(|_| dir.clone());
let additional_files = vec![];
Self {
@@ -140,13 +154,13 @@ impl ProjectRoot {
}
}
-fn append_dir(dir: &Url, path: &str) -> Result<Url, url::ParseError> {
+fn append_dir(dir: &Url, path: &str, workspace: &Workspace) -> Result<Url, url::ParseError> {
let mut path = String::from(path);
if !path.ends_with('/') {
path.push('/');
}
- dir.join(&path)
+ util::expand_relative_path(&path, dir, workspace.folders())
}
impl std::fmt::Debug for ProjectRoot {
diff --git a/support/texlab/crates/base-db/src/document.rs b/support/texlab/crates/base-db/src/document.rs
index 25607a1bf5..7a388129fd 100644
--- a/support/texlab/crates/base-db/src/document.rs
+++ b/support/texlab/crates/base-db/src/document.rs
@@ -77,7 +77,12 @@ impl Document {
}
Language::Root => DocumentData::Root,
Language::Latexmkrc => {
- let data = parser::parse_latexmkrc(&text).unwrap_or_default();
+ let data = path
+ .as_deref()
+ .and_then(|path| path.parent())
+ .and_then(|dir| parser::parse_latexmkrc(&text, dir).ok())
+ .unwrap_or_default();
+
DocumentData::Latexmkrc(data)
}
Language::Tectonic => DocumentData::Tectonic,
diff --git a/support/texlab/crates/base-db/src/semantics.rs b/support/texlab/crates/base-db/src/semantics.rs
index f83d758cf7..a2f0de291f 100644
--- a/support/texlab/crates/base-db/src/semantics.rs
+++ b/support/texlab/crates/base-db/src/semantics.rs
@@ -1,3 +1,5 @@
+use rowan::{TextLen, TextRange};
+
pub mod auxiliary;
pub mod bib;
pub mod tex;
@@ -19,6 +21,13 @@ impl Span {
range: rowan::TextRange::empty(offset),
}
}
+
+ pub fn command<L: rowan::Language>(token: &rowan::SyntaxToken<L>) -> Self {
+ let range = token.text_range();
+ let range = TextRange::new(range.start() + "\\".text_len(), range.end());
+ let text = String::from(&token.text()[1..]);
+ Self::new(text, range)
+ }
}
impl std::fmt::Debug for Span {
diff --git a/support/texlab/crates/base-db/src/semantics/tex.rs b/support/texlab/crates/base-db/src/semantics/tex.rs
index 659ec8c166..1ad8969a80 100644
--- a/support/texlab/crates/base-db/src/semantics/tex.rs
+++ b/support/texlab/crates/base-db/src/semantics/tex.rs
@@ -1,6 +1,7 @@
-use rowan::{ast::AstNode, TextLen, TextRange};
+use rowan::{ast::AstNode, TextRange};
use rustc_hash::FxHashSet;
use syntax::latex::{self, HasBrack, HasCurly};
+use titlecase::titlecase;
use super::Span;
@@ -26,10 +27,7 @@ impl Semantics {
}
latex::SyntaxElement::Token(token) => {
if token.kind() == latex::COMMAND_NAME {
- let range = token.text_range();
- let range = TextRange::new(range.start() + "\\".text_len(), range.end());
- let text = String::from(&token.text()[1..]);
- self.commands.push(Span { range, text });
+ self.commands.push(Span::command(&token));
}
}
};
@@ -257,18 +255,15 @@ impl Semantics {
}
fn process_theorem_definition(&mut self, theorem_def: latex::TheoremDefinition) {
- let Some(name) = theorem_def.name().and_then(|name| name.key()) else {
- return;
- };
-
- let Some(heading) = theorem_def.heading() else {
- return;
- };
-
- self.theorem_definitions.push(TheoremDefinition {
- name: Span::from(&name),
- heading,
- });
+ for name in theorem_def.names() {
+ let name = Span::from(&name);
+ let heading = theorem_def
+ .heading()
+ .unwrap_or_else(|| titlecase(&name.text));
+
+ self.theorem_definitions
+ .push(TheoremDefinition { name, heading });
+ }
}
fn process_graphics_path(&mut self, graphics_path: latex::GraphicsPath) {
diff --git a/support/texlab/crates/base-db/src/util.rs b/support/texlab/crates/base-db/src/util.rs
index 068a0c9b2a..789eee9c85 100644
--- a/support/texlab/crates/base-db/src/util.rs
+++ b/support/texlab/crates/base-db/src/util.rs
@@ -1,8 +1,10 @@
+mod expand;
mod label;
pub mod queries;
mod regex_filter;
pub use self::{
+ expand::expand_relative_path,
label::{render_label, FloatKind, RenderedLabel, RenderedObject},
regex_filter::filter_regex_patterns,
};
diff --git a/support/texlab/crates/base-db/src/util/expand.rs b/support/texlab/crates/base-db/src/util/expand.rs
new file mode 100644
index 0000000000..a315737115
--- /dev/null
+++ b/support/texlab/crates/base-db/src/util/expand.rs
@@ -0,0 +1,31 @@
+use std::{borrow::Cow, path::PathBuf};
+
+use url::Url;
+
+use crate::deps::HOME_DIR;
+
+pub fn expand_relative_path(
+ path: &str,
+ current_dir: &Url,
+ workspace_folders: &[PathBuf],
+) -> Result<Url, url::ParseError> {
+ let home_dir = HOME_DIR.as_ref().and_then(|dir| dir.to_str());
+
+ let workspace_folder = workspace_folders.iter().find_map(|folder| {
+ let current_dir = current_dir.to_file_path().ok()?;
+ if current_dir.starts_with(folder) {
+ Some(folder.to_str()?)
+ } else {
+ None
+ }
+ });
+
+ let expand_var = |variable: &str| match variable {
+ "userHome" => home_dir.map(Cow::Borrowed),
+ "workspaceFolder" => Some(Cow::Borrowed(workspace_folder.unwrap_or("."))),
+ _ => std::env::var(variable).ok().map(Cow::Owned),
+ };
+
+ let path = shellexpand::full_with_context_no_errors(&path, || home_dir, expand_var);
+ current_dir.join(&path)
+}
diff --git a/support/texlab/crates/base-db/src/workspace.rs b/support/texlab/crates/base-db/src/workspace.rs
index 1afd4712e9..75674dcf8d 100644
--- a/support/texlab/crates/base-db/src/workspace.rs
+++ b/support/texlab/crates/base-db/src/workspace.rs
@@ -29,11 +29,22 @@ impl Workspace {
self.documents.get(key)
}
- pub fn lookup_path(&self, path: &Path) -> Option<&Document> {
+ pub fn lookup_file(&self, path: &Path) -> Option<&Document> {
self.iter()
.find(|document| document.path.as_deref() == Some(path))
}
+ pub fn lookup_file_or_dir<'a>(
+ &'a self,
+ file_or_dir: &'a Path,
+ ) -> impl Iterator<Item = &'a Document> + '_ {
+ self.iter().filter(move |doc| {
+ doc.path
+ .as_deref()
+ .map_or(false, |p| p.starts_with(file_or_dir))
+ })
+ }
+
pub fn iter(&self) -> impl Iterator<Item = &Document> + '_ {
self.documents.iter()
}
@@ -50,6 +61,10 @@ impl Workspace {
&self.graphs
}
+ pub fn folders(&self) -> &[PathBuf] {
+ &self.folders
+ }
+
pub fn open(
&mut self,
uri: Url,
@@ -90,7 +105,7 @@ impl Workspace {
Owner::Server
};
- if let Some(document) = self.lookup_path(path) {
+ if let Some(document) = self.lookup_file(path) {
if document.text == text {
return Ok(());
}
@@ -173,6 +188,7 @@ impl Workspace {
}
pub fn remove(&mut self, uri: &Url) {
+ log::info!("Removing moved or deleted document: {uri}");
self.documents.remove(uri);
}
diff --git a/support/texlab/crates/bibfmt/Cargo.toml b/support/texlab/crates/bibfmt/Cargo.toml
index 686e871d58..a712908d7d 100644
--- a/support/texlab/crates/bibfmt/Cargo.toml
+++ b/support/texlab/crates/bibfmt/Cargo.toml
@@ -15,5 +15,5 @@ line-index = { path = "../line-index" }
doctest = false
[dev-dependencies]
-expect-test = "1.4.1"
+expect-test = "1.5.0"
parser = { path = "../parser" }
diff --git a/support/texlab/crates/bibtex-utils/Cargo.toml b/support/texlab/crates/bibtex-utils/Cargo.toml
index f0158c18a6..97e13517f5 100644
--- a/support/texlab/crates/bibtex-utils/Cargo.toml
+++ b/support/texlab/crates/bibtex-utils/Cargo.toml
@@ -8,8 +8,8 @@ rust-version.workspace = true
[dependencies]
chrono = { version = "0.4.26", default-features = false, features = ["std"] }
-human_name = "2.0.2"
-itertools = "0.12.0"
+human_name = "2.0.3"
+itertools = "0.12.1"
rowan = "0.15.15"
rustc-hash = "1.1.0"
syntax = { path = "../syntax" }
@@ -19,5 +19,5 @@ unicode-normalization = "0.1.23"
doctest = false
[dev-dependencies]
-expect-test = "1.4.1"
+expect-test = "1.5.0"
parser = { path = "../parser" }
diff --git a/support/texlab/crates/citeproc/Cargo.toml b/support/texlab/crates/citeproc/Cargo.toml
index bfc24fc83d..9e187f88e6 100644
--- a/support/texlab/crates/citeproc/Cargo.toml
+++ b/support/texlab/crates/citeproc/Cargo.toml
@@ -10,11 +10,11 @@ rust-version.workspace = true
bibtex-utils = { path = "../bibtex-utils" }
base-db = { path = "../base-db" }
isocountry = "0.3.2"
-itertools = "0.12.0"
+itertools = "0.12.1"
rowan = "0.15.15"
rustc-hash = "1.1.0"
syntax = { path = "../syntax" }
-titlecase = "2.2.1"
+titlecase = "3.0.0"
unicode-normalization = "0.1.23"
url = "2.5.0"
@@ -22,5 +22,5 @@ url = "2.5.0"
doctest = false
[dev-dependencies]
-expect-test = "1.4.1"
+expect-test = "1.5.0"
parser = { path = "../parser" }
diff --git a/support/texlab/crates/commands/Cargo.toml b/support/texlab/crates/commands/Cargo.toml
index bebcde27af..c726e5f0d5 100644
--- a/support/texlab/crates/commands/Cargo.toml
+++ b/support/texlab/crates/commands/Cargo.toml
@@ -7,22 +7,22 @@ edition.workspace = true
rust-version.workspace = true
[dependencies]
-anyhow = "1.0.72"
+anyhow = "1.0.82"
base-db = { path = "../base-db" }
-bstr = "1.9.0"
+bstr = "1.9.1"
crossbeam-channel = "0.5.12"
-itertools = "0.12.0"
+itertools = "0.12.1"
libc = "0.2.153"
log = "0.4.21"
rowan = "0.15.15"
rustc-hash = "1.1.0"
syntax = { path = "../syntax" }
-thiserror = "1.0.58"
+thiserror = "1.0.59"
url = "2.5.0"
[dev-dependencies]
distro = { path = "../distro" }
-expect-test = "1.4.1"
+expect-test = "1.5.0"
test-utils = { path = "../test-utils" }
[lib]
diff --git a/support/texlab/crates/completion-data/Cargo.toml b/support/texlab/crates/completion-data/Cargo.toml
index 37ecf1b3cf..59b707e474 100644
--- a/support/texlab/crates/completion-data/Cargo.toml
+++ b/support/texlab/crates/completion-data/Cargo.toml
@@ -7,12 +7,12 @@ edition.workspace = true
rust-version.workspace = true
[dependencies]
-flate2 = "1.0.28"
-itertools = "0.12.0"
+flate2 = "1.0.29"
+itertools = "0.12.1"
once_cell = "1.19.0"
rustc-hash = "1.1.0"
serde = { version = "1.0.195", features = ["derive"] }
-serde_json = "1.0.114"
+serde_json = "1.0.115"
[lib]
doctest = false
diff --git a/support/texlab/crates/completion/Cargo.toml b/support/texlab/crates/completion/Cargo.toml
index fbdd2d7201..e28e04dfc7 100644
--- a/support/texlab/crates/completion/Cargo.toml
+++ b/support/texlab/crates/completion/Cargo.toml
@@ -11,7 +11,7 @@ base-db = { path = "../base-db" }
completion-data = { path = "../completion-data" }
fuzzy-matcher = { version = "0.3.7", features = ["compact"] }
line-index = { path = "../line-index" }
-rayon = "1.8.1"
+rayon = "1.10.0"
rowan = "0.15.15"
rustc-hash = "1.1.0"
syntax = { path = "../syntax" }
@@ -20,7 +20,7 @@ url = "2.5.0"
[dev-dependencies]
criterion = "0.5.1"
distro = { path = "../distro" }
-expect-test = "1.4.1"
+expect-test = "1.5.0"
test-utils = { path = "../test-utils" }
[lib]
diff --git a/support/texlab/crates/completion/src/providers/include.rs b/support/texlab/crates/completion/src/providers/include.rs
index 6246624733..60c432d097 100644
--- a/support/texlab/crates/completion/src/providers/include.rs
+++ b/support/texlab/crates/completion/src/providers/include.rs
@@ -1,11 +1,11 @@
use std::{
- fs,
+ fs::{self},
path::{Path, PathBuf},
};
use base_db::{
deps::{self, ProjectRoot},
- DocumentData, FeatureParams,
+ util, DocumentData, FeatureParams,
};
use rowan::{ast::AstNode, TextLen, TextRange};
use syntax::latex;
@@ -121,10 +121,8 @@ fn current_dir(
.map_or(params.document, Clone::clone);
let root = ProjectRoot::walk_and_find(workspace, &parent.dir);
- let path = root.src_dir.to_file_path().ok()?;
-
- let mut path = PathBuf::from(path.to_str()?.replace('\\', "/"));
+ let mut path = PathBuf::new();
if let Some(graphics_path) = graphics_path {
path.push(graphics_path);
}
@@ -136,7 +134,10 @@ fn current_dir(
}
}
- Some(path)
+ let current_dir =
+ util::expand_relative_path(path.to_str()?, &root.src_dir, workspace.folders()).ok()?;
+
+ current_dir.to_file_path().ok()
}
fn is_included(file: &Path, allowed_extensions: &[&str]) -> bool {
diff --git a/support/texlab/crates/completion/src/tests.rs b/support/texlab/crates/completion/src/tests.rs
index f41c9f5e6b..c6bed6e08e 100644
--- a/support/texlab/crates/completion/src/tests.rs
+++ b/support/texlab/crates/completion/src/tests.rs
@@ -1677,6 +1677,52 @@ fn theorem_end() {
}
#[test]
+fn theorem_begin_multiple() {
+ check(
+ r#"
+%! main.tex
+\declaretheorem[sibling=table, style=thmbox]{def1, def2, def3, def4, def5}
+\begin{def
+ |
+ ^^^"#,
+ expect![[r#"
+ [
+ Environment(
+ EnvironmentData {
+ name: "def1",
+ package: "<user>",
+ },
+ ),
+ Environment(
+ EnvironmentData {
+ name: "def2",
+ package: "<user>",
+ },
+ ),
+ Environment(
+ EnvironmentData {
+ name: "def3",
+ package: "<user>",
+ },
+ ),
+ Environment(
+ EnvironmentData {
+ name: "def4",
+ package: "<user>",
+ },
+ ),
+ Environment(
+ EnvironmentData {
+ name: "def5",
+ package: "<user>",
+ },
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
fn tikz_library_open_brace() {
check(
r#"
diff --git a/support/texlab/crates/definition/Cargo.toml b/support/texlab/crates/definition/Cargo.toml
index d8c4aa3a49..63ba15c308 100644
--- a/support/texlab/crates/definition/Cargo.toml
+++ b/support/texlab/crates/definition/Cargo.toml
@@ -13,7 +13,7 @@ rustc-hash = "1.1.0"
syntax = { path = "../syntax" }
[dev-dependencies]
-itertools = "0.12.0"
+itertools = "0.12.1"
test-utils = { path = "../test-utils" }
[lib]
diff --git a/support/texlab/crates/definition/src/command.rs b/support/texlab/crates/definition/src/command.rs
index d618ca9ddf..3681cdbaa0 100644
--- a/support/texlab/crates/definition/src/command.rs
+++ b/support/texlab/crates/definition/src/command.rs
@@ -1,5 +1,5 @@
use base_db::DocumentData;
-use rowan::ast::AstNode;
+use rowan::{ast::AstNode, TextRange};
use syntax::latex;
use crate::DefinitionContext;
@@ -24,18 +24,16 @@ pub(super) fn goto_definition(context: &mut DefinitionContext) -> Option<()> {
let results = data
.root_node()
.descendants()
- .filter_map(latex::CommandDefinition::cast)
- .filter(|def| {
- def.name()
- .and_then(|name| name.command())
- .map_or(false, |node| node.text() == name.text())
+ .filter_map(|node| {
+ process_old_definition(node.clone()).or_else(|| process_new_definition(node))
})
- .filter_map(|def| {
+ .filter(|(_, command)| command.text() == name.text())
+ .filter_map(|(target_range, command)| {
Some(DefinitionResult {
origin_selection_range,
target: document,
- target_range: latex::small_range(&def),
- target_selection_range: def.name()?.command()?.text_range(),
+ target_range,
+ target_selection_range: command.text_range(),
})
});
@@ -44,3 +42,15 @@ pub(super) fn goto_definition(context: &mut DefinitionContext) -> Option<()> {
Some(())
}
+
+fn process_old_definition(node: latex::SyntaxNode) -> Option<(TextRange, latex::SyntaxToken)> {
+ let node = latex::OldCommandDefinition::cast(node)?;
+ let name = node.name()?;
+ Some((latex::small_range(&node), name))
+}
+
+fn process_new_definition(node: latex::SyntaxNode) -> Option<(TextRange, latex::SyntaxToken)> {
+ let node = latex::NewCommandDefinition::cast(node)?;
+ let name = node.name()?.command()?;
+ Some((latex::small_range(&node), name))
+}
diff --git a/support/texlab/crates/definition/src/tests.rs b/support/texlab/crates/definition/src/tests.rs
index 54b0e38f30..26f7c0dc3a 100644
--- a/support/texlab/crates/definition/src/tests.rs
+++ b/support/texlab/crates/definition/src/tests.rs
@@ -36,7 +36,21 @@ fn check(input: &str) {
}
#[test]
-fn test_command_definition() {
+fn test_old_command_definition() {
+ check(
+ r#"
+%! main.tex
+\def\foo{foo}
+ ^^^^
+^^^^^^^^
+\foo
+ |
+^^^^"#,
+ )
+}
+
+#[test]
+fn test_new_command_definition() {
check(
r#"
%! main.tex
diff --git a/support/texlab/crates/diagnostics/Cargo.toml b/support/texlab/crates/diagnostics/Cargo.toml
index 5a2a7a7e28..1cb09fdec9 100644
--- a/support/texlab/crates/diagnostics/Cargo.toml
+++ b/support/texlab/crates/diagnostics/Cargo.toml
@@ -8,21 +8,21 @@ rust-version.workspace = true
[dependencies]
base-db = { path = "../base-db" }
-encoding_rs = "0.8.33"
+encoding_rs = "0.8.34"
encoding_rs_io = "0.1.7"
-itertools = "0.12.0"
+itertools = "0.12.1"
line-index = { path = "../line-index" }
log = "0.4.21"
multimap = "0.10.0"
once_cell = "1.19.0"
-regex = "1.10.2"
+regex = "1.10.4"
rowan = "0.15.15"
rustc-hash = "1.1.0"
syntax = { path = "../syntax" }
url = "2.5.0"
[dev-dependencies]
-expect-test = "1.4.1"
+expect-test = "1.5.0"
test-utils = { path = "../test-utils" }
[lib]
diff --git a/support/texlab/crates/diagnostics/src/citations.rs b/support/texlab/crates/diagnostics/src/citations.rs
index 84b9a10b1c..0f09f589be 100644
--- a/support/texlab/crates/diagnostics/src/citations.rs
+++ b/support/texlab/crates/diagnostics/src/citations.rs
@@ -24,7 +24,7 @@ pub fn detect_undefined_citations<'a>(
for citation in &data.semantics.citations {
let name = citation.name_text();
- if name != "*" && !entries.contains(name) {
+ if name != "*" && !entries.contains(name) && !name.contains("#") {
let diagnostic = Diagnostic::Tex(citation.name.range, TexError::UndefinedCitation);
results
.entry(document.uri.clone())
diff --git a/support/texlab/crates/diagnostics/src/manager.rs b/support/texlab/crates/diagnostics/src/manager.rs
index 81347410c1..3573bc40f2 100644
--- a/support/texlab/crates/diagnostics/src/manager.rs
+++ b/support/texlab/crates/diagnostics/src/manager.rs
@@ -1,6 +1,6 @@
use base_db::{deps::Project, util::filter_regex_patterns, Document, Owner, Workspace};
use multimap::MultiMap;
-use rustc_hash::FxHashMap;
+use rustc_hash::{FxHashMap, FxHashSet};
use url::Url;
use crate::types::Diagnostic;
@@ -33,6 +33,18 @@ impl Manager {
self.chktex.insert(uri, diagnostics);
}
+ /// Removes stale diagnostics for documents that are no longer part of the workspace.
+ pub fn cleanup(&mut self, workspace: &Workspace) {
+ let uris = workspace
+ .iter()
+ .map(|doc| &doc.uri)
+ .collect::<FxHashSet<_>>();
+
+ self.grammar.retain(|uri, _| uris.contains(uri));
+ self.chktex.retain(|uri, _| uris.contains(uri));
+ self.build_log.retain(|uri, _| uris.contains(uri));
+ }
+
/// Returns all filtered diagnostics for the given workspace.
pub fn get(&self, workspace: &Workspace) -> FxHashMap<Url, Vec<Diagnostic>> {
let mut results: FxHashMap<Url, Vec<Diagnostic>> = FxHashMap::default();
diff --git a/support/texlab/crates/distro/Cargo.toml b/support/texlab/crates/distro/Cargo.toml
index c3a03105c7..eefcea2b49 100644
--- a/support/texlab/crates/distro/Cargo.toml
+++ b/support/texlab/crates/distro/Cargo.toml
@@ -7,7 +7,7 @@ edition.workspace = true
rust-version.workspace = true
[dependencies]
-anyhow = "1.0.72"
+anyhow = "1.0.82"
rustc-hash = "1.1.0"
[lib]
diff --git a/support/texlab/crates/folding/Cargo.toml b/support/texlab/crates/folding/Cargo.toml
index 500e80f5b2..684ddeb1b9 100644
--- a/support/texlab/crates/folding/Cargo.toml
+++ b/support/texlab/crates/folding/Cargo.toml
@@ -12,7 +12,7 @@ rowan = "0.15.15"
syntax = { path = "../syntax" }
[dev-dependencies]
-expect-test = "1.4.1"
+expect-test = "1.5.0"
test-utils = { path = "../test-utils" }
[lib]
diff --git a/support/texlab/crates/highlights/Cargo.toml b/support/texlab/crates/highlights/Cargo.toml
index a7f58d1d19..785c282e3a 100644
--- a/support/texlab/crates/highlights/Cargo.toml
+++ b/support/texlab/crates/highlights/Cargo.toml
@@ -11,7 +11,7 @@ base-db = { path = "../base-db" }
rowan = "0.15.15"
[dev-dependencies]
-expect-test = "1.4.1"
+expect-test = "1.5.0"
test-utils = { path = "../test-utils" }
[lib]
diff --git a/support/texlab/crates/hover/Cargo.toml b/support/texlab/crates/hover/Cargo.toml
index ebc2522c08..946ed734fe 100644
--- a/support/texlab/crates/hover/Cargo.toml
+++ b/support/texlab/crates/hover/Cargo.toml
@@ -15,7 +15,7 @@ rowan = "0.15.15"
syntax = { path = "../syntax" }
[dev-dependencies]
-expect-test = "1.4.1"
+expect-test = "1.5.0"
test-utils = { path = "../test-utils" }
[lib]
diff --git a/support/texlab/crates/inlay-hints/Cargo.toml b/support/texlab/crates/inlay-hints/Cargo.toml
index 5547f5649a..ede4ca6188 100644
--- a/support/texlab/crates/inlay-hints/Cargo.toml
+++ b/support/texlab/crates/inlay-hints/Cargo.toml
@@ -13,7 +13,7 @@ rustc-hash = "1.1.0"
[dev-dependencies]
test-utils = { path = "../test-utils" }
-expect-test = "1.4.1"
+expect-test = "1.5.0"
[lib]
doctest = false
diff --git a/support/texlab/crates/ipc/Cargo.toml b/support/texlab/crates/ipc/Cargo.toml
new file mode 100644
index 0000000000..ae31dcaf55
--- /dev/null
+++ b/support/texlab/crates/ipc/Cargo.toml
@@ -0,0 +1,21 @@
+[package]
+name = "ipc"
+version = "0.0.0"
+license.workspace = true
+authors.workspace = true
+edition.workspace = true
+rust-version.workspace = true
+
+[dependencies]
+crossbeam-channel = "0.5.12"
+serde = "1.0.199"
+serde_json = "1.0.116"
+log = "0.4.21"
+uds_windows = "1.1.0"
+
+[lib]
+doctest = false
+
+[dev-dependencies]
+expect-test = "1.5.0"
+parser = { path = "../parser" }
diff --git a/support/texlab/crates/ipc/src/lib.rs b/support/texlab/crates/ipc/src/lib.rs
new file mode 100644
index 0000000000..3da3239064
--- /dev/null
+++ b/support/texlab/crates/ipc/src/lib.rs
@@ -0,0 +1,54 @@
+use std::{
+ io::{self, BufRead, BufReader, BufWriter, Read},
+ path::PathBuf,
+};
+
+use serde::{de::DeserializeOwned, Serialize};
+
+#[cfg(unix)]
+use std::os::unix::net::{UnixListener, UnixStream};
+
+#[cfg(windows)]
+use uds_windows::{UnixListener, UnixStream};
+
+fn socket_path() -> PathBuf {
+ std::env::temp_dir().join("texlab.sock")
+}
+
+pub fn send_request<T: Serialize>(msg: T) -> io::Result<()> {
+ let stream = UnixStream::connect(socket_path())?;
+ let mut conn = BufWriter::new(stream);
+ serde_json::to_writer(&mut conn, &msg)?;
+ Ok(())
+}
+
+pub fn spawn_server<T, F>(mut event_handler: F) -> io::Result<()>
+where
+ T: DeserializeOwned,
+ F: FnMut(T) + Send + 'static,
+{
+ let socket_path = socket_path();
+ let _ = std::fs::remove_file(&socket_path);
+ let listener = UnixListener::bind(socket_path)?;
+
+ std::thread::spawn(move || {
+ for conn in listener.incoming().flatten() {
+ let _ = handle_request(conn, &mut event_handler);
+ }
+ });
+
+ Ok(())
+}
+
+fn handle_request<T, F>(conn: impl Read, event_handler: &mut F) -> io::Result<()>
+where
+ T: DeserializeOwned,
+ F: FnMut(T),
+{
+ let mut conn = BufReader::new(conn);
+ let mut line = String::new();
+ conn.read_line(&mut line)?;
+ let msg = serde_json::from_str(&line)?;
+ event_handler(msg);
+ Ok(())
+}
diff --git a/support/texlab/crates/links/Cargo.toml b/support/texlab/crates/links/Cargo.toml
index d3df0c95f8..74378483a9 100644
--- a/support/texlab/crates/links/Cargo.toml
+++ b/support/texlab/crates/links/Cargo.toml
@@ -11,7 +11,7 @@ base-db = { path = "../base-db" }
[dev-dependencies]
test-utils = { path = "../test-utils" }
-expect-test = "1.4.1"
+expect-test = "1.5.0"
[lib]
doctest = false
diff --git a/support/texlab/crates/parser/Cargo.toml b/support/texlab/crates/parser/Cargo.toml
index 0fd8889899..39fa34ef63 100644
--- a/support/texlab/crates/parser/Cargo.toml
+++ b/support/texlab/crates/parser/Cargo.toml
@@ -7,16 +7,19 @@ edition.workspace = true
rust-version.workspace = true
[dependencies]
-logos = "0.13.0"
+log = "0.4.21"
+logos = "0.14.0"
once_cell = "1.19.0"
-regex = "1.10.2"
+pathdiff = "0.2.1"
+regex = "1.10.4"
rowan = "0.15.15"
rustc-hash = "1.1.0"
syntax = { path = "../syntax" }
tempfile = "3.10.1"
+versions = "6.2.0"
[dev-dependencies]
-expect-test = "1.4.1"
+expect-test = "1.5.0"
[lib]
doctest = false
diff --git a/support/texlab/crates/parser/src/bibtex.rs b/support/texlab/crates/parser/src/bibtex.rs
index fda0d4a0ad..e2cde27dec 100644
--- a/support/texlab/crates/parser/src/bibtex.rs
+++ b/support/texlab/crates/parser/src/bibtex.rs
@@ -325,7 +325,7 @@ enum ValueToken {
#[token("\"")]
Quote,
- #[regex(r"\d+", priority = 2)]
+ #[regex(r"\d+", priority = 3)]
Integer,
#[regex(r#"[^\s"\{\},#]+"#)]
@@ -349,7 +349,7 @@ enum ContentToken {
#[token("\"")]
Quote,
- #[regex(r"\d+", priority = 2)]
+ #[regex(r"\d+", priority = 3)]
Integer,
#[token(r#"~"#)]
diff --git a/support/texlab/crates/parser/src/config.rs b/support/texlab/crates/parser/src/config.rs
index 6537d21b0d..f81083f132 100644
--- a/support/texlab/crates/parser/src/config.rs
+++ b/support/texlab/crates/parser/src/config.rs
@@ -7,6 +7,7 @@ pub struct SyntaxConfig {
pub enum_environments: FxHashSet<String>,
pub verbatim_environments: FxHashSet<String>,
pub citation_commands: FxHashSet<String>,
+ pub label_definition_commands: FxHashSet<String>,
pub label_reference_commands: FxHashSet<String>,
}
@@ -32,6 +33,11 @@ impl Default for SyntaxConfig {
.map(ToString::to_string)
.collect();
+ let label_definition_commands = DEFAULT_LABEL_DEFINITION_COMMANDS
+ .iter()
+ .map(ToString::to_string)
+ .collect();
+
let label_reference_commands = DEFAULT_LABEL_REFERENCE_COMMANDS
.iter()
.map(ToString::to_string)
@@ -43,6 +49,7 @@ impl Default for SyntaxConfig {
enum_environments,
verbatim_environments,
citation_commands,
+ label_definition_commands,
label_reference_commands,
}
}
@@ -163,6 +170,8 @@ static DEFAULT_CITATION_COMMANDS: &[&str] = &[
"citeA*",
];
+static DEFAULT_LABEL_DEFINITION_COMMANDS: &[&str] = &["label"];
+
static DEFAULT_LABEL_REFERENCE_COMMANDS: &[&str] = &[
"ref",
"vref",
diff --git a/support/texlab/crates/parser/src/latex.rs b/support/texlab/crates/parser/src/latex.rs
index a114d3ecba..669ba82f99 100644
--- a/support/texlab/crates/parser/src/latex.rs
+++ b/support/texlab/crates/parser/src/latex.rs
@@ -139,7 +139,8 @@ impl<'a> Parser<'a> {
CommandName::LabelReference => self.label_reference(),
CommandName::LabelReferenceRange => self.label_reference_range(),
CommandName::LabelNumber => self.label_number(),
- CommandName::CommandDefinition => self.command_definition(),
+ CommandName::OldCommandDefinition => self.old_command_definition(),
+ CommandName::NewCommandDefinition => self.new_command_definition(),
CommandName::MathOperator => self.math_operator(),
CommandName::GlossaryEntryDefinition => self.glossary_entry_definition(),
CommandName::GlossaryEntryReference => self.glossary_entry_reference(),
@@ -907,8 +908,21 @@ impl<'a> Parser<'a> {
self.builder.finish_node();
}
- fn command_definition(&mut self) {
- self.builder.start_node(COMMAND_DEFINITION.into());
+ fn old_command_definition(&mut self) {
+ self.builder.start_node(OLD_COMMAND_DEFINITION.into());
+ self.eat();
+ self.trivia();
+
+ if let Some(Token::CommandName(_)) = self.lexer.peek() {
+ self.eat();
+ self.trivia();
+ }
+
+ self.builder.finish_node();
+ }
+
+ fn new_command_definition(&mut self) {
+ self.builder.start_node(NEW_COMMAND_DEFINITION.into());
self.eat();
self.trivia();
@@ -1071,7 +1085,7 @@ impl<'a> Parser<'a> {
}
if self.lexer.peek() == Some(Token::LCurly) {
- self.curly_group_word();
+ self.curly_group_word_list();
}
self.builder.finish_node();
diff --git a/support/texlab/crates/parser/src/latex/lexer/commands.rs b/support/texlab/crates/parser/src/latex/lexer/commands.rs
index 95eef4becd..4d1a7ca320 100644
--- a/support/texlab/crates/parser/src/latex/lexer/commands.rs
+++ b/support/texlab/crates/parser/src/latex/lexer/commands.rs
@@ -29,15 +29,26 @@ pub fn classify(name: &str, config: &SyntaxConfig) -> CommandName {
"import" | "subimport" | "inputfrom" | "subinputfrom" | "subincludefrom" => {
CommandName::Import
}
- "label" => CommandName::LabelDefinition,
"crefrange" | "crefrange*" | "Crefrange" | "Crefrange*" => CommandName::LabelReferenceRange,
"newlabel" => CommandName::LabelNumber,
+ "def" | "let" => CommandName::OldCommandDefinition,
"newcommand"
| "newcommand*"
| "renewcommand"
| "renewcommand*"
| "DeclareRobustCommand"
- | "DeclareRobustCommand*" => CommandName::CommandDefinition,
+ | "DeclareRobustCommand*"
+ | "NewDocumentCommand"
+ | "RenewDocumentCommand"
+ | "ProvideDocumentCommand"
+ | "DeclareDocumentCommand"
+ | "NewExpandableDocumentCommand"
+ | "RenewExpandableDocumentCommand"
+ | "ProvideExpandableDocumentCommand"
+ | "DeclareExpandableDocumentCommand"
+ | "NewCommandCopy"
+ | "RenewCommandCopy"
+ | "DeclareCommandCopy" => CommandName::NewCommandDefinition,
"DeclareMathOperator" | "DeclareMathOperator*" => CommandName::MathOperator,
"newglossaryentry" => CommandName::GlossaryEntryDefinition,
"gls" | "Gls" | "GLS" | "glspl" | "Glspl" | "GLSpl" | "glsdisp" | "glslink" | "glstext"
@@ -70,14 +81,24 @@ pub fn classify(name: &str, config: &SyntaxConfig) -> CommandName {
"definecolor" => CommandName::ColorDefinition,
"definecolorset" => CommandName::ColorSetDefinition,
"usepgflibrary" | "usetikzlibrary" => CommandName::TikzLibraryImport,
- "newenvironment" | "newenvironment*" | "renewenvironment" | "renewenvironment*" => {
- CommandName::EnvironmentDefinition
- }
+ "newenvironment"
+ | "newenvironment*"
+ | "renewenvironment"
+ | "renewenvironment*"
+ | "NewDocumentEnvironment"
+ | "RenewDocumentEnvironment"
+ | "ProvideDocumentEnvironment"
+ | "DeclareDocumentEnvironment"
+ | "NewEnvironmentCopy"
+ | "RenewEnvironmentCopy"
+ | "DeclareEnvironmentCopy" => CommandName::EnvironmentDefinition,
"graphicspath" => CommandName::GraphicsPath,
"iffalse" => CommandName::BeginBlockComment,
"fi" => CommandName::EndBlockComment,
"verb" => CommandName::VerbatimBlock,
+
_ if config.citation_commands.contains(name) => CommandName::Citation,
+ _ if config.label_definition_commands.contains(name) => CommandName::LabelDefinition,
_ if config.label_reference_commands.contains(name) => CommandName::LabelReference,
_ => CommandName::Generic,
}
diff --git a/support/texlab/crates/parser/src/latex/lexer/types.rs b/support/texlab/crates/parser/src/latex/lexer/types.rs
index 27ae874d9d..896ce01c05 100644
--- a/support/texlab/crates/parser/src/latex/lexer/types.rs
+++ b/support/texlab/crates/parser/src/latex/lexer/types.rs
@@ -119,7 +119,8 @@ pub enum CommandName {
LabelReference,
LabelReferenceRange,
LabelNumber,
- CommandDefinition,
+ OldCommandDefinition,
+ NewCommandDefinition,
MathOperator,
GlossaryEntryDefinition,
GlossaryEntryReference,
diff --git a/support/texlab/crates/parser/src/latex/tests.rs b/support/texlab/crates/parser/src/latex/tests.rs
index 9c086c43fd..2c9dc9335f 100644
--- a/support/texlab/crates/parser/src/latex/tests.rs
+++ b/support/texlab/crates/parser/src/latex/tests.rs
@@ -560,21 +560,21 @@ fn test_command_definition_no_argc() {
check(
r#"\newcommand{\foo}{foo}"#,
expect![[r#"
- ROOT@0..22
- PREAMBLE@0..22
- COMMAND_DEFINITION@0..22
- COMMAND_NAME@0..11 "\\newcommand"
- CURLY_GROUP_COMMAND@11..17
- L_CURLY@11..12 "{"
- COMMAND_NAME@12..16 "\\foo"
- R_CURLY@16..17 "}"
- CURLY_GROUP@17..22
- L_CURLY@17..18 "{"
- TEXT@18..21
- WORD@18..21 "foo"
- R_CURLY@21..22 "}"
+ ROOT@0..22
+ PREAMBLE@0..22
+ NEW_COMMAND_DEFINITION@0..22
+ COMMAND_NAME@0..11 "\\newcommand"
+ CURLY_GROUP_COMMAND@11..17
+ L_CURLY@11..12 "{"
+ COMMAND_NAME@12..16 "\\foo"
+ R_CURLY@16..17 "}"
+ CURLY_GROUP@17..22
+ L_CURLY@17..18 "{"
+ TEXT@18..21
+ WORD@18..21 "foo"
+ R_CURLY@21..22 "}"
- "#]],
+ "#]],
);
}
@@ -583,16 +583,16 @@ fn test_command_definition_no_impl() {
check(
r#"\newcommand{\foo}"#,
expect![[r#"
- ROOT@0..17
- PREAMBLE@0..17
- COMMAND_DEFINITION@0..17
- COMMAND_NAME@0..11 "\\newcommand"
- CURLY_GROUP_COMMAND@11..17
- L_CURLY@11..12 "{"
- COMMAND_NAME@12..16 "\\foo"
- R_CURLY@16..17 "}"
+ ROOT@0..17
+ PREAMBLE@0..17
+ NEW_COMMAND_DEFINITION@0..17
+ COMMAND_NAME@0..11 "\\newcommand"
+ CURLY_GROUP_COMMAND@11..17
+ L_CURLY@11..12 "{"
+ COMMAND_NAME@12..16 "\\foo"
+ R_CURLY@16..17 "}"
- "#]],
+ "#]],
);
}
@@ -601,15 +601,15 @@ fn test_command_definition_no_impl_error() {
check(
r#"\newcommand{\foo"#,
expect![[r#"
- ROOT@0..16
- PREAMBLE@0..16
- COMMAND_DEFINITION@0..16
- COMMAND_NAME@0..11 "\\newcommand"
- CURLY_GROUP_COMMAND@11..16
- L_CURLY@11..12 "{"
- COMMAND_NAME@12..16 "\\foo"
+ ROOT@0..16
+ PREAMBLE@0..16
+ NEW_COMMAND_DEFINITION@0..16
+ COMMAND_NAME@0..11 "\\newcommand"
+ CURLY_GROUP_COMMAND@11..16
+ L_CURLY@11..12 "{"
+ COMMAND_NAME@12..16 "\\foo"
- "#]],
+ "#]],
);
}
@@ -618,31 +618,31 @@ fn test_command_definition_optional() {
check(
r#"\newcommand{\foo}[1][def]{#1}"#,
expect![[r##"
- ROOT@0..29
- PREAMBLE@0..29
- COMMAND_DEFINITION@0..29
- COMMAND_NAME@0..11 "\\newcommand"
- CURLY_GROUP_COMMAND@11..17
- L_CURLY@11..12 "{"
- COMMAND_NAME@12..16 "\\foo"
- R_CURLY@16..17 "}"
- BRACK_GROUP_WORD@17..20
- L_BRACK@17..18 "["
- KEY@18..19
- WORD@18..19 "1"
- R_BRACK@19..20 "]"
- BRACK_GROUP@20..25
- L_BRACK@20..21 "["
- TEXT@21..24
- WORD@21..24 "def"
- R_BRACK@24..25 "]"
- CURLY_GROUP@25..29
- L_CURLY@25..26 "{"
- TEXT@26..28
- WORD@26..28 "#1"
- R_CURLY@28..29 "}"
+ ROOT@0..29
+ PREAMBLE@0..29
+ NEW_COMMAND_DEFINITION@0..29
+ COMMAND_NAME@0..11 "\\newcommand"
+ CURLY_GROUP_COMMAND@11..17
+ L_CURLY@11..12 "{"
+ COMMAND_NAME@12..16 "\\foo"
+ R_CURLY@16..17 "}"
+ BRACK_GROUP_WORD@17..20
+ L_BRACK@17..18 "["
+ KEY@18..19
+ WORD@18..19 "1"
+ R_BRACK@19..20 "]"
+ BRACK_GROUP@20..25
+ L_BRACK@20..21 "["
+ TEXT@21..24
+ WORD@21..24 "def"
+ R_BRACK@24..25 "]"
+ CURLY_GROUP@25..29
+ L_CURLY@25..26 "{"
+ TEXT@26..28
+ WORD@26..28 "#1"
+ R_CURLY@28..29 "}"
- "##]],
+ "##]],
);
}
@@ -651,27 +651,27 @@ fn test_command_definition_simple() {
check(
r#"\newcommand[1]{\id}{#1}"#,
expect![[r##"
- ROOT@0..23
- PREAMBLE@0..23
- COMMAND_DEFINITION@0..19
- COMMAND_NAME@0..11 "\\newcommand"
- BRACK_GROUP_WORD@11..14
- L_BRACK@11..12 "["
- KEY@12..13
- WORD@12..13 "1"
- R_BRACK@13..14 "]"
- CURLY_GROUP@14..19
- L_CURLY@14..15 "{"
- GENERIC_COMMAND@15..18
- COMMAND_NAME@15..18 "\\id"
- R_CURLY@18..19 "}"
- CURLY_GROUP@19..23
- L_CURLY@19..20 "{"
- TEXT@20..22
- WORD@20..22 "#1"
- R_CURLY@22..23 "}"
+ ROOT@0..23
+ PREAMBLE@0..23
+ NEW_COMMAND_DEFINITION@0..19
+ COMMAND_NAME@0..11 "\\newcommand"
+ BRACK_GROUP_WORD@11..14
+ L_BRACK@11..12 "["
+ KEY@12..13
+ WORD@12..13 "1"
+ R_BRACK@13..14 "]"
+ CURLY_GROUP@14..19
+ L_CURLY@14..15 "{"
+ GENERIC_COMMAND@15..18
+ COMMAND_NAME@15..18 "\\id"
+ R_CURLY@18..19 "}"
+ CURLY_GROUP@19..23
+ L_CURLY@19..20 "{"
+ TEXT@20..22
+ WORD@20..22 "#1"
+ R_CURLY@22..23 "}"
- "##]],
+ "##]],
);
}
@@ -682,7 +682,7 @@ fn test_command_definition_with_begin() {
expect![[r#"
ROOT@0..80
PREAMBLE@0..80
- COMMAND_DEFINITION@0..80
+ NEW_COMMAND_DEFINITION@0..80
COMMAND_NAME@0..11 "\\newcommand"
CURLY_GROUP_COMMAND@11..35
L_CURLY@11..12 "{"
@@ -2446,11 +2446,11 @@ fn test_issue_745() {
GENERIC_COMMAND@50..64
COMMAND_NAME@50..63 "\\ExplSyntaxOn"
LINE_BREAK@63..64 "\n"
- GENERIC_COMMAND@64..223
+ ENVIRONMENT_DEFINITION@64..200
COMMAND_NAME@64..87 "\\NewDocumentEnvironment"
- CURLY_GROUP@87..96
+ CURLY_GROUP_WORD@87..96
L_CURLY@87..88 "{"
- TEXT@88..95
+ KEY@88..95
WORD@88..95 "exptblr"
R_CURLY@95..96 "}"
CURLY_GROUP@96..107
@@ -2515,21 +2515,21 @@ fn test_issue_745() {
R_CURLY@194..195 "}"
LINE_BREAK@195..196 "\n"
WHITESPACE@196..200 " "
- CURLY_GROUP@200..223
- L_CURLY@200..201 "{"
- LINE_BREAK@201..202 "\n"
- WHITESPACE@202..206 " "
- GENERIC_COMMAND@206..221
- COMMAND_NAME@206..210 "\\end"
- CURLY_GROUP@210..221
- L_CURLY@210..211 "{"
- TEXT@211..215
- WORD@211..215 "tblr"
- R_CURLY@215..216 "}"
- LINE_BREAK@216..217 "\n"
- WHITESPACE@217..221 " "
- R_CURLY@221..222 "}"
- LINE_BREAK@222..223 "\n"
+ CURLY_GROUP@200..223
+ L_CURLY@200..201 "{"
+ LINE_BREAK@201..202 "\n"
+ WHITESPACE@202..206 " "
+ GENERIC_COMMAND@206..221
+ COMMAND_NAME@206..210 "\\end"
+ CURLY_GROUP@210..221
+ L_CURLY@210..211 "{"
+ TEXT@211..215
+ WORD@211..215 "tblr"
+ R_CURLY@215..216 "}"
+ LINE_BREAK@216..217 "\n"
+ WHITESPACE@217..221 " "
+ R_CURLY@221..222 "}"
+ LINE_BREAK@222..223 "\n"
GENERIC_COMMAND@223..239
COMMAND_NAME@223..237 "\\ExplSyntaxOff"
LINE_BREAK@237..239 "\n\n"
@@ -2681,7 +2681,7 @@ fn test_issue_857() {
expect![[r#"
ROOT@0..55
PREAMBLE@0..55
- COMMAND_DEFINITION@0..11
+ NEW_COMMAND_DEFINITION@0..11
COMMAND_NAME@0..11 "\\newcommand"
GENERIC_COMMAND@11..17
COMMAND_NAME@11..14 "\\ö"
@@ -2689,7 +2689,7 @@ fn test_issue_857() {
L_CURLY@14..15 "{"
R_CURLY@15..16 "}"
LINE_BREAK@16..17 "\n"
- COMMAND_DEFINITION@17..38
+ NEW_COMMAND_DEFINITION@17..38
COMMAND_NAME@17..28 "\\newcommand"
CURLY_GROUP_COMMAND@28..35
L_CURLY@28..29 "{"
@@ -2699,7 +2699,7 @@ fn test_issue_857() {
L_CURLY@35..36 "{"
R_CURLY@36..37 "}"
LINE_BREAK@37..38 "\n"
- COMMAND_DEFINITION@38..49
+ NEW_COMMAND_DEFINITION@38..49
COMMAND_NAME@38..49 "\\newcommand"
GENERIC_COMMAND@49..55
COMMAND_NAME@49..53 "\\123"
@@ -3536,7 +3536,7 @@ fn test_theorem_definition_thmtools() {
TEXT@32..35
WORD@32..35 "bar"
R_BRACK@35..36 "]"
- CURLY_GROUP_WORD@36..41
+ CURLY_GROUP_WORD_LIST@36..41
L_CURLY@36..37 "{"
KEY@37..40
WORD@37..40 "baz"
@@ -3547,6 +3547,61 @@ fn test_theorem_definition_thmtools() {
}
#[test]
+fn test_theorem_definition_thmtools_multiple() {
+ check(
+ r#"\declaretheorem[sibling=table, style=thmbox]{definition, theorem, lemma, corollary, proposition}"#,
+ expect![[r#"
+ ROOT@0..96
+ PREAMBLE@0..96
+ THEOREM_DEFINITION_THMTOOLS@0..96
+ COMMAND_NAME@0..15 "\\declaretheorem"
+ BRACK_GROUP_KEY_VALUE@15..44
+ L_BRACK@15..16 "["
+ KEY_VALUE_BODY@16..43
+ KEY_VALUE_PAIR@16..29
+ KEY@16..23
+ WORD@16..23 "sibling"
+ EQUALITY_SIGN@23..24 "="
+ VALUE@24..29
+ TEXT@24..29
+ WORD@24..29 "table"
+ COMMA@29..30 ","
+ WHITESPACE@30..31 " "
+ KEY_VALUE_PAIR@31..43
+ KEY@31..36
+ WORD@31..36 "style"
+ EQUALITY_SIGN@36..37 "="
+ VALUE@37..43
+ TEXT@37..43
+ WORD@37..43 "thmbox"
+ R_BRACK@43..44 "]"
+ CURLY_GROUP_WORD_LIST@44..96
+ L_CURLY@44..45 "{"
+ KEY@45..55
+ WORD@45..55 "definition"
+ COMMA@55..56 ","
+ WHITESPACE@56..57 " "
+ KEY@57..64
+ WORD@57..64 "theorem"
+ COMMA@64..65 ","
+ WHITESPACE@65..66 " "
+ KEY@66..71
+ WORD@66..71 "lemma"
+ COMMA@71..72 ","
+ WHITESPACE@72..73 " "
+ KEY@73..82
+ WORD@73..82 "corollary"
+ COMMA@82..83 ","
+ WHITESPACE@83..84 " "
+ KEY@84..95
+ WORD@84..95 "proposition"
+ R_CURLY@95..96 "}"
+
+ "#]],
+ );
+}
+
+#[test]
fn test_command_subscript() {
check(
r#"\foo_bar \foo_\bar"#,
diff --git a/support/texlab/crates/parser/src/latexmkrc.rs b/support/texlab/crates/parser/src/latexmkrc.rs
index 63c92bd353..6013a5f646 100644
--- a/support/texlab/crates/parser/src/latexmkrc.rs
+++ b/support/texlab/crates/parser/src/latexmkrc.rs
@@ -1,52 +1,157 @@
+use std::path::{Path, PathBuf};
+
use syntax::latexmkrc::LatexmkrcData;
-use tempfile::tempdir;
-
-pub fn parse_latexmkrc(_input: &str) -> std::io::Result<LatexmkrcData> {
- let temp_dir = tempdir()?;
- let non_existent_tex = temp_dir.path().join("NONEXISTENT.tex");
-
- // Run `latexmk -dir-report $TMPDIR/NONEXISTENT.tex` to obtain out_dir
- // and aux_dir values. We pass nonexistent file to prevent latexmk from
- // building anything, since we need this invocation only to extract the
- // -dir-report variables.
- //
- // In the future, latexmk plans to implement -dir-report-only option and we
- // won't have to resort to this hack with NONEXISTENT.tex.
- let output = std::process::Command::new("latexmk")
- .arg("-dir-report")
- .arg(non_existent_tex)
- .output()?;
- let stderr = String::from_utf8_lossy(&output.stderr);
+mod v483 {
+ use std::path::Path;
+
+ use syntax::latexmkrc::LatexmkrcData;
+ use tempfile::tempdir;
+
+ use crate::latexmkrc::change_root;
+
+ pub fn parse_latexmkrc(input: &str, src_dir: &Path) -> std::io::Result<LatexmkrcData> {
+ let temp_dir = tempdir()?;
+ let non_existent_tex = temp_dir.path().join("NONEXISTENT.tex");
+ std::fs::write(temp_dir.path().join(".latexmkrc"), input)?;
+
+ // Run `latexmk -dir-report $TMPDIR/NONEXISTENT.tex` to obtain out_dir
+ // and aux_dir values. We pass nonexistent file to prevent latexmk from
+ // building anything, since we need this invocation only to extract the
+ // -dir-report variables.
+ //
+ // In later versions, latexmk provides the -dir-report-only option and we
+ // won't have to resort to this hack with NONEXISTENT.tex.
+ let output = std::process::Command::new("latexmk")
+ .arg("-dir-report")
+ .arg(non_existent_tex)
+ .current_dir(temp_dir.path())
+ .output()?;
+
+ let stderr = String::from_utf8_lossy(&output.stderr);
+
+ let (aux_dir, out_dir) = stderr.lines().find_map(extract_dirs).ok_or_else(|| {
+ std::io::Error::new(
+ std::io::ErrorKind::InvalidData,
+ "Normalized aux and out dir were not found in latexmk output",
+ )
+ })?;
+
+ let aux_dir = change_root(src_dir, temp_dir.path(), &aux_dir);
+ let out_dir = change_root(src_dir, temp_dir.path(), &out_dir);
+ Ok(LatexmkrcData { aux_dir, out_dir })
+ }
+
+ /// Extracts $aux_dir and $out_dir from lines of the form
+ ///
+ /// Latexmk: Normalized aux dir and out dir: '$aux_dir', '$out_dir'
+ fn extract_dirs(line: &str) -> Option<(String, String)> {
+ let mut it = line
+ .strip_prefix("Latexmk: Normalized aux dir and out dir: ")?
+ .split(", ");
+
+ let aux_dir = it.next()?.strip_prefix('\'')?.strip_suffix('\'')?;
+ let out_dir = it.next()?.strip_prefix('\'')?.strip_suffix('\'')?;
+
+ // Ensure there's no more data
+ if it.next().is_some() {
+ return None;
+ }
+
+ Some((String::from(aux_dir), String::from(out_dir)))
+ }
+}
+
+mod v484 {
+ use std::{path::Path, str::Lines};
+
+ use syntax::latexmkrc::LatexmkrcData;
+ use tempfile::tempdir;
- let (aux_dir, out_dir) = stderr.lines().find_map(extract_dirs).ok_or_else(|| {
- std::io::Error::new(
- std::io::ErrorKind::InvalidData,
- "Normalized aux and out dir were not found in latexmk output",
- )
- })?;
+ use super::change_root;
- Ok(LatexmkrcData {
- aux_dir: Some(aux_dir),
- out_dir: Some(out_dir),
- })
+ pub fn parse_latexmkrc(input: &str, src_dir: &Path) -> std::io::Result<LatexmkrcData> {
+ let temp_dir = tempdir()?;
+ std::fs::write(temp_dir.path().join(".latexmkrc"), input)?;
+
+ // Create an empty dummy TeX file to let latexmk continue
+ std::fs::write(temp_dir.path().join("dummy.tex"), "")?;
+
+ // Run `latexmk -dir-report-only` to obtain out_dir and aux_dir values.
+ let output = std::process::Command::new("latexmk")
+ .arg("-dir-report-only")
+ .current_dir(temp_dir.path())
+ .output()?;
+
+ let stdout = String::from_utf8_lossy(&output.stdout);
+
+ let (aux_dir, out_dir) = extract_dirs(stdout.lines()).ok_or_else(|| {
+ std::io::Error::new(
+ std::io::ErrorKind::InvalidData,
+ "Normalized aux and out dir were not found in latexmk output",
+ )
+ })?;
+
+ let aux_dir = change_root(src_dir, temp_dir.path(), &aux_dir);
+ let out_dir = change_root(src_dir, temp_dir.path(), &out_dir);
+
+ Ok(LatexmkrcData { aux_dir, out_dir })
+ }
+
+ /// Extracts $aux_dir and $out_dir from lines of the form
+ ///
+ /// Latexmk: Normalized aux dir and out dirs:
+ /// '$aux_dir', '$out_dir', [...]
+ fn extract_dirs(lines: Lines) -> Option<(String, String)> {
+ let mut it = lines
+ .skip_while(|line| !line.starts_with("Latexmk: Normalized aux dir and out dirs:"))
+ .nth(1)?
+ .split(",");
+
+ let aux_dir = it.next()?.trim().strip_prefix('\'')?.strip_suffix('\'')?;
+
+ it.next(); // Skip the old 'outdir' option.
+
+ let out_dir = it.next()?.trim().strip_prefix('\'')?.strip_suffix('\'')?;
+
+ // Ensure there's no more data
+ if it.next().is_some() {
+ return None;
+ }
+
+ Some((String::from(aux_dir), String::from(out_dir)))
+ }
}
-/// Extracts $aux_dir and $out_dir from lines of the form
-///
-/// Latexmk: Normalized aux dir and out dir: '$aux_dir', '$out_dir'
-fn extract_dirs(line: &str) -> Option<(String, String)> {
- let mut it = line
- .strip_prefix("Latexmk: Normalized aux dir and out dir: ")?
- .split(", ");
+pub fn parse_latexmkrc(input: &str, src_dir: &Path) -> std::io::Result<LatexmkrcData> {
+ let output = std::process::Command::new("latexmk")
+ .arg("--version")
+ .output()?;
+
+ let version = String::from_utf8(output.stdout)
+ .ok()
+ .as_ref()
+ .and_then(|line| Some((line.find("Version")?, line)))
+ .and_then(|(i, line)| line[i..].trim_end().strip_prefix("Version "))
+ .and_then(|text| versions::Versioning::new(text));
- let aux_dir = it.next()?.strip_prefix('\'')?.strip_suffix('\'')?;
- let out_dir = it.next()?.strip_prefix('\'')?.strip_suffix('\'')?;
+ let result = if version.map_or(false, |v| v >= versions::Versioning::new("4.84").unwrap()) {
+ v484::parse_latexmkrc(input, src_dir)
+ } else {
+ v483::parse_latexmkrc(input, src_dir)
+ };
+
+ log::debug!("Latexmkrc parsing result: src_dir={src_dir:?}, output={result:?}");
+ result
+}
- // Ensure there's no more data
- if it.next().is_some() {
+fn change_root(src_dir: &Path, tmp_dir: &Path, out_dir: &str) -> Option<String> {
+ let out_dir = tmp_dir.join(out_dir);
+ let relative_to_tmp = pathdiff::diff_paths(out_dir, tmp_dir)?;
+ let relative_to_src = pathdiff::diff_paths(src_dir.join(relative_to_tmp), src_dir)?;
+ if relative_to_src == PathBuf::new() {
return None;
}
- Some((String::from(aux_dir), String::from(out_dir)))
+ Some(relative_to_src.to_str()?.to_string())
}
diff --git a/support/texlab/crates/references/Cargo.toml b/support/texlab/crates/references/Cargo.toml
index b2d80d49ee..e78e84ea4c 100644
--- a/support/texlab/crates/references/Cargo.toml
+++ b/support/texlab/crates/references/Cargo.toml
@@ -9,6 +9,7 @@ rust-version.workspace = true
[dependencies]
base-db = { path = "../base-db" }
rowan = "0.15.15"
+rustc-hash = "1.1.0"
syntax = { path = "../syntax" }
[dev-dependencies]
diff --git a/support/texlab/crates/references/src/command.rs b/support/texlab/crates/references/src/command.rs
new file mode 100644
index 0000000000..712da20b73
--- /dev/null
+++ b/support/texlab/crates/references/src/command.rs
@@ -0,0 +1,52 @@
+use base_db::{semantics::Span, DocumentLocation};
+use rowan::ast::AstNode;
+use rustc_hash::FxHashSet;
+use syntax::latex;
+
+use crate::{Reference, ReferenceContext, ReferenceKind};
+
+pub(super) fn find_all(context: &mut ReferenceContext) -> Option<()> {
+ let data = context.params.feature.document.data.as_tex()?;
+ let token = data
+ .root_node()
+ .token_at_offset(context.params.offset)
+ .find(|token| token.kind() == latex::COMMAND_NAME)?;
+
+ let project = &context.params.feature.project;
+
+ for document in &project.documents {
+ if let Some(data) = document.data.as_tex() {
+ let defs: FxHashSet<Span> = data
+ .root_node()
+ .descendants()
+ .filter_map(|node| {
+ latex::OldCommandDefinition::cast(node.clone())
+ .and_then(|node| node.name())
+ .or_else(|| {
+ latex::NewCommandDefinition::cast(node)
+ .and_then(|node| node.name())
+ .and_then(|group| group.command())
+ })
+ .map(|name| Span::command(&name))
+ })
+ .collect();
+
+ for command in &data.semantics.commands {
+ if command.text == &token.text()[1..] {
+ let kind = if defs.contains(command) {
+ ReferenceKind::Definition
+ } else {
+ ReferenceKind::Reference
+ };
+
+ context.results.push(Reference {
+ location: DocumentLocation::new(document, command.range),
+ kind,
+ });
+ }
+ }
+ }
+ }
+
+ Some(())
+}
diff --git a/support/texlab/crates/references/src/lib.rs b/support/texlab/crates/references/src/lib.rs
index 23dbd7e0ae..7d14a20d25 100644
--- a/support/texlab/crates/references/src/lib.rs
+++ b/support/texlab/crates/references/src/lib.rs
@@ -1,3 +1,4 @@
+mod command;
mod entry;
mod label;
mod string_def;
@@ -39,6 +40,7 @@ pub fn find_all<'a>(params: &ReferenceParams<'a>) -> Vec<DocumentLocation<'a>> {
entry::find_all(&mut context);
label::find_all(&mut context);
string_def::find_all(&mut context);
+ command::find_all(&mut context);
context
.results
diff --git a/support/texlab/crates/references/src/tests.rs b/support/texlab/crates/references/src/tests.rs
index a21a3ce551..845282afb0 100644
--- a/support/texlab/crates/references/src/tests.rs
+++ b/support/texlab/crates/references/src/tests.rs
@@ -229,3 +229,34 @@ fn test_string_definition_include_decl() {
true,
);
}
+
+#[test]
+fn test_new_command_definition() {
+ check(
+ r#"
+%! main.tex
+\foo
+ |
+ ^^^
+
+\newcommand{\foo}{foo}
+"#,
+ false,
+ );
+}
+
+#[test]
+fn test_new_command_definition_include_decl() {
+ check(
+ r#"
+%! main.tex
+\foo
+ |
+ ^^^
+
+\newcommand{\foo}{foo}
+ ^^^
+"#,
+ true,
+ );
+}
diff --git a/support/texlab/crates/symbols/Cargo.toml b/support/texlab/crates/symbols/Cargo.toml
index d297703d3b..65db0fc0bd 100644
--- a/support/texlab/crates/symbols/Cargo.toml
+++ b/support/texlab/crates/symbols/Cargo.toml
@@ -12,14 +12,14 @@ doctest = false
[dependencies]
base-db = { path = "../base-db" }
distro = { path = "../distro" }
-itertools = "0.12.0"
+itertools = "0.12.1"
line-index = { path = "../line-index" }
rowan = "0.15.15"
syntax = { path = "../syntax" }
-titlecase = "2.2.1"
+titlecase = "3.0.0"
url = "2.5.0"
[dev-dependencies]
-regex = "1.10.2"
+regex = "1.10.4"
test-utils = { path = "../test-utils" }
-expect-test = "1.4.1"
+expect-test = "1.5.0"
diff --git a/support/texlab/crates/syntax/Cargo.toml b/support/texlab/crates/syntax/Cargo.toml
index aaad7e973e..a2d3d866b6 100644
--- a/support/texlab/crates/syntax/Cargo.toml
+++ b/support/texlab/crates/syntax/Cargo.toml
@@ -7,7 +7,7 @@ edition.workspace = true
rust-version.workspace = true
[dependencies]
-itertools = "0.12.0"
+itertools = "0.12.1"
rowan = "0.15.15"
[lib]
diff --git a/support/texlab/crates/syntax/src/latex/cst.rs b/support/texlab/crates/syntax/src/latex/cst.rs
index 95aa1e4667..f16d703162 100644
--- a/support/texlab/crates/syntax/src/latex/cst.rs
+++ b/support/texlab/crates/syntax/src/latex/cst.rs
@@ -555,8 +555,19 @@ impl TheoremDefinition {
self.syntax().first_token()
}
- pub fn name(&self) -> Option<CurlyGroupWord> {
- self.syntax().children().find_map(CurlyGroupWord::cast)
+ pub fn names(&self) -> impl Iterator<Item = Key> {
+ self.syntax()
+ .children()
+ .find_map(CurlyGroupWordList::cast)
+ .into_iter()
+ .flat_map(|group| group.keys())
+ .chain(
+ self.syntax()
+ .children()
+ .find_map(CurlyGroupWord::cast)
+ .into_iter()
+ .filter_map(|group| group.key()),
+ )
}
pub fn heading(&self) -> Option<String> {
@@ -581,9 +592,25 @@ impl TheoremDefinition {
}
}
-cst_node!(CommandDefinition, COMMAND_DEFINITION, MATH_OPERATOR);
+cst_node!(OldCommandDefinition, OLD_COMMAND_DEFINITION);
+
+impl OldCommandDefinition {
+ pub fn command(&self) -> Option<SyntaxToken> {
+ self.syntax().first_token()
+ }
+
+ pub fn name(&self) -> Option<SyntaxToken> {
+ self.syntax()
+ .children_with_tokens()
+ .skip(1)
+ .filter_map(|elem| elem.into_token())
+ .find(|token| token.kind() == COMMAND_NAME)
+ }
+}
+
+cst_node!(NewCommandDefinition, NEW_COMMAND_DEFINITION, MATH_OPERATOR);
-impl CommandDefinition {
+impl NewCommandDefinition {
pub fn command(&self) -> Option<SyntaxToken> {
self.syntax().first_token()
}
diff --git a/support/texlab/crates/syntax/src/latex/kind.rs b/support/texlab/crates/syntax/src/latex/kind.rs
index 39e2837932..8b649b1f31 100644
--- a/support/texlab/crates/syntax/src/latex/kind.rs
+++ b/support/texlab/crates/syntax/src/latex/kind.rs
@@ -66,7 +66,8 @@ pub enum SyntaxKind {
LABEL_REFERENCE,
LABEL_REFERENCE_RANGE,
LABEL_NUMBER,
- COMMAND_DEFINITION,
+ OLD_COMMAND_DEFINITION,
+ NEW_COMMAND_DEFINITION,
MATH_OPERATOR,
GLOSSARY_ENTRY_DEFINITION,
GLOSSARY_ENTRY_REFERENCE,
diff --git a/support/texlab/crates/texlab/Cargo.toml b/support/texlab/crates/texlab/Cargo.toml
index 8fe5d4fbbe..4c142a2229 100644
--- a/support/texlab/crates/texlab/Cargo.toml
+++ b/support/texlab/crates/texlab/Cargo.toml
@@ -1,7 +1,7 @@
[package]
name = "texlab"
description = "LaTeX Language Server"
-version = "5.14.1"
+version = "5.16.0"
license.workspace = true
readme = "README.md"
authors.workspace = true
@@ -22,11 +22,11 @@ test = false
doctest = false
[dependencies]
-anyhow = "1.0.75"
+anyhow = "1.0.82"
base-db = { path = "../base-db" }
bibfmt = { path = "../bibfmt" }
citeproc = { path = "../citeproc" }
-clap = { version = "4.5.3", features = ["derive"] }
+clap = { version = "4.5.4", features = ["derive"] }
commands = { path = "../commands" }
completion = { path = "../completion" }
completion-data = { path = "../completion-data" }
@@ -39,22 +39,23 @@ folding = { path = "../folding" }
highlights = { path = "../highlights" }
hover = { path = "../hover" }
inlay-hints = { path = "../inlay-hints" }
+ipc = { path = "../ipc" }
line-index = { path = "../line-index" }
links = { path = "../links" }
log = "0.4.21"
lsp-server = "0.7.6"
-lsp-types = "0.95.0"
+lsp-types = "0.95.1"
notify = "6.1.1"
notify-debouncer-full = "0.3.1"
parking_lot = "0.12.1"
parser = { path = "../parser" }
references = { path = "../references" }
-regex = "1.10.2"
+regex = "1.10.4"
rename = { path = "../rename" }
rowan = "0.15.15"
rustc-hash = "1.1.0"
serde = "1.0.195"
-serde_json = "1.0.114"
+serde_json = "1.0.115"
serde_regex = "1.1.0"
serde_repr = "0.1.18"
symbols = { path = "../symbols" }
diff --git a/support/texlab/crates/texlab/src/main.rs b/support/texlab/crates/texlab/src/main.rs
index 08977ae96a..ceca02b7f7 100644
--- a/support/texlab/crates/texlab/src/main.rs
+++ b/support/texlab/crates/texlab/src/main.rs
@@ -1,9 +1,10 @@
use std::{fs::OpenOptions, io, path::PathBuf};
use anyhow::Result;
-use clap::{ArgAction, Parser};
+use clap::{ArgAction, Parser, Subcommand};
use log::LevelFilter;
use lsp_server::Connection;
+use lsp_types::Url;
use texlab::Server;
/// An implementation of the Language Server Protocol for LaTeX
@@ -21,20 +22,72 @@ struct Opts {
/// Write the logging output to FILE
#[clap(long, name = "FILE", value_parser)]
log_file: Option<PathBuf>,
+
+ #[clap(subcommand)]
+ command: Option<Command>,
+}
+
+#[derive(Debug, Subcommand)]
+enum Command {
+ /// Runs the language server in a editor context using STDIN and STDOUT.
+ Run,
+
+ /// Opens a document at a specific line.
+ ///
+ /// This command can be used to implement inverse search in an editor-agnostic way.
+ InverseSearch(InverseSearchOpts),
+}
+
+/// Options for the inverse search subcommand.
+#[derive(Debug, Parser)]
+struct InverseSearchOpts {
+ /// The path to the document to open.
+ #[clap(short, long, name = "FILE", value_parser)]
+ input: PathBuf,
+
+ /// The zero-based line number of the document to jump to.
+ #[clap(short, long)]
+ line: u32,
}
fn main() -> Result<()> {
let opts = Opts::parse();
- setup_logger(opts);
+ setup_logger(&opts);
+
+ match opts.command.unwrap_or(Command::Run) {
+ Command::Run => {
+ let (connection, threads) = Connection::stdio();
+ Server::exec(connection)?;
+ threads.join()?;
+ }
+ Command::InverseSearch(opts) => {
+ let Some(uri) = opts
+ .input
+ .canonicalize()
+ .ok()
+ .and_then(|path| Url::from_file_path(path).ok())
+ else {
+ eprintln!("Failed to convert input path to a URI.");
+ std::process::exit(-1);
+ };
+
+ let params = lsp_types::TextDocumentPositionParams::new(
+ lsp_types::TextDocumentIdentifier::new(uri),
+ lsp_types::Position::new(opts.line, 0),
+ );
- let (connection, threads) = Connection::stdio();
- Server::exec(connection)?;
- threads.join()?;
+ if let Err(why) = ipc::send_request(params) {
+ eprintln!("Failed to send inverse search request to the main instance. Is the server running?");
+ eprintln!("Details: {why:?}");
+ std::process::exit(-1);
+ }
+ }
+ }
Ok(())
}
-fn setup_logger(opts: Opts) {
+fn setup_logger(opts: &Opts) {
let verbosity_level = if !opts.quiet {
match opts.verbosity {
0 => LevelFilter::Error,
@@ -52,7 +105,7 @@ fn setup_logger(opts: Opts) {
.level(verbosity_level)
.chain(io::stderr());
- let logger = match opts.log_file {
+ let logger = match &opts.log_file {
Some(log_file) => logger.chain(
OpenOptions::new()
.write(true)
diff --git a/support/texlab/crates/texlab/src/server.rs b/support/texlab/crates/texlab/src/server.rs
index 70b9a03022..ef70c42c71 100644
--- a/support/texlab/crates/texlab/src/server.rs
+++ b/support/texlab/crates/texlab/src/server.rs
@@ -11,7 +11,7 @@ use std::{
};
use anyhow::Result;
-use base_db::{deps, Config, Owner, Workspace};
+use base_db::{deps, Owner, Workspace};
use commands::{BuildCommand, CleanCommand, CleanTarget, ForwardSearch};
use crossbeam_channel::{Receiver, Sender};
use distro::{Distro, Language};
@@ -51,6 +51,7 @@ enum InternalMessage {
Diagnostics,
ChktexFinished(Url, Vec<diagnostics::Diagnostic>),
ForwardSearch(Url, Option<Position>),
+ InverseSearch(TextDocumentPositionParams),
}
pub struct Server {
@@ -242,7 +243,7 @@ impl Server {
for document in checked_paths
.iter()
- .filter_map(|path| workspace.lookup_path(path))
+ .filter_map(|path| workspace.lookup_file(path))
{
self.diagnostic_manager.update_syntax(&workspace, document);
}
@@ -319,7 +320,7 @@ impl Server {
fn update_options(&mut self, options: Options) {
let mut workspace = self.workspace.write();
- workspace.set_config(Config::from(options));
+ workspace.set_config(from_proto::config(options));
self.watcher.watch(&mut workspace);
}
@@ -794,30 +795,50 @@ impl Server {
let mut changed = false;
let mut workspace = self.workspace.write();
+
match event.kind {
notify::EventKind::Remove(_) | notify::EventKind::Modify(ModifyKind::Name(_)) => {
- for path in event.paths {
- if let Some(document) = workspace.lookup_path(&path) {
- if document.owner == Owner::Server {
- let uri = document.uri.clone();
- workspace.remove(&uri);
- changed = true;
- }
- }
+ let affected_uris = event
+ .paths
+ .iter()
+ .flat_map(|file_or_dir| workspace.lookup_file_or_dir(file_or_dir))
+ .filter(|doc| doc.owner == Owner::Server)
+ .map(|doc| doc.uri.clone())
+ .collect::<Vec<_>>();
+
+ for uri in affected_uris {
+ workspace.remove(&uri);
+ changed = true;
}
}
notify::EventKind::Create(_) | notify::EventKind::Modify(_) => {
- for path in event.paths {
- if workspace
- .lookup_path(&path)
- .map_or(true, |document| document.owner == Owner::Server)
- {
- if let Some(language) = Language::from_path(&path) {
- changed |= workspace.load(&path, language).is_ok();
-
- if let Some(document) = workspace.lookup_path(&path) {
- self.diagnostic_manager.update_syntax(&workspace, document);
- }
+ for file_or_dir in event.paths {
+ let affected_paths = if file_or_dir.is_dir() {
+ changed = true;
+ workspace
+ .lookup_file_or_dir(&file_or_dir)
+ .filter_map(|doc| doc.path.clone())
+ .collect::<Vec<_>>()
+ } else {
+ vec![file_or_dir]
+ };
+
+ for path in affected_paths {
+ if !workspace
+ .lookup_file(&path)
+ .map_or(true, |doc| doc.owner == Owner::Server)
+ {
+ continue;
+ }
+
+ let Some(language) = Language::from_path(&path) else {
+ continue;
+ };
+
+ changed |= workspace.load(&path, language).is_ok();
+
+ if let Some(document) = workspace.lookup_file(&path) {
+ self.diagnostic_manager.update_syntax(&workspace, document);
}
}
}
@@ -825,9 +846,10 @@ impl Server {
notify::EventKind::Any | notify::EventKind::Access(_) | notify::EventKind::Other => {}
};
- drop(workspace);
if changed {
- self.publish_diagnostics_with_delay();
+ self.diagnostic_manager.cleanup(&workspace);
+ drop(workspace);
+ self.update_workspace();
}
}
@@ -921,6 +943,27 @@ impl Server {
Ok(results)
}
+ fn inverse_search(&self, params: TextDocumentPositionParams) -> Result<()> {
+ if !self.client_flags.show_document {
+ log::warn!("Inverse search request received although the client does not support window/showDocument: {params:?}");
+ }
+
+ let position = lsp_types::Position::new(params.position.line, 0);
+ let params = lsp_types::ShowDocumentParams {
+ uri: params.text_document.uri,
+ take_focus: Some(true),
+ external: Some(false),
+ selection: Some(lsp_types::Range::new(position, position)),
+ };
+
+ let client = self.client.clone();
+ self.pool.execute(move || {
+ let _ = client.send_request::<ShowDocument>(params);
+ });
+
+ Ok(())
+ }
+
fn parse_command_params<T: DeserializeOwned>(
&self,
params: Vec<serde_json::Value>,
@@ -934,6 +977,13 @@ impl Server {
Ok(value)
}
+ fn setup_ipc_server(&mut self) {
+ let sender = self.internal_tx.clone();
+ let _ = ipc::spawn_server(move |params: TextDocumentPositionParams| {
+ let _ = sender.send(InternalMessage::InverseSearch(params));
+ });
+ }
+
fn process_messages(&mut self) -> Result<()> {
loop {
crossbeam_channel::select! {
@@ -1038,6 +1088,9 @@ impl Server {
InternalMessage::ForwardSearch(uri, position) => {
self.forward_search(None, uri, position)?;
}
+ InternalMessage::InverseSearch(params) => {
+ self.inverse_search(params)?;
+ }
};
}
};
@@ -1060,6 +1113,7 @@ impl Server {
self.register_configuration();
self.pull_options();
+ self.setup_ipc_server();
self.process_messages()?;
self.pool.join();
Ok(())
diff --git a/support/texlab/crates/texlab/src/server/options.rs b/support/texlab/crates/texlab/src/server/options.rs
index 60c0bf90f7..02200392f6 100644
--- a/support/texlab/crates/texlab/src/server/options.rs
+++ b/support/texlab/crates/texlab/src/server/options.rs
@@ -1,6 +1,3 @@
-use std::time::Duration;
-
-use base_db::{Config, Formatter, SynctexConfig};
use regex::Regex;
use serde::{Deserialize, Serialize};
@@ -128,6 +125,7 @@ pub struct ExperimentalOptions {
pub enum_environments: Vec<String>,
pub verbatim_environments: Vec<String>,
pub citation_commands: Vec<String>,
+ pub label_definition_commands: Vec<String>,
pub label_reference_commands: Vec<String>,
}
@@ -159,135 +157,3 @@ impl Default for CompletionMatcher {
Self::FuzzyIgnoreCase
}
}
-
-impl From<Options> for Config {
- fn from(value: Options) -> Self {
- let mut config = Config::default();
- config.root_dir = value.root_directory;
-
- config.build.program = value.build.executable.unwrap_or(config.build.program);
- config.build.args = value.build.args.unwrap_or(config.build.args);
- config.build.on_save = value.build.on_save;
- config.build.forward_search_after = value.build.forward_search_after;
-
- config.build.aux_dir = value
- .build
- .aux_directory
- .or_else(|| value.aux_directory.clone())
- .unwrap_or_else(|| String::from("."));
-
- config.build.pdf_dir = value
- .build
- .pdf_directory
- .or(value.aux_directory)
- .unwrap_or_else(|| String::from("."));
-
- config.build.log_dir = value
- .build
- .log_directory
- .unwrap_or_else(|| config.build.pdf_dir.clone());
-
- config.build.output_filename = value.build.filename;
-
- config.diagnostics.allowed_patterns = value
- .diagnostics
- .allowed_patterns
- .into_iter()
- .map(|pattern| pattern.0)
- .collect();
-
- config.diagnostics.ignored_patterns = value
- .diagnostics
- .ignored_patterns
- .into_iter()
- .map(|pattern| pattern.0)
- .collect();
-
- config.diagnostics.delay = value
- .diagnostics_delay
- .map_or(config.diagnostics.delay, Duration::from_millis);
-
- config.diagnostics.chktex.on_open = value.chktex.on_open_and_save;
- config.diagnostics.chktex.on_save = value.chktex.on_open_and_save;
- config.diagnostics.chktex.on_edit = value.chktex.on_edit;
- config.diagnostics.chktex.additional_args =
- value.chktex.additional_args.unwrap_or_default();
-
- config.formatting.tex_formatter = match value.latex_formatter {
- LatexFormatter::None => Formatter::Null,
- LatexFormatter::Texlab => Formatter::Server,
- LatexFormatter::Latexindent => Formatter::LatexIndent,
- };
-
- config.formatting.bib_formatter = match value.bibtex_formatter {
- BibtexFormatter::None => Formatter::Null,
- BibtexFormatter::Texlab => Formatter::Server,
- BibtexFormatter::Latexindent => Formatter::LatexIndent,
- };
-
- config.formatting.line_length =
- value
- .formatter_line_length
- .map_or(80, |len| if len < 0 { usize::MAX } else { len as usize });
-
- config.formatting.latex_indent.local = value.latexindent.local;
- config.formatting.latex_indent.modify_line_breaks = value.latexindent.modify_line_breaks;
-
- config.synctex = value
- .forward_search
- .executable
- .zip(value.forward_search.args)
- .map(|(program, args)| SynctexConfig { program, args });
-
- config.symbols.allowed_patterns = value
- .symbols
- .allowed_patterns
- .into_iter()
- .map(|pattern| pattern.0)
- .collect();
-
- config.symbols.ignored_patterns = value
- .symbols
- .ignored_patterns
- .into_iter()
- .map(|pattern| pattern.0)
- .collect();
-
- config.inlay_hints.label_definitions = value.inlay_hints.label_definitions.unwrap_or(true);
- config.inlay_hints.label_references = value.inlay_hints.label_references.unwrap_or(true);
-
- config.completion.matcher = match value.completion.matcher {
- CompletionMatcher::Fuzzy => base_db::MatchingAlgo::Skim,
- CompletionMatcher::FuzzyIgnoreCase => base_db::MatchingAlgo::SkimIgnoreCase,
- CompletionMatcher::Prefix => base_db::MatchingAlgo::Prefix,
- CompletionMatcher::PrefixIgnoreCase => base_db::MatchingAlgo::PrefixIgnoreCase,
- };
-
- config
- .syntax
- .math_environments
- .extend(value.experimental.math_environments);
-
- config
- .syntax
- .enum_environments
- .extend(value.experimental.enum_environments);
-
- config
- .syntax
- .verbatim_environments
- .extend(value.experimental.verbatim_environments);
-
- config
- .syntax
- .citation_commands
- .extend(value.experimental.citation_commands);
-
- config
- .syntax
- .label_reference_commands
- .extend(value.experimental.label_reference_commands);
-
- config
- }
-}
diff --git a/support/texlab/crates/texlab/src/util.rs b/support/texlab/crates/texlab/src/util.rs
index c40ba8b51a..d9ec5a2fb1 100644
--- a/support/texlab/crates/texlab/src/util.rs
+++ b/support/texlab/crates/texlab/src/util.rs
@@ -22,7 +22,7 @@ pub fn normalize_uri(uri: &mut lsp_types::Url) {
}
fn fix_drive_letter(text: &str) -> Option<String> {
- if !text.is_ascii() {
+ if !text.is_ascii() || text.len() == 0 {
return None;
}
diff --git a/support/texlab/crates/texlab/src/util/client_flags.rs b/support/texlab/crates/texlab/src/util/client_flags.rs
index e5813e4f02..842dbb1eae 100644
--- a/support/texlab/crates/texlab/src/util/client_flags.rs
+++ b/support/texlab/crates/texlab/src/util/client_flags.rs
@@ -37,4 +37,7 @@ pub struct ClientFlags {
/// If `true`, the server can report progress using `WorkDoneProgress`.
pub progress: bool,
+
+ /// If `true`, the server can let the client open a document using `window/showDocument`.
+ pub show_document: bool,
}
diff --git a/support/texlab/crates/texlab/src/util/from_proto.rs b/support/texlab/crates/texlab/src/util/from_proto.rs
index e05d317dfe..db4f3a5b39 100644
--- a/support/texlab/crates/texlab/src/util/from_proto.rs
+++ b/support/texlab/crates/texlab/src/util/from_proto.rs
@@ -1,4 +1,6 @@
-use base_db::{FeatureParams, Workspace};
+use std::time::Duration;
+
+use base_db::{Config, FeatureParams, Formatter, SynctexConfig, Workspace};
use completion::CompletionParams;
use definition::DefinitionParams;
use highlights::HighlightParams;
@@ -8,7 +10,10 @@ use references::ReferenceParams;
use rename::RenameParams;
use rowan::TextSize;
-use crate::features::completion::ResolveInfo;
+use crate::{
+ features::completion::ResolveInfo,
+ server::options::{BibtexFormatter, CompletionMatcher, LatexFormatter, Options},
+};
use super::{line_index_ext::LineIndexExt, ClientFlags};
@@ -94,6 +99,12 @@ pub fn client_flags(
.and_then(|cap| cap.work_done_progress)
.unwrap_or(false);
+ let show_document = capabilities
+ .window
+ .as_ref()
+ .and_then(|cap| cap.show_document.as_ref())
+ .map_or(false, |cap| cap.support);
+
ClientFlags {
hierarchical_document_symbols,
completion_markdown,
@@ -106,6 +117,7 @@ pub fn client_flags(
definition_link,
folding_custom_kinds,
progress,
+ show_document,
}
}
@@ -221,3 +233,137 @@ pub fn completion_resolve_info(item: &mut lsp_types::CompletionItem) -> Option<R
.take()
.and_then(|data| serde_json::from_value(data).ok())
}
+
+pub fn config(value: Options) -> Config {
+ let mut config = Config::default();
+ config.root_dir = value.root_directory;
+
+ config.build.program = value.build.executable.unwrap_or(config.build.program);
+ config.build.args = value.build.args.unwrap_or(config.build.args);
+ config.build.on_save = value.build.on_save;
+ config.build.forward_search_after = value.build.forward_search_after;
+
+ config.build.aux_dir = value
+ .build
+ .aux_directory
+ .or_else(|| value.aux_directory.clone())
+ .unwrap_or_else(|| String::from("."));
+
+ config.build.pdf_dir = value
+ .build
+ .pdf_directory
+ .or(value.aux_directory)
+ .unwrap_or_else(|| String::from("."));
+
+ config.build.log_dir = value
+ .build
+ .log_directory
+ .unwrap_or_else(|| config.build.pdf_dir.clone());
+
+ config.build.output_filename = value.build.filename;
+
+ config.diagnostics.allowed_patterns = value
+ .diagnostics
+ .allowed_patterns
+ .into_iter()
+ .map(|pattern| pattern.0)
+ .collect();
+
+ config.diagnostics.ignored_patterns = value
+ .diagnostics
+ .ignored_patterns
+ .into_iter()
+ .map(|pattern| pattern.0)
+ .collect();
+
+ config.diagnostics.delay = value
+ .diagnostics_delay
+ .map_or(config.diagnostics.delay, Duration::from_millis);
+
+ config.diagnostics.chktex.on_open = value.chktex.on_open_and_save;
+ config.diagnostics.chktex.on_save = value.chktex.on_open_and_save;
+ config.diagnostics.chktex.on_edit = value.chktex.on_edit;
+ config.diagnostics.chktex.additional_args = value.chktex.additional_args.unwrap_or_default();
+
+ config.formatting.tex_formatter = match value.latex_formatter {
+ LatexFormatter::None => Formatter::Null,
+ LatexFormatter::Texlab => Formatter::Server,
+ LatexFormatter::Latexindent => Formatter::LatexIndent,
+ };
+
+ config.formatting.bib_formatter = match value.bibtex_formatter {
+ BibtexFormatter::None => Formatter::Null,
+ BibtexFormatter::Texlab => Formatter::Server,
+ BibtexFormatter::Latexindent => Formatter::LatexIndent,
+ };
+
+ config.formatting.line_length =
+ value
+ .formatter_line_length
+ .map_or(80, |len| if len < 0 { usize::MAX } else { len as usize });
+
+ config.formatting.latex_indent.local = value.latexindent.local;
+ config.formatting.latex_indent.modify_line_breaks = value.latexindent.modify_line_breaks;
+
+ config.synctex = value
+ .forward_search
+ .executable
+ .zip(value.forward_search.args)
+ .map(|(program, args)| SynctexConfig { program, args });
+
+ config.symbols.allowed_patterns = value
+ .symbols
+ .allowed_patterns
+ .into_iter()
+ .map(|pattern| pattern.0)
+ .collect();
+
+ config.symbols.ignored_patterns = value
+ .symbols
+ .ignored_patterns
+ .into_iter()
+ .map(|pattern| pattern.0)
+ .collect();
+
+ config.inlay_hints.label_definitions = value.inlay_hints.label_definitions.unwrap_or(true);
+ config.inlay_hints.label_references = value.inlay_hints.label_references.unwrap_or(true);
+
+ config.completion.matcher = match value.completion.matcher {
+ CompletionMatcher::Fuzzy => base_db::MatchingAlgo::Skim,
+ CompletionMatcher::FuzzyIgnoreCase => base_db::MatchingAlgo::SkimIgnoreCase,
+ CompletionMatcher::Prefix => base_db::MatchingAlgo::Prefix,
+ CompletionMatcher::PrefixIgnoreCase => base_db::MatchingAlgo::PrefixIgnoreCase,
+ };
+
+ config
+ .syntax
+ .math_environments
+ .extend(value.experimental.math_environments);
+
+ config
+ .syntax
+ .enum_environments
+ .extend(value.experimental.enum_environments);
+
+ config
+ .syntax
+ .verbatim_environments
+ .extend(value.experimental.verbatim_environments);
+
+ config
+ .syntax
+ .citation_commands
+ .extend(value.experimental.citation_commands);
+
+ config
+ .syntax
+ .label_definition_commands
+ .extend(value.experimental.label_definition_commands);
+
+ config
+ .syntax
+ .label_reference_commands
+ .extend(value.experimental.label_reference_commands);
+
+ config
+}