summaryrefslogtreecommitdiff
path: root/support/texlab/crates
diff options
context:
space:
mode:
Diffstat (limited to 'support/texlab/crates')
-rw-r--r--support/texlab/crates/base-db/src/data.rs23
-rw-r--r--support/texlab/crates/base-db/src/deps.rs11
-rw-r--r--support/texlab/crates/base-db/src/deps/discover.rs117
-rw-r--r--support/texlab/crates/base-db/src/deps/graph.rs256
-rw-r--r--support/texlab/crates/base-db/src/deps/project.rs39
-rw-r--r--support/texlab/crates/base-db/src/deps/root.rs157
-rw-r--r--support/texlab/crates/base-db/src/graph.rs191
-rw-r--r--support/texlab/crates/base-db/src/lib.rs6
-rw-r--r--support/texlab/crates/base-db/src/semantics/auxiliary.rs26
-rw-r--r--support/texlab/crates/base-db/src/semantics/bib.rs8
-rw-r--r--support/texlab/crates/base-db/src/util/label.rs3
-rw-r--r--support/texlab/crates/base-db/src/util/queries.rs5
-rw-r--r--support/texlab/crates/base-db/src/workspace.rs212
-rw-r--r--support/texlab/crates/bibtex-utils/Cargo.toml2
-rw-r--r--support/texlab/crates/bibtex-utils/src/field.rs2
-rw-r--r--support/texlab/crates/bibtex-utils/src/field/text.rs4
-rw-r--r--support/texlab/crates/citeproc/Cargo.toml2
-rw-r--r--support/texlab/crates/citeproc/src/entry.rs30
-rw-r--r--support/texlab/crates/commands/Cargo.toml4
-rw-r--r--support/texlab/crates/commands/src/build.rs12
-rw-r--r--support/texlab/crates/commands/src/clean.rs8
-rw-r--r--support/texlab/crates/commands/src/dep_graph.rs31
-rw-r--r--support/texlab/crates/commands/src/fwd_search.rs18
-rw-r--r--support/texlab/crates/completion/src/lib.rs29
-rw-r--r--support/texlab/crates/completion/src/providers.rs6
-rw-r--r--support/texlab/crates/completion/src/providers/command.rs17
-rw-r--r--support/texlab/crates/completion/src/providers/include.rs11
-rw-r--r--support/texlab/crates/completion/src/providers/label_def.rs60
-rw-r--r--support/texlab/crates/completion/src/providers/label_ref.rs (renamed from support/texlab/crates/completion/src/providers/label.rs)2
-rw-r--r--support/texlab/crates/completion/src/tests.rs57
-rw-r--r--support/texlab/crates/completion/src/util/builder.rs1
-rw-r--r--support/texlab/crates/definition/src/include.rs15
-rw-r--r--support/texlab/crates/diagnostics/Cargo.toml3
-rw-r--r--support/texlab/crates/diagnostics/src/build_log.rs4
-rw-r--r--support/texlab/crates/diagnostics/src/chktex.rs12
-rw-r--r--support/texlab/crates/diagnostics/src/citations.rs3
-rw-r--r--support/texlab/crates/diagnostics/src/labels.rs14
-rw-r--r--support/texlab/crates/diagnostics/src/manager.rs4
-rw-r--r--support/texlab/crates/hover/src/tests.rs34
-rw-r--r--support/texlab/crates/inlay-hints/src/tests.rs2
-rw-r--r--support/texlab/crates/links/src/include.rs18
-rw-r--r--support/texlab/crates/symbols/src/document.rs4
-rw-r--r--support/texlab/crates/symbols/src/document/tex.rs3
-rw-r--r--support/texlab/crates/symbols/src/workspace/sort.rs6
-rw-r--r--support/texlab/crates/texlab/Cargo.toml9
-rw-r--r--support/texlab/crates/texlab/src/features/completion.rs24
-rw-r--r--support/texlab/crates/texlab/src/features/formatting/latexindent.rs5
-rw-r--r--support/texlab/crates/texlab/src/server.rs6
48 files changed, 975 insertions, 541 deletions
diff --git a/support/texlab/crates/base-db/src/data.rs b/support/texlab/crates/base-db/src/data.rs
index b24c2bb77a..8dbb8cdfe7 100644
--- a/support/texlab/crates/base-db/src/data.rs
+++ b/support/texlab/crates/base-db/src/data.rs
@@ -1,8 +1,8 @@
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub struct BibtexEntryType<'a> {
- pub name: &'a str,
- pub category: BibtexEntryTypeCategory,
- pub documentation: Option<&'a str>,
+ pub name: &'a str,
+ pub category: BibtexEntryTypeCategory,
+ pub documentation: Option<&'a str>,
}
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy)]
@@ -18,23 +18,28 @@ pub enum BibtexEntryTypeCategory {
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub struct BibtexFieldType<'a> {
- pub name: &'a str,
- pub documentation: &'a str,
+ pub name: &'a str,
+ pub documentation: &'a str,
}
impl<'a> BibtexEntryType<'a> {
pub fn find(name: &str) -> Option<Self> {
- BIBTEX_ENTRY_TYPES.iter().find(|ty| ty.name.eq_ignore_ascii_case(name)).copied()
+ BIBTEX_ENTRY_TYPES
+ .iter()
+ .find(|ty| ty.name.eq_ignore_ascii_case(name))
+ .copied()
}
}
impl<'a> BibtexFieldType<'a> {
pub fn find(name: &str) -> Option<Self> {
- BIBTEX_FIELD_TYPES.iter().find(|ty| ty.name.eq_ignore_ascii_case(name)).copied()
+ BIBTEX_FIELD_TYPES
+ .iter()
+ .find(|ty| ty.name.eq_ignore_ascii_case(name))
+ .copied()
}
}
-
pub static BIBTEX_ENTRY_TYPES: &[BibtexEntryType<'static>] = &[
BibtexEntryType {
name: "@preamble",
@@ -887,5 +892,5 @@ pub static BIBTEX_FIELD_TYPES: &[BibtexFieldType<'static>] = &[
BibtexFieldType {
name: "school",
documentation: "An alias for `institution`, provided for BibTeX compatibility. The `institution` field is used by traditional BibTeX for technical reports whereas the `school` field holds the institution associated with theses. The `biblatex` package employs the generic field name `institution` in both cases.",
- }
+ }
];
diff --git a/support/texlab/crates/base-db/src/deps.rs b/support/texlab/crates/base-db/src/deps.rs
new file mode 100644
index 0000000000..06689a0df2
--- /dev/null
+++ b/support/texlab/crates/base-db/src/deps.rs
@@ -0,0 +1,11 @@
+mod discover;
+mod graph;
+mod project;
+mod root;
+
+pub use self::{
+ discover::{discover, watch},
+ graph::{DirectLinkData, Edge, EdgeData, Graph},
+ project::{parents, Project},
+ root::ProjectRoot,
+};
diff --git a/support/texlab/crates/base-db/src/deps/discover.rs b/support/texlab/crates/base-db/src/deps/discover.rs
new file mode 100644
index 0000000000..06905905a5
--- /dev/null
+++ b/support/texlab/crates/base-db/src/deps/discover.rs
@@ -0,0 +1,117 @@
+use std::path::PathBuf;
+
+use distro::Language;
+use itertools::Itertools;
+use rustc_hash::FxHashSet;
+
+use crate::Workspace;
+
+use super::ProjectRoot;
+
+pub fn watch(
+ workspace: &mut Workspace,
+ watcher: &mut dyn notify::Watcher,
+ watched_dirs: &mut FxHashSet<PathBuf>,
+) {
+ let roots = workspace
+ .iter()
+ .map(|document| &document.dir)
+ .filter(|dir| dir.scheme() == "file")
+ .unique()
+ .map(|dir| ProjectRoot::walk_and_find(workspace, dir));
+
+ for root in roots {
+ for uri in [&root.src_dir, &root.aux_dir, &root.log_dir, &root.pdf_dir] {
+ if let Ok(path) = uri.to_file_path() {
+ if watched_dirs.insert(path.clone()) {
+ let _ = watcher.watch(&path, notify::RecursiveMode::NonRecursive);
+ }
+ }
+ }
+ }
+}
+
+pub fn discover(workspace: &mut Workspace, checked_paths: &mut FxHashSet<PathBuf>) {
+ loop {
+ let mut changed = false;
+ changed |= discover_parents(workspace, checked_paths);
+ changed |= discover_children(workspace, checked_paths);
+ if !changed {
+ break;
+ }
+ }
+}
+
+fn discover_parents(workspace: &mut Workspace, checked_paths: &mut FxHashSet<PathBuf>) -> bool {
+ let dirs = workspace
+ .iter()
+ .filter(|document| document.language != Language::Bib)
+ .filter_map(|document| document.path.as_deref())
+ .flat_map(|path| path.ancestors().skip(1))
+ .filter(|path| workspace.contains(path))
+ .map(|path| path.to_path_buf())
+ .collect::<FxHashSet<_>>();
+
+ let mut changed = false;
+ for dir in dirs {
+ if workspace
+ .iter()
+ .filter(|document| matches!(document.language, Language::Root | Language::Tectonic))
+ .filter_map(|document| document.path.as_deref())
+ .filter_map(|path| path.parent())
+ .any(|marker| dir.starts_with(marker))
+ {
+ continue;
+ }
+
+ let Ok(entries) = std::fs::read_dir(dir) else {
+ continue;
+ };
+
+ for file in entries
+ .flatten()
+ .filter(|entry| entry.file_type().map_or(false, |type_| type_.is_file()))
+ .map(|entry| entry.path())
+ {
+ let Some(lang) = Language::from_path(&file) else {
+ continue;
+ };
+
+ if !matches!(
+ lang,
+ Language::Tex | Language::Root | Language::Tectonic | Language::Latexmkrc
+ ) {
+ continue;
+ }
+
+ if workspace.lookup_path(&file).is_none() && file.exists() {
+ changed |= workspace.load(&file, lang).is_ok();
+ checked_paths.insert(file);
+ }
+ }
+ }
+
+ changed
+}
+
+fn discover_children(workspace: &mut Workspace, checked_paths: &mut FxHashSet<PathBuf>) -> bool {
+ let files = workspace
+ .graphs()
+ .values()
+ .flat_map(|graph| graph.missing.iter())
+ .filter(|uri| uri.scheme() == "file")
+ .flat_map(|uri| uri.to_file_path())
+ .collect::<FxHashSet<_>>();
+
+ let mut changed = false;
+ for file in files {
+ let language = Language::from_path(&file).unwrap_or(Language::Tex);
+
+ if workspace.lookup_path(&file).is_none() && file.exists() {
+ changed |= workspace.load(&file, language).is_ok();
+ checked_paths.insert(file);
+ }
+ }
+
+ changed
+}
diff --git a/support/texlab/crates/base-db/src/deps/graph.rs b/support/texlab/crates/base-db/src/deps/graph.rs
new file mode 100644
index 0000000000..a90d8fa204
--- /dev/null
+++ b/support/texlab/crates/base-db/src/deps/graph.rs
@@ -0,0 +1,256 @@
+use std::{ffi::OsStr, path::PathBuf, rc::Rc};
+
+use distro::Language;
+use itertools::Itertools;
+use once_cell::sync::Lazy;
+use percent_encoding::percent_decode_str;
+use rustc_hash::FxHashSet;
+use url::Url;
+
+use crate::{semantics, Document, Workspace};
+
+use super::ProjectRoot;
+
+pub(crate) static HOME_DIR: Lazy<Option<PathBuf>> = Lazy::new(dirs::home_dir);
+
+#[derive(Debug, PartialEq, Eq, Clone, Hash)]
+pub struct Edge {
+ pub source: Url,
+ pub target: Url,
+ pub data: EdgeData,
+}
+
+#[derive(Debug, PartialEq, Eq, Clone, Hash)]
+pub enum EdgeData {
+ DirectLink(DirectLinkData),
+ AdditionalFiles,
+ Artifact,
+}
+
+#[derive(Debug, PartialEq, Eq, Clone, Hash)]
+pub struct DirectLinkData {
+ pub link: semantics::tex::Link,
+ pub new_root: Option<ProjectRoot>,
+}
+
+#[derive(Debug, Clone, Copy)]
+struct Start<'a, 'b> {
+ source: &'a Document,
+ root: &'b ProjectRoot,
+}
+
+#[derive(Debug)]
+pub struct Graph {
+ pub missing: Vec<Url>,
+ pub edges: Vec<Edge>,
+ pub start: Url,
+}
+
+impl Graph {
+ pub fn new(workspace: &Workspace, start: &Document) -> Self {
+ let mut graph = Self {
+ missing: Vec::new(),
+ edges: Vec::new(),
+ start: start.uri.clone(),
+ };
+
+ let root = ProjectRoot::walk_and_find(workspace, &start.dir);
+
+ let mut stack = vec![(start, Rc::new(root))];
+ let mut visited = FxHashSet::default();
+
+ while let Some((source, root)) = stack.pop() {
+ let index = graph.edges.len();
+
+ graph.process(
+ workspace,
+ Start {
+ source,
+ root: &root,
+ },
+ );
+
+ for edge in &graph.edges[index..] {
+ if visited.insert(edge.target.clone()) {
+ let new_root = match &edge.data {
+ EdgeData::DirectLink(data) => data.new_root.clone(),
+ _ => None,
+ };
+
+ let new_root = new_root.map_or_else(|| Rc::clone(&root), Rc::new);
+
+ stack.push((workspace.lookup(&edge.target).unwrap(), new_root));
+ }
+ }
+ }
+
+ graph
+ }
+
+ pub fn preorder<'a: 'b, 'b>(
+ &'b self,
+ workspace: &'a Workspace,
+ ) -> impl DoubleEndedIterator<Item = &'a Document> + '_ {
+ std::iter::once(&self.start)
+ .chain(self.edges.iter().map(|group| &group.target))
+ .unique()
+ .filter_map(|uri| workspace.lookup(uri))
+ }
+
+ fn process(&mut self, workspace: &Workspace, start: Start) {
+ self.add_direct_links(workspace, start);
+ self.add_artifacts(workspace, start);
+ self.add_additional_files(workspace, start);
+ }
+
+ fn add_additional_files(&mut self, workspace: &Workspace, start: Start) {
+ for uri in &start.root.additional_files {
+ match workspace.lookup(uri) {
+ Some(target) => {
+ self.edges.push(Edge {
+ source: start.source.uri.clone(),
+ target: target.uri.clone(),
+ data: EdgeData::AdditionalFiles,
+ });
+ }
+ None => {
+ self.missing.push(uri.clone());
+ }
+ }
+ }
+ }
+
+ fn add_direct_links(&mut self, workspace: &Workspace, start: Start) -> Option<()> {
+ let data = start.source.data.as_tex()?;
+
+ for link in &data.semantics.links {
+ self.add_direct_link(workspace, start, link);
+ }
+
+ Some(())
+ }
+
+ fn add_direct_link(
+ &mut self,
+ workspace: &Workspace,
+ start: Start,
+ link: &semantics::tex::Link,
+ ) {
+ let home_dir = HOME_DIR.as_deref();
+
+ let stem = &link.path.text;
+ let mut file_names = vec![stem.clone()];
+ link.kind
+ .extensions()
+ .iter()
+ .map(|ext| format!("{stem}.{ext}"))
+ .for_each(|name| file_names.push(name));
+
+ let file_name_db = &workspace.distro().file_name_db;
+ let distro_files = file_names
+ .iter()
+ .filter_map(|name| file_name_db.get(name))
+ .filter(|path| {
+ home_dir.map_or(false, |dir| path.starts_with(dir))
+ || Language::from_path(path) == Some(Language::Bib)
+ })
+ .flat_map(Url::from_file_path);
+
+ for target_uri in file_names
+ .iter()
+ .flat_map(|file_name| start.root.src_dir.join(file_name))
+ .chain(distro_files)
+ {
+ match workspace.lookup(&target_uri) {
+ Some(target) => {
+ let new_root = link
+ .base_dir
+ .as_deref()
+ .and_then(|path| start.root.src_dir.join(path).ok())
+ .map(|dir| ProjectRoot::walk_and_find(workspace, &dir));
+
+ let link_data = DirectLinkData {
+ link: link.clone(),
+ new_root,
+ };
+
+ self.edges.push(Edge {
+ source: start.source.uri.clone(),
+ target: target.uri.clone(),
+ data: EdgeData::DirectLink(link_data),
+ });
+
+ break;
+ }
+ None => {
+ self.missing.push(target_uri);
+ }
+ };
+ }
+ }
+
+ fn add_artifacts(&mut self, workspace: &Workspace, start: Start) {
+ if start.source.language != Language::Tex {
+ return;
+ }
+
+ let root = start.root;
+ let relative_path = root.compile_dir.make_relative(&start.source.uri).unwrap();
+
+ self.add_artifact(
+ workspace,
+ start.source,
+ &root.aux_dir.join(&relative_path).unwrap(),
+ "aux",
+ );
+
+ self.add_artifact(workspace, start.source, &root.aux_dir, "aux");
+ self.add_artifact(workspace, start.source, &root.compile_dir, "aux");
+
+ self.add_artifact(
+ workspace,
+ start.source,
+ &root.log_dir.join(&relative_path).unwrap(),
+ "log",
+ );
+
+ self.add_artifact(workspace, start.source, &root.log_dir, "log");
+ self.add_artifact(workspace, start.source, &root.compile_dir, "log");
+ }
+
+ fn add_artifact(
+ &mut self,
+ workspace: &Workspace,
+ source: &Document,
+ dir: &Url,
+ extension: &str,
+ ) {
+ let mut path = PathBuf::from(
+ percent_decode_str(source.uri.path())
+ .decode_utf8_lossy()
+ .as_ref(),
+ );
+
+ path.set_extension(extension);
+ let Some(target_uri) = path
+ .file_name()
+ .and_then(OsStr::to_str)
+ .and_then(|name| dir.join(name).ok())
+ else {
+ return;
+ };
+
+ match workspace.lookup(&target_uri) {
+ Some(target) => {
+ self.edges.push(Edge {
+ source: source.uri.clone(),
+ target: target.uri.clone(),
+ data: EdgeData::Artifact,
+ });
+ }
+ None => {
+ self.missing.push(target_uri);
+ }
+ }
+ }
+}
diff --git a/support/texlab/crates/base-db/src/deps/project.rs b/support/texlab/crates/base-db/src/deps/project.rs
new file mode 100644
index 0000000000..e1b941e67d
--- /dev/null
+++ b/support/texlab/crates/base-db/src/deps/project.rs
@@ -0,0 +1,39 @@
+use itertools::Itertools;
+use rustc_hash::FxHashSet;
+
+use crate::{Document, Workspace};
+
+#[derive(Debug, Clone)]
+pub struct Project<'a> {
+ pub documents: FxHashSet<&'a Document>,
+}
+
+impl<'a> Project<'a> {
+ pub fn from_child(workspace: &'a Workspace, child: &'a Document) -> Self {
+ let mut documents = FxHashSet::default();
+ for graph in workspace.graphs().values() {
+ if graph.preorder(workspace).contains(&child) {
+ documents.extend(graph.preorder(workspace));
+ }
+ }
+
+ Self { documents }
+ }
+}
+
+pub fn parents<'a>(workspace: &'a Workspace, child: &'a Document) -> FxHashSet<&'a Document> {
+ workspace
+ .iter()
+ .filter(|document| {
+ document
+ .data
+ .as_tex()
+ .map_or(false, |data| data.semantics.can_be_root)
+ })
+ .filter(|parent| {
+ let graph = &workspace.graphs()[&parent.uri];
+ let mut nodes = graph.preorder(workspace);
+ nodes.contains(&child)
+ })
+ .collect()
+}
diff --git a/support/texlab/crates/base-db/src/deps/root.rs b/support/texlab/crates/base-db/src/deps/root.rs
new file mode 100644
index 0000000000..7e2266e68c
--- /dev/null
+++ b/support/texlab/crates/base-db/src/deps/root.rs
@@ -0,0 +1,157 @@
+use url::Url;
+
+use crate::{DocumentData, Workspace};
+
+#[derive(PartialEq, Eq, Clone, Hash)]
+pub struct ProjectRoot {
+ pub compile_dir: Url,
+ pub src_dir: Url,
+ pub aux_dir: Url,
+ pub log_dir: Url,
+ pub pdf_dir: Url,
+ pub additional_files: Vec<Url>,
+}
+
+impl ProjectRoot {
+ pub fn walk_and_find(workspace: &Workspace, dir: &Url) -> Self {
+ let mut current = dir.clone();
+ loop {
+ let root = Self::from_rootfile(workspace, &current)
+ .or_else(|| Self::from_tectonic(workspace, &current))
+ .or_else(|| Self::from_latexmkrc(workspace, &current));
+
+ if let Some(root) = root {
+ break root;
+ }
+
+ let Ok(parent) = current.join("..") else {
+ break Self::from_config(workspace, &dir);
+ };
+
+ if current == parent {
+ break Self::from_config(workspace, &dir);
+ }
+
+ current = parent;
+ }
+ }
+
+ pub fn from_tectonic(workspace: &Workspace, dir: &Url) -> Option<Self> {
+ let exists = workspace
+ .iter()
+ .filter(|document| document.dir == *dir)
+ .any(|document| matches!(document.data, DocumentData::Tectonic));
+
+ if !exists {
+ return None;
+ }
+
+ let compile_dir = dir.clone();
+ let src_dir = dir.join("src/").unwrap();
+ let out_dir = dir.join("build/").unwrap();
+ let aux_dir = out_dir.clone();
+ let log_dir = out_dir.clone();
+ let pdf_dir = out_dir;
+ let additional_files = vec![
+ src_dir.join("_preamble.tex").unwrap(),
+ src_dir.join("_postamble.tex").unwrap(),
+ ];
+
+ Some(Self {
+ compile_dir,
+ src_dir,
+ aux_dir,
+ log_dir,
+ pdf_dir,
+ additional_files,
+ })
+ }
+
+ pub fn from_latexmkrc(workspace: &Workspace, dir: &Url) -> Option<Self> {
+ let rcfile = workspace
+ .iter()
+ .filter(|document| document.dir == *dir)
+ .find_map(|document| document.data.as_latexmkrc())?;
+
+ let compile_dir = dir.clone();
+ let src_dir = dir.clone();
+ let aux_dir = rcfile
+ .aux_dir
+ .as_ref()
+ .and_then(|path| append_dir(dir, path).ok())
+ .unwrap_or_else(|| dir.clone());
+
+ let out_dir = rcfile
+ .out_dir
+ .as_ref()
+ .and_then(|path| append_dir(dir, path).ok())
+ .unwrap_or_else(|| dir.clone());
+
+ let log_dir = out_dir.clone();
+ let pdf_dir = out_dir;
+ let additional_files = vec![];
+
+ Some(Self {
+ compile_dir,
+ src_dir,
+ aux_dir,
+ log_dir,
+ pdf_dir,
+ additional_files,
+ })
+ }
+
+ pub fn from_rootfile(workspace: &Workspace, dir: &Url) -> Option<Self> {
+ let exists = workspace
+ .iter()
+ .filter(|document| document.dir == *dir)
+ .any(|document| matches!(document.data, DocumentData::Root));
+
+ if !exists {
+ return None;
+ }
+
+ Some(Self::from_config(workspace, dir))
+ }
+
+ pub fn from_config(workspace: &Workspace, dir: &Url) -> Self {
+ let compile_dir = dir.clone();
+ let src_dir = dir.clone();
+ let config = workspace.config();
+ let aux_dir = append_dir(dir, &config.build.aux_dir).unwrap_or_else(|_| dir.clone());
+ let log_dir = append_dir(dir, &config.build.log_dir).unwrap_or_else(|_| dir.clone());
+ let pdf_dir = append_dir(dir, &config.build.pdf_dir).unwrap_or_else(|_| dir.clone());
+ let additional_files = vec![];
+
+ Self {
+ compile_dir,
+ src_dir,
+ aux_dir,
+ log_dir,
+ pdf_dir,
+ additional_files,
+ }
+ }
+}
+
+fn append_dir(dir: &Url, path: &str) -> Result<Url, url::ParseError> {
+ let mut path = String::from(path);
+ if !path.ends_with('/') {
+ path.push('/');
+ }
+
+ dir.join(&path)
+}
+
+impl std::fmt::Debug for ProjectRoot {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ f.debug_struct("ProjectRoot")
+ .field("compile_dir", &self.compile_dir.as_str())
+ .field("src_dir", &self.src_dir.as_str())
+ .field("aux_dir", &self.aux_dir.as_str())
+ .field("log_dir", &self.log_dir.as_str())
+ .field("pdf_dir", &self.pdf_dir.as_str())
+ .field("additional_files", &self.additional_files)
+ .finish()
+ }
+}
diff --git a/support/texlab/crates/base-db/src/graph.rs b/support/texlab/crates/base-db/src/graph.rs
deleted file mode 100644
index 2148c7e269..0000000000
--- a/support/texlab/crates/base-db/src/graph.rs
+++ /dev/null
@@ -1,191 +0,0 @@
-use std::{ffi::OsStr, path::PathBuf};
-
-use distro::Language;
-use itertools::Itertools;
-use once_cell::sync::Lazy;
-use percent_encoding::percent_decode_str;
-use rustc_hash::FxHashSet;
-use url::Url;
-
-use crate::{semantics, Document, DocumentData, Workspace};
-
-pub static HOME_DIR: Lazy<Option<PathBuf>> = Lazy::new(dirs::home_dir);
-
-#[derive(Debug, PartialEq, Eq, Clone, Hash)]
-pub struct Edge<'a> {
- pub source: &'a Document,
- pub target: &'a Document,
- pub weight: Option<EdgeWeight<'a>>,
-}
-
-#[derive(Debug, PartialEq, Eq, Clone, Hash)]
-pub struct EdgeWeight<'a> {
- pub link: &'a semantics::tex::Link,
- pub old_base_dir: Url,
- pub new_base_dir: Url,
-}
-
-#[derive(Debug)]
-pub struct Graph<'a> {
- pub workspace: &'a Workspace,
- pub start: &'a Document,
- pub edges: Vec<Edge<'a>>,
- pub missing: Vec<Url>,
-}
-
-impl<'a> Graph<'a> {
- pub fn new(workspace: &'a Workspace, start: &'a Document) -> Self {
- let mut graph = Self {
- workspace,
- start,
- edges: Vec::new(),
- missing: Vec::new(),
- };
-
- let base_dir = workspace.current_dir(&start.dir);
- let mut stack = vec![(start, base_dir)];
- let mut visited = FxHashSet::default();
-
- while let Some((source, base_dir)) = stack.pop() {
- let index = graph.edges.len();
- graph.add_explicit_edges(source, &base_dir);
- for edge in &graph.edges[index..] {
- let Some(weight) = edge.weight.as_ref() else {
- continue;
- };
-
- if visited.insert(&edge.target.uri) {
- stack.push((edge.target, weight.new_base_dir.clone()));
- }
- }
-
- graph.add_implicit_edges(source, &base_dir);
- }
-
- graph
- }
-
- pub fn preorder(&self) -> impl DoubleEndedIterator<Item = &'a Document> + '_ {
- std::iter::once(self.start)
- .chain(self.edges.iter().map(|group| group.target))
- .unique_by(|document| &document.uri)
- }
-
- fn add_explicit_edges(&mut self, source: &'a Document, base_dir: &Url) {
- let DocumentData::Tex(data) = &source.data else {
- return;
- };
-
- let uri = source.uri.as_str();
- let is_pkg = uri.ends_with(".sty") || uri.ends_with(".cls");
- if is_pkg && !self.workspace.config().syntax.follow_package_links {
- return;
- }
-
- for link in &data.semantics.links {
- self.add_link(source, base_dir, link);
- }
- }
-
- fn add_link(&mut self, source: &'a Document, base_dir: &Url, link: &'a semantics::tex::Link) {
- let home_dir = HOME_DIR.as_deref();
-
- let stem = &link.path.text;
- let mut file_names = vec![stem.clone()];
- link.kind
- .extensions()
- .iter()
- .map(|ext| format!("{stem}.{ext}"))
- .for_each(|name| file_names.push(name));
-
- let file_name_db = &self.workspace.distro().file_name_db;
- let distro_files = file_names
- .iter()
- .filter_map(|name| file_name_db.get(name))
- .filter(|path| {
- home_dir.map_or(false, |dir| path.starts_with(dir))
- || Language::from_path(path) == Some(Language::Bib)
- })
- .flat_map(Url::from_file_path);
-
- for target_uri in file_names
- .iter()
- .flat_map(|file_name| base_dir.join(file_name))
- .chain(distro_files)
- {
- match self.workspace.lookup(&target_uri) {
- Some(target) => {
- let new_base_dir = link
- .base_dir
- .as_deref()
- .and_then(|path| base_dir.join(path).ok())
- .unwrap_or_else(|| base_dir.clone());
-
- let weight = Some(EdgeWeight {
- link,
- old_base_dir: base_dir.clone(),
- new_base_dir,
- });
-
- self.edges.push(Edge {
- source,
- target,
- weight,
- });
-
- break;
- }
- None => {
- self.missing.push(target_uri);
- }
- };
- }
- }
-
- fn add_implicit_edges(&mut self, source: &'a Document, base_dir: &Url) {
- if source.language == Language::Tex {
- let aux_dir = self.workspace.aux_dir(base_dir);
- let log_dir = self.workspace.log_dir(base_dir);
-
- let relative_path = base_dir.make_relative(&source.uri).unwrap();
-
- self.add_artifact(source, &aux_dir.join(&relative_path).unwrap(), "aux");
- self.add_artifact(source, &aux_dir, "aux");
- self.add_artifact(source, base_dir, "aux");
-
- self.add_artifact(source, &log_dir.join(&relative_path).unwrap(), "log");
- self.add_artifact(source, &log_dir, "log");
- self.add_artifact(source, base_dir, "log");
- }
- }
-
- fn add_artifact(&mut self, source: &'a Document, base_dir: &Url, extension: &str) {
- let mut path = PathBuf::from(
- percent_decode_str(source.uri.path())
- .decode_utf8_lossy()
- .as_ref(),
- );
-
- path.set_extension(extension);
- let Some(target_uri) = path
- .file_name()
- .and_then(OsStr::to_str)
- .and_then(|name| base_dir.join(name).ok())
- else {
- return;
- };
-
- match self.workspace.lookup(&target_uri) {
- Some(target) => {
- self.edges.push(Edge {
- source,
- target,
- weight: None,
- });
- }
- None => {
- self.missing.push(target_uri);
- }
- }
- }
-}
diff --git a/support/texlab/crates/base-db/src/lib.rs b/support/texlab/crates/base-db/src/lib.rs
index d2db7d4391..7e0233b2c3 100644
--- a/support/texlab/crates/base-db/src/lib.rs
+++ b/support/texlab/crates/base-db/src/lib.rs
@@ -1,7 +1,7 @@
mod config;
pub mod data;
+pub mod deps;
mod document;
-pub mod graph;
pub mod semantics;
pub mod util;
mod workspace;
@@ -11,13 +11,13 @@ pub use self::{config::*, document::*, workspace::*};
#[derive(Debug)]
pub struct FeatureParams<'a> {
pub document: &'a Document,
- pub project: Project<'a>,
+ pub project: deps::Project<'a>,
pub workspace: &'a Workspace,
}
impl<'a> FeatureParams<'a> {
pub fn new(workspace: &'a Workspace, document: &'a Document) -> Self {
- let project = workspace.project(document);
+ let project = deps::Project::from_child(workspace, document);
Self {
document,
project,
diff --git a/support/texlab/crates/base-db/src/semantics/auxiliary.rs b/support/texlab/crates/base-db/src/semantics/auxiliary.rs
index 719953974c..489a7d1ac6 100644
--- a/support/texlab/crates/base-db/src/semantics/auxiliary.rs
+++ b/support/texlab/crates/base-db/src/semantics/auxiliary.rs
@@ -1,6 +1,6 @@
use rowan::ast::AstNode;
use rustc_hash::FxHashMap;
-use syntax::latex;
+use syntax::latex::{self, HasCurly};
#[derive(Debug, Clone, Default)]
pub struct Semantics {
@@ -20,20 +20,24 @@ impl Semantics {
}
}
- fn process_label_number(&mut self, label_number: &latex::LabelNumber) {
- let Some(name) = label_number
+ fn process_label_number(&mut self, label_number: &latex::LabelNumber) -> Option<()> {
+ let name = label_number
.name()
.and_then(|group| group.key())
- .map(|key| key.to_string()) else { return };
+ .map(|key| key.to_string())?;
- let Some(text) = label_number
- .text()
- .map(|node| node.syntax().descendants())
- .into_iter()
- .flatten()
- .find(|node| node.kind() == latex::TEXT || node.kind() == latex::MIXED_GROUP)
- .map(|node| node.text().to_string()) else { return };
+ let group = label_number.text()?;
+ let group = group
+ .syntax()
+ .children()
+ .filter_map(latex::CurlyGroup::cast)
+ .find_map(|group| {
+ latex::Text::cast(group.syntax().first_child()?)?;
+ Some(group)
+ })?;
+ let text = group.content_text()?.replace('{', "").replace('}', "");
self.label_numbers.insert(name, text);
+ Some(())
}
}
diff --git a/support/texlab/crates/base-db/src/semantics/bib.rs b/support/texlab/crates/base-db/src/semantics/bib.rs
index 1a03fbb2c2..f89280658c 100644
--- a/support/texlab/crates/base-db/src/semantics/bib.rs
+++ b/support/texlab/crates/base-db/src/semantics/bib.rs
@@ -1,8 +1,8 @@
use bibtex_utils::field::text::TextFieldData;
use itertools::Itertools;
use rowan::{ast::AstNode, TextRange};
-use syntax::bibtex::{self, HasName, HasType, HasValue};
use rustc_hash::FxHashMap;
+use syntax::bibtex::{self, HasName, HasType, HasValue};
use crate::data::{BibtexEntryType, BibtexEntryTypeCategory};
@@ -33,9 +33,9 @@ impl Semantics {
let category = BibtexEntryType::find(type_token.text())
.map_or(BibtexEntryTypeCategory::Misc, |ty| ty.category);
- let field_values = entry
- .fields()
- .filter_map(|field| Some(TextFieldData::parse(&field.value()?, &self.expanded_defs)?.text));
+ let field_values = entry.fields().filter_map(|field| {
+ Some(TextFieldData::parse(&field.value()?, &self.expanded_defs)?.text)
+ });
let keywords = [name.text().into(), type_token.text().into()]
.into_iter()
diff --git a/support/texlab/crates/base-db/src/util/label.rs b/support/texlab/crates/base-db/src/util/label.rs
index af0889bab0..b813035acc 100644
--- a/support/texlab/crates/base-db/src/util/label.rs
+++ b/support/texlab/crates/base-db/src/util/label.rs
@@ -5,8 +5,9 @@ use rowan::TextRange;
use self::RenderedObject::*;
use crate::{
+ deps::Project,
semantics::tex::{Label, LabelObject},
- Project, Workspace,
+ Workspace,
};
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
diff --git a/support/texlab/crates/base-db/src/util/queries.rs b/support/texlab/crates/base-db/src/util/queries.rs
index 8c20589172..6ddf20104f 100644
--- a/support/texlab/crates/base-db/src/util/queries.rs
+++ b/support/texlab/crates/base-db/src/util/queries.rs
@@ -4,8 +4,9 @@ use rustc_hash::FxHashMap;
use url::Url;
use crate::{
+ deps::Project,
semantics::{bib, tex},
- Document, DocumentLocation, Project, Workspace,
+ Document, DocumentLocation, Workspace,
};
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Hash)]
@@ -184,7 +185,7 @@ impl<'a> Conflict<'a> {
let projects: FxHashMap<&Url, Project> = workspace
.iter()
- .map(|document| (&document.uri, workspace.project(document)))
+ .map(|document| (&document.uri, Project::from_child(workspace, document)))
.collect();
let mut conflicts = Vec::new();
diff --git a/support/texlab/crates/base-db/src/workspace.rs b/support/texlab/crates/base-db/src/workspace.rs
index 0fe347f1b3..1afd4712e9 100644
--- a/support/texlab/crates/base-db/src/workspace.rs
+++ b/support/texlab/crates/base-db/src/workspace.rs
@@ -4,14 +4,12 @@ use std::{
};
use distro::{Distro, Language};
-use itertools::Itertools;
use line_index::LineCol;
use rowan::{TextLen, TextRange};
-use rustc_hash::FxHashSet;
-use syntax::latexmkrc::LatexmkrcData;
+use rustc_hash::{FxHashMap, FxHashSet};
use url::Url;
-use crate::{graph, Config, Document, DocumentData, DocumentParams, Owner};
+use crate::{deps, Config, Document, DocumentParams, Owner};
#[derive(Debug, Default)]
pub struct Workspace {
@@ -19,6 +17,7 @@ pub struct Workspace {
config: Config,
distro: Distro,
folders: Vec<PathBuf>,
+ graphs: FxHashMap<Url, deps::Graph>,
}
impl Workspace {
@@ -47,6 +46,10 @@ impl Workspace {
&self.distro
}
+ pub fn graphs(&self) -> &FxHashMap<Url, deps::Graph> {
+ &self.graphs
+ }
+
pub fn open(
&mut self,
uri: Url,
@@ -65,6 +68,11 @@ impl Workspace {
cursor,
config: &self.config,
}));
+
+ self.graphs = self
+ .iter()
+ .map(|start| (start.uri.clone(), deps::Graph::new(self, start)))
+ .collect();
}
pub fn load(&mut self, path: &Path, language: Language) -> std::io::Result<()> {
@@ -114,83 +122,6 @@ impl Workspace {
Some(())
}
- pub fn watch(
- &mut self,
- watcher: &mut dyn notify::Watcher,
- watched_dirs: &mut FxHashSet<PathBuf>,
- ) {
- self.iter()
- .filter(|document| document.uri.scheme() == "file")
- .flat_map(|document| {
- let current_dir = &self.current_dir(&document.dir);
- let doc_dir = document.dir.to_file_path();
- let aux_dir = self.aux_dir(current_dir).to_file_path();
- let log_dir = self.log_dir(current_dir).to_file_path();
- [aux_dir, log_dir, doc_dir]
- })
- .flatten()
- .for_each(|path| {
- if !watched_dirs.contains(&path) {
- let _ = watcher.watch(&path, notify::RecursiveMode::NonRecursive);
- watched_dirs.insert(path);
- }
- });
- }
-
- pub fn current_dir(&self, base_dir: &Url) -> Url {
- let root_dir = self.config.root_dir.as_deref();
- if let Some(dir) = root_dir.and_then(|path| base_dir.join(path).ok()) {
- return dir;
- }
-
- self.iter()
- .filter(|document| matches!(document.data, DocumentData::Root | DocumentData::Tectonic))
- .flat_map(|document| document.uri.join("."))
- .find(|root_dir| base_dir.as_str().starts_with(root_dir.as_str()))
- .unwrap_or_else(|| base_dir.clone())
- }
-
- pub fn aux_dir(&self, base_dir: &Url) -> Url {
- self.output_dir(base_dir, &self.config.build.aux_dir, |data| {
- data.aux_dir.as_deref()
- })
- }
-
- pub fn log_dir(&self, base_dir: &Url) -> Url {
- self.output_dir(base_dir, &self.config.build.log_dir, |_| None)
- }
-
- pub fn pdf_dir(&self, base_dir: &Url) -> Url {
- self.output_dir(base_dir, &self.config.build.pdf_dir, |_| None)
- }
-
- fn current_latexmkrc(&self, base_dir: &Url) -> Option<&LatexmkrcData> {
- self.documents
- .iter()
- .filter(|document| document.language == Language::Latexmkrc)
- .find(|document| document.uri.join(".").as_ref() == Ok(base_dir))
- .and_then(|document| document.data.as_latexmkrc())
- }
-
- fn output_dir(
- &self,
- base_dir: &Url,
- config: &str,
- extract_latexmkrc: impl FnOnce(&LatexmkrcData) -> Option<&str>,
- ) -> Url {
- let mut dir: String = self
- .current_latexmkrc(base_dir)
- .and_then(|data| extract_latexmkrc(data).or_else(|| data.out_dir.as_deref()))
- .unwrap_or(config)
- .into();
-
- if !dir.ends_with('/') {
- dir.push('/');
- }
-
- base_dir.join(&dir).unwrap_or_else(|_| base_dir.clone())
- }
-
pub fn contains(&self, path: &Path) -> bool {
if self.folders.is_empty() {
return true;
@@ -199,35 +130,6 @@ impl Workspace {
self.folders.iter().any(|dir| path.starts_with(dir))
}
- pub fn project(&self, child: &Document) -> Project {
- let mut documents = FxHashSet::default();
- for graph in self
- .iter()
- .map(|start| graph::Graph::new(self, start))
- .filter(|graph| graph.preorder().contains(&child))
- {
- documents.extend(graph.preorder());
- }
-
- Project { documents }
- }
-
- pub fn parents(&self, child: &Document) -> FxHashSet<&Document> {
- self.iter()
- .filter(|document| {
- let DocumentData::Tex(data) = &document.data else {
- return false;
- };
- data.semantics.can_be_root
- })
- .filter(|parent| {
- let graph = graph::Graph::new(self, parent);
- let mut nodes = graph.preorder();
- nodes.contains(&child)
- })
- .collect()
- }
-
pub fn set_config(&mut self, config: Config) {
self.config = config;
self.reload();
@@ -280,94 +182,4 @@ impl Workspace {
self.documents.insert(document);
Some(())
}
-
- pub fn discover(&mut self, checked_paths: &mut FxHashSet<PathBuf>) {
- loop {
- let mut changed = false;
- changed |= self.discover_parents(checked_paths);
- changed |= self.discover_children(checked_paths);
- if !changed {
- break;
- }
- }
- }
-
- fn discover_parents(&mut self, checked_paths: &mut FxHashSet<PathBuf>) -> bool {
- let dirs = self
- .iter()
- .filter(|document| document.language != Language::Bib)
- .filter_map(|document| document.path.as_deref())
- .flat_map(|path| path.ancestors().skip(1))
- .filter(|path| self.contains(path))
- .map(|path| path.to_path_buf())
- .collect::<FxHashSet<_>>();
-
- let mut changed = false;
- for dir in dirs {
- if self
- .iter()
- .filter(|document| matches!(document.language, Language::Root | Language::Tectonic))
- .filter_map(|document| document.path.as_deref())
- .filter_map(|path| path.parent())
- .any(|marker| dir.starts_with(marker))
- {
- continue;
- }
-
- let Ok(entries) = std::fs::read_dir(dir) else {
- continue;
- };
-
- for file in entries
- .flatten()
- .filter(|entry| entry.file_type().map_or(false, |type_| type_.is_file()))
- .map(|entry| entry.path())
- {
- let Some(lang) = Language::from_path(&file) else {
- continue;
- };
-
- if !matches!(
- lang,
- Language::Tex | Language::Root | Language::Tectonic | Language::Latexmkrc
- ) {
- continue;
- }
-
- if self.lookup_path(&file).is_none() && file.exists() {
- changed |= self.load(&file, lang).is_ok();
- checked_paths.insert(file);
- }
- }
- }
-
- changed
- }
-
- fn discover_children(&mut self, checked_paths: &mut FxHashSet<PathBuf>) -> bool {
- let files = self
- .iter()
- .map(|start| graph::Graph::new(self, start))
- .flat_map(|graph| graph.missing)
- .filter(|uri| uri.scheme() == "file")
- .flat_map(|uri| uri.to_file_path())
- .collect::<FxHashSet<_>>();
-
- let mut changed = false;
- for file in files {
- let language = Language::from_path(&file).unwrap_or(Language::Tex);
-
- if self.lookup_path(&file).is_none() && file.exists() {
- changed |= self.load(&file, language).is_ok();
- checked_paths.insert(file);
- }
- }
-
- changed
- }
-}
-
-#[derive(Debug)]
-pub struct Project<'a> {
- pub documents: FxHashSet<&'a Document>,
}
diff --git a/support/texlab/crates/bibtex-utils/Cargo.toml b/support/texlab/crates/bibtex-utils/Cargo.toml
index 8b115bf207..f0158c18a6 100644
--- a/support/texlab/crates/bibtex-utils/Cargo.toml
+++ b/support/texlab/crates/bibtex-utils/Cargo.toml
@@ -13,7 +13,7 @@ itertools = "0.12.0"
rowan = "0.15.15"
rustc-hash = "1.1.0"
syntax = { path = "../syntax" }
-unicode-normalization = "0.1.22"
+unicode-normalization = "0.1.23"
[lib]
doctest = false
diff --git a/support/texlab/crates/bibtex-utils/src/field.rs b/support/texlab/crates/bibtex-utils/src/field.rs
index 71bd25f6d0..b10278cf76 100644
--- a/support/texlab/crates/bibtex-utils/src/field.rs
+++ b/support/texlab/crates/bibtex-utils/src/field.rs
@@ -6,4 +6,4 @@ pub mod number;
pub mod text;
/// A cache used to accelerate related field parses.
-pub type FieldParseCache = FxHashMap<String, String>; \ No newline at end of file
+pub type FieldParseCache = FxHashMap<String, String>;
diff --git a/support/texlab/crates/bibtex-utils/src/field/text.rs b/support/texlab/crates/bibtex-utils/src/field/text.rs
index b3eabaff0e..89f7b569ca 100644
--- a/support/texlab/crates/bibtex-utils/src/field/text.rs
+++ b/support/texlab/crates/bibtex-utils/src/field/text.rs
@@ -1,8 +1,8 @@
use rowan::{ast::AstNode, NodeOrToken};
use syntax::bibtex::{
- Accent, Command, CurlyGroup, HasAccentName, HasCommandName, HasName, HasWord, Join,
- Literal, QuoteGroup, SyntaxKind::*, SyntaxToken, Value,
+ Accent, Command, CurlyGroup, HasAccentName, HasCommandName, HasName, HasWord, Join, Literal,
+ QuoteGroup, SyntaxKind::*, SyntaxToken, Value,
};
use super::FieldParseCache;
diff --git a/support/texlab/crates/citeproc/Cargo.toml b/support/texlab/crates/citeproc/Cargo.toml
index 9d356b3778..bfc24fc83d 100644
--- a/support/texlab/crates/citeproc/Cargo.toml
+++ b/support/texlab/crates/citeproc/Cargo.toml
@@ -15,7 +15,7 @@ rowan = "0.15.15"
rustc-hash = "1.1.0"
syntax = { path = "../syntax" }
titlecase = "2.2.1"
-unicode-normalization = "0.1.22"
+unicode-normalization = "0.1.23"
url = "2.5.0"
[lib]
diff --git a/support/texlab/crates/citeproc/src/entry.rs b/support/texlab/crates/citeproc/src/entry.rs
index f544858efc..b44c48b6ce 100644
--- a/support/texlab/crates/citeproc/src/entry.rs
+++ b/support/texlab/crates/citeproc/src/entry.rs
@@ -1,3 +1,4 @@
+use base_db::semantics::bib::Semantics;
use bibtex_utils::field::{
author::{AuthorField, AuthorFieldData},
date::{DateField, DateFieldData},
@@ -5,7 +6,6 @@ use bibtex_utils::field::{
text::{TextField, TextFieldData},
FieldParseCache,
};
-use base_db::semantics::bib::Semantics;
use rustc_hash::FxHashMap;
use syntax::bibtex::{Entry, Field, HasName, HasType, HasValue, Value};
@@ -133,28 +133,48 @@ impl EntryData {
.or_else(|| self.parse_text_field(name, &value, expanded_defs))
}
- fn parse_author_field(&mut self, name: &str, value: &Value, expanded_defs: &FieldParseCache) -> Option<()> {
+ fn parse_author_field(
+ &mut self,
+ name: &str,
+ value: &Value,
+ expanded_defs: &FieldParseCache,
+ ) -> Option<()> {
let name = AuthorField::parse(name)?;
let data = AuthorFieldData::parse(value, expanded_defs)?;
self.author.insert(name, data);
Some(())
}
- fn parse_date_field(&mut self, name: &str, value: &Value, expanded_defs: &FieldParseCache) -> Option<()> {
+ fn parse_date_field(
+ &mut self,
+ name: &str,
+ value: &Value,
+ expanded_defs: &FieldParseCache,
+ ) -> Option<()> {
let name = DateField::parse(name)?;
let data = DateFieldData::parse(value, expanded_defs)?;
self.date.insert(name, data);
Some(())
}
- fn parse_number_field(&mut self, name: &str, value: &Value, expanded_defs: &FieldParseCache) -> Option<()> {
+ fn parse_number_field(
+ &mut self,
+ name: &str,
+ value: &Value,
+ expanded_defs: &FieldParseCache,
+ ) -> Option<()> {
let name = NumberField::parse(name)?;
let data = NumberFieldData::parse(value, expanded_defs)?;
self.number.insert(name, data);
Some(())
}
- fn parse_text_field(&mut self, name: &str, value: &Value, expanded_defs: &FieldParseCache) -> Option<()> {
+ fn parse_text_field(
+ &mut self,
+ name: &str,
+ value: &Value,
+ expanded_defs: &FieldParseCache,
+ ) -> Option<()> {
let name = TextField::parse(name).unwrap_or(TextField::Unknown);
let data = TextFieldData::parse(value, expanded_defs)?;
self.text.insert(name, data);
diff --git a/support/texlab/crates/commands/Cargo.toml b/support/texlab/crates/commands/Cargo.toml
index 512a05ee61..bebcde27af 100644
--- a/support/texlab/crates/commands/Cargo.toml
+++ b/support/texlab/crates/commands/Cargo.toml
@@ -10,14 +10,14 @@ rust-version.workspace = true
anyhow = "1.0.72"
base-db = { path = "../base-db" }
bstr = "1.9.0"
-crossbeam-channel = "0.5.11"
+crossbeam-channel = "0.5.12"
itertools = "0.12.0"
libc = "0.2.153"
log = "0.4.21"
rowan = "0.15.15"
rustc-hash = "1.1.0"
syntax = { path = "../syntax" }
-thiserror = "1.0.57"
+thiserror = "1.0.58"
url = "2.5.0"
[dev-dependencies]
diff --git a/support/texlab/crates/commands/src/build.rs b/support/texlab/crates/commands/src/build.rs
index 47e156ef72..34d39f26e4 100644
--- a/support/texlab/crates/commands/src/build.rs
+++ b/support/texlab/crates/commands/src/build.rs
@@ -6,7 +6,10 @@ use std::{
};
use anyhow::Result;
-use base_db::Workspace;
+use base_db::{
+ deps::{self, ProjectRoot},
+ Workspace,
+};
use bstr::io::BufReadExt;
use crossbeam_channel::Sender;
use thiserror::Error;
@@ -39,8 +42,7 @@ impl BuildCommand {
return Err(BuildError::NotFound(uri.clone()));
};
- let document = workspace
- .parents(document)
+ let document = deps::parents(workspace, document)
.into_iter()
.next()
.unwrap_or(document);
@@ -53,7 +55,9 @@ impl BuildCommand {
let program = config.program.clone();
let args = replace_placeholders(&config.args, &[('f', path)]);
- let Ok(working_dir) = workspace.current_dir(&document.dir).to_file_path() else {
+ let root = ProjectRoot::walk_and_find(workspace, &document.dir);
+
+ let Ok(working_dir) = root.compile_dir.to_file_path() else {
return Err(BuildError::NotLocal(document.uri.clone()));
};
diff --git a/support/texlab/crates/commands/src/clean.rs b/support/texlab/crates/commands/src/clean.rs
index 706e7dfaf1..d4ec6b24f4 100644
--- a/support/texlab/crates/commands/src/clean.rs
+++ b/support/texlab/crates/commands/src/clean.rs
@@ -1,7 +1,7 @@
use std::process::Stdio;
use anyhow::Result;
-use base_db::{Document, Workspace};
+use base_db::{deps::ProjectRoot, Document, Workspace};
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Hash)]
pub enum CleanTarget {
@@ -21,7 +21,7 @@ impl CleanCommand {
anyhow::bail!("document '{}' is not a local file", document.uri)
};
- let base_dir = workspace.current_dir(&document.dir);
+ let root = ProjectRoot::walk_and_find(workspace, &document.dir);
let flag = match target {
CleanTarget::Auxiliary => "-c",
@@ -29,8 +29,8 @@ impl CleanCommand {
};
let out_dir = match target {
- CleanTarget::Auxiliary => workspace.aux_dir(&base_dir),
- CleanTarget::Artifacts => workspace.pdf_dir(&base_dir),
+ CleanTarget::Auxiliary => root.aux_dir,
+ CleanTarget::Artifacts => root.pdf_dir,
};
let out_dir_path = out_dir.to_file_path().unwrap();
diff --git a/support/texlab/crates/commands/src/dep_graph.rs b/support/texlab/crates/commands/src/dep_graph.rs
index 1388756ddf..fe9ff3829c 100644
--- a/support/texlab/crates/commands/src/dep_graph.rs
+++ b/support/texlab/crates/commands/src/dep_graph.rs
@@ -1,22 +1,20 @@
use std::io::Write;
use anyhow::Result;
-use base_db::{graph, Document, Workspace};
+use base_db::Workspace;
use itertools::Itertools;
use rustc_hash::FxHashMap;
pub fn show_dependency_graph(workspace: &Workspace) -> Result<String> {
- let documents = workspace
- .iter()
- .enumerate()
- .map(|(i, doc)| (doc, format!("v{i:0>5}")))
- .collect::<FxHashMap<&Document, String>>();
+ let mut documents = FxHashMap::default();
let mut writer = Vec::new();
writeln!(&mut writer, "digraph G {{")?;
writeln!(&mut writer, "rankdir = LR;")?;
- for (document, node) in &documents {
+ for (i, document) in workspace.iter().enumerate() {
+ let node = format!("v{i:0>5}");
+
let label = document.uri.as_str();
let shape = if document
.data
@@ -35,19 +33,22 @@ pub fn show_dependency_graph(workspace: &Workspace) -> Result<String> {
};
writeln!(&mut writer, "\t{node} [label=\"{label}\", shape={shape}];")?;
+ documents.insert(&document.uri, node);
}
for edge in workspace
- .iter()
- .flat_map(|start| graph::Graph::new(workspace, start).edges)
+ .graphs()
+ .values()
+ .flat_map(|graph| &graph.edges)
.unique()
{
- let source = &documents[edge.source];
- let target = &documents[edge.target];
- let label = edge
- .weight
- .as_ref()
- .map_or("<artifact>", |weight| &weight.link.path.text);
+ let source = &documents[&edge.source];
+ let target = &documents[&edge.target];
+ let label = match &edge.data {
+ base_db::deps::EdgeData::DirectLink(data) => &data.link.path.text,
+ base_db::deps::EdgeData::AdditionalFiles => "<project>",
+ base_db::deps::EdgeData::Artifact => "<artifact>",
+ };
writeln!(&mut writer, "\t{source} -> {target} [label=\"{label}\"];")?;
}
diff --git a/support/texlab/crates/commands/src/fwd_search.rs b/support/texlab/crates/commands/src/fwd_search.rs
index 4e3450681c..f1991cd48e 100644
--- a/support/texlab/crates/commands/src/fwd_search.rs
+++ b/support/texlab/crates/commands/src/fwd_search.rs
@@ -1,7 +1,10 @@
use std::{path::PathBuf, process::Stdio};
use anyhow::Result;
-use base_db::{Document, Workspace};
+use base_db::{
+ deps::{self, ProjectRoot},
+ Document, Workspace,
+};
use thiserror::Error;
use url::Url;
@@ -53,7 +56,10 @@ impl ForwardSearch {
.lookup(uri)
.ok_or_else(|| ForwardSearchError::TexNotFound(uri.clone()))?;
- let parent = workspace.parents(child).into_iter().next().unwrap_or(child);
+ let parent = deps::parents(workspace, child)
+ .into_iter()
+ .next()
+ .unwrap_or(child);
log::debug!("[FwdSearch] root_document={}", parent.uri,);
@@ -78,12 +84,12 @@ impl ForwardSearch {
}
fn find_pdf(workspace: &Workspace, document: &Document) -> Result<PathBuf, ForwardSearchError> {
- let base_dir = workspace.current_dir(&document.dir);
- let pdf_dir = workspace.pdf_dir(&base_dir);
+ let root = ProjectRoot::walk_and_find(workspace, &document.dir);
- log::debug!("[FwdSearch] base_dir={base_dir}, pdf_dir={pdf_dir}");
+ log::debug!("[FwdSearch] root={root:#?}");
- let pdf_dir = pdf_dir
+ let pdf_dir = root
+ .pdf_dir
.to_file_path()
.map_err(|()| ForwardSearchError::InvalidPath(document.uri.clone()))?;
diff --git a/support/texlab/crates/completion/src/lib.rs b/support/texlab/crates/completion/src/lib.rs
index 6acec28d49..7fc5bba174 100644
--- a/support/texlab/crates/completion/src/lib.rs
+++ b/support/texlab/crates/completion/src/lib.rs
@@ -45,6 +45,7 @@ impl<'a> CompletionItem<'a> {
#[derive(Debug, PartialEq, Eq)]
pub enum CompletionItemData<'a> {
Command(CommandData<'a>),
+ CommandLikeDelimiter(&'a str, &'a str),
BeginEnvironment,
Citation(CitationData<'a>),
Environment(EnvironmentData<'a>),
@@ -66,6 +67,7 @@ impl<'a> CompletionItemData<'a> {
pub fn label<'b: 'a>(&'b self) -> &'a str {
match self {
Self::Command(data) => data.name,
+ Self::CommandLikeDelimiter(left, _) => left,
Self::BeginEnvironment => "begin",
Self::Citation(data) => &data.entry.name.text,
Self::Environment(data) => data.name,
@@ -83,6 +85,30 @@ impl<'a> CompletionItemData<'a> {
Self::TikzLibrary(name) => name,
}
}
+
+ /// Returns a number that can be used to sort the completion items further before resorting to the label.
+ /// This is useful for making snippets more visible.
+ pub fn sort_index(&self) -> u8 {
+ match self {
+ Self::Command(_) => 1,
+ Self::CommandLikeDelimiter(_, _) => 0,
+ Self::BeginEnvironment => 1,
+ Self::Citation(_) => 1,
+ Self::Environment(_) => 1,
+ Self::GlossaryEntry(_) => 1,
+ Self::Label(_) => 1,
+ Self::Color(_) => 1,
+ Self::ColorModel(_) => 1,
+ Self::File(_) => 1,
+ Self::Directory(_) => 1,
+ Self::Argument(_) => 1,
+ Self::Package(_) => 1,
+ Self::DocumentClass(_) => 1,
+ Self::EntryType(_) => 1,
+ Self::Field(_) => 1,
+ Self::TikzLibrary(_) => 1,
+ }
+ }
}
#[derive(PartialEq, Eq)]
@@ -171,7 +197,8 @@ pub fn complete<'a>(params: &'a CompletionParams<'a>) -> CompletionResult<'a> {
providers::complete_citations(params, &mut builder);
providers::complete_acronyms(params, &mut builder);
providers::complete_glossaries(params, &mut builder);
- providers::complete_labels(params, &mut builder);
+ providers::complete_label_references(params, &mut builder);
+ providers::complete_label_definitions(params, &mut builder);
providers::complete_colors(params, &mut builder);
providers::complete_color_models(params, &mut builder);
providers::complete_includes(params, &mut builder);
diff --git a/support/texlab/crates/completion/src/providers.rs b/support/texlab/crates/completion/src/providers.rs
index 5160c85d85..dc60907f2e 100644
--- a/support/texlab/crates/completion/src/providers.rs
+++ b/support/texlab/crates/completion/src/providers.rs
@@ -9,7 +9,8 @@ mod field;
mod glossary;
mod import;
mod include;
-mod label;
+mod label_def;
+mod label_ref;
mod tikz_library;
pub use argument::complete_arguments;
@@ -23,5 +24,6 @@ pub use field::complete_fields;
pub use glossary::{complete_acronyms, complete_glossaries};
pub use import::complete_imports;
pub use include::complete_includes;
-pub use label::complete_labels;
+pub use label_def::complete_label_definitions;
+pub use label_ref::complete_label_references;
pub use tikz_library::complete_tikz_libraries;
diff --git a/support/texlab/crates/completion/src/providers/command.rs b/support/texlab/crates/completion/src/providers/command.rs
index ddbf8bc206..f404d42ab3 100644
--- a/support/texlab/crates/completion/src/providers/command.rs
+++ b/support/texlab/crates/completion/src/providers/command.rs
@@ -7,6 +7,8 @@ use crate::{
CommandData, CompletionItem, CompletionItemData, CompletionParams,
};
+static DELIMITERS: &[(&str, &str)] = &[("(", ")"), ("[", "]"), ("{", "\\}")];
+
pub fn complete_commands<'a>(
params: &'a CompletionParams<'a>,
builder: &mut CompletionBuilder<'a>,
@@ -20,6 +22,7 @@ pub fn complete_commands<'a>(
});
proc.add_begin_snippet();
+ proc.add_delimiters();
proc.add_library();
proc.add_user();
Some(())
@@ -39,6 +42,20 @@ impl<'a, 'b> Processor<'a, 'b> {
Some(())
}
+ pub fn add_delimiters(&mut self) {
+ for (left, right) in DELIMITERS {
+ let Some(score) = self.0.builder.matcher.score(&left, &self.0.cursor.text) else {
+ continue;
+ };
+
+ let data = CompletionItemData::CommandLikeDelimiter(left, right);
+ self.0
+ .builder
+ .items
+ .push(CompletionItem::new_simple(score, self.0.cursor.range, data));
+ }
+ }
+
pub fn add_library(&mut self) -> Option<()> {
for package in included_packages(&self.0.params.feature) {
let commands_with_score = package.commands.iter().filter_map(|command| {
diff --git a/support/texlab/crates/completion/src/providers/include.rs b/support/texlab/crates/completion/src/providers/include.rs
index f8bb55dfeb..6246624733 100644
--- a/support/texlab/crates/completion/src/providers/include.rs
+++ b/support/texlab/crates/completion/src/providers/include.rs
@@ -3,7 +3,10 @@ use std::{
path::{Path, PathBuf},
};
-use base_db::{DocumentData, FeatureParams};
+use base_db::{
+ deps::{self, ProjectRoot},
+ DocumentData, FeatureParams,
+};
use rowan::{ast::AstNode, TextLen, TextRange};
use syntax::latex;
@@ -112,13 +115,13 @@ fn current_dir(
graphics_path: Option<&str>,
) -> Option<PathBuf> {
let workspace = &params.workspace;
- let parent = workspace
- .parents(params.document)
+ let parent = deps::parents(&workspace, params.document)
.iter()
.next()
.map_or(params.document, Clone::clone);
- let path = workspace.current_dir(&parent.dir).to_file_path().ok()?;
+ let root = ProjectRoot::walk_and_find(workspace, &parent.dir);
+ let path = root.src_dir.to_file_path().ok()?;
let mut path = PathBuf::from(path.to_str()?.replace('\\', "/"));
diff --git a/support/texlab/crates/completion/src/providers/label_def.rs b/support/texlab/crates/completion/src/providers/label_def.rs
new file mode 100644
index 0000000000..a8966ed62f
--- /dev/null
+++ b/support/texlab/crates/completion/src/providers/label_def.rs
@@ -0,0 +1,60 @@
+use base_db::{
+ semantics::{
+ tex::{Label, LabelKind},
+ Span,
+ },
+ util::queries::Object,
+};
+use rowan::ast::AstNode;
+use rustc_hash::FxHashSet;
+use syntax::latex;
+
+use crate::{
+ util::{find_curly_group_word, CompletionBuilder},
+ CompletionParams,
+};
+
+pub fn complete_label_definitions<'a>(
+ params: &'a CompletionParams<'a>,
+ builder: &mut CompletionBuilder<'a>,
+) -> Option<()> {
+ let cursor = find_definition(params)?;
+
+ let label_defs: FxHashSet<&str> = Label::find_all(&params.feature.project)
+ .filter(|(_, label)| label.kind == LabelKind::Definition)
+ .map(|(_, label)| label.name_text())
+ .collect();
+
+ let label_refs: FxHashSet<&str> = Label::find_all(&params.feature.project)
+ .filter(|(_, label)| label.kind == LabelKind::Reference)
+ .map(|(_, label)| label.name_text())
+ .collect();
+
+ for label in label_refs.difference(&label_defs) {
+ let Some(score) = builder.matcher.score(label, &cursor.text) else {
+ continue;
+ };
+
+ let data = crate::LabelData {
+ name: label,
+ header: None,
+ footer: None,
+ object: None,
+ keywords: label.to_string(),
+ };
+
+ builder.items.push(crate::CompletionItem::new_simple(
+ score,
+ cursor.range,
+ crate::CompletionItemData::Label(data),
+ ));
+ }
+
+ Some(())
+}
+
+fn find_definition(params: &CompletionParams) -> Option<Span> {
+ let (cursor, group) = find_curly_group_word(params)?;
+ latex::LabelDefinition::cast(group.syntax().parent()?)?;
+ Some(cursor)
+}
diff --git a/support/texlab/crates/completion/src/providers/label.rs b/support/texlab/crates/completion/src/providers/label_ref.rs
index 8af2873457..6f71ae3241 100644
--- a/support/texlab/crates/completion/src/providers/label.rs
+++ b/support/texlab/crates/completion/src/providers/label_ref.rs
@@ -11,7 +11,7 @@ use crate::{
CompletionItem, CompletionItemData, CompletionParams,
};
-pub fn complete_labels<'a>(
+pub fn complete_label_references<'a>(
params: &'a CompletionParams<'a>,
builder: &mut CompletionBuilder<'a>,
) -> Option<()> {
diff --git a/support/texlab/crates/completion/src/tests.rs b/support/texlab/crates/completion/src/tests.rs
index 84cd06447b..f41c9f5e6b 100644
--- a/support/texlab/crates/completion/src/tests.rs
+++ b/support/texlab/crates/completion/src/tests.rs
@@ -726,40 +726,34 @@ fn component_command_simple() {
%! main.tex
\
|"#,
- expect![[r##"
+ expect![[r#"
[
- Command(
- CommandData {
- name: "!",
- package: [],
- },
+ CommandLikeDelimiter(
+ "(",
+ ")",
),
- Command(
- CommandData {
- name: "\"",
- package: [],
- },
+ CommandLikeDelimiter(
+ "[",
+ "]",
),
- Command(
- CommandData {
- name: "#",
- package: [],
- },
+ CommandLikeDelimiter(
+ "{",
+ "\\}",
),
Command(
CommandData {
- name: "$",
+ name: "!",
package: [],
},
),
Command(
CommandData {
- name: "%",
+ name: "\"",
package: [],
},
),
]
- "##]],
+ "#]],
);
}
@@ -1613,6 +1607,31 @@ Lorem ipsum dolor sit amet.
}
#[test]
+fn label_undefined() {
+ check(
+ r#"
+%! foo.tex
+\label{f}
+ |
+ ^
+\ref{foo}"#,
+ expect![[r#"
+ [
+ Label(
+ LabelData {
+ name: "foo",
+ header: None,
+ footer: None,
+ object: None,
+ keywords: "foo",
+ },
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
fn theorem_begin() {
check(
r#"
diff --git a/support/texlab/crates/completion/src/util/builder.rs b/support/texlab/crates/completion/src/util/builder.rs
index 4ae0818e58..d219a5f2b1 100644
--- a/support/texlab/crates/completion/src/util/builder.rs
+++ b/support/texlab/crates/completion/src/util/builder.rs
@@ -32,6 +32,7 @@ impl<'a> CompletionBuilder<'a> {
b.preselect
.cmp(&a.preselect)
.then_with(|| b.score.cmp(&a.score))
+ .then_with(|| a.data.sort_index().cmp(&b.data.sort_index()))
.then_with(|| a.data.label().cmp(b.data.label()))
});
diff --git a/support/texlab/crates/definition/src/include.rs b/support/texlab/crates/definition/src/include.rs
index bc9311ba01..c9a5a69899 100644
--- a/support/texlab/crates/definition/src/include.rs
+++ b/support/texlab/crates/definition/src/include.rs
@@ -1,3 +1,4 @@
+use base_db::deps;
use rowan::TextRange;
use crate::DefinitionContext;
@@ -7,18 +8,22 @@ use super::DefinitionResult;
pub(super) fn goto_definition(context: &mut DefinitionContext) -> Option<()> {
let feature = &context.params.feature;
let start = feature.document;
- let parents = feature.workspace.parents(start);
+ let parents = deps::parents(feature.workspace, start);
let results = parents
.into_iter()
.chain(std::iter::once(start))
- .flat_map(|parent| base_db::graph::Graph::new(feature.workspace, parent).edges)
- .filter(|edge| edge.source == start)
+ .flat_map(|parent| &feature.workspace.graphs()[&parent.uri].edges)
+ .filter(|edge| edge.source == start.uri)
.flat_map(|edge| {
- let origin_selection_range = edge.weight?.link.path.range;
+ let deps::EdgeData::DirectLink(data) = &edge.data else {
+ return None;
+ };
+
+ let origin_selection_range = data.link.path.range;
if origin_selection_range.contains_inclusive(context.params.offset) {
Some(DefinitionResult {
origin_selection_range,
- target: edge.target,
+ target: feature.workspace.lookup(&edge.target).unwrap(),
target_range: TextRange::default(),
target_selection_range: TextRange::default(),
})
diff --git a/support/texlab/crates/diagnostics/Cargo.toml b/support/texlab/crates/diagnostics/Cargo.toml
index 91c2ff9dbd..5a2a7a7e28 100644
--- a/support/texlab/crates/diagnostics/Cargo.toml
+++ b/support/texlab/crates/diagnostics/Cargo.toml
@@ -7,9 +7,7 @@ edition.workspace = true
rust-version.workspace = true
[dependencies]
-anyhow = "1.0.75"
base-db = { path = "../base-db" }
-dirs = "5.0.1"
encoding_rs = "0.8.33"
encoding_rs_io = "0.1.7"
itertools = "0.12.0"
@@ -17,7 +15,6 @@ line-index = { path = "../line-index" }
log = "0.4.21"
multimap = "0.10.0"
once_cell = "1.19.0"
-parking_lot = "0.12.1"
regex = "1.10.2"
rowan = "0.15.15"
rustc-hash = "1.1.0"
diff --git a/support/texlab/crates/diagnostics/src/build_log.rs b/support/texlab/crates/diagnostics/src/build_log.rs
index 352e18d0d1..f4180f3978 100644
--- a/support/texlab/crates/diagnostics/src/build_log.rs
+++ b/support/texlab/crates/diagnostics/src/build_log.rs
@@ -1,4 +1,4 @@
-use base_db::{Document, Workspace};
+use base_db::{deps, Document, Workspace};
use line_index::LineCol;
use multimap::MultiMap;
use rowan::{TextLen, TextRange, TextSize};
@@ -17,7 +17,7 @@ pub fn update(
let data = log_document.data.as_log()?;
- let parents = workspace.parents(log_document);
+ let parents = deps::parents(workspace, log_document);
let root_document = parents.iter().next()?;
let base_path = root_document
diff --git a/support/texlab/crates/diagnostics/src/chktex.rs b/support/texlab/crates/diagnostics/src/chktex.rs
index a342f78349..195ea968e5 100644
--- a/support/texlab/crates/diagnostics/src/chktex.rs
+++ b/support/texlab/crates/diagnostics/src/chktex.rs
@@ -4,7 +4,10 @@ use std::{
process::Stdio,
};
-use base_db::{Document, Workspace};
+use base_db::{
+ deps::{self, ProjectRoot},
+ Document, Workspace,
+};
use encoding_rs_io::DecodeReaderBytesBuilder;
use line_index::LineCol;
use once_cell::sync::Lazy;
@@ -23,8 +26,7 @@ impl Command {
pub fn new(workspace: &Workspace, document: &Document) -> Option<Self> {
document.data.as_tex()?;
- let parent = workspace
- .parents(document)
+ let parent = deps::parents(workspace, document)
.into_iter()
.next()
.unwrap_or(document);
@@ -34,7 +36,9 @@ impl Command {
return None;
}
- let working_dir = workspace.current_dir(&parent.dir).to_file_path().ok()?;
+ let root = ProjectRoot::walk_and_find(workspace, &parent.dir);
+
+ let working_dir = root.src_dir.to_file_path().ok()?;
log::debug!("Calling ChkTeX from directory: {}", working_dir.display());
let text = document.text.clone();
diff --git a/support/texlab/crates/diagnostics/src/citations.rs b/support/texlab/crates/diagnostics/src/citations.rs
index ea4ef03d58..84b9a10b1c 100644
--- a/support/texlab/crates/diagnostics/src/citations.rs
+++ b/support/texlab/crates/diagnostics/src/citations.rs
@@ -1,7 +1,8 @@
use base_db::{
+ deps::Project,
semantics::{bib::Entry, tex::Citation},
util::queries::{self, Object},
- Document, Project, Workspace,
+ Document, Workspace,
};
use rustc_hash::{FxHashMap, FxHashSet};
use url::Url;
diff --git a/support/texlab/crates/diagnostics/src/labels.rs b/support/texlab/crates/diagnostics/src/labels.rs
index 9a39f3f24b..15b5aaad00 100644
--- a/support/texlab/crates/diagnostics/src/labels.rs
+++ b/support/texlab/crates/diagnostics/src/labels.rs
@@ -13,11 +13,6 @@ pub fn detect_undefined_and_unused_labels(
workspace: &Workspace,
results: &mut FxHashMap<Url, Vec<Diagnostic>>,
) {
- let graphs: Vec<_> = workspace
- .iter()
- .map(|start| base_db::graph::Graph::new(workspace, start))
- .collect();
-
for document in workspace.iter() {
let DocumentData::Tex(data) = &document.data else {
continue;
@@ -25,10 +20,11 @@ pub fn detect_undefined_and_unused_labels(
let mut label_refs = FxHashSet::default();
let mut label_defs = FxHashSet::default();
- let project = graphs
- .iter()
- .filter(|graph| graph.preorder().contains(&document))
- .flat_map(|graph| graph.preorder());
+ let project = workspace
+ .graphs()
+ .values()
+ .filter(|graph| graph.preorder(workspace).contains(&document))
+ .flat_map(|graph| graph.preorder(workspace));
for label in project
.filter_map(|child| child.data.as_tex())
diff --git a/support/texlab/crates/diagnostics/src/manager.rs b/support/texlab/crates/diagnostics/src/manager.rs
index 38c0a514f4..81347410c1 100644
--- a/support/texlab/crates/diagnostics/src/manager.rs
+++ b/support/texlab/crates/diagnostics/src/manager.rs
@@ -1,4 +1,4 @@
-use base_db::{util::filter_regex_patterns, Document, Owner, Workspace};
+use base_db::{deps::Project, util::filter_regex_patterns, Document, Owner, Workspace};
use multimap::MultiMap;
use rustc_hash::FxHashMap;
use url::Url;
@@ -66,7 +66,7 @@ impl Manager {
.iter()
.filter(|document| Self::is_relevant(document))
{
- let project = workspace.project(document);
+ let project = Project::from_child(workspace, document);
super::citations::detect_undefined_citations(&project, document, &mut results);
super::citations::detect_unused_entries(&project, document, &mut results);
}
diff --git a/support/texlab/crates/hover/src/tests.rs b/support/texlab/crates/hover/src/tests.rs
index e6377bb034..4473982645 100644
--- a/support/texlab/crates/hover/src/tests.rs
+++ b/support/texlab/crates/hover/src/tests.rs
@@ -318,3 +318,37 @@ fn test_latex_label_theorem_child_file_mumber() {
"#]],
);
}
+
+#[test]
+fn test_latex_label_ntheorem() {
+ check(
+ r#"
+%! main.tex
+\newtheorem{theorem}[theoremcounter]{Theorem}
+\begin{theorem}%
+\label{thm:test}
+\end{theorem}
+\ref{thm:test}
+ |
+ ^^^^^^^^
+
+%! main.aux
+\newlabel{thm:test}{{1.{1}}{1}}"#,
+ expect![[r#"
+ Some(
+ Label(
+ RenderedLabel {
+ range: 46..93,
+ number: Some(
+ "1.1",
+ ),
+ object: Theorem {
+ kind: "Theorem",
+ description: None,
+ },
+ },
+ ),
+ )
+ "#]],
+ );
+}
diff --git a/support/texlab/crates/inlay-hints/src/tests.rs b/support/texlab/crates/inlay-hints/src/tests.rs
index f8c4d619b1..cbffb84ed9 100644
--- a/support/texlab/crates/inlay-hints/src/tests.rs
+++ b/support/texlab/crates/inlay-hints/src/tests.rs
@@ -94,7 +94,7 @@ fn test_label_definition() {
RenderedLabel {
range: 152..226,
number: Some(
- "fig:qux",
+ "1",
),
object: Float {
kind: Figure,
diff --git a/support/texlab/crates/links/src/include.rs b/support/texlab/crates/links/src/include.rs
index 1392b16eaa..2a049d0b14 100644
--- a/support/texlab/crates/links/src/include.rs
+++ b/support/texlab/crates/links/src/include.rs
@@ -1,23 +1,25 @@
-use base_db::{DocumentLocation, FeatureParams};
+use base_db::{
+ deps::{self, EdgeData},
+ DocumentLocation, FeatureParams,
+};
pub(super) fn find_links<'a>(
params: &FeatureParams<'a>,
results: &mut Vec<DocumentLocation<'a>>,
) -> Option<()> {
let document = params.document;
- let parent = *params
- .workspace
- .parents(document)
+ let parent = *deps::parents(params.workspace, document)
.iter()
.next()
.unwrap_or(&document);
- let graph = base_db::graph::Graph::new(params.workspace, parent);
+ let graph = &params.workspace.graphs()[&parent.uri];
for edge in &graph.edges {
- if edge.source == document {
- if let Some(weight) = &edge.weight {
- results.push(DocumentLocation::new(edge.target, weight.link.path.range));
+ if edge.source == document.uri {
+ if let EdgeData::DirectLink(data) = &edge.data {
+ let target = params.workspace.lookup(&edge.target).unwrap();
+ results.push(DocumentLocation::new(target, data.link.path.range));
}
}
}
diff --git a/support/texlab/crates/symbols/src/document.rs b/support/texlab/crates/symbols/src/document.rs
index e8f1087051..791f119919 100644
--- a/support/texlab/crates/symbols/src/document.rs
+++ b/support/texlab/crates/symbols/src/document.rs
@@ -1,12 +1,12 @@
mod bib;
mod tex;
-use base_db::{util, Document, DocumentData, SymbolConfig, Workspace};
+use base_db::{deps::Project, util, Document, DocumentData, SymbolConfig, Workspace};
use crate::Symbol;
pub fn document_symbols(workspace: &Workspace, document: &Document) -> Vec<Symbol> {
- let project = workspace.project(document);
+ let project = Project::from_child(workspace, document);
let mut symbols = match &document.data {
DocumentData::Tex(data) => {
let builder = tex::SymbolBuilder::new(&project, workspace.config());
diff --git a/support/texlab/crates/symbols/src/document/tex.rs b/support/texlab/crates/symbols/src/document/tex.rs
index 55612ee11d..e0df9c85c1 100644
--- a/support/texlab/crates/symbols/src/document/tex.rs
+++ b/support/texlab/crates/symbols/src/document/tex.rs
@@ -1,6 +1,6 @@
use std::str::FromStr;
-use base_db::{semantics::Span, util::FloatKind, Config, Project};
+use base_db::{deps::Project, semantics::Span, util::FloatKind, Config};
use rowan::ast::AstNode;
use syntax::latex::{self, HasBrack, HasCurly, LatexLanguage};
use titlecase::titlecase;
@@ -101,6 +101,7 @@ impl<'a> SymbolBuilder<'a> {
let name = enum_item
.label()
.and_then(|label| label.content_text())
+ .filter(|text| !text.is_empty())
.unwrap_or_else(|| "Item".into());
let symbol = match self.find_label(enum_item.syntax()) {
diff --git a/support/texlab/crates/symbols/src/workspace/sort.rs b/support/texlab/crates/symbols/src/workspace/sort.rs
index 75d80a02eb..adf455aa81 100644
--- a/support/texlab/crates/symbols/src/workspace/sort.rs
+++ b/support/texlab/crates/symbols/src/workspace/sort.rs
@@ -1,4 +1,4 @@
-use base_db::{graph, Document, Workspace};
+use base_db::{Document, Workspace};
use itertools::Itertools;
use url::Url;
@@ -26,8 +26,8 @@ impl<'a> From<&'a Workspace> for ProjectOrdering<'a> {
})
.chain(workspace.iter())
.flat_map(|document| {
- let graph = graph::Graph::new(workspace, document);
- graph.preorder().rev().collect_vec()
+ let graph = &workspace.graphs()[&document.uri];
+ graph.preorder(workspace).rev().collect_vec()
})
.unique()
.collect_vec();
diff --git a/support/texlab/crates/texlab/Cargo.toml b/support/texlab/crates/texlab/Cargo.toml
index ccc02bff01..a78059051e 100644
--- a/support/texlab/crates/texlab/Cargo.toml
+++ b/support/texlab/crates/texlab/Cargo.toml
@@ -1,7 +1,7 @@
[package]
name = "texlab"
description = "LaTeX Language Server"
-version = "5.13.1"
+version = "5.14.0"
license.workspace = true
readme = "README.md"
authors.workspace = true
@@ -26,16 +26,14 @@ anyhow = "1.0.75"
base-db = { path = "../base-db" }
bibfmt = { path = "../bibfmt" }
citeproc = { path = "../citeproc" }
-clap = { version = "4.4.18", features = ["derive"] }
+clap = { version = "4.5.3", features = ["derive"] }
commands = { path = "../commands" }
completion = { path = "../completion" }
completion-data = { path = "../completion-data" }
-crossbeam-channel = "0.5.11"
+crossbeam-channel = "0.5.12"
definition = { path = "../definition" }
diagnostics = { path = "../diagnostics" }
distro = { path = "../distro" }
-encoding_rs = "0.8.33"
-encoding_rs_io = "0.1.7"
fern = "0.6.2"
folding = { path = "../folding" }
highlights = { path = "../highlights" }
@@ -48,7 +46,6 @@ lsp-server = "0.7.6"
lsp-types = "0.95.0"
notify = "6.1.1"
notify-debouncer-full = "0.3.1"
-once_cell = "1.19.0"
parking_lot = "0.12.1"
parser = { path = "../parser" }
references = { path = "../references" }
diff --git a/support/texlab/crates/texlab/src/features/completion.rs b/support/texlab/crates/texlab/src/features/completion.rs
index 5bca0070ef..09326f8e76 100644
--- a/support/texlab/crates/texlab/src/features/completion.rs
+++ b/support/texlab/crates/texlab/src/features/completion.rs
@@ -88,6 +88,9 @@ impl<'a> ItemBuilder<'a> {
CompletionItemData::Command(data) => {
self.convert_command(&mut result, range, data);
}
+ CompletionItemData::CommandLikeDelimiter(left, right) => {
+ self.convert_command_like_delimiter(&mut result, range, left, right);
+ }
CompletionItemData::BeginEnvironment => {
self.convert_begin_environment(&mut result, range);
}
@@ -173,6 +176,27 @@ impl<'a> ItemBuilder<'a> {
result.text_edit = Some(lsp_types::TextEdit::new(range, data.name.into()).into());
}
+ fn convert_command_like_delimiter(
+ &self,
+ result: &mut lsp_types::CompletionItem,
+ range: lsp_types::Range,
+ left: &str,
+ right: &str,
+ ) {
+ if self.client_flags.completion_snippets {
+ result.kind = Some(Structure::Snippet.completion_kind());
+ let snippet = format!("{left}$0\\{right}");
+ result.text_edit = Some(lsp_types::TextEdit::new(range, snippet).into());
+ result.insert_text_format = Some(lsp_types::InsertTextFormat::SNIPPET);
+ } else {
+ result.kind = Some(Structure::Command.completion_kind());
+ result.text_edit = Some(lsp_types::TextEdit::new(range, left.into()).into());
+ }
+
+ result.label = left.into();
+ result.detail = Some(format_package_files(&[]));
+ }
+
fn convert_begin_environment(
&self,
result: &mut lsp_types::CompletionItem,
diff --git a/support/texlab/crates/texlab/src/features/formatting/latexindent.rs b/support/texlab/crates/texlab/src/features/formatting/latexindent.rs
index 061c360cf5..d3fffc2050 100644
--- a/support/texlab/crates/texlab/src/features/formatting/latexindent.rs
+++ b/support/texlab/crates/texlab/src/features/formatting/latexindent.rs
@@ -3,7 +3,7 @@ use std::{
process::{Command, Stdio},
};
-use base_db::{Document, LatexIndentConfig, Workspace};
+use base_db::{deps::ProjectRoot, Document, LatexIndentConfig, Workspace};
use distro::Language;
use rowan::TextLen;
use tempfile::tempdir;
@@ -16,7 +16,8 @@ pub fn format_with_latexindent(
) -> Option<Vec<lsp_types::TextEdit>> {
let config = workspace.config();
let target_dir = tempdir().ok()?;
- let source_dir = workspace.current_dir(&document.dir).to_file_path().ok()?;
+ let root = ProjectRoot::walk_and_find(workspace, &document.dir);
+ let source_dir = root.src_dir.to_file_path().ok()?;
let target_file = target_dir
.path()
diff --git a/support/texlab/crates/texlab/src/server.rs b/support/texlab/crates/texlab/src/server.rs
index fc16775bc9..70b9a03022 100644
--- a/support/texlab/crates/texlab/src/server.rs
+++ b/support/texlab/crates/texlab/src/server.rs
@@ -11,7 +11,7 @@ use std::{
};
use anyhow::Result;
-use base_db::{Config, Owner, Workspace};
+use base_db::{deps, Config, Owner, Workspace};
use commands::{BuildCommand, CleanCommand, CleanTarget, ForwardSearch};
use crossbeam_channel::{Receiver, Sender};
use distro::{Distro, Language};
@@ -237,7 +237,7 @@ impl Server {
fn update_workspace(&mut self) {
let mut checked_paths = FxHashSet::default();
let mut workspace = self.workspace.write();
- workspace.discover(&mut checked_paths);
+ base_db::deps::discover(&mut workspace, &mut checked_paths);
self.watcher.watch(&mut workspace);
for document in checked_paths
@@ -1086,6 +1086,6 @@ impl FileWatcher {
}
pub fn watch(&mut self, workspace: &mut Workspace) {
- workspace.watch(self.watcher.watcher(), &mut self.watched_dirs);
+ deps::watch(workspace, self.watcher.watcher(), &mut self.watched_dirs);
}
}