summaryrefslogtreecommitdiff
path: root/support
diff options
context:
space:
mode:
authorNorbert Preining <norbert@preining.info>2023-07-31 03:02:46 +0000
committerNorbert Preining <norbert@preining.info>2023-07-31 03:02:46 +0000
commit17d547a1effe2cafcdfbf704bdf8cb0484790ef2 (patch)
tree30556f1ebbb411da2d3e7297c0c4a2ffbd5af4ee /support
parent595d37aac232836c0519c45f2078c5272122eb32 (diff)
CTAN sync 202307310302
Diffstat (limited to 'support')
-rw-r--r--support/pdfjam/README.md49
-rw-r--r--support/pdfjam/VERSION2
-rwxr-xr-xsupport/pdfjam/bin/pdfjam22
-rw-r--r--support/pdfjam/man1/pdfjam.12
-rw-r--r--support/texlab/CHANGELOG.md20
-rw-r--r--support/texlab/Cargo.lock270
-rw-r--r--support/texlab/crates/base-db/Cargo.toml16
-rw-r--r--support/texlab/crates/base-db/src/config.rs9
-rw-r--r--support/texlab/crates/base-db/src/data.rs12
-rw-r--r--support/texlab/crates/base-db/src/diagnostics.rs25
-rw-r--r--support/texlab/crates/base-db/src/diagnostics/bib.rs65
-rw-r--r--support/texlab/crates/base-db/src/diagnostics/log.rs68
-rw-r--r--support/texlab/crates/base-db/src/diagnostics/tex.rs116
-rw-r--r--support/texlab/crates/base-db/src/document.rs75
-rw-r--r--support/texlab/crates/base-db/src/graph.rs50
-rw-r--r--support/texlab/crates/base-db/src/lib.rs1
-rw-r--r--support/texlab/crates/base-db/src/semantics.rs13
-rw-r--r--support/texlab/crates/base-db/src/semantics/bib.rs59
-rw-r--r--support/texlab/crates/base-db/src/semantics/tex.rs77
-rw-r--r--support/texlab/crates/base-db/src/util.rs1
-rw-r--r--support/texlab/crates/base-db/src/util/line_index.rs2
-rw-r--r--support/texlab/crates/base-db/src/util/queries.rs215
-rw-r--r--support/texlab/crates/base-db/src/workspace.rs65
-rw-r--r--support/texlab/crates/citeproc/Cargo.toml8
-rw-r--r--support/texlab/crates/citeproc/src/snapshots/citeproc__tests__article_aksin_2006.snap5
-rw-r--r--support/texlab/crates/citeproc/src/snapshots/citeproc__tests__article_betram_1996.snap5
-rw-r--r--support/texlab/crates/citeproc/src/snapshots/citeproc__tests__article_blom_2021.snap5
-rw-r--r--support/texlab/crates/citeproc/src/snapshots/citeproc__tests__article_jain_1999.snap5
-rw-r--r--support/texlab/crates/citeproc/src/snapshots/citeproc__tests__article_kastenholz_2006.snap5
-rw-r--r--support/texlab/crates/citeproc/src/snapshots/citeproc__tests__article_rivest_1978.snap5
-rw-r--r--support/texlab/crates/citeproc/src/snapshots/citeproc__tests__book_aho_2006.snap5
-rw-r--r--support/texlab/crates/citeproc/src/snapshots/citeproc__tests__book_averroes_1998.snap5
-rw-r--r--support/texlab/crates/citeproc/src/snapshots/citeproc__tests__book_knuth_1984.snap5
-rw-r--r--support/texlab/crates/citeproc/src/snapshots/citeproc__tests__collection_matuz_1990.snap5
-rw-r--r--support/texlab/crates/citeproc/src/snapshots/citeproc__tests__inproceedings_combi_2004.snap5
-rw-r--r--support/texlab/crates/citeproc/src/snapshots/citeproc__tests__inproceedings_erwin_2007.snap5
-rw-r--r--support/texlab/crates/citeproc/src/snapshots/citeproc__tests__mvbook_nietzsche_1988.snap5
-rw-r--r--support/texlab/crates/citeproc/src/snapshots/citeproc__tests__patent_almendro_1998.snap5
-rw-r--r--support/texlab/crates/citeproc/src/tests.rs161
-rw-r--r--support/texlab/crates/commands/Cargo.toml14
-rw-r--r--support/texlab/crates/commands/src/clean.rs5
-rw-r--r--support/texlab/crates/commands/src/find_envs.rs95
-rw-r--r--support/texlab/crates/commands/src/fwd_search.rs15
-rw-r--r--support/texlab/crates/commands/src/snapshots/commands__find_envs__tests__test.snap27
-rw-r--r--support/texlab/crates/completion-data/Cargo.toml18
-rw-r--r--support/texlab/crates/completion-data/data/completion.json.gz (renamed from support/texlab/crates/texlab/data/components.json.gz)bin6970534 -> 6970534 bytes
-rw-r--r--support/texlab/crates/completion-data/src/lib.rs135
-rw-r--r--support/texlab/crates/definition/Cargo.toml19
-rw-r--r--support/texlab/crates/definition/src/citation.rs30
-rw-r--r--support/texlab/crates/definition/src/command.rs (renamed from support/texlab/crates/texlab/src/features/definition/command.rs)34
-rw-r--r--support/texlab/crates/definition/src/include.rs31
-rw-r--r--support/texlab/crates/definition/src/label.rs37
-rw-r--r--support/texlab/crates/definition/src/lib.rs49
-rw-r--r--support/texlab/crates/definition/src/string_ref.rs35
-rw-r--r--support/texlab/crates/definition/src/tests.rs154
-rw-r--r--support/texlab/crates/diagnostics/Cargo.toml21
-rw-r--r--support/texlab/crates/diagnostics/src/build_log.rs101
-rw-r--r--support/texlab/crates/diagnostics/src/citations.rs97
-rw-r--r--support/texlab/crates/diagnostics/src/grammar.rs4
-rw-r--r--support/texlab/crates/diagnostics/src/grammar/bib.rs103
-rw-r--r--support/texlab/crates/diagnostics/src/grammar/tex.rs132
-rw-r--r--support/texlab/crates/diagnostics/src/labels.rs97
-rw-r--r--support/texlab/crates/diagnostics/src/lib.rs86
-rw-r--r--support/texlab/crates/diagnostics/src/tests.rs191
-rw-r--r--support/texlab/crates/diagnostics/src/types.rs38
-rw-r--r--support/texlab/crates/diagnostics/src/util.rs28
-rw-r--r--support/texlab/crates/distro/Cargo.toml2
-rw-r--r--support/texlab/crates/hover/Cargo.toml21
-rw-r--r--support/texlab/crates/hover/src/citation.rs39
-rw-r--r--support/texlab/crates/hover/src/entry_type.rs18
-rw-r--r--support/texlab/crates/hover/src/field_type.rs21
-rw-r--r--support/texlab/crates/hover/src/label.rs28
-rw-r--r--support/texlab/crates/hover/src/lib.rs61
-rw-r--r--support/texlab/crates/hover/src/package.rs20
-rw-r--r--support/texlab/crates/hover/src/string_ref.rs35
-rw-r--r--support/texlab/crates/hover/src/tests.rs323
-rw-r--r--support/texlab/crates/parser/Cargo.toml6
-rw-r--r--support/texlab/crates/parser/src/latex.rs27
-rw-r--r--support/texlab/crates/parser/src/snapshots/parser__latex__tests__parse@issue_568.txt.snap28
-rw-r--r--support/texlab/crates/parser/src/test_data/latex/issue_568.txt2
-rw-r--r--support/texlab/crates/references/Cargo.toml18
-rw-r--r--support/texlab/crates/references/src/entry.rs49
-rw-r--r--support/texlab/crates/references/src/label.rs29
-rw-r--r--support/texlab/crates/references/src/lib.rs50
-rw-r--r--support/texlab/crates/references/src/string_def.rs42
-rw-r--r--support/texlab/crates/references/src/tests.rs246
-rw-r--r--support/texlab/crates/symbols/Cargo.toml8
-rw-r--r--support/texlab/crates/syntax/Cargo.toml2
-rw-r--r--support/texlab/crates/syntax/src/bibtex.rs2
-rw-r--r--support/texlab/crates/syntax/src/latex/cst.rs15
-rw-r--r--support/texlab/crates/test-utils/Cargo.toml3
-rw-r--r--support/texlab/crates/test-utils/src/fixture.rs10
-rw-r--r--support/texlab/crates/texlab/Cargo.toml38
-rw-r--r--support/texlab/crates/texlab/src/features/completion/argument.rs8
-rw-r--r--support/texlab/crates/texlab/src/features/completion/builder.rs11
-rw-r--r--support/texlab/crates/texlab/src/features/completion/component_command.rs8
-rw-r--r--support/texlab/crates/texlab/src/features/completion/component_environment.rs8
-rw-r--r--support/texlab/crates/texlab/src/features/completion/import.rs9
-rw-r--r--support/texlab/crates/texlab/src/features/definition.rs82
-rw-r--r--support/texlab/crates/texlab/src/features/definition/document.rs31
-rw-r--r--support/texlab/crates/texlab/src/features/definition/entry.rs42
-rw-r--r--support/texlab/crates/texlab/src/features/definition/label.rs37
-rw-r--r--support/texlab/crates/texlab/src/features/definition/string.rs35
-rw-r--r--support/texlab/crates/texlab/src/features/hover.rs72
-rw-r--r--support/texlab/crates/texlab/src/features/hover/citation.rs27
-rw-r--r--support/texlab/crates/texlab/src/features/hover/component.rs23
-rw-r--r--support/texlab/crates/texlab/src/features/hover/entry_type.rs21
-rw-r--r--support/texlab/crates/texlab/src/features/hover/field.rs24
-rw-r--r--support/texlab/crates/texlab/src/features/hover/label.rs26
-rw-r--r--support/texlab/crates/texlab/src/features/hover/string_ref.rs42
-rw-r--r--support/texlab/crates/texlab/src/features/reference.rs56
-rw-r--r--support/texlab/crates/texlab/src/features/reference/entry.rs56
-rw-r--r--support/texlab/crates/texlab/src/features/reference/label.rs36
-rw-r--r--support/texlab/crates/texlab/src/features/reference/string.rs44
-rw-r--r--support/texlab/crates/texlab/src/features/rename/entry.rs75
-rw-r--r--support/texlab/crates/texlab/src/server.rs70
-rw-r--r--support/texlab/crates/texlab/src/server/extensions.rs2
-rw-r--r--support/texlab/crates/texlab/src/server/options.rs19
-rw-r--r--support/texlab/crates/texlab/src/util.rs1
-rw-r--r--support/texlab/crates/texlab/src/util/components.rs116
-rw-r--r--support/texlab/crates/texlab/src/util/cursor.rs18
-rw-r--r--support/texlab/crates/texlab/src/util/diagnostics.rs229
-rw-r--r--support/texlab/crates/texlab/tests/lsp/text_document.rs3
-rw-r--r--support/texlab/crates/texlab/tests/lsp/text_document/definition.rs156
-rw-r--r--support/texlab/crates/texlab/tests/lsp/text_document/hover.rs246
-rw-r--r--support/texlab/crates/texlab/tests/lsp/text_document/references.rs270
-rw-r--r--support/texlab/crates/texlab/tests/lsp/text_document/snapshots/lsp__text_document__hover__citation_inside_cite.snap8
-rw-r--r--support/texlab/crates/texlab/tests/lsp/text_document/snapshots/lsp__text_document__hover__citation_inside_entry.snap8
-rw-r--r--support/texlab/crates/texlab/tests/lsp/text_document/snapshots/lsp__text_document__hover__component_known_package.snap8
-rw-r--r--support/texlab/crates/texlab/tests/lsp/text_document/snapshots/lsp__text_document__hover__entry_type_known_type.snap8
-rw-r--r--support/texlab/crates/texlab/tests/lsp/text_document/snapshots/lsp__text_document__hover__field_known.snap8
-rw-r--r--support/texlab/crates/texlab/tests/lsp/text_document/snapshots/lsp__text_document__hover__label_theorem_child_file.snap8
-rw-r--r--support/texlab/crates/texlab/tests/lsp/text_document/snapshots/lsp__text_document__hover__label_theorem_child_file_mumber.snap8
-rw-r--r--support/texlab/crates/texlab/tests/lsp/text_document/snapshots/lsp__text_document__hover__section.snap8
-rw-r--r--support/texlab/crates/texlab/tests/lsp/text_document/snapshots/lsp__text_document__hover__string_inside_reference.snap8
-rw-r--r--support/texlab/texlab.14
-rw-r--r--support/texlab/texlab.pdfbin26404 -> 26416 bytes
137 files changed, 3820 insertions, 2371 deletions
diff --git a/support/pdfjam/README.md b/support/pdfjam/README.md
index f73a854348..c2fa576c9f 100644
--- a/support/pdfjam/README.md
+++ b/support/pdfjam/README.md
@@ -429,50 +429,5 @@ And maybe even someone else will
want to take on the task of improving and maintaining some of them,
who knows? The wrapper scripts (**no longer maintained**) can now be found at
<https://github.com/rrthomas/pdfjam-extras>.
-
-### Version release notes
-
-[**3.04**](https://github.com/rrthomas/pdfjam/releases/tag/v3.04) [2023-05-07]:
-
-- Many documentation improvements.
-- Various minor code fixes.
-- No longer depend on hyperref package.
-- Support TMPDIR.
-
-[**3.03**](https://github.com/rrthomas/pdfjam/releases/tag/v3.03) [2019-11-18]:
-
-- Built package now (again) has tests in a zip archive. (needed for CTAN)
-
-[**3.02**](https://github.com/rrthomas/pdfjam/releases/tag/v3.02) [2019-11-14]:
-
-- Re-styled the package name to **pdfjam**. (Previously the package --- which then
- contained several scripts --- was named PDFjam).
-- Moved everything to a new home on the web at
- <a href="https://github.com/rrthomas">github.com/rrthomas</a>.
-- Simplified the package by removal of all the 'wrapper' scripts
- (`pdfnup`, `pdfjoin`, etc.).
-- Settings specified via `--preamble` are now protected from the normal
- tidying of the TeX input file to remove redundant packages.
-- The possibility to use `lualatex` or `xelatex` in place of `pdflatex` is
- now described explicitly in the README. The specification of which LaTeX
- engine to use can be made either in a configuration file, or on the command
- line via option (for example) `--latex /usr/bin/xelatex`. Thanks to
- Mircea for suggesting this.
-- Added new `--runs` option, so as to allow for example `--runs 2` in a situation
- where two runs of `pdflatex` (or `lualatex` or `xelatex`) are needed (typically
- where the result document is being indexed in some way, so more than one run is
- needed). Thanks to Ferdinand for this suggestion (and patch).
-- Default paper size is now guessed from the locale where possible, with fallback
- default size being ISO A4 ('a4paper' in LaTeX). This can still be over-ridden
- in a configuration file, or on the command line. Thanks to Jonathan for this
- suggestion.
-- Innocuous edits made to make pdfjam work better in Cygwin. Thanks to Lucas for
- sending a helpful patch for this.
-- Included a note in the FAQ about the (sometimes reported) 'Too many open files'
- error. Thanks to George for information about this.
-- Tidying of the `pdfjam` script, and better organisation of the `tests` folder.
- Thanks to Lucas for help with this.
-
-----------
-
-_Older releases are still available at <https://davidfirth.github.io/pdfjam>._
+
+_Releases up to version 2.08 are still available at <https://davidfirth.github.io/pdfjam>._
diff --git a/support/pdfjam/VERSION b/support/pdfjam/VERSION
index 9ab54fe350..1de2ea467f 100644
--- a/support/pdfjam/VERSION
+++ b/support/pdfjam/VERSION
@@ -1 +1 @@
-This is pdfjam 3.06 \ No newline at end of file
+This is pdfjam 3.07 \ No newline at end of file
diff --git a/support/pdfjam/bin/pdfjam b/support/pdfjam/bin/pdfjam
index bc4a8be7d7..455d2cbeca 100755
--- a/support/pdfjam/bin/pdfjam
+++ b/support/pdfjam/bin/pdfjam
@@ -1,5 +1,5 @@
#!/bin/sh
-version=3.06
+version=3.07
#########################################################################
## ##
## pdfjam: A shell-script interface to the "pdfpages" LaTeX package ##
@@ -881,17 +881,15 @@ then
PATH="$modifyPath:$PATH"
export PATH
fi
- (kpsewhich pdfpages.sty >/dev/null) ||
- error_exit \
- "LaTeX package pdfpages.sty is not installed" \
- $E_UNAVAILABLE
-
- for pack in geometry pdflscape eso-pic everyshi atbegshi ; do
- (kpsewhich $pack.sty >/dev/null) ||
- error_exit \
- "LaTeX package $pack.sty is not installed (see the pdfpages manual)" \
- $E_UNAVAILABLE
- done
+ case "$latex" in
+ *tectonic*) ;;
+ *)
+ (kpsewhich pdfpages.sty >/dev/null) ||
+ error_exit \
+ "LaTeX package pdfpages.sty is not installed" \
+ $E_UNAVAILABLE
+ ;;
+ esac
fi
if test "$keepinfo" = true
then
diff --git a/support/pdfjam/man1/pdfjam.1 b/support/pdfjam/man1/pdfjam.1
index 4d56970dba..16b9c0af0d 100644
--- a/support/pdfjam/man1/pdfjam.1
+++ b/support/pdfjam/man1/pdfjam.1
@@ -1,4 +1,4 @@
-.TH "pdfjam" "1" "10 March 2010" "" ""
+.TH "pdfjam" "1" "18 November 2020" "" ""
.SH "NAME"
pdfjam \- A shell script for manipulating PDF files
.SH "SYNOPSIS"
diff --git a/support/texlab/CHANGELOG.md b/support/texlab/CHANGELOG.md
index cca502b4d3..a97c7cb44a 100644
--- a/support/texlab/CHANGELOG.md
+++ b/support/texlab/CHANGELOG.md
@@ -5,6 +5,26 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
+## [5.8.0] - 2023-07-30
+
+### Added
+
+- Report diagnostics for unused and undefined labels
+- Report diagnostics for unused BibTeX entries and undefined citations
+- Report diagnostics for duplicate BibTeX entries
+- Report diagnostics for duplicate labels
+- Add `texlab.build.auxDirectory` and `texlab.build.logDirectory` settings ([#906](https://github.com/latex-lsp/texlab/issues/906))
+
+### Deprecated
+
+- Deprecate `texlab.auxDirectory` in favor of `texlab.build.auxDirectory`
+
+### Fixed
+
+- Fix parsing paths with `|` ([#568](https://github.com/latex-lsp/texlab/issues/568))
+- Fix parsing LaTeX identifiers with `=` ([#568](https://github.com/latex-lsp/texlab/issues/568))
+- Fix search path for aux files when using `\include` instead of `\input` ([[#906](https://github.com/latex-lsp/texlab/issues/906))
+
## [5.7.0] - 2023-06-07
### Added
diff --git a/support/texlab/Cargo.lock b/support/texlab/Cargo.lock
index b69b6e82e8..98dff1e35c 100644
--- a/support/texlab/Cargo.lock
+++ b/support/texlab/Cargo.lock
@@ -27,6 +27,12 @@ dependencies = [
]
[[package]]
+name = "android-tzdata"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0"
+
+[[package]]
name = "anes"
version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -92,9 +98,9 @@ dependencies = [
[[package]]
name = "anyhow"
-version = "1.0.71"
+version = "1.0.72"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9c7d0618f0e0b7e8ff11427422b64564d5fb0be1940354bfe2e0529b18a9d9b8"
+checksum = "3b13c32d80ecc7ab747b80c3784bce54ee8a7a0cc4fbda9bf4cda2cf6fe90854"
[[package]]
name = "assert_unordered"
@@ -117,7 +123,7 @@ version = "0.0.0"
dependencies = [
"dirs",
"distro",
- "itertools",
+ "itertools 0.11.0",
"log",
"notify",
"once_cell",
@@ -154,12 +160,11 @@ dependencies = [
[[package]]
name = "bstr"
-version = "1.4.0"
+version = "1.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c3d4260bcc2e8fc9df1eac4919a720effeb63a3f0952f5bf4944adfa18897f09"
+checksum = "6798148dccfbff0fae41c7574d2fa8f1ef3492fba0face179de5d8d447d67b05"
dependencies = [
"memchr",
- "once_cell",
"regex-automata",
"serde",
]
@@ -190,11 +195,11 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]]
name = "chrono"
-version = "0.4.24"
+version = "0.4.26"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4e3c5919066adf22df73762e50cffcde3a758f2a848b113b586d1f86728b673b"
+checksum = "ec837a71355b28f6556dbd569b37b3f363091c0bd4b2e735674521b4c5fd9bc5"
dependencies = [
- "num-integer",
+ "android-tzdata",
"num-traits",
]
@@ -230,10 +235,10 @@ name = "citeproc"
version = "0.0.0"
dependencies = [
"chrono",
+ "expect-test",
"human_name",
- "insta",
"isocountry",
- "itertools",
+ "itertools 0.11.0",
"parser",
"rowan",
"rustc-hash",
@@ -245,9 +250,9 @@ dependencies = [
[[package]]
name = "clap"
-version = "4.3.0"
+version = "4.3.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "93aae7a4192245f70fe75dd9157fc7b4a5bf53e88d30bd4396f7d8f9284d5acc"
+checksum = "8f644d0dac522c8b05ddc39aaaccc5b136d5dc4ff216610c5641e3be5becf56c"
dependencies = [
"clap_builder",
"clap_derive",
@@ -256,22 +261,21 @@ dependencies = [
[[package]]
name = "clap_builder"
-version = "4.3.0"
+version = "4.3.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4f423e341edefb78c9caba2d9c7f7687d0e72e89df3ce3394554754393ac3990"
+checksum = "af410122b9778e024f9e0fb35682cc09cc3f85cad5e8d3ba8f47a9702df6e73d"
dependencies = [
"anstream",
"anstyle",
- "bitflags",
"clap_lex",
"strsim",
]
[[package]]
name = "clap_derive"
-version = "4.3.0"
+version = "4.3.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "191d9573962933b4027f932c600cd252ce27a8ad5979418fe78e43c07996f27b"
+checksum = "54a9bb5758fc5dfe728d1019941681eccaf0cf8a4189b692a0ee2f2ecf90a050"
dependencies = [
"heck",
"proc-macro2",
@@ -300,8 +304,8 @@ dependencies = [
"bstr",
"crossbeam-channel",
"distro",
- "insta",
- "itertools",
+ "expect-test",
+ "itertools 0.11.0",
"libc",
"log",
"rowan",
@@ -313,6 +317,18 @@ dependencies = [
]
[[package]]
+name = "completion-data"
+version = "0.0.0"
+dependencies = [
+ "flate2",
+ "itertools 0.11.0",
+ "once_cell",
+ "rustc-hash",
+ "serde",
+ "serde_json",
+]
+
+[[package]]
name = "console"
version = "0.15.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -360,7 +376,7 @@ dependencies = [
"clap",
"criterion-plot",
"is-terminal",
- "itertools",
+ "itertools 0.10.5",
"num-traits",
"once_cell",
"oorandom",
@@ -381,7 +397,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6b50826342786a51a89e2da3a28f1c32b06e387201bc2d19791f622c673706b1"
dependencies = [
"cast",
- "itertools",
+ "itertools 0.10.5",
]
[[package]]
@@ -438,6 +454,30 @@ dependencies = [
]
[[package]]
+name = "definition"
+version = "0.0.0"
+dependencies = [
+ "base-db",
+ "itertools 0.11.0",
+ "rowan",
+ "syntax",
+ "test-utils",
+]
+
+[[package]]
+name = "diagnostics"
+version = "0.0.0"
+dependencies = [
+ "base-db",
+ "itertools 0.11.0",
+ "rowan",
+ "rustc-hash",
+ "syntax",
+ "test-utils",
+ "url",
+]
+
+[[package]]
name = "digest"
version = "0.10.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -469,6 +509,12 @@ dependencies = [
]
[[package]]
+name = "dissimilar"
+version = "1.0.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "210ec60ae7d710bed8683e333e9d2855a8a56a3e9892b38bad3bb0d4d29b0d5e"
+
+[[package]]
name = "distro"
version = "0.0.0"
dependencies = [
@@ -528,6 +574,16 @@ dependencies = [
]
[[package]]
+name = "expect-test"
+version = "1.4.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "30d9eafeadd538e68fb28016364c9732d78e420b9ff8853fa5e4058861e9f8d3"
+dependencies = [
+ "dissimilar",
+ "once_cell",
+]
+
+[[package]]
name = "fastrand"
version = "1.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -575,9 +631,9 @@ checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1"
[[package]]
name = "form_urlencoded"
-version = "1.1.0"
+version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a9c384f161156f5260c24a097c56119f9be8c798586aecc13afbcbe7b7e26bf8"
+checksum = "a62bc1cf6f830c2ec14a513a9fb124d0a213a629668a4186f329db21fe045652"
dependencies = [
"percent-encoding",
]
@@ -668,6 +724,19 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fed44880c466736ef9a5c5b5facefb5ed0785676d0c02d612db14e54f0d84286"
[[package]]
+name = "hover"
+version = "0.0.0"
+dependencies = [
+ "base-db",
+ "citeproc",
+ "completion-data",
+ "expect-test",
+ "rowan",
+ "syntax",
+ "test-utils",
+]
+
+[[package]]
name = "human_name"
version = "2.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -718,9 +787,9 @@ dependencies = [
[[package]]
name = "insta"
-version = "1.29.0"
+version = "1.31.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9a28d25139df397cbca21408bb742cf6837e04cdbebf1b07b760caf971d6a972"
+checksum = "a0770b0a3d4c70567f0d58331f3088b0e4c4f56c9b8d764efe654b4a5d46de3a"
dependencies = [
"console",
"globset",
@@ -786,6 +855,15 @@ dependencies = [
]
[[package]]
+name = "itertools"
+version = "0.11.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b1c173a5686ce8bfa551b3563d0c2170bf24ca44da99c7ca4bfdab5418c3fe57"
+dependencies = [
+ "either",
+]
+
+[[package]]
name = "itoa"
version = "1.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -834,9 +912,9 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
[[package]]
name = "libc"
-version = "0.2.144"
+version = "0.2.147"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2b00cc1c228a6782d0f076e7b232802e0c5689d41bb5df366f2a6b6621cfdfe1"
+checksum = "b4668fb0ea861c1df094127ac5f1da3409a82116a4ba74fca2e58ef927159bb3"
[[package]]
name = "linked-hash-map"
@@ -862,12 +940,9 @@ dependencies = [
[[package]]
name = "log"
-version = "0.4.17"
+version = "0.4.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e"
-dependencies = [
- "cfg-if",
-]
+checksum = "b06a4cde4c0f271a446782e3eff8de789548ce57dbc8eca9292c27f4a42004b4"
[[package]]
name = "logos"
@@ -903,9 +978,9 @@ dependencies = [
[[package]]
name = "lsp-server"
-version = "0.7.0"
+version = "0.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "68a9b4c78d1c3f35c5864c90e9633377b5f374a4a4983ac64c30b8ae898f9305"
+checksum = "37ea9ae5a5082ca3b6ae824fc7666cd206b99168a4d4c769ad8fe9cc740df6a6"
dependencies = [
"crossbeam-channel",
"log",
@@ -964,9 +1039,9 @@ dependencies = [
[[package]]
name = "notify"
-version = "6.0.0"
+version = "6.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4d9ba6c734de18ca27c8cef5cd7058aa4ac9f63596131e4c7e41e579319032a2"
+checksum = "5738a2795d57ea20abec2d6d76c6081186709c0024187cd5977265eda6598b51"
dependencies = [
"bitflags",
"crossbeam-channel",
@@ -981,16 +1056,6 @@ dependencies = [
]
[[package]]
-name = "num-integer"
-version = "0.1.45"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "225d3389fb3509a24c93f5c29eb6bde2586b98d9f016636dff58d7c6f7569cd9"
-dependencies = [
- "autocfg",
- "num-traits",
-]
-
-[[package]]
name = "num-traits"
version = "0.2.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1011,9 +1076,9 @@ dependencies = [
[[package]]
name = "once_cell"
-version = "1.17.1"
+version = "1.18.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b7e5500299e16ebb147ae15a00a942af264cf3688f47923b8fc2cd5858f23ad3"
+checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d"
[[package]]
name = "oorandom"
@@ -1065,9 +1130,9 @@ dependencies = [
[[package]]
name = "percent-encoding"
-version = "2.2.0"
+version = "2.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "478c572c3d73181ff3c2539045f6eb99e5491218eae919370993b890cdbdd98e"
+checksum = "9b2a4787296e9989611394c33f193f676704af1686e70b8f8033ab5ba9a35a94"
[[package]]
name = "pest"
@@ -1181,18 +1246,18 @@ dependencies = [
[[package]]
name = "proc-macro2"
-version = "1.0.58"
+version = "1.0.66"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "fa1fb82fc0c281dd9671101b66b771ebbe1eaf967b96ac8740dcba4b70005ca8"
+checksum = "18fb31db3f9bddb2ea821cde30a9f70117e3f119938b5ee630b7403aa6e2ead9"
dependencies = [
"unicode-ident",
]
[[package]]
name = "quote"
-version = "1.0.27"
+version = "1.0.31"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8f4f29d145265ec1c483c7c654450edde0bfe043d3938d6972630663356d9500"
+checksum = "5fe8a65d69dd0808184ebb5f836ab526bb259db23c657efa38711b1072ee47f0"
dependencies = [
"proc-macro2",
]
@@ -1264,21 +1329,37 @@ dependencies = [
]
[[package]]
+name = "references"
+version = "0.0.0"
+dependencies = [
+ "base-db",
+ "rowan",
+ "syntax",
+ "test-utils",
+]
+
+[[package]]
name = "regex"
-version = "1.8.1"
+version = "1.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "af83e617f331cc6ae2da5443c602dfa5af81e517212d9d611a5b3ba1777b5370"
+checksum = "b2eae68fc220f7cf2532e4494aded17545fce192d59cd996e0fe7887f4ceb575"
dependencies = [
"aho-corasick 1.0.1",
"memchr",
- "regex-syntax 0.7.1",
+ "regex-automata",
+ "regex-syntax 0.7.4",
]
[[package]]
name = "regex-automata"
-version = "0.1.10"
+version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132"
+checksum = "39354c10dd07468c2e73926b23bb9c2caca74c5501e38a35da70406f1d923310"
+dependencies = [
+ "aho-corasick 1.0.1",
+ "memchr",
+ "regex-syntax 0.7.4",
+]
[[package]]
name = "regex-syntax"
@@ -1288,9 +1369,9 @@ checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1"
[[package]]
name = "regex-syntax"
-version = "0.7.1"
+version = "0.7.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a5996294f19bd3aae0453a862ad728f60e6600695733dd5df01da90c54363a3c"
+checksum = "e5ea92a5b6195c6ef2a0295ea818b312502c6fc94dde986c5553242e18fd4ce2"
[[package]]
name = "rowan"
@@ -1348,18 +1429,18 @@ checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
[[package]]
name = "serde"
-version = "1.0.163"
+version = "1.0.171"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2113ab51b87a539ae008b5c6c02dc020ffa39afd2d83cffcb3f4eb2722cebec2"
+checksum = "30e27d1e4fd7659406c492fd6cfaf2066ba8773de45ca75e855590f856dc34a9"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_derive"
-version = "1.0.163"
+version = "1.0.171"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8c805777e3930c8883389c602315a24224bcc738b63905ef87cd1420353ea93e"
+checksum = "389894603bd18c46fa56231694f8d827779c0951a667087194cf9de94ed24682"
dependencies = [
"proc-macro2",
"quote",
@@ -1368,9 +1449,9 @@ dependencies = [
[[package]]
name = "serde_json"
-version = "1.0.96"
+version = "1.0.103"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "057d394a50403bcac12672b2b18fb387ab6d289d957dab67dd201875391e52f1"
+checksum = "d03b412469450d4404fe8499a268edd7f8b79fecb074b0d812ad64ca21f4031b"
dependencies = [
"itoa",
"ryu",
@@ -1389,9 +1470,9 @@ dependencies = [
[[package]]
name = "serde_repr"
-version = "0.1.12"
+version = "0.1.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bcec881020c684085e55a25f7fd888954d56609ef363479dc5a1305eb0d40cab"
+checksum = "1d89a8107374290037607734c0b73a85db7ed80cae314b3c5791f192a496e731"
dependencies = [
"proc-macro2",
"quote",
@@ -1437,15 +1518,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0"
[[package]]
-name = "smol_str"
-version = "0.2.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "74212e6bbe9a4352329b2f68ba3130c15a3f26fe88ff22dbdc6cdd58fa85e99c"
-dependencies = [
- "serde",
-]
-
-[[package]]
name = "strsim"
version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1458,7 +1530,7 @@ dependencies = [
"base-db",
"distro",
"insta",
- "itertools",
+ "itertools 0.11.0",
"regex",
"rowan",
"syntax",
@@ -1469,9 +1541,9 @@ dependencies = [
[[package]]
name = "syn"
-version = "2.0.16"
+version = "2.0.26"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a6f671d4b5ffdb8eadec19c0ae67fe2639df8684bd7bc4b83d986b8db549cf01"
+checksum = "45c3457aacde3c65315de5031ec191ce46604304d2446e803d71ade03308d970"
dependencies = [
"proc-macro2",
"quote",
@@ -1482,21 +1554,22 @@ dependencies = [
name = "syntax"
version = "0.0.0"
dependencies = [
- "itertools",
+ "itertools 0.11.0",
"rowan",
]
[[package]]
name = "tempfile"
-version = "3.5.0"
+version = "3.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b9fbec84f381d5795b08656e4912bec604d162bff9291d6189a78f4c8ab87998"
+checksum = "31c0432476357e58790aaa47a8efb0c5138f137343f3b5f23bd36a27e3b0a6d6"
dependencies = [
+ "autocfg",
"cfg-if",
"fastrand",
"redox_syscall 0.3.5",
"rustix",
- "windows-sys 0.45.0",
+ "windows-sys 0.48.0",
]
[[package]]
@@ -1506,13 +1579,12 @@ dependencies = [
"base-db",
"distro",
"rowan",
- "syntax",
"url",
]
[[package]]
name = "texlab"
-version = "5.7.0"
+version = "5.8.0"
dependencies = [
"anyhow",
"assert_unordered",
@@ -1520,17 +1592,20 @@ dependencies = [
"citeproc",
"clap",
"commands",
+ "completion-data",
"criterion",
"crossbeam-channel",
+ "definition",
+ "diagnostics",
"dirs",
"distro",
"encoding_rs",
"encoding_rs_io",
"fern",
- "flate2",
"fuzzy-matcher",
+ "hover",
"insta",
- "itertools",
+ "itertools 0.11.0",
"log",
"lsp-server",
"lsp-types",
@@ -1538,6 +1613,7 @@ dependencies = [
"once_cell",
"parking_lot",
"parser",
+ "references",
"regex",
"rowan",
"rustc-hash",
@@ -1545,34 +1621,32 @@ dependencies = [
"serde_json",
"serde_regex",
"serde_repr",
- "smol_str",
"symbols",
"syntax",
"tempfile",
"threadpool",
- "titlecase",
]
[[package]]
name = "text-size"
-version = "1.1.0"
+version = "1.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "288cb548dbe72b652243ea797201f3d481a0609a967980fcc5b2315ea811560a"
+checksum = "f18aa187839b2bdb1ad2fa35ead8c4c2976b64e4363c386d45ac0f7ee85c9233"
[[package]]
name = "thiserror"
-version = "1.0.40"
+version = "1.0.43"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "978c9a314bd8dc99be594bc3c175faaa9794be04a5a5e153caba6915336cebac"
+checksum = "a35fc5b8971143ca348fa6df4f024d4d55264f3468c71ad1c2f365b0a4d58c42"
dependencies = [
"thiserror-impl",
]
[[package]]
name = "thiserror-impl"
-version = "1.0.40"
+version = "1.0.43"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f9456a42c5b0d803c8cd86e73dd7cc9edd429499f37a3550d286d5e86720569f"
+checksum = "463fe12d7993d3b327787537ce8dd4dfa058de32fc2b195ef3cde03dc4771e8f"
dependencies = [
"proc-macro2",
"quote",
diff --git a/support/texlab/crates/base-db/Cargo.toml b/support/texlab/crates/base-db/Cargo.toml
index 2b44acaf3c..fa015b24cc 100644
--- a/support/texlab/crates/base-db/Cargo.toml
+++ b/support/texlab/crates/base-db/Cargo.toml
@@ -9,18 +9,18 @@ rust-version.workspace = true
[dependencies]
dirs = "5.0.1"
distro = { path = "../distro" }
-itertools = "0.10.5"
-log = "0.4.17"
-notify = "6.0.0"
-once_cell = "1.17.1"
+itertools = "0.11.0"
+log = "0.4.19"
+notify = "6.0.1"
+once_cell = "1.18.0"
parser = { path = "../parser" }
-percent-encoding = "2.2.0"
-regex = "1.8.1"
+percent-encoding = "2.3.0"
+regex = "1.9.1"
rowan = "0.15.11"
rustc-hash = "1.1.0"
syntax = { path = "../syntax" }
-text-size = "1.1.0"
-url = "2.3.1"
+text-size = "1.1.1"
+url = "=2.3.1"
[lib]
doctest = false
diff --git a/support/texlab/crates/base-db/src/config.rs b/support/texlab/crates/base-db/src/config.rs
index cf747d4ecd..36063b470c 100644
--- a/support/texlab/crates/base-db/src/config.rs
+++ b/support/texlab/crates/base-db/src/config.rs
@@ -1,3 +1,4 @@
+use std::path::PathBuf;
use std::time::Duration;
use parser::SyntaxConfig;
@@ -21,7 +22,9 @@ pub struct BuildConfig {
pub args: Vec<String>,
pub on_save: bool,
pub forward_search_after: bool,
- pub output_dir: String,
+ pub aux_dir: String,
+ pub log_dir: String,
+ pub output_filename: Option<PathBuf>,
}
#[derive(Debug)]
@@ -110,7 +113,9 @@ impl Default for BuildConfig {
.collect(),
on_save: false,
forward_search_after: false,
- output_dir: String::from("."),
+ aux_dir: String::from("."),
+ log_dir: String::from("."),
+ output_filename: None,
}
}
}
diff --git a/support/texlab/crates/base-db/src/data.rs b/support/texlab/crates/base-db/src/data.rs
index 2ebb35cae2..64313c0ec7 100644
--- a/support/texlab/crates/base-db/src/data.rs
+++ b/support/texlab/crates/base-db/src/data.rs
@@ -23,15 +23,15 @@ pub struct BibtexFieldType<'a> {
}
impl<'a> BibtexEntryType<'a> {
- pub fn find(name: &str) -> Option<&Self> {
- BIBTEX_ENTRY_TYPES.iter().find(|ty| ty.name.eq_ignore_ascii_case(name))
- }
+ pub fn find(name: &str) -> Option<Self> {
+ BIBTEX_ENTRY_TYPES.iter().find(|ty| ty.name.eq_ignore_ascii_case(name)).copied()
+ }
}
impl<'a> BibtexFieldType<'a> {
- pub fn find(name: &str) -> Option<&Self> {
- BIBTEX_FIELD_TYPES.iter().find(|ty| ty.name.eq_ignore_ascii_case(name))
- }
+ pub fn find(name: &str) -> Option<Self> {
+ BIBTEX_FIELD_TYPES.iter().find(|ty| ty.name.eq_ignore_ascii_case(name)).copied()
+ }
}
diff --git a/support/texlab/crates/base-db/src/diagnostics.rs b/support/texlab/crates/base-db/src/diagnostics.rs
deleted file mode 100644
index f5d13400a6..0000000000
--- a/support/texlab/crates/base-db/src/diagnostics.rs
+++ /dev/null
@@ -1,25 +0,0 @@
-pub mod bib;
-pub mod log;
-pub mod tex;
-
-use rowan::TextRange;
-use syntax::BuildError;
-
-#[derive(Debug, Clone)]
-pub struct Diagnostic {
- pub range: TextRange,
- pub code: ErrorCode,
-}
-
-#[derive(Debug, Clone)]
-pub enum ErrorCode {
- UnexpectedRCurly,
- RCurlyInserted,
- MismatchedEnvironment,
- ExpectingLCurly,
- ExpectingKey,
- ExpectingRCurly,
- ExpectingEq,
- ExpectingFieldValue,
- Build(BuildError),
-}
diff --git a/support/texlab/crates/base-db/src/diagnostics/bib.rs b/support/texlab/crates/base-db/src/diagnostics/bib.rs
deleted file mode 100644
index 67fcf412ad..0000000000
--- a/support/texlab/crates/base-db/src/diagnostics/bib.rs
+++ /dev/null
@@ -1,65 +0,0 @@
-use rowan::{ast::AstNode, TextRange};
-use syntax::bibtex::{self, HasDelims, HasEq, HasName, HasType, HasValue};
-
-use crate::{Document, DocumentData};
-
-use super::{Diagnostic, ErrorCode};
-
-pub fn analyze(document: &mut Document) {
- let DocumentData::Bib(data) = &document.data else { return };
-
- for node in bibtex::SyntaxNode::new_root(data.green.clone()).descendants() {
- if let Some(entry) = bibtex::Entry::cast(node.clone()) {
- analyze_entry(document, entry);
- } else if let Some(field) = bibtex::Field::cast(node.clone()) {
- analyze_field(document, field);
- }
- }
-}
-
-fn analyze_entry(document: &mut Document, entry: bibtex::Entry) {
- if entry.left_delim_token().is_none() {
- document.diagnostics.push(Diagnostic {
- range: entry.type_token().unwrap().text_range(),
- code: ErrorCode::ExpectingLCurly,
- });
-
- return;
- }
-
- if entry.name_token().is_none() {
- document.diagnostics.push(Diagnostic {
- range: entry.left_delim_token().unwrap().text_range(),
- code: ErrorCode::ExpectingKey,
- });
-
- return;
- }
-
- if entry.right_delim_token().is_none() {
- document.diagnostics.push(Diagnostic {
- range: TextRange::empty(entry.syntax().text_range().end()),
- code: ErrorCode::ExpectingRCurly,
- });
- }
-}
-
-fn analyze_field(document: &mut Document, field: bibtex::Field) {
- if field.eq_token().is_none() {
- let code = ErrorCode::ExpectingEq;
- document.diagnostics.push(Diagnostic {
- range: field.name_token().unwrap().text_range(),
- code,
- });
-
- return;
- }
-
- if field.value().is_none() {
- let code = ErrorCode::ExpectingFieldValue;
- document.diagnostics.push(Diagnostic {
- range: field.name_token().unwrap().text_range(),
- code,
- });
- }
-}
diff --git a/support/texlab/crates/base-db/src/diagnostics/log.rs b/support/texlab/crates/base-db/src/diagnostics/log.rs
deleted file mode 100644
index 9e97a4e788..0000000000
--- a/support/texlab/crates/base-db/src/diagnostics/log.rs
+++ /dev/null
@@ -1,68 +0,0 @@
-use rowan::{TextLen, TextRange, TextSize};
-use rustc_hash::FxHashMap;
-use syntax::BuildError;
-use url::Url;
-
-use crate::{Document, DocumentData, Workspace};
-
-use super::{Diagnostic, ErrorCode};
-
-pub fn analyze<'a>(
- workspace: &'a Workspace,
- log_document: &'a Document,
-) -> FxHashMap<&'a Document, Vec<Diagnostic>> {
- let mut results = FxHashMap::default();
-
- let DocumentData::Log(data) = &log_document.data else { return results };
-
- let parents = workspace.parents(log_document);
- let Some(root_document) = parents.iter().next() else { return results };
-
- let Some(base_path) = root_document.path
- .as_deref()
- .and_then(|path| path.parent()) else { return results };
-
- for error in &data.errors {
- let full_path = base_path.join(&error.relative_path);
- let Ok(full_path_uri) = Url::from_file_path(&full_path) else { continue };
- let tex_document = workspace.lookup(&full_path_uri).unwrap_or(root_document);
-
- let range = find_range_of_hint(tex_document, error).unwrap_or_else(|| {
- let line = error.line.unwrap_or(0);
- let offset = *tex_document
- .line_index
- .newlines
- .get(line as usize)
- .unwrap_or(&TextSize::from(0));
-
- TextRange::empty(offset)
- });
-
- let diagnostic = Diagnostic {
- range,
- code: ErrorCode::Build(error.clone()),
- };
-
- results.entry(tex_document).or_default().push(diagnostic);
- }
-
- results
-}
-
-fn find_range_of_hint(document: &Document, error: &BuildError) -> Option<TextRange> {
- let line = error.line? as usize;
- let hint = error.hint.as_deref()?;
- let line_index = &document.line_index;
-
- let line_start = line_index.newlines.get(line).copied()?;
- let line_end = line_index
- .newlines
- .get(line + 1)
- .copied()
- .unwrap_or((&document.text).text_len());
-
- let line_text = &document.text[line_start.into()..line_end.into()];
- let hint_start = line_start + TextSize::try_from(line_text.find(hint)?).unwrap();
- let hint_end = hint_start + hint.text_len();
- Some(TextRange::new(hint_start, hint_end))
-}
diff --git a/support/texlab/crates/base-db/src/diagnostics/tex.rs b/support/texlab/crates/base-db/src/diagnostics/tex.rs
deleted file mode 100644
index 81b6e4401a..0000000000
--- a/support/texlab/crates/base-db/src/diagnostics/tex.rs
+++ /dev/null
@@ -1,116 +0,0 @@
-use rowan::{ast::AstNode, NodeOrToken, TextRange};
-use syntax::latex;
-
-use crate::{Config, Document, DocumentData};
-
-use super::{Diagnostic, ErrorCode};
-
-pub fn analyze(document: &mut Document, config: &Config) {
- if !document.uri.as_str().ends_with(".tex") {
- return;
- }
-
- let DocumentData::Tex(data) = &document.data else { return };
-
- let mut traversal = latex::SyntaxNode::new_root(data.green.clone()).preorder();
- while let Some(event) = traversal.next() {
- match event {
- rowan::WalkEvent::Enter(node) => {
- if let Some(environment) = latex::Environment::cast(node.clone()) {
- if environment
- .begin()
- .and_then(|begin| begin.name())
- .and_then(|name| name.key())
- .map_or(false, |name| {
- config
- .syntax
- .verbatim_environments
- .contains(&name.to_string())
- })
- {
- traversal.skip_subtree();
- continue;
- }
- }
-
- analyze_environment(document, node.clone())
- .or_else(|| analyze_curly_group(document, node.clone(), config))
- .or_else(|| analyze_curly_braces(document, node));
- }
- rowan::WalkEvent::Leave(_) => {
- continue;
- }
- };
- }
-}
-
-fn analyze_environment(document: &mut Document, node: latex::SyntaxNode) -> Option<()> {
- let environment = latex::Environment::cast(node)?;
- let begin = environment.begin()?.name()?.key()?;
- let end = environment.end()?.name()?.key()?;
- if begin != end {
- document.diagnostics.push(Diagnostic {
- range: latex::small_range(&begin),
- code: ErrorCode::MismatchedEnvironment,
- });
- }
-
- Some(())
-}
-
-fn analyze_curly_group(
- document: &mut Document,
- node: latex::SyntaxNode,
- config: &Config,
-) -> Option<()> {
- if !matches!(
- node.kind(),
- latex::CURLY_GROUP
- | latex::CURLY_GROUP_COMMAND
- | latex::CURLY_GROUP_KEY_VALUE
- | latex::CURLY_GROUP_WORD
- | latex::CURLY_GROUP_WORD_LIST
- ) {
- return None;
- }
-
- let is_inside_verbatim_environment = node
- .ancestors()
- .filter_map(latex::Environment::cast)
- .filter_map(|env| env.begin())
- .filter_map(|begin| begin.name())
- .filter_map(|name| name.key())
- .any(|name| {
- config
- .syntax
- .verbatim_environments
- .contains(&name.to_string())
- });
-
- if !is_inside_verbatim_environment
- && !node
- .children_with_tokens()
- .filter_map(NodeOrToken::into_token)
- .any(|token| token.kind() == latex::R_CURLY)
- {
- document.diagnostics.push(Diagnostic {
- range: TextRange::empty(node.text_range().end()),
- code: ErrorCode::RCurlyInserted,
- });
- }
-
- Some(())
-}
-
-fn analyze_curly_braces(document: &mut Document, node: latex::SyntaxNode) -> Option<()> {
- if node.kind() == latex::ERROR && node.first_token()?.text() == "}" {
- document.diagnostics.push(Diagnostic {
- range: node.text_range(),
- code: ErrorCode::UnexpectedRCurly,
- });
-
- Some(())
- } else {
- None
- }
-}
diff --git a/support/texlab/crates/base-db/src/document.rs b/support/texlab/crates/base-db/src/document.rs
index 8b6e07fb0f..4b84adf7de 100644
--- a/support/texlab/crates/base-db/src/document.rs
+++ b/support/texlab/crates/base-db/src/document.rs
@@ -2,10 +2,10 @@ use std::path::PathBuf;
use distro::Language;
use syntax::{bibtex, latex, BuildError};
+use text_size::TextRange;
use url::Url;
use crate::{
- diagnostics::{self, Diagnostic},
semantics,
util::{LineCol, LineIndex},
Config,
@@ -17,6 +17,16 @@ pub enum Owner {
Server,
}
+#[derive(Debug)]
+pub struct DocumentParams<'a> {
+ pub uri: Url,
+ pub text: String,
+ pub language: Language,
+ pub owner: Owner,
+ pub cursor: LineCol,
+ pub config: &'a Config,
+}
+
#[derive(Clone)]
pub struct Document {
pub uri: Url,
@@ -28,18 +38,12 @@ pub struct Document {
pub cursor: LineCol,
pub language: Language,
pub data: DocumentData,
- pub diagnostics: Vec<Diagnostic>,
}
impl Document {
- pub fn parse(
- uri: Url,
- text: String,
- language: Language,
- owner: Owner,
- cursor: LineCol,
- config: &Config,
- ) -> Self {
+ pub fn parse(params: DocumentParams) -> Self {
+ let DocumentParams { uri, text, .. } = params;
+
let dir = uri.join(".").unwrap();
let path = if uri.scheme() == "file" {
@@ -50,20 +54,21 @@ impl Document {
let line_index = LineIndex::new(&text);
- let diagnostics = Vec::new();
- let data = match language {
+ let data = match params.language {
Language::Tex => {
- let green = parser::parse_latex(&text, &config.syntax);
+ let green = parser::parse_latex(&text, &params.config.syntax);
let mut semantics = semantics::tex::Semantics::default();
semantics.process_root(&latex::SyntaxNode::new_root(green.clone()));
DocumentData::Tex(TexDocumentData { green, semantics })
}
Language::Bib => {
let green = parser::parse_bibtex(&text);
- DocumentData::Bib(BibDocumentData { green })
+ let mut semantics = semantics::bib::Semantics::default();
+ semantics.process_root(&bibtex::SyntaxNode::new_root(green.clone()));
+ DocumentData::Bib(BibDocumentData { green, semantics })
}
Language::Aux => {
- let green = parser::parse_latex(&text, &config.syntax);
+ let green = parser::parse_latex(&text, &params.config.syntax);
let mut semantics = semantics::auxiliary::Semantics::default();
semantics.process_root(&latex::SyntaxNode::new_root(green.clone()));
DocumentData::Aux(AuxDocumentData { green, semantics })
@@ -76,26 +81,17 @@ impl Document {
Language::Tectonic => DocumentData::Tectonic,
};
- let mut document = Self {
+ Self {
uri,
dir,
path,
text,
line_index,
- owner,
- cursor,
- language,
+ owner: params.owner,
+ cursor: params.cursor,
+ language: params.language,
data,
- diagnostics,
- };
-
- match language {
- Language::Tex => diagnostics::tex::analyze(&mut document, config),
- Language::Bib => diagnostics::bib::analyze(&mut document),
- Language::Aux | Language::Log | Language::Root | Language::Tectonic => (),
- };
-
- document
+ }
}
}
@@ -165,6 +161,14 @@ impl DocumentData {
None
}
}
+
+ pub fn as_log(&self) -> Option<&LogDocumentData> {
+ if let DocumentData::Log(data) = self {
+ Some(data)
+ } else {
+ None
+ }
+ }
}
#[derive(Debug, Clone)]
@@ -182,6 +186,7 @@ impl TexDocumentData {
#[derive(Debug, Clone)]
pub struct BibDocumentData {
pub green: rowan::GreenNode,
+ pub semantics: semantics::bib::Semantics,
}
impl BibDocumentData {
@@ -200,3 +205,15 @@ pub struct AuxDocumentData {
pub green: rowan::GreenNode,
pub semantics: semantics::auxiliary::Semantics,
}
+
+#[derive(Debug, Clone)]
+pub struct DocumentLocation<'a> {
+ pub document: &'a Document,
+ pub range: TextRange,
+}
+
+impl<'a> DocumentLocation<'a> {
+ pub fn new(document: &'a Document, range: TextRange) -> Self {
+ Self { document, range }
+ }
+}
diff --git a/support/texlab/crates/base-db/src/graph.rs b/support/texlab/crates/base-db/src/graph.rs
index 5e727d2bd2..3e718afd48 100644
--- a/support/texlab/crates/base-db/src/graph.rs
+++ b/support/texlab/crates/base-db/src/graph.rs
@@ -48,15 +48,18 @@ impl<'a> Graph<'a> {
while let Some((source, base_dir)) = stack.pop() {
let index = graph.edges.len();
- graph.explicit_edges(source, &base_dir);
+ graph.add_explicit_edges(source, &base_dir);
for edge in &graph.edges[index..] {
- let Some(weight) = edge.weight.as_ref() else { continue };
+ let Some(weight) = edge.weight.as_ref() else {
+ continue;
+ };
+
if visited.insert(&edge.target.uri) {
stack.push((edge.target, weight.new_base_dir.clone()));
}
}
- graph.implicit_edges(source, &base_dir);
+ graph.add_implicit_edges(source, &base_dir);
}
graph
@@ -68,19 +71,17 @@ impl<'a> Graph<'a> {
.unique_by(|document| &document.uri)
}
- fn explicit_edges(&mut self, source: &'a Document, base_dir: &Url) {
- let DocumentData::Tex(data) = &source.data else { return };
+ fn add_explicit_edges(&mut self, source: &'a Document, base_dir: &Url) {
+ let DocumentData::Tex(data) = &source.data else {
+ return;
+ };
+
for link in &data.semantics.links {
- self.explicit_edge(source, base_dir, link);
+ self.add_link(source, base_dir, link);
}
}
- fn explicit_edge(
- &mut self,
- source: &'a Document,
- base_dir: &Url,
- link: &'a semantics::tex::Link,
- ) {
+ fn add_link(&mut self, source: &'a Document, base_dir: &Url, link: &'a semantics::tex::Link) {
let home_dir = HOME_DIR.as_deref();
let stem = &link.path.text;
@@ -130,15 +131,20 @@ impl<'a> Graph<'a> {
}
}
- fn implicit_edges(&mut self, source: &'a Document, base_dir: &Url) {
- let uri = source.uri.as_str();
- if source.language == Language::Tex && !uri.ends_with(".aux") {
- self.implicit_edge(source, base_dir, "log");
- self.implicit_edge(source, base_dir, "aux");
+ fn add_implicit_edges(&mut self, source: &'a Document, base_dir: &Url) {
+ if source.language == Language::Tex {
+ let config = &self.workspace.config().build;
+ let aux_dir = self.workspace.output_dir(base_dir, config.aux_dir.clone());
+ let log_dir = self.workspace.output_dir(base_dir, config.log_dir.clone());
+
+ self.add_artifact(source, &aux_dir, "aux");
+ self.add_artifact(source, base_dir, "aux");
+ self.add_artifact(source, &log_dir, "log");
+ self.add_artifact(source, base_dir, "log");
}
}
- fn implicit_edge(&mut self, source: &'a Document, base_dir: &Url, extension: &str) {
+ fn add_artifact(&mut self, source: &'a Document, base_dir: &Url, extension: &str) {
let mut path = PathBuf::from(
percent_decode_str(source.uri.path())
.decode_utf8_lossy()
@@ -146,9 +152,13 @@ impl<'a> Graph<'a> {
);
path.set_extension(extension);
- let Some(target_uri) = path.file_name()
+ let Some(target_uri) = path
+ .file_name()
.and_then(OsStr::to_str)
- .and_then(|name| self.workspace.output_dir(base_dir).join(name).ok()) else { return };
+ .and_then(|name| base_dir.join(name).ok())
+ else {
+ return;
+ };
match self.workspace.lookup(&target_uri) {
Some(target) => {
diff --git a/support/texlab/crates/base-db/src/lib.rs b/support/texlab/crates/base-db/src/lib.rs
index 304888a42a..3ad5f74836 100644
--- a/support/texlab/crates/base-db/src/lib.rs
+++ b/support/texlab/crates/base-db/src/lib.rs
@@ -1,6 +1,5 @@
mod config;
pub mod data;
-pub mod diagnostics;
mod document;
pub mod graph;
pub mod semantics;
diff --git a/support/texlab/crates/base-db/src/semantics.rs b/support/texlab/crates/base-db/src/semantics.rs
index 1096eb2125..d05c9ffb87 100644
--- a/support/texlab/crates/base-db/src/semantics.rs
+++ b/support/texlab/crates/base-db/src/semantics.rs
@@ -1,4 +1,5 @@
pub mod auxiliary;
+pub mod bib;
pub mod tex;
#[derive(PartialEq, Eq, Clone, Hash)]
@@ -6,6 +7,7 @@ pub struct Span {
pub text: String,
pub range: rowan::TextRange,
}
+
impl std::fmt::Debug for Span {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_tuple("Span")
@@ -17,9 +19,18 @@ impl std::fmt::Debug for Span {
impl From<&syntax::latex::Key> for Span {
fn from(key: &syntax::latex::Key) -> Self {
- Span {
+ Self {
text: key.to_string(),
range: syntax::latex::small_range(key),
}
}
}
+
+impl From<&syntax::bibtex::SyntaxToken> for Span {
+ fn from(token: &syntax::bibtex::SyntaxToken) -> Self {
+ Self {
+ text: token.text().into(),
+ range: token.text_range(),
+ }
+ }
+}
diff --git a/support/texlab/crates/base-db/src/semantics/bib.rs b/support/texlab/crates/base-db/src/semantics/bib.rs
new file mode 100644
index 0000000000..22017d3c03
--- /dev/null
+++ b/support/texlab/crates/base-db/src/semantics/bib.rs
@@ -0,0 +1,59 @@
+use rowan::ast::AstNode;
+use syntax::bibtex::{self, HasName};
+use text_size::TextRange;
+
+use super::Span;
+
+#[derive(Debug, Clone, Default)]
+pub struct Semantics {
+ pub entries: Vec<Entry>,
+ pub strings: Vec<StringDef>,
+}
+
+impl Semantics {
+ pub fn process_root(&mut self, root: &bibtex::SyntaxNode) {
+ for node in root.children() {
+ if let Some(entry) = bibtex::Entry::cast(node.clone()) {
+ self.process_entry(entry);
+ } else if let Some(string) = bibtex::StringDef::cast(node) {
+ self.process_string_def(string);
+ }
+ }
+ }
+
+ fn process_entry(&mut self, entry: bibtex::Entry) {
+ if let Some(name) = entry.name_token() {
+ self.entries.push(Entry {
+ name: Span {
+ range: name.text_range(),
+ text: name.text().into(),
+ },
+ full_range: entry.syntax().text_range(),
+ });
+ }
+ }
+
+ fn process_string_def(&mut self, string: bibtex::StringDef) {
+ if let Some(name) = string.name_token() {
+ self.strings.push(StringDef {
+ name: Span {
+ range: name.text_range(),
+ text: name.text().into(),
+ },
+ full_range: string.syntax().text_range(),
+ });
+ }
+ }
+}
+
+#[derive(Debug, Clone)]
+pub struct Entry {
+ pub name: Span,
+ pub full_range: TextRange,
+}
+
+#[derive(Debug, Clone)]
+pub struct StringDef {
+ pub name: Span,
+ pub full_range: TextRange,
+}
diff --git a/support/texlab/crates/base-db/src/semantics/tex.rs b/support/texlab/crates/base-db/src/semantics/tex.rs
index 1445ae12e7..9954098b36 100644
--- a/support/texlab/crates/base-db/src/semantics/tex.rs
+++ b/support/texlab/crates/base-db/src/semantics/tex.rs
@@ -9,6 +9,7 @@ use super::Span;
pub struct Semantics {
pub links: Vec<Link>,
pub labels: Vec<Label>,
+ pub citations: Vec<Citation>,
pub commands: Vec<Span>,
pub environments: Vec<Span>,
pub theorem_definitions: Vec<TheoremDefinition>,
@@ -53,6 +54,8 @@ impl Semantics {
self.process_label_reference(label);
} else if let Some(label) = latex::LabelReferenceRange::cast(node.clone()) {
self.process_label_reference_range(label);
+ } else if let Some(citation) = latex::Citation::cast(node.clone()) {
+ self.process_citation(citation);
} else if let Some(environment) = latex::Environment::cast(node.clone()) {
self.process_environment(environment);
} else if let Some(theorem_def) = latex::TheoremDefinition::cast(node.clone()) {
@@ -61,7 +64,9 @@ impl Semantics {
}
fn process_include(&mut self, include: latex::Include) {
- let Some(list) = include.path_list() else { return };
+ let Some(list) = include.path_list() else {
+ return;
+ };
for path in list.keys() {
let kind = match include.syntax().kind() {
@@ -85,13 +90,18 @@ impl Semantics {
let Some(mut base_dir) = import
.directory()
.and_then(|dir| dir.key())
- .map(|key| key.to_string()) else { return };
+ .map(|key| key.to_string())
+ else {
+ return;
+ };
if !base_dir.ends_with('/') {
base_dir.push('/');
}
- let Some(path) = import.file().and_then(|path| path.key()) else { return };
+ let Some(path) = import.file().and_then(|path| path.key()) else {
+ return;
+ };
let text = format!("{base_dir}{}", path.to_string());
let range = latex::small_range(&path);
@@ -103,13 +113,17 @@ impl Semantics {
}
fn process_label_definition(&mut self, label: latex::LabelDefinition) {
- let Some(name) = label.name().and_then(|group| group.key()) else { return };
+ let Some(name) = label.name().and_then(|group| group.key()) else {
+ return;
+ };
let full_range = latex::small_range(&label);
let mut objects = Vec::new();
for node in label.syntax().ancestors() {
if let Some(section) = latex::Section::cast(node.clone()) {
- let Some(text) = section.name().and_then(|group| group.content_text()) else { continue };
+ let Some(text) = section.name().and_then(|group| group.content_text()) else {
+ continue;
+ };
let range = latex::small_range(&section);
let prefix = String::from(match section.syntax().kind() {
latex::PART => "Part",
@@ -128,10 +142,14 @@ impl Semantics {
range,
});
} else if let Some(environment) = latex::Environment::cast(node.clone()) {
- let Some(name) = environment.begin()
+ let Some(name) = environment
+ .begin()
.and_then(|begin| begin.name())
.and_then(|group| group.key())
- .map(|key| key.to_string()) else { continue };
+ .map(|key| key.to_string())
+ else {
+ continue;
+ };
let caption = environment
.syntax()
@@ -175,7 +193,9 @@ impl Semantics {
}
fn process_label_reference(&mut self, label: latex::LabelReference) {
- let Some(name_list) = label.name_list() else { return };
+ let Some(name_list) = label.name_list() else {
+ return;
+ };
let full_range = latex::small_range(&label);
for name in name_list.keys() {
@@ -209,11 +229,26 @@ impl Semantics {
}
}
+ fn process_citation(&mut self, citation: latex::Citation) {
+ let full_range = latex::small_range(&citation);
+ if let Some(list) = citation.key_list() {
+ for key in list.keys() {
+ self.citations.push(Citation {
+ name: Span::from(&key),
+ full_range,
+ });
+ }
+ }
+ }
+
fn process_environment(&mut self, environment: latex::Environment) {
let Some(name) = environment
.begin()
.and_then(|begin| begin.name())
- .and_then(|group| group.key()) else { return };
+ .and_then(|group| group.key())
+ else {
+ return;
+ };
let name = Span::from(&name);
self.can_be_compiled = self.can_be_compiled || name.text == "document";
@@ -221,9 +256,13 @@ impl Semantics {
}
fn process_theorem_definition(&mut self, theorem_def: latex::TheoremDefinition) {
- let Some(name) = theorem_def.name().and_then(|name| name.key()) else { return };
+ let Some(name) = theorem_def.name().and_then(|name| name.key()) else {
+ return;
+ };
- let Some(heading) = theorem_def.heading() else { return };
+ let Some(heading) = theorem_def.heading() else {
+ return;
+ };
self.theorem_definitions.push(TheoremDefinition {
name: Span::from(&name),
@@ -258,6 +297,16 @@ pub struct Link {
pub base_dir: Option<String>,
}
+impl Link {
+ pub fn package_name(&self) -> Option<String> {
+ match self.kind {
+ LinkKind::Sty => Some(format!("{}.sty", self.path.text)),
+ LinkKind::Cls => Some(format!("{}.cls", self.path.text)),
+ _ => None,
+ }
+ }
+}
+
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Hash)]
pub enum LabelKind {
Definition,
@@ -298,3 +347,9 @@ pub struct TheoremDefinition {
pub name: Span,
pub heading: String,
}
+
+#[derive(Debug, Clone)]
+pub struct Citation {
+ pub name: Span,
+ pub full_range: TextRange,
+}
diff --git a/support/texlab/crates/base-db/src/util.rs b/support/texlab/crates/base-db/src/util.rs
index 22b9309209..6d6d6897a6 100644
--- a/support/texlab/crates/base-db/src/util.rs
+++ b/support/texlab/crates/base-db/src/util.rs
@@ -1,5 +1,6 @@
mod label;
mod line_index;
+pub mod queries;
mod regex_filter;
pub use self::{
diff --git a/support/texlab/crates/base-db/src/util/line_index.rs b/support/texlab/crates/base-db/src/util/line_index.rs
index 70e8f8128b..3c5907782a 100644
--- a/support/texlab/crates/base-db/src/util/line_index.rs
+++ b/support/texlab/crates/base-db/src/util/line_index.rs
@@ -10,7 +10,7 @@ use rustc_hash::FxHashMap;
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct LineIndex {
/// Offset the the beginning of each line, zero-based
- pub(crate) newlines: Vec<TextSize>,
+ pub newlines: Vec<TextSize>,
/// List of non-ASCII characters on each line
pub(crate) utf16_lines: FxHashMap<u32, Vec<Utf16Char>>,
}
diff --git a/support/texlab/crates/base-db/src/util/queries.rs b/support/texlab/crates/base-db/src/util/queries.rs
new file mode 100644
index 0000000000..9d4418c328
--- /dev/null
+++ b/support/texlab/crates/base-db/src/util/queries.rs
@@ -0,0 +1,215 @@
+use itertools::Itertools;
+use rustc_hash::FxHashMap;
+use text_size::{TextRange, TextSize};
+use url::Url;
+
+use crate::{
+ semantics::{bib, tex},
+ Document, DocumentLocation, Project, Workspace,
+};
+
+#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Hash)]
+pub enum SearchMode {
+ Name,
+ Full,
+}
+
+#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Hash)]
+pub enum ObjectKind {
+ Definition,
+ Reference,
+}
+
+pub trait Object {
+ fn name_text(&self) -> &str;
+
+ fn name_range(&self) -> TextRange;
+
+ fn full_range(&self) -> TextRange;
+
+ fn kind(&self) -> ObjectKind;
+
+ fn find<'db>(document: &'db Document) -> Box<dyn Iterator<Item = &'db Self> + 'db>;
+
+ fn find_all<'a, 'db>(
+ project: &'a Project<'db>,
+ ) -> Box<dyn Iterator<Item = (&'db Document, &'db Self)> + 'a> {
+ let iter = project
+ .documents
+ .iter()
+ .flat_map(|document| Self::find(document).map(|obj| (*document, obj)));
+
+ Box::new(iter)
+ }
+}
+
+impl Object for tex::Label {
+ fn name_text(&self) -> &str {
+ &self.name.text
+ }
+
+ fn name_range(&self) -> TextRange {
+ self.name.range
+ }
+
+ fn full_range(&self) -> TextRange {
+ self.full_range
+ }
+
+ fn kind(&self) -> ObjectKind {
+ match self.kind {
+ tex::LabelKind::Definition => ObjectKind::Definition,
+ tex::LabelKind::Reference => ObjectKind::Reference,
+ tex::LabelKind::ReferenceRange => ObjectKind::Reference,
+ }
+ }
+
+ fn find<'db>(document: &'db Document) -> Box<dyn Iterator<Item = &'db Self> + 'db> {
+ let data = document.data.as_tex();
+ let iter = data
+ .into_iter()
+ .flat_map(|data| data.semantics.labels.iter());
+
+ Box::new(iter)
+ }
+}
+
+impl Object for tex::Citation {
+ fn name_text(&self) -> &str {
+ &self.name.text
+ }
+
+ fn name_range(&self) -> TextRange {
+ self.name.range
+ }
+
+ fn full_range(&self) -> TextRange {
+ self.full_range
+ }
+
+ fn find<'db>(document: &'db Document) -> Box<dyn Iterator<Item = &'db Self> + 'db> {
+ let data = document.data.as_tex();
+ let iter = data
+ .into_iter()
+ .flat_map(|data| data.semantics.citations.iter());
+
+ Box::new(iter)
+ }
+
+ fn kind(&self) -> ObjectKind {
+ ObjectKind::Reference
+ }
+}
+
+impl Object for bib::Entry {
+ fn name_text(&self) -> &str {
+ &self.name.text
+ }
+
+ fn name_range(&self) -> TextRange {
+ self.name.range
+ }
+
+ fn full_range(&self) -> TextRange {
+ self.full_range
+ }
+
+ fn find<'db>(document: &'db Document) -> Box<dyn Iterator<Item = &'db Self> + 'db> {
+ let data = document.data.as_bib();
+ let iter = data
+ .into_iter()
+ .flat_map(|data| data.semantics.entries.iter());
+
+ Box::new(iter)
+ }
+
+ fn kind(&self) -> ObjectKind {
+ ObjectKind::Definition
+ }
+}
+
+#[derive(Debug)]
+pub struct ObjectWithRange<T> {
+ pub object: T,
+ pub range: TextRange,
+}
+
+impl<T> ObjectWithRange<T> {
+ pub fn new(object: T, range: TextRange) -> Self {
+ Self { object, range }
+ }
+}
+
+pub fn object_at_cursor<T: Object>(
+ objs: &[T],
+ offset: TextSize,
+ mode: SearchMode,
+) -> Option<ObjectWithRange<&T>> {
+ let mut result = objs
+ .iter()
+ .find(|obj| obj.name_range().contains_inclusive(offset))
+ .map(|obj| ObjectWithRange::new(obj, obj.name_range()));
+
+ if mode == SearchMode::Full {
+ result = result.or_else(|| {
+ objs.iter()
+ .find(|obj| obj.full_range().contains_inclusive(offset))
+ .map(|obj| ObjectWithRange::new(obj, obj.full_range()))
+ });
+ }
+
+ result
+}
+
+pub fn objects_with_name<'a, 'db, T: Object + 'static>(
+ project: &'a Project<'db>,
+ name: &'a str,
+) -> impl Iterator<Item = (&'db Document, &'db T)> + 'a {
+ T::find_all(project).filter(move |(_, obj)| obj.name_text() == name)
+}
+
+#[derive(Debug)]
+pub struct Conflict<'a> {
+ pub main: DocumentLocation<'a>,
+ pub rest: Vec<DocumentLocation<'a>>,
+}
+
+impl<'a> Conflict<'a> {
+ pub fn find_all<T: Object + std::fmt::Debug>(workspace: &'a Workspace) -> Vec<Self> {
+ let groups = workspace
+ .iter()
+ .flat_map(|document| T::find(document).map(move |obj| (document, obj)))
+ .filter(|(_, obj)| obj.kind() == ObjectKind::Definition)
+ .into_group_map_by(|(_, obj)| obj.name_text());
+
+ let projects: FxHashMap<&Url, Project> = workspace
+ .iter()
+ .map(|document| (&document.uri, workspace.project(document)))
+ .collect();
+
+ let mut conflicts = Vec::new();
+ for group in groups.into_values().filter(|group| group.len() > 1) {
+ for (i, main) in group
+ .iter()
+ .enumerate()
+ .map(|(i, (document, obj))| (i, DocumentLocation::new(document, obj.name_range())))
+ {
+ let mut rest = Vec::new();
+
+ let project = &projects[&main.document.uri];
+
+ for (_, (other, obj)) in group.iter().enumerate().filter(|(j, _)| *j != i) {
+ if project.documents.contains(other) {
+ rest.push(DocumentLocation::new(other, obj.name_range()));
+ }
+ }
+
+ if !rest.is_empty() {
+ conflicts.push(Conflict { main, rest });
+ }
+ }
+ }
+
+ conflicts
+ }
+}
diff --git a/support/texlab/crates/base-db/src/workspace.rs b/support/texlab/crates/base-db/src/workspace.rs
index 58c7567f9d..9308c2401d 100644
--- a/support/texlab/crates/base-db/src/workspace.rs
+++ b/support/texlab/crates/base-db/src/workspace.rs
@@ -10,7 +10,7 @@ use rustc_hash::FxHashSet;
use text_size::TextLen;
use url::Url;
-use crate::{graph, util::LineCol, Config, Document, DocumentData, Owner};
+use crate::{graph, util::LineCol, Config, Document, DocumentData, DocumentParams, Owner};
#[derive(Debug, Default)]
pub struct Workspace {
@@ -56,14 +56,14 @@ impl Workspace {
) {
log::debug!("Opening document {uri}...");
self.documents.remove(&uri);
- self.documents.insert(Document::parse(
+ self.documents.insert(Document::parse(DocumentParams {
uri,
text,
language,
owner,
cursor,
- &self.config,
- ));
+ config: &self.config,
+ }));
}
pub fn load(&mut self, path: &Path, language: Language, owner: Owner) -> std::io::Result<()> {
@@ -109,9 +109,22 @@ impl Workspace {
self.iter()
.filter(|document| document.uri.scheme() == "file")
.flat_map(|document| {
- let dir1 = self.output_dir(&self.current_dir(&document.dir));
- let dir2 = &document.dir;
- [dir1.to_file_path(), dir2.to_file_path()]
+ let dir1 = self.output_dir(
+ &self.current_dir(&document.dir),
+ self.config.build.aux_dir.clone(),
+ );
+
+ let dir2 = self.output_dir(
+ &self.current_dir(&document.dir),
+ self.config.build.log_dir.clone(),
+ );
+
+ let dir3 = &document.dir;
+ [
+ dir1.to_file_path(),
+ dir2.to_file_path(),
+ dir3.to_file_path(),
+ ]
})
.flatten()
.for_each(|path| {
@@ -135,8 +148,8 @@ impl Workspace {
.unwrap_or_else(|| base_dir.clone())
}
- pub fn output_dir(&self, base_dir: &Url) -> Url {
- let mut path = self.config.build.output_dir.clone();
+ pub fn output_dir(&self, base_dir: &Url, relative_path: String) -> Url {
+ let mut path = relative_path;
if !path.ends_with('/') {
path.push('/');
}
@@ -168,7 +181,9 @@ impl Workspace {
pub fn parents(&self, child: &Document) -> FxHashSet<&Document> {
self.iter()
.filter(|document| {
- let DocumentData::Tex(data) = &document.data else { return false };
+ let DocumentData::Tex(data) = &document.data else {
+ return false;
+ };
data.semantics.can_be_root
})
.filter(|parent| {
@@ -232,18 +247,18 @@ impl Workspace {
Some(())
}
- pub fn discover(&mut self) {
+ pub fn discover(&mut self, checked_paths: &mut FxHashSet<PathBuf>) {
loop {
let mut changed = false;
- changed |= self.discover_parents();
- changed |= self.discover_children();
+ changed |= self.discover_parents(checked_paths);
+ changed |= self.discover_children(checked_paths);
if !changed {
break;
}
}
}
- fn discover_parents(&mut self) -> bool {
+ fn discover_parents(&mut self, checked_paths: &mut FxHashSet<PathBuf>) -> bool {
let dirs = self
.iter()
.filter_map(|document| document.path.as_deref())
@@ -264,20 +279,25 @@ impl Workspace {
continue;
}
- let Ok(entries) = std::fs::read_dir(dir) else { continue };
+ let Ok(entries) = std::fs::read_dir(dir) else {
+ continue;
+ };
for file in entries
.flatten()
.filter(|entry| entry.file_type().map_or(false, |type_| type_.is_file()))
.map(|entry| entry.path())
{
- let Some(lang) = Language::from_path(&file) else { continue };
+ let Some(lang) = Language::from_path(&file) else {
+ continue;
+ };
if !matches!(lang, Language::Tex | Language::Root | Language::Tectonic) {
continue;
}
if self.lookup_path(&file).is_none() {
changed |= self.load(&file, lang, Owner::Server).is_ok();
+ checked_paths.insert(file);
}
}
}
@@ -285,8 +305,8 @@ impl Workspace {
changed
}
- fn discover_children(&mut self) -> bool {
- let paths = self
+ fn discover_children(&mut self, checked_paths: &mut FxHashSet<PathBuf>) -> bool {
+ let files = self
.iter()
.map(|start| graph::Graph::new(self, start))
.flat_map(|graph| graph.missing)
@@ -295,10 +315,11 @@ impl Workspace {
.collect::<FxHashSet<_>>();
let mut changed = false;
- for path in paths {
- let language = Language::from_path(&path).unwrap_or(Language::Tex);
- if self.lookup_path(&path).is_none() {
- changed |= self.load(&path, language, Owner::Server).is_ok();
+ for file in files {
+ let language = Language::from_path(&file).unwrap_or(Language::Tex);
+ if self.lookup_path(&file).is_none() {
+ changed |= self.load(&file, language, Owner::Server).is_ok();
+ checked_paths.insert(file);
}
}
diff --git a/support/texlab/crates/citeproc/Cargo.toml b/support/texlab/crates/citeproc/Cargo.toml
index fb230323b0..ff1af00c97 100644
--- a/support/texlab/crates/citeproc/Cargo.toml
+++ b/support/texlab/crates/citeproc/Cargo.toml
@@ -7,20 +7,20 @@ edition.workspace = true
rust-version.workspace = true
[dependencies]
-chrono = { version = "0.4.24", default-features = false, features = ["std"] }
+chrono = { version = "0.4.26", default-features = false, features = ["std"] }
human_name = "2.0.2"
isocountry = "0.3.2"
-itertools = "0.10.5"
+itertools = "0.11.0"
rowan = "0.15.11"
rustc-hash = "1.1.0"
syntax = { path = "../syntax" }
titlecase = "2.2.1"
unicode-normalization = "0.1.22"
-url = "2.3.1"
+url = "=2.3.1"
[lib]
doctest = false
[dev-dependencies]
-insta = "1.29.0"
+expect-test = "1.4.1"
parser = { path = "../parser" }
diff --git a/support/texlab/crates/citeproc/src/snapshots/citeproc__tests__article_aksin_2006.snap b/support/texlab/crates/citeproc/src/snapshots/citeproc__tests__article_aksin_2006.snap
deleted file mode 100644
index 72c0c5e41a..0000000000
--- a/support/texlab/crates/citeproc/src/snapshots/citeproc__tests__article_aksin_2006.snap
+++ /dev/null
@@ -1,5 +0,0 @@
----
-source: crates/citeproc/src/tests.rs
-expression: "render_entry(r#\"\n@string{jomch = {J.~Organomet. Chem.}}\n\n@article{aksin,\n author = {Aks{\\i}n, {\\\"O}zge and T{\\\"u}rkmen, Hayati and Artok, Levent\n and {\\c{C}}etinkaya, Bekir and Ni, Chaoying and\n B{\\\"u}y{\\\"u}kg{\\\"u}ng{\\\"o}r, Orhan and {\\\"O}zkal, Erhan},\n title = {Effect of immobilization on catalytic characteristics of\n saturated {Pd-N}-heterocyclic carbenes in {Mizoroki-Heck}\n reactions},\n journaltitle = jomch,\n date = 2006,\n volume = 691,\n number = 13,\n pages = {3027-3036},\n indextitle = {Effect of immobilization on catalytic characteristics},\n}\"#)"
----
-O. Aksın, H. Türkmen, L. Artok, B. Çetinkaya, C. Ni, O. Büyükgüngör, E. Özkal: "Effect of immobilization on catalytic characteristics of saturated Pd-N-heterocyclic carbenes in Mizoroki-Heck reactions". *J. Organomet. Chem.* 691.13 (2006): 3027-3036.
diff --git a/support/texlab/crates/citeproc/src/snapshots/citeproc__tests__article_betram_1996.snap b/support/texlab/crates/citeproc/src/snapshots/citeproc__tests__article_betram_1996.snap
deleted file mode 100644
index 1ae89dd711..0000000000
--- a/support/texlab/crates/citeproc/src/snapshots/citeproc__tests__article_betram_1996.snap
+++ /dev/null
@@ -1,5 +0,0 @@
----
-source: crates/citeproc/src/tests.rs
-expression: "render_entry(r#\"\n@string{jams = {J.~Amer. Math. Soc.}}\n\n@article{bertram,\n author = {Bertram, Aaron and Wentworth, Richard},\n title = {Gromov invariants for holomorphic maps on {Riemann} surfaces},\n journaltitle = jams,\n date = 1996,\n volume = 9,\n number = 2,\n pages = {529-571},\n langid = {english},\n langidopts = {variant=american},\n shorttitle = {Gromov invariants},\n annotation = {An \\texttt{article} entry with a \\texttt{volume} and a\n \\texttt{number} field},\n}\"#)"
----
-A. Bertram, R. Wentworth: "Gromov invariants for holomorphic maps on Riemann surfaces". *J. Amer. Math. Soc.* 9.2 (1996): 529-571.
diff --git a/support/texlab/crates/citeproc/src/snapshots/citeproc__tests__article_blom_2021.snap b/support/texlab/crates/citeproc/src/snapshots/citeproc__tests__article_blom_2021.snap
deleted file mode 100644
index 0e841a20a9..0000000000
--- a/support/texlab/crates/citeproc/src/snapshots/citeproc__tests__article_blom_2021.snap
+++ /dev/null
@@ -1,5 +0,0 @@
----
-source: crates/citeproc/src/tests.rs
-expression: "render_entry(r#\"\n@article{DBLP:journals/corr/abs-2107-11903,\n author = {Michelle L. Blom and\n Jurlind Budurushi and\n Ronald L. Rivest and\n Philip B. Stark and\n Peter J. Stuckey and\n Vanessa Teague and\n Damjan Vukcevic},\n title = {Assertion-based Approaches to Auditing Complex Elections, with application\n to party-list proportional elections},\n journal = {CoRR},\n volume = {abs/2107.11903},\n year = {2021},\n url = {https://arxiv.org/abs/2107.11903},\n eprinttype = {arXiv},\n eprint = {2107.11903},\n timestamp = {Thu, 29 Jul 2021 16:14:15 +0200},\n biburl = {https://dblp.org/rec/journals/corr/abs-2107-11903.bib},\n bibsource = {dblp computer science bibliography, https://dblp.org}\n}\"#)"
----
-M. Blom, J. Budurushi, R. Rivest, P. Stark, P. Stuckey, V. Teague, D. Vukcevic: "Assertion-based Approaches to Auditing Complex Elections, with application to party-list proportional elections". *CoRR* abs/2107.11903 (2021): arXiv: [2107.11903](https://arxiv.org/abs/2107.11903). URL: [https://arxiv.org/abs/2107.11903](https://arxiv.org/abs/2107.11903).
diff --git a/support/texlab/crates/citeproc/src/snapshots/citeproc__tests__article_jain_1999.snap b/support/texlab/crates/citeproc/src/snapshots/citeproc__tests__article_jain_1999.snap
deleted file mode 100644
index 6ae38a25e5..0000000000
--- a/support/texlab/crates/citeproc/src/snapshots/citeproc__tests__article_jain_1999.snap
+++ /dev/null
@@ -1,5 +0,0 @@
----
-source: crates/citeproc/src/tests.rs
-expression: "render_entry(r#\"\n@article{10.1145/331499.331504,\n author = {Jain, A. K. and Murty, M. N. and Flynn, P. J.},\n title = {Data Clustering: A Review},\n year = {1999},\n issue_date = {Sept. 1999},\n publisher = {Association for Computing Machinery},\n address = {New York, NY, USA},\n volume = {31},\n number = {3},\n issn = {0360-0300},\n url = {https://doi.org/10.1145/331499.331504},\n doi = {10.1145/331499.331504},\n journal = {ACM Comput. Surv.},\n month = {sep},\n pages = {264-323},\n numpages = {60},\n keywords = {incremental clustering, clustering applications, exploratory data analysis, cluster analysis, similarity indices, unsupervised learning}\n}\"#)"
----
-A. Jain, M. Murty, P. Flynn: "Data Clustering: A Review". *ACM Comput. Surv.* 31.3 (Sep. 1999): 264-323. ISSN: 0360-0300. DOI: [10.1145/331499.331504](https://doi.org/10.1145/331499.331504). URL: [https://doi.org/10.1145/331499.331504](https://doi.org/10.1145/331499.331504).
diff --git a/support/texlab/crates/citeproc/src/snapshots/citeproc__tests__article_kastenholz_2006.snap b/support/texlab/crates/citeproc/src/snapshots/citeproc__tests__article_kastenholz_2006.snap
deleted file mode 100644
index 8abd770d8f..0000000000
--- a/support/texlab/crates/citeproc/src/snapshots/citeproc__tests__article_kastenholz_2006.snap
+++ /dev/null
@@ -1,5 +0,0 @@
----
-source: crates/citeproc/src/tests.rs
-expression: "render_entry(r#\"\n@string{jchph = {J.~Chem. Phys.}}\n\n@article{kastenholz,\n author = {Kastenholz, M. A. and H{\\\"u}nenberger, Philippe H.},\n title = {Computation of methodology\\hyphen independent ionic solvation\n free energies from molecular simulations},\n journaltitle = jchph,\n date = 2006,\n subtitle = {{I}. {The} electrostatic potential in molecular liquids},\n volume = 124,\n eid = 124106,\n doi = {10.1063/1.2172593},\n langid = {english},\n langidopts = {variant=american},\n indextitle = {Computation of ionic solvation free energies},\n annotation = {An \\texttt{article} entry with an \\texttt{eid} and a\n \\texttt{doi} field. Note that the \\textsc{doi} is transformed\n into a clickable link if \\texttt{hyperref} support has been\n enabled},\n}\n \"#)"
----
-M. Kastenholz, P. Hünenberger: "Computation of methodology- independent ionic solvation free energies from molecular simulations. I. The electrostatic potential in molecular liquids". *J. Chem. Phys.* 124, 124106 (2006): DOI: [10.1063/1.2172593](https://doi.org/10.1063/1.2172593).
diff --git a/support/texlab/crates/citeproc/src/snapshots/citeproc__tests__article_rivest_1978.snap b/support/texlab/crates/citeproc/src/snapshots/citeproc__tests__article_rivest_1978.snap
deleted file mode 100644
index 64b9784479..0000000000
--- a/support/texlab/crates/citeproc/src/snapshots/citeproc__tests__article_rivest_1978.snap
+++ /dev/null
@@ -1,5 +0,0 @@
----
-source: crates/citeproc/src/tests.rs
-expression: "render_entry(r#\"\n@article{10.1145/359340.359342,\n author = {Rivest, R. L. and Shamir, A. and Adleman, L.},\n title = {A Method for Obtaining Digital Signatures and Public-Key Cryptosystems},\n year = {1978},\n issue_date = {Feb. 1978},\n publisher = {Association for Computing Machinery},\n address = {New York, NY, USA},\n volume = {21},\n number = {2},\n issn = {0001-0782},\n url = {https://doi.org/10.1145/359340.359342},\n doi = {10.1145/359340.359342},\n journal = {Commun. ACM},\n month = {feb},\n pages = {120-126},\n numpages = {7},\n}\"#)"
----
-R. Rivest, A. Shamir, L. Adleman: "A Method for Obtaining Digital Signatures and Public-Key Cryptosystems". *Commun. ACM* 21.2 (Feb. 1978): 120-126. ISSN: 0001-0782. DOI: [10.1145/359340.359342](https://doi.org/10.1145/359340.359342). URL: [https://doi.org/10.1145/359340.359342](https://doi.org/10.1145/359340.359342).
diff --git a/support/texlab/crates/citeproc/src/snapshots/citeproc__tests__book_aho_2006.snap b/support/texlab/crates/citeproc/src/snapshots/citeproc__tests__book_aho_2006.snap
deleted file mode 100644
index b168edc665..0000000000
--- a/support/texlab/crates/citeproc/src/snapshots/citeproc__tests__book_aho_2006.snap
+++ /dev/null
@@ -1,5 +0,0 @@
----
-source: crates/citeproc/src/tests.rs
-expression: "render_entry(r#\"\n@book{10.5555/1177220,\n author = {Aho, Alfred V. and Lam, Monica S. and Sethi, Ravi and Ullman, Jeffrey D.},\n title = {Compilers: Principles, Techniques, and Tools (2nd Edition)},\n year = {2006},\n isbn = {0321486811},\n publisher = {Addison-Wesley Longman Publishing Co., Inc.},\n address = {USA}\n}\"#)"
----
-A. Aho, M. Lam, R. Sethi, J. Ullman: "Compilers: Principles, Techniques, and Tools (2nd Edition)". Addison-Wesley Longman Publishing Co., Inc., 2006. ISBN: 0321486811.
diff --git a/support/texlab/crates/citeproc/src/snapshots/citeproc__tests__book_averroes_1998.snap b/support/texlab/crates/citeproc/src/snapshots/citeproc__tests__book_averroes_1998.snap
deleted file mode 100644
index e03caa723e..0000000000
--- a/support/texlab/crates/citeproc/src/snapshots/citeproc__tests__book_averroes_1998.snap
+++ /dev/null
@@ -1,5 +0,0 @@
----
-source: crates/citeproc/src/tests.rs
-expression: "render_entry(r#\"\n@book{averroes/bland,\n author = {Averroes},\n title = {The Epistle on the Possibility of Conjunction with the Active\n Intellect by {Ibn Rushd} with the Commentary of {Moses Narboni}},\n date = 1982,\n editor = {Bland, Kalman P.},\n translator = {Bland, Kalman P.},\n series = {Moreshet: Studies in {Jewish} History, Literature and Thought},\n number = 7,\n publisher = {Jewish Theological Seminary of America},\n location = {New York},\n keywords = {primary},\n langid = {english},\n langidopts = {variant=american},\n indextitle = {Epistle on the Possibility of Conjunction, The},\n shorttitle = {Possibility of Conjunction},\n annotation = {A \\texttt{book} entry with a \\texttt{series} and a\n \\texttt{number}. Note the concatenation of the \\texttt{editor}\n and \\texttt{translator} fields as well as the\n \\texttt{indextitle} field},\n}\"#)"
----
-"The Epistle on the Possibility of Conjunction with the Active Intellect by Ibn Rushd with the Commentary of Moses Narboni". Ed. by K. Bland. Trans. by K. Bland. Moreshet: Studies in Jewish History, Literature and Thought 7. New York: Jewish Theological Seminary of America, 1982.
diff --git a/support/texlab/crates/citeproc/src/snapshots/citeproc__tests__book_knuth_1984.snap b/support/texlab/crates/citeproc/src/snapshots/citeproc__tests__book_knuth_1984.snap
deleted file mode 100644
index a15a63167d..0000000000
--- a/support/texlab/crates/citeproc/src/snapshots/citeproc__tests__book_knuth_1984.snap
+++ /dev/null
@@ -1,5 +0,0 @@
----
-source: crates/citeproc/src/tests.rs
-expression: "render_entry(r#\"\n@book{knuth:ct:a,\n author = {Knuth, Donald E.},\n title = {The {\\TeX book}},\n date = 1984,\n maintitle = {Computers \\& Typesetting},\n volume = {A},\n publisher = {Addison-Wesley},\n location = {Reading, Mass.},\n langid = {english},\n langidopts = {variant=american},\n sorttitle = {Computers & Typesetting A},\n indexsorttitle= {The TeXbook},\n indextitle = {\\protect\\TeX book, The},\n shorttitle = {\\TeX book},\n annotation = {The first volume of a five-volume book. Note the\n \\texttt{sorttitle} field. We want this\n volume to be listed after the entry referring to the entire\n five-volume set. Also note the \\texttt{indextitle} and\n \\texttt{indexsorttitle} fields. Indexing packages that don't\n generate robust index entries require some control sequences\n to be protected from expansion},\n}\n \"#)"
----
-D. Knuth: "The TeX book". *Computers & Typesetting*. Vol. A. Reading, Mass.: Addison-Wesley, 1984.
diff --git a/support/texlab/crates/citeproc/src/snapshots/citeproc__tests__collection_matuz_1990.snap b/support/texlab/crates/citeproc/src/snapshots/citeproc__tests__collection_matuz_1990.snap
deleted file mode 100644
index 0112a28139..0000000000
--- a/support/texlab/crates/citeproc/src/snapshots/citeproc__tests__collection_matuz_1990.snap
+++ /dev/null
@@ -1,5 +0,0 @@
----
-source: crates/citeproc/src/tests.rs
-expression: "render_entry(r#\"\n@collection{matuz:doody,\n editor = {Matuz, Roger},\n title = {Contemporary Literary Criticism},\n year = 1990,\n volume = 61,\n publisher = {Gale},\n location = {Detroit},\n pages = {204-208},\n langid = {english},\n langidopts = {variant=american},\n annotation = {A \\texttt{collection} entry providing the excerpt information\n for the \\texttt{doody} entry. Note the format of the\n \\texttt{pages} field},\n}\"#)"
----
-"Contemporary Literary Criticism". Ed. by R. Matuz. Vol. 61. Detroit: Gale, 1990, 204-208.
diff --git a/support/texlab/crates/citeproc/src/snapshots/citeproc__tests__inproceedings_combi_2004.snap b/support/texlab/crates/citeproc/src/snapshots/citeproc__tests__inproceedings_combi_2004.snap
deleted file mode 100644
index e4d3959d92..0000000000
--- a/support/texlab/crates/citeproc/src/snapshots/citeproc__tests__inproceedings_combi_2004.snap
+++ /dev/null
@@ -1,5 +0,0 @@
----
-source: crates/citeproc/src/tests.rs
-expression: "render_entry(r#\"\n@inproceedings{10.1145/967900.968040,\n author = {Combi, Carlo and Pozzi, Giuseppe},\n title = {Architectures for a Temporal Workflow Management System},\n year = {2004},\n isbn = {1581138121},\n publisher = {Association for Computing Machinery},\n address = {New York, NY, USA},\n url = {https://doi.org/10.1145/967900.968040},\n doi = {10.1145/967900.968040},\n booktitle = {Proceedings of the 2004 ACM Symposium on Applied Computing},\n pages = {659-666},\n numpages = {8},\n keywords = {active DBMS, temporal DBMS, workflow management system - WfMS, temporal workflow management system},\n location = {Nicosia, Cyprus},\n series = {SAC '04}\n}\"#)"
----
-C. Combi, G. Pozzi: "Architectures for a Temporal Workflow Management System". *Proceedings of the 2004 ACM Symposium on Applied Computing*. SAC '04. Nicosia, Cyprus: Association for Computing Machinery, 2004, 659-666. ISBN: 1581138121. DOI: [10.1145/967900.968040](https://doi.org/10.1145/967900.968040). URL: [https://doi.org/10.1145/967900.968040](https://doi.org/10.1145/967900.968040).
diff --git a/support/texlab/crates/citeproc/src/snapshots/citeproc__tests__inproceedings_erwin_2007.snap b/support/texlab/crates/citeproc/src/snapshots/citeproc__tests__inproceedings_erwin_2007.snap
deleted file mode 100644
index f544f35573..0000000000
--- a/support/texlab/crates/citeproc/src/snapshots/citeproc__tests__inproceedings_erwin_2007.snap
+++ /dev/null
@@ -1,5 +0,0 @@
----
-source: crates/citeproc/src/tests.rs
-expression: "render_entry(r#\"\n@inproceedings{10.5555/1386993.1386994,\n author = {Erwin, Alva and Gopalan, Raj P. and Achuthan, N. R.},\n title = {A Bottom-up Projection Based Algorithm for Mining High Utility Itemsets},\n year = {2007},\n isbn = {9781920682651},\n publisher = {Australian Computer Society, Inc.},\n address = {AUS},\n booktitle = {Proceedings of the 2nd International Workshop on Integrating Artificial Intelligence and Data Mining - Volume 84},\n pages = {3-11},\n numpages = {9},\n keywords = {pattern growth, high utility itemset mining},\n location = {Gold Coast, Australia},\n series = {AIDM '07}\n}\"#)"
----
-A. Erwin, R. Gopalan, N. Achuthan: "A Bottom-up Projection Based Algorithm for Mining High Utility Itemsets". *Proceedings of the 2nd International Workshop on Integrating Artificial Intelligence and Data Mining - Volume 84*. AIDM '07. Gold Coast, Australia: Australian Computer Society, Inc., 2007, 3-11. ISBN: 9781920682651.
diff --git a/support/texlab/crates/citeproc/src/snapshots/citeproc__tests__mvbook_nietzsche_1988.snap b/support/texlab/crates/citeproc/src/snapshots/citeproc__tests__mvbook_nietzsche_1988.snap
deleted file mode 100644
index d86268cd90..0000000000
--- a/support/texlab/crates/citeproc/src/snapshots/citeproc__tests__mvbook_nietzsche_1988.snap
+++ /dev/null
@@ -1,5 +0,0 @@
----
-source: crates/citeproc/src/tests.rs
-expression: "render_entry(r#\"\n@string{dtv = {Deutscher Taschenbuch-Verlag}}\n\n@mvbook{nietzsche:ksa,\n author = {Nietzsche, Friedrich},\n title = {S{\\\"a}mtliche Werke},\n date = 1988,\n editor = {Colli, Giorgio and Montinari, Mazzino},\n edition = 2,\n volumes = 15,\n publisher = dtv # { and Walter de Gruyter},\n location = {M{\\\"u}nchen and Berlin and New York},\n langid = {german},\n sorttitle = {Werke-00-000},\n indexsorttitle= {Samtliche Werke},\n subtitle = {Kritische Studienausgabe},\n annotation = {The critical edition of Nietzsche's works. This is a\n \\texttt{mvbook} entry referring to a 15-volume work as a\n whole. Note the \\texttt{volumes} field and the format of the\n \\texttt{publisher} and \\texttt{location} fields in the\n database file. Also note the \\texttt{sorttitle} and\n field which is used to fine-tune the\n sorting order of the bibliography. We want this item listed\n first in the bibliography},\n}\"#)"
----
-F. Nietzsche: "Sämtliche Werke. Kritische Studienausgabe". Ed. by G. Colli, M. Montinari. 2nd. München and Berlin and New York: Deutscher Taschenbuch-Verlag and Walter de Gruyter, 1988.
diff --git a/support/texlab/crates/citeproc/src/snapshots/citeproc__tests__patent_almendro_1998.snap b/support/texlab/crates/citeproc/src/snapshots/citeproc__tests__patent_almendro_1998.snap
deleted file mode 100644
index 6cf8b39462..0000000000
--- a/support/texlab/crates/citeproc/src/snapshots/citeproc__tests__patent_almendro_1998.snap
+++ /dev/null
@@ -1,5 +0,0 @@
----
-source: crates/citeproc/src/tests.rs
-expression: "render_entry(r#\"\n@patent{almendro,\n author = {Almendro, Jos{\\'e} L. and Mart{\\'i}n, Jacinto and S{\\'a}nchez,\n Alberto and Nozal, Fernando},\n title = {Elektromagnetisches Signalhorn},\n number = {EU-29702195U},\n date = 1998,\n location = {countryfr and countryuk and countryde},\n langid = {german},\n annotation = {This is a \\texttt{patent} entry with a \\texttt{location}\n field. The number is given in the \\texttt{number} field. Note\n the format of the \\texttt{location} field in the database\n file. Compare \\texttt{laufenberg}, \\texttt{sorace}, and\n \\texttt{kowalik}},\n}\"#)"
----
-J. Almendro, J. Martín, A. Sánchez, F. Nozal: "Elektromagnetisches Signalhorn". EU-29702195U (France and United Kingdom and Germany). 1998.
diff --git a/support/texlab/crates/citeproc/src/tests.rs b/support/texlab/crates/citeproc/src/tests.rs
index a125afd695..1c91580f39 100644
--- a/support/texlab/crates/citeproc/src/tests.rs
+++ b/support/texlab/crates/citeproc/src/tests.rs
@@ -1,18 +1,19 @@
-use insta::assert_snapshot;
+use expect_test::{expect, Expect};
use parser::parse_bibtex;
use rowan::ast::AstNode;
use syntax::bibtex;
-fn render_entry(input: &str) -> String {
+fn check(input: &str, expect: Expect) {
let green = parse_bibtex(input);
let root = bibtex::Root::cast(bibtex::SyntaxNode::new_root(green)).unwrap();
let entry = root.entries().next().unwrap();
- super::render(&entry).unwrap()
+ let output = super::render(&entry).unwrap();
+ expect.assert_eq(&output);
}
#[test]
-fn article_rivest_1978() {
- assert_snapshot!(render_entry(
+fn test_article_rivest_1978() {
+ check(
r#"
@article{10.1145/359340.359342,
author = {Rivest, R. L. and Shamir, A. and Adleman, L.},
@@ -30,13 +31,16 @@ fn article_rivest_1978() {
month = {feb},
pages = {120-126},
numpages = {7},
-}"#
- ));
+}"#,
+ expect![[
+ r#"R. Rivest, A. Shamir, L. Adleman: "A Method for Obtaining Digital Signatures and Public-Key Cryptosystems". *Commun. ACM* 21.2 (Feb. 1978): 120-126. ISSN: 0001-0782. DOI: [10.1145/359340.359342](https://doi.org/10.1145/359340.359342). URL: [https://doi.org/10.1145/359340.359342](https://doi.org/10.1145/359340.359342)."#
+ ]],
+ );
}
#[test]
-fn article_jain_1999() {
- assert_snapshot!(render_entry(
+fn test_article_jain_1999() {
+ check(
r#"
@article{10.1145/331499.331504,
author = {Jain, A. K. and Murty, M. N. and Flynn, P. J.},
@@ -55,13 +59,16 @@ fn article_jain_1999() {
pages = {264-323},
numpages = {60},
keywords = {incremental clustering, clustering applications, exploratory data analysis, cluster analysis, similarity indices, unsupervised learning}
-}"#
- ));
+}"#,
+ expect![[
+ r#"A. Jain, M. Murty, P. Flynn: "Data Clustering: A Review". *ACM Comput. Surv.* 31.3 (Sep. 1999): 264-323. ISSN: 0360-0300. DOI: [10.1145/331499.331504](https://doi.org/10.1145/331499.331504). URL: [https://doi.org/10.1145/331499.331504](https://doi.org/10.1145/331499.331504)."#
+ ]],
+ );
}
#[test]
-fn article_aksin_2006() {
- assert_snapshot!(render_entry(
+fn test_article_aksin_2006() {
+ check(
r#"
@string{jomch = {J.~Organomet. Chem.}}
@@ -78,13 +85,16 @@ fn article_aksin_2006() {
number = 13,
pages = {3027-3036},
indextitle = {Effect of immobilization on catalytic characteristics},
-}"#
- ));
+}"#,
+ expect![[
+ r#"O. Aksın, H. Türkmen, L. Artok, B. Çetinkaya, C. Ni, O. Büyükgüngör, E. Özkal: "Effect of immobilization on catalytic characteristics of saturated Pd-N-heterocyclic carbenes in Mizoroki-Heck reactions". *J. Organomet. Chem.* 691.13 (2006): 3027-3036."#
+ ]],
+ );
}
#[test]
-fn article_betram_1996() {
- assert_snapshot!(render_entry(
+fn test_article_betram_1996() {
+ check(
r#"
@string{jams = {J.~Amer. Math. Soc.}}
@@ -101,13 +111,16 @@ fn article_betram_1996() {
shorttitle = {Gromov invariants},
annotation = {An \texttt{article} entry with a \texttt{volume} and a
\texttt{number} field},
-}"#
- ));
+}"#,
+ expect![[
+ r#"A. Bertram, R. Wentworth: "Gromov invariants for holomorphic maps on Riemann surfaces". *J. Amer. Math. Soc.* 9.2 (1996): 529-571."#
+ ]],
+ );
}
#[test]
-fn article_kastenholz_2006() {
- assert_snapshot!(render_entry(
+fn test_article_kastenholz_2006() {
+ check(
r#"
@string{jchph = {J.~Chem. Phys.}}
@@ -129,13 +142,16 @@ fn article_kastenholz_2006() {
into a clickable link if \texttt{hyperref} support has been
enabled},
}
- "#
- ));
+ "#,
+ expect![[
+ r#"M. Kastenholz, P. Hünenberger: "Computation of methodology- independent ionic solvation free energies from molecular simulations. I. The electrostatic potential in molecular liquids". *J. Chem. Phys.* 124, 124106 (2006): DOI: [10.1063/1.2172593](https://doi.org/10.1063/1.2172593)."#
+ ]],
+ );
}
#[test]
-fn article_blom_2021() {
- assert_snapshot!(render_entry(
+fn test_article_blom_2021() {
+ check(
r#"
@article{DBLP:journals/corr/abs-2107-11903,
author = {Michelle L. Blom and
@@ -156,13 +172,16 @@ fn article_blom_2021() {
timestamp = {Thu, 29 Jul 2021 16:14:15 +0200},
biburl = {https://dblp.org/rec/journals/corr/abs-2107-11903.bib},
bibsource = {dblp computer science bibliography, https://dblp.org}
-}"#
- ));
+}"#,
+ expect![[
+ r#"M. Blom, J. Budurushi, R. Rivest, P. Stark, P. Stuckey, V. Teague, D. Vukcevic: "Assertion-based Approaches to Auditing Complex Elections, with application to party-list proportional elections". *CoRR* abs/2107.11903 (2021): arXiv: [2107.11903](https://arxiv.org/abs/2107.11903). URL: [https://arxiv.org/abs/2107.11903](https://arxiv.org/abs/2107.11903)."#
+ ]],
+ );
}
#[test]
-fn book_aho_2006() {
- assert_snapshot!(render_entry(
+fn test_book_aho_2006() {
+ check(
r#"
@book{10.5555/1177220,
author = {Aho, Alfred V. and Lam, Monica S. and Sethi, Ravi and Ullman, Jeffrey D.},
@@ -171,13 +190,16 @@ fn book_aho_2006() {
isbn = {0321486811},
publisher = {Addison-Wesley Longman Publishing Co., Inc.},
address = {USA}
-}"#
- ));
+}"#,
+ expect![[
+ r#"A. Aho, M. Lam, R. Sethi, J. Ullman: "Compilers: Principles, Techniques, and Tools (2nd Edition)". Addison-Wesley Longman Publishing Co., Inc., 2006. ISBN: 0321486811."#
+ ]],
+ );
}
#[test]
-fn book_averroes_1998() {
- assert_snapshot!(render_entry(
+fn test_book_averroes_1998() {
+ check(
r#"
@book{averroes/bland,
author = {Averroes},
@@ -199,13 +221,16 @@ fn book_averroes_1998() {
\texttt{number}. Note the concatenation of the \texttt{editor}
and \texttt{translator} fields as well as the
\texttt{indextitle} field},
-}"#
- ));
+}"#,
+ expect![[
+ r#""The Epistle on the Possibility of Conjunction with the Active Intellect by Ibn Rushd with the Commentary of Moses Narboni". Ed. by K. Bland. Trans. by K. Bland. Moreshet: Studies in Jewish History, Literature and Thought 7. New York: Jewish Theological Seminary of America, 1982."#
+ ]],
+ );
}
#[test]
-fn book_knuth_1984() {
- assert_snapshot!(render_entry(
+fn test_book_knuth_1984() {
+ check(
r#"
@book{knuth:ct:a,
author = {Knuth, Donald E.},
@@ -229,13 +254,16 @@ fn book_knuth_1984() {
generate robust index entries require some control sequences
to be protected from expansion},
}
- "#
- ));
+ "#,
+ expect![[
+ r#"D. Knuth: "The TeX book". *Computers & Typesetting*. Vol. A. Reading, Mass.: Addison-Wesley, 1984."#
+ ]],
+ );
}
#[test]
-fn mvbook_nietzsche_1988() {
- assert_snapshot!(render_entry(
+fn test_mvbook_nietzsche_1988() {
+ check(
r#"
@string{dtv = {Deutscher Taschenbuch-Verlag}}
@@ -260,13 +288,16 @@ fn mvbook_nietzsche_1988() {
field which is used to fine-tune the
sorting order of the bibliography. We want this item listed
first in the bibliography},
-}"#
- ));
+}"#,
+ expect![[
+ r#"F. Nietzsche: "Sämtliche Werke. Kritische Studienausgabe". Ed. by G. Colli, M. Montinari. 2nd. München and Berlin and New York: Deutscher Taschenbuch-Verlag and Walter de Gruyter, 1988."#
+ ]],
+ );
}
#[test]
-fn inproceedings_erwin_2007() {
- assert_snapshot!(render_entry(
+fn test_inproceedings_erwin_2007() {
+ check(
r#"
@inproceedings{10.5555/1386993.1386994,
author = {Erwin, Alva and Gopalan, Raj P. and Achuthan, N. R.},
@@ -281,13 +312,16 @@ fn inproceedings_erwin_2007() {
keywords = {pattern growth, high utility itemset mining},
location = {Gold Coast, Australia},
series = {AIDM '07}
-}"#
- ));
+}"#,
+ expect![[
+ r#"A. Erwin, R. Gopalan, N. Achuthan: "A Bottom-up Projection Based Algorithm for Mining High Utility Itemsets". *Proceedings of the 2nd International Workshop on Integrating Artificial Intelligence and Data Mining - Volume 84*. AIDM '07. Gold Coast, Australia: Australian Computer Society, Inc., 2007, 3-11. ISBN: 9781920682651."#
+ ]],
+ );
}
#[test]
-fn inproceedings_combi_2004() {
- assert_snapshot!(render_entry(
+fn test_inproceedings_combi_2004() {
+ check(
r#"
@inproceedings{10.1145/967900.968040,
author = {Combi, Carlo and Pozzi, Giuseppe},
@@ -304,13 +338,16 @@ fn inproceedings_combi_2004() {
keywords = {active DBMS, temporal DBMS, workflow management system - WfMS, temporal workflow management system},
location = {Nicosia, Cyprus},
series = {SAC '04}
-}"#
- ));
+}"#,
+ expect![[
+ r#"C. Combi, G. Pozzi: "Architectures for a Temporal Workflow Management System". *Proceedings of the 2004 ACM Symposium on Applied Computing*. SAC '04. Nicosia, Cyprus: Association for Computing Machinery, 2004, 659-666. ISBN: 1581138121. DOI: [10.1145/967900.968040](https://doi.org/10.1145/967900.968040). URL: [https://doi.org/10.1145/967900.968040](https://doi.org/10.1145/967900.968040)."#
+ ]],
+ );
}
#[test]
-fn collection_matuz_1990() {
- assert_snapshot!(render_entry(
+fn test_collection_matuz_1990() {
+ check(
r#"
@collection{matuz:doody,
editor = {Matuz, Roger},
@@ -325,13 +362,16 @@ fn collection_matuz_1990() {
annotation = {A \texttt{collection} entry providing the excerpt information
for the \texttt{doody} entry. Note the format of the
\texttt{pages} field},
-}"#
- ));
+}"#,
+ expect![[
+ r#""Contemporary Literary Criticism". Ed. by R. Matuz. Vol. 61. Detroit: Gale, 1990, 204-208."#
+ ]],
+ );
}
#[test]
-fn patent_almendro_1998() {
- assert_snapshot!(render_entry(
+fn test_patent_almendro_1998() {
+ check(
r#"
@patent{almendro,
author = {Almendro, Jos{\'e} L. and Mart{\'i}n, Jacinto and S{\'a}nchez,
@@ -346,6 +386,9 @@ fn patent_almendro_1998() {
the format of the \texttt{location} field in the database
file. Compare \texttt{laufenberg}, \texttt{sorace}, and
\texttt{kowalik}},
-}"#
- ));
+}"#,
+ expect![[
+ r#"J. Almendro, J. Martín, A. Sánchez, F. Nozal: "Elektromagnetisches Signalhorn". EU-29702195U (France and United Kingdom and Germany). 1998."#
+ ]],
+ );
}
diff --git a/support/texlab/crates/commands/Cargo.toml b/support/texlab/crates/commands/Cargo.toml
index 1f3a1bddd9..cf395e5bf8 100644
--- a/support/texlab/crates/commands/Cargo.toml
+++ b/support/texlab/crates/commands/Cargo.toml
@@ -7,22 +7,22 @@ edition.workspace = true
rust-version.workspace = true
[dependencies]
-anyhow = "1.0.71"
+anyhow = "1.0.72"
base-db = { path = "../base-db" }
-bstr = "1.4.0"
+bstr = "1.6.0"
crossbeam-channel = "0.5.8"
-itertools = "0.10.5"
-libc = "0.2.144"
-log = "0.4.17"
+itertools = "0.11.0"
+libc = "0.2.147"
+log = "0.4.19"
rowan = "0.15.11"
rustc-hash = "1.1.0"
syntax = { path = "../syntax" }
-thiserror = "1.0.40"
+thiserror = "1.0.43"
url = "2.3.1"
[dev-dependencies]
distro = { path = "../distro" }
-insta = { version = "1.29.0", features = ["json"] }
+expect-test = "1.4.1"
test-utils = { path = "../test-utils" }
[lib]
diff --git a/support/texlab/crates/commands/src/clean.rs b/support/texlab/crates/commands/src/clean.rs
index 391be79c81..d6c3977fb2 100644
--- a/support/texlab/crates/commands/src/clean.rs
+++ b/support/texlab/crates/commands/src/clean.rs
@@ -22,7 +22,10 @@ impl CleanCommand {
};
let dir = workspace.current_dir(&document.dir);
- let dir = workspace.output_dir(&dir).to_file_path().unwrap();
+ let dir = workspace
+ .output_dir(&dir, workspace.config().build.log_dir.clone())
+ .to_file_path()
+ .unwrap();
let flag = match target {
CleanTarget::Auxiliary => "-c",
diff --git a/support/texlab/crates/commands/src/find_envs.rs b/support/texlab/crates/commands/src/find_envs.rs
index 20b11c9e37..7a17345d80 100644
--- a/support/texlab/crates/commands/src/find_envs.rs
+++ b/support/texlab/crates/commands/src/find_envs.rs
@@ -9,11 +9,15 @@ pub struct EnvironmentMatch {
}
pub fn find_environments(document: &Document, offset: TextSize) -> Vec<EnvironmentMatch> {
- let DocumentData::Tex(data) = &document.data else { return Vec::new() };
+ let DocumentData::Tex(data) = &document.data else {
+ return Vec::new();
+ };
let root = latex::SyntaxNode::new_root(data.green.clone());
- let Some(token) = root.token_at_offset(offset).right_biased() else { return Vec::new() };
+ let Some(token) = root.token_at_offset(offset).right_biased() else {
+ return Vec::new();
+ };
let mut results = Vec::new();
for environment in token
@@ -24,7 +28,10 @@ pub fn find_environments(document: &Document, offset: TextSize) -> Vec<Environme
.begin()
.and_then(|begin| begin.name())
.and_then(|group| group.key())
- .map(|name| Span::from(&name)) else { continue };
+ .map(|name| Span::from(&name))
+ else {
+ continue;
+ };
let full_range = latex::small_range(&environment);
results.push(EnvironmentMatch { name, full_range });
@@ -36,30 +43,82 @@ pub fn find_environments(document: &Document, offset: TextSize) -> Vec<Environme
#[cfg(test)]
mod tests {
- use insta::assert_debug_snapshot;
+ use expect_test::{expect, Expect};
use test_utils::fixture::Fixture;
use crate::find_environments;
+ fn check(fixture: &str, expect: Expect) {
+ let fixture = Fixture::parse(fixture);
+ let workspace = fixture.workspace;
+ let document = workspace.iter().next().unwrap();
+ let offset = fixture.documents[0].cursor.unwrap();
+ let results = find_environments(&document, offset);
+ expect.assert_debug_eq(&results);
+ }
+
#[test]
- fn test() {
- let fixture = Fixture::parse(
+ fn test_simple() {
+ check(
r#"
%! main.tex
\begin{a}
- \begin{b}
- \begin{c}
- |
- \end{c}
- \end{b}
- \begin{d}
- \end{d}
-\end{a}"#,
+ |
+\end{a}
+"#,
+ expect![[r#"
+ [
+ EnvironmentMatch {
+ name: Span(
+ "a",
+ 7..8,
+ ),
+ full_range: 0..17,
+ },
+ ]
+ "#]],
);
+ }
- let workspace = fixture.workspace;
- let document = workspace.iter().next().unwrap();
- let results = find_environments(&document, fixture.documents[0].cursor.unwrap());
- assert_debug_snapshot!(results);
+ #[test]
+ fn test_nested() {
+ check(
+ r#"
+%! main.tex
+\begin{a}
+ \begin{b}
+ \begin{c}
+ |
+ \end{c}
+ \end{b}
+ \begin{d}
+ \end{d}
+\end{a}"#,
+ expect![[r#"
+ [
+ EnvironmentMatch {
+ name: Span(
+ "a",
+ 7..8,
+ ),
+ full_range: 0..103,
+ },
+ EnvironmentMatch {
+ name: Span(
+ "b",
+ 21..22,
+ ),
+ full_range: 14..69,
+ },
+ EnvironmentMatch {
+ name: Span(
+ "c",
+ 39..40,
+ ),
+ full_range: 32..57,
+ },
+ ]
+ "#]],
+ );
}
}
diff --git a/support/texlab/crates/commands/src/fwd_search.rs b/support/texlab/crates/commands/src/fwd_search.rs
index 55c2df59a0..153e1245d5 100644
--- a/support/texlab/crates/commands/src/fwd_search.rs
+++ b/support/texlab/crates/commands/src/fwd_search.rs
@@ -59,18 +59,23 @@ impl ForwardSearch {
}
let dir = workspace.current_dir(&parent.dir);
- let dir = workspace.output_dir(&dir).to_file_path().unwrap();
+ let dir = workspace
+ .output_dir(&dir, workspace.config().build.log_dir.clone())
+ .to_file_path()
+ .unwrap();
let Some(tex_path) = &child.path else {
return Err(ForwardSearchError::InvalidPath(child.uri.clone()));
};
- let Some(pdf_path) = parent.path
- .as_deref()
+ let override_path = workspace.config().build.output_filename.as_deref();
+
+ let Some(pdf_path) = override_path
+ .or(parent.path.as_deref())
.and_then(Path::file_stem)
.and_then(OsStr::to_str)
- .map(|stem| dir.join(format!("{stem}.pdf"))) else
- {
+ .map(|stem| dir.join(format!("{stem}.pdf")))
+ else {
return Err(ForwardSearchError::InvalidPath(parent.uri.clone()));
};
diff --git a/support/texlab/crates/commands/src/snapshots/commands__find_envs__tests__test.snap b/support/texlab/crates/commands/src/snapshots/commands__find_envs__tests__test.snap
deleted file mode 100644
index fa532f125a..0000000000
--- a/support/texlab/crates/commands/src/snapshots/commands__find_envs__tests__test.snap
+++ /dev/null
@@ -1,27 +0,0 @@
----
-source: crates/commands/src/find_envs.rs
-expression: results
----
-[
- EnvironmentMatch {
- name: Span(
- "a",
- 7..8,
- ),
- full_range: 0..87,
- },
- EnvironmentMatch {
- name: Span(
- "b",
- 19..20,
- ),
- full_range: 12..57,
- },
- EnvironmentMatch {
- name: Span(
- "c",
- 33..34,
- ),
- full_range: 26..47,
- },
-]
diff --git a/support/texlab/crates/completion-data/Cargo.toml b/support/texlab/crates/completion-data/Cargo.toml
new file mode 100644
index 0000000000..16783ce87e
--- /dev/null
+++ b/support/texlab/crates/completion-data/Cargo.toml
@@ -0,0 +1,18 @@
+[package]
+name = "completion-data"
+version = "0.0.0"
+license.workspace = true
+authors.workspace = true
+edition.workspace = true
+rust-version.workspace = true
+
+[dependencies]
+flate2 = "1.0.26"
+itertools = "0.11.0"
+once_cell = "1.18.0"
+rustc-hash = "1.1.0"
+serde = { version = "1.0.171", features = ["derive"] }
+serde_json = "1.0.103"
+
+[lib]
+doctest = false
diff --git a/support/texlab/crates/texlab/data/components.json.gz b/support/texlab/crates/completion-data/data/completion.json.gz
index b355f616ec..b355f616ec 100644
--- a/support/texlab/crates/texlab/data/components.json.gz
+++ b/support/texlab/crates/completion-data/data/completion.json.gz
Binary files differ
diff --git a/support/texlab/crates/completion-data/src/lib.rs b/support/texlab/crates/completion-data/src/lib.rs
new file mode 100644
index 0000000000..c63564a58f
--- /dev/null
+++ b/support/texlab/crates/completion-data/src/lib.rs
@@ -0,0 +1,135 @@
+use std::{borrow::Cow, io::Read};
+
+use flate2::read::GzDecoder;
+use itertools::Itertools;
+use once_cell::sync::Lazy;
+use rustc_hash::FxHashMap;
+use serde::Deserialize;
+
+#[derive(Debug, PartialEq, Eq, Clone, Deserialize)]
+#[serde(rename_all = "camelCase")]
+pub struct Database<'a> {
+ #[serde(rename = "components", borrow)]
+ packages: Vec<Package<'a>>,
+
+ #[serde(borrow)]
+ metadata: Vec<Metadata<'a>>,
+
+ #[serde(skip)]
+ lookup_packages: FxHashMap<&'a str, usize>,
+
+ #[serde(skip)]
+ lookup_metadata: FxHashMap<&'a str, usize>,
+
+ #[serde(skip)]
+ lookup_kernel: usize,
+}
+
+#[derive(Debug, PartialEq, Eq, Clone, Deserialize)]
+#[serde(rename_all = "camelCase")]
+pub struct Package<'a> {
+ #[serde(borrow)]
+ pub file_names: Vec<&'a str>,
+
+ #[serde(borrow)]
+ pub references: Vec<&'a str>,
+
+ #[serde(borrow)]
+ pub commands: Vec<Command<'a>>,
+
+ #[serde(borrow)]
+ pub environments: Vec<&'a str>,
+}
+
+#[derive(Debug, PartialEq, Eq, Clone, Deserialize)]
+#[serde(rename_all = "camelCase")]
+pub struct Command<'a> {
+ pub name: Cow<'a, str>,
+
+ #[serde(borrow)]
+ pub image: Option<&'a str>,
+
+ #[serde(borrow)]
+ pub glyph: Option<Cow<'a, str>>,
+
+ #[serde(borrow)]
+ pub parameters: Vec<Parameter<'a>>,
+}
+
+#[derive(Debug, PartialEq, Eq, Clone, Deserialize)]
+#[serde(rename_all = "camelCase")]
+pub struct Parameter<'a>(#[serde(borrow)] pub Vec<Argument<'a>>);
+
+#[derive(Debug, PartialEq, Eq, Clone, Deserialize)]
+#[serde(rename_all = "camelCase")]
+pub struct Argument<'a> {
+ pub name: &'a str,
+
+ #[serde(borrow)]
+ pub image: Option<&'a str>,
+}
+
+#[derive(Debug, PartialEq, Eq, Clone, Deserialize)]
+#[serde(rename_all = "camelCase")]
+pub struct Metadata<'a> {
+ pub name: &'a str,
+
+ #[serde(borrow)]
+ pub caption: Option<Cow<'a, str>>,
+
+ #[serde(borrow)]
+ pub description: Option<Cow<'a, str>>,
+}
+
+impl<'a> Database<'a> {
+ pub fn iter(&self) -> impl Iterator<Item = &Package> + '_ {
+ self.packages.iter()
+ }
+
+ pub fn find(&self, name: &str) -> Option<&Package> {
+ self.lookup_packages
+ .get(name)
+ .map(|index| &self.packages[*index])
+ }
+
+ pub fn meta(&self, name: &str) -> Option<&Metadata> {
+ self.lookup_metadata
+ .get(name)
+ .map(|index| &self.metadata[*index])
+ }
+
+ pub fn kernel(&self) -> &Package {
+ &self.packages[self.lookup_kernel]
+ }
+}
+
+const JSON_GZ: &[u8] = include_bytes!("../data/completion.json.gz");
+
+pub static DATABASE: Lazy<Database<'static>> = Lazy::new(|| {
+ let mut decoder = GzDecoder::new(JSON_GZ);
+ let json = Box::leak(Box::new(String::new()));
+ decoder.read_to_string(json).unwrap();
+ let mut db: Database = serde_json::from_str(json).unwrap();
+ db.lookup_packages = db
+ .packages
+ .iter()
+ .enumerate()
+ .flat_map(|(i, pkg)| pkg.file_names.iter().map(move |name| (*name, i)))
+ .collect();
+
+ db.lookup_metadata = db
+ .metadata
+ .iter()
+ .enumerate()
+ .unique_by(|(_, meta)| meta.name)
+ .map(|(i, meta)| (meta.name, i))
+ .collect();
+
+ db.lookup_kernel = db
+ .packages
+ .iter()
+ .position(|package| package.file_names.is_empty())
+ .unwrap();
+
+ db
+});
diff --git a/support/texlab/crates/definition/Cargo.toml b/support/texlab/crates/definition/Cargo.toml
new file mode 100644
index 0000000000..788abfc118
--- /dev/null
+++ b/support/texlab/crates/definition/Cargo.toml
@@ -0,0 +1,19 @@
+[package]
+name = "definition"
+version = "0.0.0"
+license.workspace = true
+authors.workspace = true
+edition.workspace = true
+rust-version.workspace = true
+
+[dependencies]
+base-db = { path = "../base-db" }
+rowan = "0.15.11"
+syntax = { path = "../syntax" }
+
+[dev-dependencies]
+itertools = "0.11.0"
+test-utils = { path = "../test-utils" }
+
+[lib]
+doctest = false
diff --git a/support/texlab/crates/definition/src/citation.rs b/support/texlab/crates/definition/src/citation.rs
new file mode 100644
index 0000000000..1497711cf6
--- /dev/null
+++ b/support/texlab/crates/definition/src/citation.rs
@@ -0,0 +1,30 @@
+use base_db::{
+ semantics::bib,
+ util::queries::{self, Object},
+};
+
+use crate::DefinitionContext;
+
+use super::DefinitionResult;
+
+pub(super) fn goto_definition<'db>(context: &mut DefinitionContext<'db>) -> Option<()> {
+ let data = context.params.document.data.as_tex()?;
+
+ let citation = queries::object_at_cursor(
+ &data.semantics.citations,
+ context.params.offset,
+ queries::SearchMode::Full,
+ )?;
+
+ let name = citation.object.name_text();
+ for (document, entry) in queries::objects_with_name::<bib::Entry>(&context.project, name) {
+ context.results.push(DefinitionResult {
+ origin_selection_range: citation.object.name_range(),
+ target: document,
+ target_range: entry.full_range,
+ target_selection_range: entry.name.range,
+ });
+ }
+
+ Some(())
+}
diff --git a/support/texlab/crates/texlab/src/features/definition/command.rs b/support/texlab/crates/definition/src/command.rs
index 9707247019..0a367beb14 100644
--- a/support/texlab/crates/texlab/src/features/definition/command.rs
+++ b/support/texlab/crates/definition/src/command.rs
@@ -2,25 +2,26 @@ use base_db::DocumentData;
use rowan::ast::AstNode;
use syntax::latex;
-use crate::util::cursor::CursorContext;
+use crate::DefinitionContext;
use super::DefinitionResult;
-pub(super) fn goto_definition<'a>(
- context: &CursorContext<'a>,
-) -> Option<Vec<DefinitionResult<'a>>> {
- let name = context
- .cursor
- .as_tex()
- .filter(|token| token.kind() == latex::COMMAND_NAME)?;
+pub(super) fn goto_definition<'db>(context: &mut DefinitionContext<'db>) -> Option<()> {
+ let data = context.params.document.data.as_tex()?;
+ let root = data.root_node();
+ let name = root
+ .token_at_offset(context.params.offset)
+ .find(|token| token.kind() == latex::COMMAND_NAME)?;
let origin_selection_range = name.text_range();
for document in &context.project.documents {
- let DocumentData::Tex(data) = &document.data else { continue };
+ let DocumentData::Tex(data) = &document.data else {
+ continue;
+ };
- let root = data.root_node();
- if let Some(result) = root
+ let results = data
+ .root_node()
.descendants()
.filter_map(latex::CommandDefinition::cast)
.filter(|def| {
@@ -28,18 +29,17 @@ pub(super) fn goto_definition<'a>(
.and_then(|name| name.command())
.map_or(false, |node| node.text() == name.text())
})
- .find_map(|def| {
+ .filter_map(|def| {
Some(DefinitionResult {
origin_selection_range,
target: document,
target_range: latex::small_range(&def),
target_selection_range: def.name()?.command()?.text_range(),
})
- })
- {
- return Some(vec![result]);
- }
+ });
+
+ context.results.extend(results);
}
- None
+ Some(())
}
diff --git a/support/texlab/crates/definition/src/include.rs b/support/texlab/crates/definition/src/include.rs
new file mode 100644
index 0000000000..2c24f05321
--- /dev/null
+++ b/support/texlab/crates/definition/src/include.rs
@@ -0,0 +1,31 @@
+use rowan::TextRange;
+
+use crate::DefinitionContext;
+
+use super::DefinitionResult;
+
+pub(super) fn goto_definition<'db>(context: &mut DefinitionContext<'db>) -> Option<()> {
+ let start = context.params.document;
+ let parents = context.params.workspace.parents(start);
+ let results = parents
+ .into_iter()
+ .chain(std::iter::once(start))
+ .flat_map(|parent| base_db::graph::Graph::new(context.params.workspace, parent).edges)
+ .filter(|edge| edge.source == start)
+ .flat_map(|edge| {
+ let origin_selection_range = edge.weight?.link.path.range;
+ if origin_selection_range.contains_inclusive(context.params.offset) {
+ Some(DefinitionResult {
+ origin_selection_range,
+ target: edge.target,
+ target_range: TextRange::default(),
+ target_selection_range: TextRange::default(),
+ })
+ } else {
+ None
+ }
+ });
+
+ context.results.extend(results);
+ Some(())
+}
diff --git a/support/texlab/crates/definition/src/label.rs b/support/texlab/crates/definition/src/label.rs
new file mode 100644
index 0000000000..127f6ff497
--- /dev/null
+++ b/support/texlab/crates/definition/src/label.rs
@@ -0,0 +1,37 @@
+use base_db::{
+ semantics::tex,
+ util::{
+ queries::{self, Object},
+ render_label,
+ },
+};
+
+use crate::DefinitionContext;
+
+use super::DefinitionResult;
+
+pub(super) fn goto_definition<'db>(context: &mut DefinitionContext<'db>) -> Option<()> {
+ let data = context.params.document.data.as_tex()?;
+ let reference = queries::object_at_cursor(
+ &data.semantics.labels,
+ context.params.offset,
+ queries::SearchMode::Full,
+ )?;
+
+ let name = reference.object.name_text();
+ let labels = queries::objects_with_name::<tex::Label>(&context.project, name);
+ for (document, label) in labels.filter(|(_, label)| label.kind == tex::LabelKind::Definition) {
+ let target_selection_range = label.name.range;
+ let target_range = render_label(context.params.workspace, &context.project, label)
+ .map_or(target_selection_range, |label| label.range);
+
+ context.results.push(DefinitionResult {
+ origin_selection_range: reference.object.name_range(),
+ target: document,
+ target_range,
+ target_selection_range,
+ });
+ }
+
+ Some(())
+}
diff --git a/support/texlab/crates/definition/src/lib.rs b/support/texlab/crates/definition/src/lib.rs
new file mode 100644
index 0000000000..b96c48ed3f
--- /dev/null
+++ b/support/texlab/crates/definition/src/lib.rs
@@ -0,0 +1,49 @@
+mod citation;
+mod command;
+mod include;
+mod label;
+mod string_ref;
+
+use base_db::{Document, Project, Workspace};
+use rowan::{TextRange, TextSize};
+
+#[derive(Debug)]
+pub struct DefinitionParams<'db> {
+ pub workspace: &'db Workspace,
+ pub document: &'db Document,
+ pub offset: TextSize,
+}
+
+#[derive(Debug, PartialEq, Eq, Clone)]
+pub struct DefinitionResult<'a> {
+ pub origin_selection_range: TextRange,
+ pub target: &'a Document,
+ pub target_range: TextRange,
+ pub target_selection_range: TextRange,
+}
+
+#[derive(Debug)]
+struct DefinitionContext<'db> {
+ params: DefinitionParams<'db>,
+ project: Project<'db>,
+ results: Vec<DefinitionResult<'db>>,
+}
+
+pub fn goto_definition<'db>(params: DefinitionParams<'db>) -> Vec<DefinitionResult<'db>> {
+ let project = params.workspace.project(params.document);
+ let mut context = DefinitionContext {
+ params,
+ project,
+ results: Vec::new(),
+ };
+
+ command::goto_definition(&mut context);
+ include::goto_definition(&mut context);
+ citation::goto_definition(&mut context);
+ label::goto_definition(&mut context);
+ string_ref::goto_definition(&mut context);
+ context.results
+}
+
+#[cfg(test)]
+mod tests;
diff --git a/support/texlab/crates/definition/src/string_ref.rs b/support/texlab/crates/definition/src/string_ref.rs
new file mode 100644
index 0000000000..124cc33c7c
--- /dev/null
+++ b/support/texlab/crates/definition/src/string_ref.rs
@@ -0,0 +1,35 @@
+use rowan::ast::AstNode;
+use syntax::bibtex;
+
+use crate::DefinitionContext;
+
+use super::DefinitionResult;
+
+pub(super) fn goto_definition<'db>(context: &mut DefinitionContext<'db>) -> Option<()> {
+ let data = context.params.document.data.as_bib()?;
+ let root = data.root_node();
+ let name = root
+ .token_at_offset(context.params.offset)
+ .filter(|token| token.kind() == bibtex::NAME)
+ .find(|token| {
+ let parent = token.parent().unwrap();
+ bibtex::Value::can_cast(parent.kind()) || bibtex::StringDef::can_cast(parent.kind())
+ })?;
+
+ let origin_selection_range = name.text_range();
+
+ let strings = &data.semantics.strings;
+ for string in strings
+ .iter()
+ .filter(|string| string.name.text == name.text())
+ {
+ context.results.push(DefinitionResult {
+ origin_selection_range,
+ target: context.params.document,
+ target_range: string.full_range,
+ target_selection_range: string.name.range,
+ });
+ }
+
+ Some(())
+}
diff --git a/support/texlab/crates/definition/src/tests.rs b/support/texlab/crates/definition/src/tests.rs
new file mode 100644
index 0000000000..e1bf68ec2f
--- /dev/null
+++ b/support/texlab/crates/definition/src/tests.rs
@@ -0,0 +1,154 @@
+use crate::{DefinitionParams, DefinitionResult};
+
+fn check(input: &str) {
+ let fixture = test_utils::fixture::Fixture::parse(input);
+ let workspace = &fixture.workspace;
+
+ let mut origin_selection_range = None;
+ let mut origin_document = None;
+ let mut origin_cursor = None;
+ for document in &fixture.documents {
+ if let Some(cursor) = document.cursor {
+ origin_document = Some(document);
+ origin_cursor = Some(cursor);
+ origin_selection_range = document
+ .ranges
+ .iter()
+ .find(|range| range.contains_inclusive(cursor))
+ .copied();
+
+ break;
+ }
+ }
+
+ let origin_document = origin_document.unwrap();
+
+ let mut expected = Vec::new();
+ for document in &fixture.documents {
+ let mut ranges = document.ranges.iter();
+ while let Some(target_selection_range) = ranges.next().copied() {
+ let origin_selection_range = origin_selection_range.unwrap();
+ if (&origin_document.uri, origin_selection_range)
+ != (&document.uri, target_selection_range)
+ {
+ expected.push(DefinitionResult {
+ origin_selection_range,
+ target: fixture.workspace.lookup(&document.uri).unwrap(),
+ target_range: *ranges.next().unwrap(),
+ target_selection_range,
+ });
+ }
+ }
+ }
+
+ let mut actual = crate::goto_definition(DefinitionParams {
+ workspace,
+ document: workspace.lookup(&origin_document.uri).unwrap(),
+ offset: origin_cursor.unwrap(),
+ });
+
+ sort_results(&mut expected);
+ sort_results(&mut actual);
+
+ assert_eq!(actual, expected);
+}
+
+fn sort_results(items: &mut Vec<DefinitionResult>) {
+ items.sort_by(|a, b| {
+ let a = (&a.target.uri, a.target_range.start());
+ let b = (&b.target.uri, b.target_range.start());
+ a.cmp(&b)
+ });
+}
+
+#[test]
+fn test_command_definition() {
+ check(
+ r#"
+%! main.tex
+\DeclareMathOperator{\foo}{foo}
+ ^^^^
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+\foo
+ |
+^^^^"#,
+ )
+}
+
+#[test]
+fn test_document() {
+ check(
+ r#"
+%! foo.tex
+\addbibresource{baz.bib}
+ |
+ ^^^^^^^
+
+%! bar.bib
+@article{foo, bar = {baz}}
+
+%! baz.bib
+@article{foo, bar = {baz}}
+!
+!"#,
+ )
+}
+
+#[test]
+fn test_entry() {
+ check(
+ r#"
+%! foo.tex
+\addbibresource{baz.bib}
+\cite{foo}
+ |
+ ^^^
+
+%! bar.bib
+@article{foo, bar = {baz}}
+
+%! baz.bib
+@article{foo, bar = {baz}}
+ ^^^
+^^^^^^^^^^^^^^^^^^^^^^^^^^"#,
+ )
+}
+
+#[test]
+fn test_string_simple() {
+ check(
+ r#"
+%! main.bib
+@string{foo = {bar}}
+ ^^^
+^^^^^^^^^^^^^^^^^^^^
+@article{bar, author = foo}
+ |
+ ^^^"#,
+ )
+}
+
+#[test]
+fn test_string_join() {
+ check(
+ r#"
+%! main.bib
+@string{foo = {bar}}
+ ^^^
+^^^^^^^^^^^^^^^^^^^^
+@article{bar, author = foo # "bar"}
+ |
+ ^^^"#,
+ )
+}
+
+#[test]
+fn test_string_field() {
+ check(
+ r#"
+%! main.bib
+@string{foo = {bar}}
+@article{bar, author = foo # "bar"}
+ |"#,
+ )
+}
diff --git a/support/texlab/crates/diagnostics/Cargo.toml b/support/texlab/crates/diagnostics/Cargo.toml
new file mode 100644
index 0000000000..3f87c8c879
--- /dev/null
+++ b/support/texlab/crates/diagnostics/Cargo.toml
@@ -0,0 +1,21 @@
+[package]
+name = "diagnostics"
+version = "0.0.0"
+license.workspace = true
+authors.workspace = true
+edition.workspace = true
+rust-version.workspace = true
+
+[dependencies]
+base-db = { path = "../base-db" }
+itertools = "0.11.0"
+rowan = "0.15.11"
+rustc-hash = "1.1.0"
+syntax = { path = "../syntax" }
+url = "=2.3.1"
+
+[dev-dependencies]
+test-utils = { path = "../test-utils" }
+
+[lib]
+doctest = false
diff --git a/support/texlab/crates/diagnostics/src/build_log.rs b/support/texlab/crates/diagnostics/src/build_log.rs
new file mode 100644
index 0000000000..f5770c90fb
--- /dev/null
+++ b/support/texlab/crates/diagnostics/src/build_log.rs
@@ -0,0 +1,101 @@
+use std::borrow::Cow;
+
+use base_db::{Document, Workspace};
+use rowan::{TextLen, TextRange, TextSize};
+use rustc_hash::FxHashMap;
+use syntax::BuildError;
+use url::Url;
+
+use crate::{
+ types::{Diagnostic, DiagnosticData},
+ DiagnosticBuilder, DiagnosticSource,
+};
+
+#[derive(Debug, Default)]
+struct BuildLog {
+ errors: FxHashMap<Url, Vec<Diagnostic>>,
+}
+
+#[derive(Debug, Default)]
+pub struct BuildErrors {
+ logs: FxHashMap<Url, BuildLog>,
+}
+
+impl DiagnosticSource for BuildErrors {
+ fn update(&mut self, workspace: &Workspace, log_document: &Document) {
+ let mut errors: FxHashMap<Url, Vec<Diagnostic>> = FxHashMap::default();
+
+ let Some(data) = log_document.data.as_log() else { return };
+
+ let parents = workspace.parents(log_document);
+ let Some(root_document) = parents.iter().next() else { return };
+
+ let Some(base_path) = root_document
+ .path
+ .as_deref()
+ .and_then(|path| path.parent()) else { return };
+
+ for error in &data.errors {
+ let full_path = base_path.join(&error.relative_path);
+ let Ok(full_path_uri) = Url::from_file_path(&full_path) else { continue };
+ let tex_document = workspace.lookup(&full_path_uri).unwrap_or(root_document);
+
+ let range = find_range_of_hint(tex_document, error).unwrap_or_else(|| {
+ let line = error.line.unwrap_or(0);
+ let offset = *tex_document
+ .line_index
+ .newlines
+ .get(line as usize)
+ .unwrap_or(&TextSize::from(0));
+
+ TextRange::empty(offset)
+ });
+
+ let diagnostic = Diagnostic {
+ range,
+ data: DiagnosticData::Build(error.clone()),
+ };
+
+ errors
+ .entry(tex_document.uri.clone())
+ .or_default()
+ .push(diagnostic);
+ }
+
+ self.logs
+ .insert(log_document.uri.clone(), BuildLog { errors });
+ }
+
+ fn publish<'db>(
+ &'db mut self,
+ workspace: &'db Workspace,
+ builder: &mut DiagnosticBuilder<'db>,
+ ) {
+ self.logs.retain(|uri, _| workspace.lookup(uri).is_some());
+
+ for document in workspace.iter() {
+ let Some(log) = self.logs.get(&document.uri) else { continue };
+ for (uri, errors) in &log.errors {
+ builder.push_many(&uri, errors.iter().map(Cow::Borrowed));
+ }
+ }
+ }
+}
+
+fn find_range_of_hint(document: &Document, error: &BuildError) -> Option<TextRange> {
+ let line = error.line? as usize;
+ let hint = error.hint.as_deref()?;
+ let line_index = &document.line_index;
+
+ let line_start = line_index.newlines.get(line).copied()?;
+ let line_end = line_index
+ .newlines
+ .get(line + 1)
+ .copied()
+ .unwrap_or((&document.text).text_len());
+
+ let line_text = &document.text[line_start.into()..line_end.into()];
+ let hint_start = line_start + TextSize::try_from(line_text.find(hint)?).unwrap();
+ let hint_end = hint_start + hint.text_len();
+ Some(TextRange::new(hint_start, hint_end))
+}
diff --git a/support/texlab/crates/diagnostics/src/citations.rs b/support/texlab/crates/diagnostics/src/citations.rs
new file mode 100644
index 0000000000..e6b4d1022a
--- /dev/null
+++ b/support/texlab/crates/diagnostics/src/citations.rs
@@ -0,0 +1,97 @@
+use std::borrow::Cow;
+
+use base_db::{
+ semantics::{bib::Entry, tex::Citation},
+ util::queries::{self, Object},
+ BibDocumentData, Document, DocumentData, Project, TexDocumentData, Workspace,
+};
+use rustc_hash::FxHashSet;
+
+use crate::{
+ types::{BibError, Diagnostic, DiagnosticData, TexError},
+ DiagnosticBuilder, DiagnosticSource,
+};
+
+#[derive(Default)]
+pub struct CitationErrors;
+
+impl DiagnosticSource for CitationErrors {
+ fn publish<'db>(
+ &'db mut self,
+ workspace: &'db Workspace,
+ builder: &mut DiagnosticBuilder<'db>,
+ ) {
+ for document in workspace.iter() {
+ let project = workspace.project(document);
+
+ if let DocumentData::Tex(data) = &document.data {
+ detect_undefined_citations(&project, document, data, builder);
+ } else if let DocumentData::Bib(data) = &document.data {
+ detect_unused_entries(&project, document, data, builder);
+ }
+ }
+
+ detect_duplicate_entries(workspace, builder);
+ }
+}
+
+fn detect_undefined_citations<'db>(
+ project: &Project<'db>,
+ document: &'db Document,
+ data: &TexDocumentData,
+ builder: &mut DiagnosticBuilder<'db>,
+) {
+ let entries: FxHashSet<&str> = Entry::find_all(project)
+ .map(|(_, entry)| entry.name_text())
+ .collect();
+
+ for citation in &data.semantics.citations {
+ if !entries.contains(citation.name.text.as_str()) {
+ let diagnostic = Diagnostic {
+ range: citation.name.range,
+ data: DiagnosticData::Tex(TexError::UndefinedCitation),
+ };
+
+ builder.push(&document.uri, Cow::Owned(diagnostic));
+ }
+ }
+}
+
+fn detect_unused_entries<'db>(
+ project: &Project<'db>,
+ document: &'db Document,
+ data: &BibDocumentData,
+ builder: &mut DiagnosticBuilder<'db>,
+) {
+ let citations: FxHashSet<&str> = Citation::find_all(project)
+ .map(|(_, citation)| citation.name_text())
+ .collect();
+
+ for entry in &data.semantics.entries {
+ if !citations.contains(entry.name.text.as_str()) {
+ let diagnostic = Diagnostic {
+ range: entry.name.range,
+ data: DiagnosticData::Bib(BibError::UnusedEntry),
+ };
+
+ builder.push(&document.uri, Cow::Owned(diagnostic));
+ }
+ }
+}
+
+fn detect_duplicate_entries<'db>(workspace: &'db Workspace, builder: &mut DiagnosticBuilder<'db>) {
+ for conflict in queries::Conflict::find_all::<Entry>(workspace) {
+ let others = conflict
+ .rest
+ .iter()
+ .map(|location| (location.document.uri.clone(), location.range))
+ .collect();
+
+ let diagnostic = Diagnostic {
+ range: conflict.main.range,
+ data: DiagnosticData::Bib(BibError::DuplicateEntry(others)),
+ };
+
+ builder.push(&conflict.main.document.uri, Cow::Owned(diagnostic));
+ }
+}
diff --git a/support/texlab/crates/diagnostics/src/grammar.rs b/support/texlab/crates/diagnostics/src/grammar.rs
new file mode 100644
index 0000000000..1a9e3cecda
--- /dev/null
+++ b/support/texlab/crates/diagnostics/src/grammar.rs
@@ -0,0 +1,4 @@
+mod bib;
+mod tex;
+
+pub use self::{bib::BibSyntaxErrors, tex::TexSyntaxErrors};
diff --git a/support/texlab/crates/diagnostics/src/grammar/bib.rs b/support/texlab/crates/diagnostics/src/grammar/bib.rs
new file mode 100644
index 0000000000..e101e68f2a
--- /dev/null
+++ b/support/texlab/crates/diagnostics/src/grammar/bib.rs
@@ -0,0 +1,103 @@
+use base_db::{Document, DocumentData, Workspace};
+use rowan::{ast::AstNode, TextRange};
+use syntax::bibtex::{self, HasDelims, HasEq, HasName, HasType, HasValue};
+
+use crate::{
+ types::{BibError, DiagnosticData},
+ util::SimpleDiagnosticSource,
+ Diagnostic, DiagnosticBuilder, DiagnosticSource,
+};
+
+#[derive(Default)]
+pub struct BibSyntaxErrors(SimpleDiagnosticSource);
+
+impl DiagnosticSource for BibSyntaxErrors {
+ fn update(&mut self, _workspace: &Workspace, document: &Document) {
+ let mut analyzer = Analyzer {
+ document,
+ diagnostics: Vec::new(),
+ };
+
+ analyzer.analyze_root();
+ self.0
+ .errors
+ .insert(document.uri.clone(), analyzer.diagnostics);
+ }
+
+ fn publish<'db>(
+ &'db mut self,
+ workspace: &'db Workspace,
+ builder: &mut DiagnosticBuilder<'db>,
+ ) {
+ self.0.publish(workspace, builder);
+ }
+}
+
+struct Analyzer<'a> {
+ document: &'a Document,
+ diagnostics: Vec<Diagnostic>,
+}
+
+impl<'a> Analyzer<'a> {
+ fn analyze_root(&mut self) {
+ let DocumentData::Bib(data) = &self.document.data else { return };
+
+ for node in bibtex::SyntaxNode::new_root(data.green.clone()).descendants() {
+ if let Some(entry) = bibtex::Entry::cast(node.clone()) {
+ self.analyze_entry(entry);
+ } else if let Some(field) = bibtex::Field::cast(node.clone()) {
+ self.analyze_field(field);
+ }
+ }
+ }
+
+ fn analyze_entry(&mut self, entry: bibtex::Entry) {
+ if entry.left_delim_token().is_none() {
+ let offset = entry.type_token().unwrap().text_range().end();
+ self.diagnostics.push(Diagnostic {
+ range: TextRange::empty(offset),
+ data: DiagnosticData::Bib(BibError::ExpectingLCurly),
+ });
+
+ return;
+ }
+
+ if entry.name_token().is_none() {
+ let offset = entry.left_delim_token().unwrap().text_range().end();
+ self.diagnostics.push(Diagnostic {
+ range: TextRange::empty(offset),
+ data: DiagnosticData::Bib(BibError::ExpectingKey),
+ });
+
+ return;
+ }
+
+ if entry.right_delim_token().is_none() {
+ let offset = entry.syntax().text_range().end();
+ self.diagnostics.push(Diagnostic {
+ range: TextRange::empty(offset),
+ data: DiagnosticData::Bib(BibError::ExpectingRCurly),
+ });
+ }
+ }
+
+ fn analyze_field(&mut self, field: bibtex::Field) {
+ if field.eq_token().is_none() {
+ let offset = field.name_token().unwrap().text_range().end();
+ self.diagnostics.push(Diagnostic {
+ range: TextRange::empty(offset),
+ data: DiagnosticData::Bib(BibError::ExpectingEq),
+ });
+
+ return;
+ }
+
+ if field.value().is_none() {
+ let offset = field.eq_token().unwrap().text_range().end();
+ self.diagnostics.push(Diagnostic {
+ range: TextRange::empty(offset),
+ data: DiagnosticData::Bib(BibError::ExpectingFieldValue),
+ });
+ }
+ }
+}
diff --git a/support/texlab/crates/diagnostics/src/grammar/tex.rs b/support/texlab/crates/diagnostics/src/grammar/tex.rs
new file mode 100644
index 0000000000..b61ae4b020
--- /dev/null
+++ b/support/texlab/crates/diagnostics/src/grammar/tex.rs
@@ -0,0 +1,132 @@
+use base_db::{Config, Document, DocumentData, Workspace};
+use rowan::{ast::AstNode, NodeOrToken, TextRange};
+use syntax::latex;
+
+use crate::{
+ types::{DiagnosticData, TexError},
+ util::SimpleDiagnosticSource,
+ Diagnostic, DiagnosticBuilder, DiagnosticSource,
+};
+
+#[derive(Default)]
+pub struct TexSyntaxErrors(SimpleDiagnosticSource);
+
+impl DiagnosticSource for TexSyntaxErrors {
+ fn update(&mut self, workspace: &Workspace, document: &Document) {
+ let mut analyzer = Analyzer {
+ document,
+ config: workspace.config(),
+ diagnostics: Vec::new(),
+ };
+
+ analyzer.analyze_root();
+ self.0
+ .errors
+ .insert(document.uri.clone(), analyzer.diagnostics);
+ }
+
+ fn publish<'db>(
+ &'db mut self,
+ workspace: &'db Workspace,
+ builder: &mut DiagnosticBuilder<'db>,
+ ) {
+ self.0.publish(workspace, builder);
+ }
+}
+
+struct Analyzer<'a> {
+ document: &'a Document,
+ config: &'a Config,
+ diagnostics: Vec<Diagnostic>,
+}
+
+impl<'a> Analyzer<'a> {
+ fn analyze_root(&mut self) {
+ if !self.document.uri.as_str().ends_with(".tex") {
+ return;
+ }
+
+ let DocumentData::Tex(data) = &self.document.data else { return };
+
+ let verbatim_envs = &self.config.syntax.verbatim_environments;
+
+ let mut traversal = latex::SyntaxNode::new_root(data.green.clone()).preorder();
+ while let Some(event) = traversal.next() {
+ match event {
+ rowan::WalkEvent::Enter(node) => {
+ if let Some(environment) = latex::Environment::cast(node.clone()) {
+ if environment
+ .begin()
+ .and_then(|begin| begin.name())
+ .and_then(|name| name.key())
+ .map_or(false, |name| verbatim_envs.contains(&name.to_string()))
+ {
+ traversal.skip_subtree();
+ continue;
+ }
+ }
+
+ self.analyze_environment(node.clone())
+ .or_else(|| self.analyze_curly_group(node.clone()))
+ .or_else(|| self.analyze_curly_braces(node));
+ }
+ rowan::WalkEvent::Leave(_) => {
+ continue;
+ }
+ };
+ }
+ }
+
+ fn analyze_environment(&mut self, node: latex::SyntaxNode) -> Option<()> {
+ let environment = latex::Environment::cast(node)?;
+ let begin = environment.begin()?.name()?.key()?;
+ let end = environment.end()?.name()?.key()?;
+ if begin != end {
+ self.diagnostics.push(Diagnostic {
+ range: latex::small_range(&begin),
+ data: DiagnosticData::Tex(TexError::MismatchedEnvironment),
+ });
+ }
+
+ Some(())
+ }
+
+ fn analyze_curly_group(&mut self, node: latex::SyntaxNode) -> Option<()> {
+ if !matches!(
+ node.kind(),
+ latex::CURLY_GROUP
+ | latex::CURLY_GROUP_COMMAND
+ | latex::CURLY_GROUP_KEY_VALUE
+ | latex::CURLY_GROUP_WORD
+ | latex::CURLY_GROUP_WORD_LIST
+ ) {
+ return None;
+ }
+
+ if !node
+ .children_with_tokens()
+ .filter_map(NodeOrToken::into_token)
+ .any(|token| token.kind() == latex::R_CURLY)
+ {
+ self.diagnostics.push(Diagnostic {
+ range: TextRange::empty(node.text_range().end()),
+ data: DiagnosticData::Tex(TexError::ExpectingRCurly),
+ });
+ }
+
+ Some(())
+ }
+
+ fn analyze_curly_braces(&mut self, node: latex::SyntaxNode) -> Option<()> {
+ if node.kind() == latex::ERROR && node.first_token()?.text() == "}" {
+ self.diagnostics.push(Diagnostic {
+ range: node.text_range(),
+ data: DiagnosticData::Tex(TexError::UnexpectedRCurly),
+ });
+
+ Some(())
+ } else {
+ None
+ }
+ }
+}
diff --git a/support/texlab/crates/diagnostics/src/labels.rs b/support/texlab/crates/diagnostics/src/labels.rs
new file mode 100644
index 0000000000..03df664a15
--- /dev/null
+++ b/support/texlab/crates/diagnostics/src/labels.rs
@@ -0,0 +1,97 @@
+use std::borrow::Cow;
+
+use base_db::{
+ semantics::tex::{Label, LabelKind},
+ util::queries,
+ DocumentData, Workspace,
+};
+use itertools::Itertools;
+use rustc_hash::FxHashSet;
+
+use crate::{
+ types::{DiagnosticData, TexError},
+ Diagnostic, DiagnosticBuilder, DiagnosticSource,
+};
+
+#[derive(Default)]
+pub struct LabelErrors;
+
+impl DiagnosticSource for LabelErrors {
+ fn publish<'db>(
+ &'db mut self,
+ workspace: &'db Workspace,
+ builder: &mut DiagnosticBuilder<'db>,
+ ) {
+ detect_undefined_and_unused_labels(workspace, builder);
+ detect_duplicate_labels(workspace, builder);
+ }
+}
+
+fn detect_undefined_and_unused_labels<'db>(
+ workspace: &'db Workspace,
+ builder: &mut DiagnosticBuilder<'db>,
+) {
+ let graphs: Vec<_> = workspace
+ .iter()
+ .map(|start| base_db::graph::Graph::new(workspace, start))
+ .collect();
+
+ for document in workspace.iter() {
+ let DocumentData::Tex(data) = &document.data else {
+ continue;
+ };
+
+ let mut label_refs = FxHashSet::default();
+ let mut label_defs = FxHashSet::default();
+ let project = graphs
+ .iter()
+ .filter(|graph| graph.preorder().contains(&document))
+ .flat_map(|graph| graph.preorder());
+
+ for label in project
+ .filter_map(|child| child.data.as_tex())
+ .flat_map(|data| data.semantics.labels.iter())
+ {
+ if label.kind == LabelKind::Definition {
+ label_defs.insert(&label.name.text);
+ } else {
+ label_refs.insert(&label.name.text);
+ }
+ }
+
+ for label in &data.semantics.labels {
+ if label.kind != LabelKind::Definition && !label_defs.contains(&label.name.text) {
+ let diagnostic = Diagnostic {
+ range: label.name.range,
+ data: DiagnosticData::Tex(TexError::UndefinedLabel),
+ };
+ builder.push(&document.uri, Cow::Owned(diagnostic));
+ }
+
+ if label.kind == LabelKind::Definition && !label_refs.contains(&label.name.text) {
+ let diagnostic = Diagnostic {
+ range: label.name.range,
+ data: DiagnosticData::Tex(TexError::UnusedLabel),
+ };
+ builder.push(&document.uri, Cow::Owned(diagnostic));
+ }
+ }
+ }
+}
+
+fn detect_duplicate_labels<'db>(workspace: &'db Workspace, builder: &mut DiagnosticBuilder<'db>) {
+ for conflict in queries::Conflict::find_all::<Label>(workspace) {
+ let others = conflict
+ .rest
+ .iter()
+ .map(|location| (location.document.uri.clone(), location.range))
+ .collect();
+
+ let diagnostic = Diagnostic {
+ range: conflict.main.range,
+ data: DiagnosticData::Tex(TexError::DuplicateLabel(others)),
+ };
+
+ builder.push(&conflict.main.document.uri, Cow::Owned(diagnostic));
+ }
+}
diff --git a/support/texlab/crates/diagnostics/src/lib.rs b/support/texlab/crates/diagnostics/src/lib.rs
new file mode 100644
index 0000000000..e55ecb78e6
--- /dev/null
+++ b/support/texlab/crates/diagnostics/src/lib.rs
@@ -0,0 +1,86 @@
+mod build_log;
+mod citations;
+mod grammar;
+mod labels;
+pub mod types;
+pub(crate) mod util;
+
+use std::borrow::Cow;
+
+use base_db::{Document, Workspace};
+use build_log::BuildErrors;
+use citations::CitationErrors;
+use grammar::{BibSyntaxErrors, TexSyntaxErrors};
+use labels::LabelErrors;
+use rustc_hash::FxHashMap;
+use types::Diagnostic;
+use url::Url;
+
+#[derive(Debug, PartialEq, Eq, Clone, Default)]
+pub struct DiagnosticBuilder<'db> {
+ inner: FxHashMap<&'db Url, Vec<Cow<'db, Diagnostic>>>,
+}
+
+impl<'db> DiagnosticBuilder<'db> {
+ pub fn push(&mut self, uri: &'db Url, diagnostic: Cow<'db, Diagnostic>) {
+ self.inner.entry(&uri).or_default().push(diagnostic);
+ }
+
+ pub fn push_many(
+ &mut self,
+ uri: &'db Url,
+ diagnostics: impl Iterator<Item = Cow<'db, Diagnostic>>,
+ ) {
+ self.inner.entry(&uri).or_default().extend(diagnostics);
+ }
+
+ pub fn iter(&self) -> impl Iterator<Item = (&'db Url, impl Iterator<Item = &Diagnostic>)> {
+ self.inner
+ .iter()
+ .map(|(uri, diagnostics)| (*uri, diagnostics.iter().map(|diag| diag.as_ref())))
+ }
+}
+
+pub trait DiagnosticSource {
+ #[allow(unused_variables)]
+ fn update(&mut self, workspace: &Workspace, document: &Document) {}
+
+ fn publish<'db>(&'db mut self, workspace: &'db Workspace, builder: &mut DiagnosticBuilder<'db>);
+}
+
+pub struct DiagnosticManager {
+ sources: Vec<Box<dyn DiagnosticSource>>,
+}
+
+impl Default for DiagnosticManager {
+ fn default() -> Self {
+ let mut sources: Vec<Box<dyn DiagnosticSource>> = Vec::new();
+ sources.push(Box::new(TexSyntaxErrors::default()));
+ sources.push(Box::new(BibSyntaxErrors::default()));
+ sources.push(Box::new(BuildErrors::default()));
+ sources.push(Box::new(LabelErrors::default()));
+ sources.push(Box::new(CitationErrors::default()));
+ Self { sources }
+ }
+}
+
+impl DiagnosticSource for DiagnosticManager {
+ fn update(&mut self, workspace: &Workspace, document: &Document) {
+ for source in &mut self.sources {
+ source.update(workspace, document);
+ }
+ }
+
+ fn publish<'db>(
+ &'db mut self,
+ workspace: &'db Workspace,
+ builder: &mut DiagnosticBuilder<'db>,
+ ) {
+ for source in &mut self.sources {
+ source.publish(workspace, builder);
+ }
+ }
+}
+
+#[cfg(test)]
+mod tests;
diff --git a/support/texlab/crates/diagnostics/src/tests.rs b/support/texlab/crates/diagnostics/src/tests.rs
new file mode 100644
index 0000000000..8b9534f337
--- /dev/null
+++ b/support/texlab/crates/diagnostics/src/tests.rs
@@ -0,0 +1,191 @@
+use std::borrow::Cow;
+
+use test_utils::fixture::Fixture;
+
+use crate::{
+ types::{BibError, Diagnostic, DiagnosticData, TexError},
+ DiagnosticBuilder, DiagnosticManager, DiagnosticSource,
+};
+
+fn check(input: &str, expected_data: &[DiagnosticData]) {
+ let fixture = Fixture::parse(input);
+ let mut manager = DiagnosticManager::default();
+
+ let mut expected = DiagnosticBuilder::default();
+ let mut expected_data = expected_data.iter();
+ for document in &fixture.documents {
+ let diagnostics = document.ranges.iter().copied().map(|range| {
+ let data = expected_data.next().unwrap().clone();
+ Cow::Owned(Diagnostic { range, data })
+ });
+
+ expected.push_many(&document.uri, diagnostics);
+ }
+
+ for document in fixture.workspace.iter() {
+ manager.update(&fixture.workspace, &document);
+ }
+
+ let mut actual = DiagnosticBuilder::default();
+ manager.publish(&fixture.workspace, &mut actual);
+
+ for diagnostics in actual.inner.values_mut() {
+ diagnostics.sort_by_key(|diag| (diag.range.start(), diag.range.len()));
+ }
+
+ assert_eq!(actual, expected);
+}
+
+#[test]
+fn test_bib_entry_missing_l_delim() {
+ check(
+ r#"
+%! main.bib
+@article
+ !
+"#,
+ &[DiagnosticData::Bib(BibError::ExpectingLCurly)],
+ )
+}
+
+#[test]
+fn test_bib_entry_missing_r_delim() {
+ check(
+ r#"
+%! main.bib
+@article{foo,
+ !
+
+%! main.tex
+\bibliography{main}
+\cite{foo}
+"#,
+ &[DiagnosticData::Bib(BibError::ExpectingRCurly)],
+ )
+}
+
+#[test]
+fn test_bib_entry_missing_name() {
+ check(
+ r#"
+%! main.bib
+@article{
+ !"#,
+ &[DiagnosticData::Bib(BibError::ExpectingKey)],
+ )
+}
+
+#[test]
+fn test_bib_field_missing_eq() {
+ check(
+ r#"
+%! main.bib
+@article{foo,
+ field
+ !
+}
+
+%! main.tex
+\bibliography{main}
+\cite{foo}
+"#,
+ &[DiagnosticData::Bib(BibError::ExpectingEq)],
+ )
+}
+
+#[test]
+fn test_bib_field_missing_value() {
+ check(
+ r#"
+%! main.bib
+@article{foo,
+ field =
+ !
+}
+
+%! main.tex
+\bibliography{main}
+\cite{foo}
+"#,
+ &[DiagnosticData::Bib(BibError::ExpectingFieldValue)],
+ )
+}
+
+#[test]
+fn test_tex_unmatched_braces() {
+ check(
+ r#"
+%! main.tex
+}
+^
+{
+ !
+"#,
+ &[
+ DiagnosticData::Tex(TexError::UnexpectedRCurly),
+ DiagnosticData::Tex(TexError::ExpectingRCurly),
+ ],
+ )
+}
+
+#[test]
+fn test_tex_environment_mismatched() {
+ check(
+ r#"
+%! main.tex
+\begin{foo}
+ ^^^
+\end{bar}
+"#,
+ &[DiagnosticData::Tex(TexError::MismatchedEnvironment)],
+ )
+}
+
+#[test]
+fn test_label_unused() {
+ check(
+ r#"
+%! main.tex
+\label{foo}
+ ^^^
+\label{bar}\ref{bar}
+"#,
+ &[DiagnosticData::Tex(TexError::UnusedLabel)],
+ )
+}
+
+#[test]
+fn test_label_undefined() {
+ check(
+ r#"
+%! main.tex
+\ref{foo}
+ ^^^
+"#,
+ &[DiagnosticData::Tex(TexError::UndefinedLabel)],
+ )
+}
+
+#[test]
+fn test_citation_undefined() {
+ check(
+ r#"
+%! main.tex
+\cite{foo}
+ ^^^
+"#,
+ &[DiagnosticData::Tex(TexError::UndefinedCitation)],
+ )
+}
+
+#[test]
+fn test_citation_unused() {
+ check(
+ r#"
+%! main.bib
+@article{foo,}
+ ^^^
+"#,
+ &[DiagnosticData::Bib(BibError::UnusedEntry)],
+ )
+}
diff --git a/support/texlab/crates/diagnostics/src/types.rs b/support/texlab/crates/diagnostics/src/types.rs
new file mode 100644
index 0000000000..a443245b6f
--- /dev/null
+++ b/support/texlab/crates/diagnostics/src/types.rs
@@ -0,0 +1,38 @@
+use rowan::TextRange;
+use syntax::BuildError;
+use url::Url;
+
+#[derive(Debug, PartialEq, Eq, Clone)]
+pub struct Diagnostic {
+ pub range: TextRange,
+ pub data: DiagnosticData,
+}
+
+#[derive(Debug, PartialEq, Eq, Clone)]
+pub enum DiagnosticData {
+ Tex(TexError),
+ Bib(BibError),
+ Build(BuildError),
+}
+
+#[derive(Debug, PartialEq, Eq, Clone)]
+pub enum TexError {
+ UnexpectedRCurly,
+ ExpectingRCurly,
+ MismatchedEnvironment,
+ UnusedLabel,
+ UndefinedLabel,
+ UndefinedCitation,
+ DuplicateLabel(Vec<(Url, TextRange)>),
+}
+
+#[derive(Debug, PartialEq, Eq, Clone)]
+pub enum BibError {
+ ExpectingLCurly,
+ ExpectingKey,
+ ExpectingRCurly,
+ ExpectingEq,
+ ExpectingFieldValue,
+ UnusedEntry,
+ DuplicateEntry(Vec<(Url, TextRange)>),
+}
diff --git a/support/texlab/crates/diagnostics/src/util.rs b/support/texlab/crates/diagnostics/src/util.rs
new file mode 100644
index 0000000000..fd34125dbe
--- /dev/null
+++ b/support/texlab/crates/diagnostics/src/util.rs
@@ -0,0 +1,28 @@
+use std::borrow::Cow;
+
+use base_db::Workspace;
+use rustc_hash::FxHashMap;
+use url::Url;
+
+use crate::{Diagnostic, DiagnosticBuilder, DiagnosticSource};
+
+#[derive(Default)]
+pub struct SimpleDiagnosticSource {
+ pub errors: FxHashMap<Url, Vec<Diagnostic>>,
+}
+
+impl DiagnosticSource for SimpleDiagnosticSource {
+ fn publish<'db>(
+ &'db mut self,
+ workspace: &'db Workspace,
+ builder: &mut DiagnosticBuilder<'db>,
+ ) {
+ self.errors.retain(|uri, _| workspace.lookup(uri).is_some());
+
+ for document in workspace.iter() {
+ if let Some(diagnostics) = self.errors.get(&document.uri) {
+ builder.push_many(&document.uri, diagnostics.iter().map(Cow::Borrowed));
+ }
+ }
+ }
+}
diff --git a/support/texlab/crates/distro/Cargo.toml b/support/texlab/crates/distro/Cargo.toml
index d7c336a041..c3a03105c7 100644
--- a/support/texlab/crates/distro/Cargo.toml
+++ b/support/texlab/crates/distro/Cargo.toml
@@ -7,7 +7,7 @@ edition.workspace = true
rust-version.workspace = true
[dependencies]
-anyhow = "1.0.71"
+anyhow = "1.0.72"
rustc-hash = "1.1.0"
[lib]
diff --git a/support/texlab/crates/hover/Cargo.toml b/support/texlab/crates/hover/Cargo.toml
new file mode 100644
index 0000000000..f92939dd24
--- /dev/null
+++ b/support/texlab/crates/hover/Cargo.toml
@@ -0,0 +1,21 @@
+[package]
+name = "hover"
+version = "0.0.0"
+license.workspace = true
+authors.workspace = true
+edition.workspace = true
+rust-version.workspace = true
+
+[dependencies]
+base-db = { path = "../base-db" }
+citeproc = { path = "../citeproc" }
+completion-data = { path = "../completion-data" }
+rowan = "0.15.11"
+syntax = { path = "../syntax" }
+
+[dev-dependencies]
+expect-test = "1.4.1"
+test-utils = { path = "../test-utils" }
+
+[lib]
+doctest = false
diff --git a/support/texlab/crates/hover/src/citation.rs b/support/texlab/crates/hover/src/citation.rs
new file mode 100644
index 0000000000..d36370e709
--- /dev/null
+++ b/support/texlab/crates/hover/src/citation.rs
@@ -0,0 +1,39 @@
+use base_db::{util::queries, DocumentData};
+use rowan::ast::AstNode;
+use syntax::bibtex;
+
+use crate::{Hover, HoverData, HoverParams};
+
+pub(super) fn find_hover<'db>(params: &HoverParams<'db>) -> Option<Hover<'db>> {
+ let offset = params.offset;
+
+ let (name, range) = match &params.document.data {
+ DocumentData::Tex(data) => {
+ let result = queries::object_at_cursor(
+ &data.semantics.citations,
+ offset,
+ queries::SearchMode::Full,
+ )?;
+ (&result.object.name.text, result.range)
+ }
+ DocumentData::Bib(data) => {
+ let result = queries::object_at_cursor(
+ &data.semantics.entries,
+ offset,
+ queries::SearchMode::Name,
+ )?;
+ (&result.object.name.text, result.range)
+ }
+ _ => return None,
+ };
+
+ let text = params.project.documents.iter().find_map(|document| {
+ let data = document.data.as_bib()?;
+ let root = bibtex::Root::cast(data.root_node())?;
+ let entry = root.find_entry(&name)?;
+ citeproc::render(&entry)
+ })?;
+
+ let data = HoverData::Citation(text);
+ Some(Hover { range, data })
+}
diff --git a/support/texlab/crates/hover/src/entry_type.rs b/support/texlab/crates/hover/src/entry_type.rs
new file mode 100644
index 0000000000..c28c88b611
--- /dev/null
+++ b/support/texlab/crates/hover/src/entry_type.rs
@@ -0,0 +1,18 @@
+use base_db::data::BibtexEntryType;
+use syntax::bibtex;
+
+use crate::{Hover, HoverData, HoverParams};
+
+pub(super) fn find_hover<'db>(params: &'db HoverParams) -> Option<Hover<'db>> {
+ let data = params.document.data.as_bib()?;
+ let root = data.root_node();
+ let name = root
+ .token_at_offset(params.offset)
+ .find(|x| x.kind() == bibtex::TYPE)?;
+
+ let entry_type = BibtexEntryType::find(&name.text()[1..])?;
+ Some(Hover {
+ range: name.text_range(),
+ data: HoverData::EntryType(entry_type),
+ })
+}
diff --git a/support/texlab/crates/hover/src/field_type.rs b/support/texlab/crates/hover/src/field_type.rs
new file mode 100644
index 0000000000..18ebe6b78f
--- /dev/null
+++ b/support/texlab/crates/hover/src/field_type.rs
@@ -0,0 +1,21 @@
+use base_db::data::BibtexFieldType;
+use rowan::ast::AstNode;
+use syntax::bibtex;
+
+use crate::{Hover, HoverData, HoverParams};
+
+pub(super) fn find_hover<'db>(params: &HoverParams<'db>) -> Option<Hover<'db>> {
+ let data = params.document.data.as_bib()?;
+ let root = data.root_node();
+ let name = root
+ .token_at_offset(params.offset)
+ .find(|token| token.kind() == bibtex::NAME)?;
+
+ bibtex::Field::cast(name.parent()?)?;
+
+ let field_type = BibtexFieldType::find(name.text())?;
+ Some(Hover {
+ range: name.text_range(),
+ data: HoverData::FieldType(field_type),
+ })
+}
diff --git a/support/texlab/crates/hover/src/label.rs b/support/texlab/crates/hover/src/label.rs
new file mode 100644
index 0000000000..c5f72a287e
--- /dev/null
+++ b/support/texlab/crates/hover/src/label.rs
@@ -0,0 +1,28 @@
+use base_db::{
+ semantics::tex,
+ util::{
+ queries::{self, Object},
+ render_label,
+ },
+};
+
+use crate::{Hover, HoverData, HoverParams};
+
+pub(super) fn find_hover<'db>(params: &'db HoverParams<'db>) -> Option<Hover<'db>> {
+ let data = params.document.data.as_tex()?;
+ let cursor = queries::object_at_cursor(
+ &data.semantics.labels,
+ params.offset,
+ queries::SearchMode::Full,
+ )?;
+
+ let (_, definition) = tex::Label::find_all(&params.project)
+ .filter(|(_, label)| label.kind == tex::LabelKind::Definition)
+ .find(|(_, label)| label.name_text() == cursor.object.name_text())?;
+
+ let label = render_label(&params.workspace, &params.project, definition)?;
+ Some(Hover {
+ range: cursor.range,
+ data: HoverData::Label(label),
+ })
+}
diff --git a/support/texlab/crates/hover/src/lib.rs b/support/texlab/crates/hover/src/lib.rs
new file mode 100644
index 0000000000..4b06111603
--- /dev/null
+++ b/support/texlab/crates/hover/src/lib.rs
@@ -0,0 +1,61 @@
+mod citation;
+mod entry_type;
+mod field_type;
+mod label;
+mod package;
+mod string_ref;
+
+use base_db::{
+ data::{BibtexEntryType, BibtexFieldType},
+ util::RenderedLabel,
+ Document, Project, Workspace,
+};
+use rowan::{TextRange, TextSize};
+
+#[derive(Debug)]
+pub struct HoverParams<'db> {
+ pub document: &'db Document,
+ pub project: Project<'db>,
+ pub workspace: &'db Workspace,
+ pub offset: TextSize,
+}
+
+impl<'db> HoverParams<'db> {
+ pub fn new(workspace: &'db Workspace, document: &'db Document, offset: TextSize) -> Self {
+ let project = workspace.project(document);
+ Self {
+ document,
+ project,
+ workspace,
+ offset,
+ }
+ }
+}
+
+#[derive(Debug, Clone)]
+pub struct Hover<'db> {
+ pub range: TextRange,
+ pub data: HoverData<'db>,
+}
+
+#[derive(Debug, Clone)]
+pub enum HoverData<'db> {
+ Citation(String),
+ Package(&'db str),
+ EntryType(BibtexEntryType<'db>),
+ FieldType(BibtexFieldType<'db>),
+ Label(RenderedLabel<'db>),
+ StringRef(String),
+}
+
+pub fn find<'db>(params: &'db HoverParams<'db>) -> Option<Hover<'db>> {
+ citation::find_hover(&params)
+ .or_else(|| package::find_hover(&params))
+ .or_else(|| entry_type::find_hover(&params))
+ .or_else(|| field_type::find_hover(&params))
+ .or_else(|| label::find_hover(&params))
+ .or_else(|| string_ref::find_hover(&params))
+}
+
+#[cfg(test)]
+mod tests;
diff --git a/support/texlab/crates/hover/src/package.rs b/support/texlab/crates/hover/src/package.rs
new file mode 100644
index 0000000000..1d6b328860
--- /dev/null
+++ b/support/texlab/crates/hover/src/package.rs
@@ -0,0 +1,20 @@
+use base_db::semantics::tex::LinkKind;
+
+use crate::{Hover, HoverData, HoverParams};
+
+pub(super) fn find_hover<'db>(params: &HoverParams<'db>) -> Option<Hover<'db>> {
+ let data = params.document.data.as_tex()?;
+ data.semantics
+ .links
+ .iter()
+ .filter(|link| matches!(link.kind, LinkKind::Sty | LinkKind::Cls))
+ .filter(|link| link.path.range.contains_inclusive(params.offset))
+ .find_map(|link| {
+ let meta = completion_data::DATABASE.meta(&link.path.text)?;
+ let description = meta.description.as_deref()?;
+ Some(Hover {
+ range: link.path.range,
+ data: HoverData::Package(description),
+ })
+ })
+}
diff --git a/support/texlab/crates/hover/src/string_ref.rs b/support/texlab/crates/hover/src/string_ref.rs
new file mode 100644
index 0000000000..64449909d7
--- /dev/null
+++ b/support/texlab/crates/hover/src/string_ref.rs
@@ -0,0 +1,35 @@
+use citeproc::field::text::TextFieldData;
+use rowan::ast::AstNode;
+use syntax::bibtex::{self, HasName, HasValue};
+
+use crate::{Hover, HoverData, HoverParams};
+
+pub(super) fn find_hover<'db>(params: &HoverParams<'db>) -> Option<Hover<'db>> {
+ let data = params.document.data.as_bib()?;
+ let root = bibtex::Root::cast(data.root_node())?;
+ let name = root
+ .syntax()
+ .token_at_offset(params.offset)
+ .find(|token| token.kind() == bibtex::NAME)
+ .filter(|token| {
+ let parent = token.parent().unwrap();
+ bibtex::Value::can_cast(parent.kind()) || bibtex::StringDef::can_cast(parent.kind())
+ })?;
+
+ for string in root.strings() {
+ if !string
+ .name_token()
+ .map_or(false, |token| token.text() == name.text())
+ {
+ continue;
+ }
+
+ let value = TextFieldData::parse(&string.value()?)?.text;
+ return Some(Hover {
+ range: name.text_range(),
+ data: HoverData::StringRef(value),
+ });
+ }
+
+ None
+}
diff --git a/support/texlab/crates/hover/src/tests.rs b/support/texlab/crates/hover/src/tests.rs
new file mode 100644
index 0000000000..2890af938f
--- /dev/null
+++ b/support/texlab/crates/hover/src/tests.rs
@@ -0,0 +1,323 @@
+use expect_test::{expect, Expect};
+
+use crate::HoverParams;
+
+fn check(input: &str, expect: Expect) {
+ let fixture = test_utils::fixture::Fixture::parse(input);
+ let workspace = &fixture.workspace;
+ let document = workspace.lookup(&fixture.documents[0].uri).unwrap();
+ let offset = fixture.documents[0].cursor.unwrap();
+ let params = HoverParams::new(workspace, document, offset);
+
+ let data = crate::find(&params).map(|hover| {
+ assert_eq!(fixture.documents[0].ranges[0], hover.range);
+ hover.data
+ });
+
+ expect.assert_debug_eq(&data);
+}
+
+#[test]
+fn test_smoke() {
+ check(
+ r#"
+%! main.tex
+
+|"#,
+ expect![[r#"
+ None
+ "#]],
+ );
+}
+
+#[test]
+fn test_latex_citation() {
+ check(
+ r#"
+%! main.tex
+\addbibresource{main.bib}
+\cite{foo}
+ |
+ ^^^
+%! main.bib
+@article{foo, author = {Foo Bar}, title = {Baz Qux}, year = 1337}"#,
+ expect![[r#"
+ Some(
+ Citation(
+ "F. Bar: \"Baz Qux\". (1337).",
+ ),
+ )
+ "#]],
+ );
+}
+
+#[test]
+fn test_bibtex_entry_key() {
+ check(
+ r#"
+%! main.bib
+@article{foo, author = {Foo Bar}, title = {Baz Qux}, year = 1337}
+ |
+ ^^^
+
+%! main.tex
+\addbibresource{main.bib}
+\cite{foo}"#,
+ expect![[r#"
+ Some(
+ Citation(
+ "F. Bar: \"Baz Qux\". (1337).",
+ ),
+ )
+ "#]],
+ );
+}
+
+#[test]
+fn test_bibtex_entry_key_empty() {
+ check(
+ r#"
+%! main.bib
+@foo{bar,}
+ |"#,
+ expect![[r#"
+ None
+ "#]],
+ );
+}
+
+#[test]
+fn test_bibtex_entry_type_known() {
+ check(
+ r#"
+%! main.bib
+@article{foo,}
+ |
+^^^^^^^^"#,
+ expect![[r#"
+ Some(
+ EntryType(
+ BibtexEntryType {
+ name: "article",
+ category: Article,
+ documentation: Some(
+ "An article in a journal, magazine, newspaper, or other periodical which forms a \n self-contained unit with its own title. The title of the periodical is given in the \n journaltitle field. If the issue has its own title in addition to the main title of \n the periodical, it goes in the issuetitle field. Note that editor and related \n fields refer to the journal while translator and related fields refer to the article.\n\nRequired fields: `author`, `title`, `journaltitle`, `year/date`",
+ ),
+ },
+ ),
+ )
+ "#]],
+ );
+}
+
+#[test]
+fn test_bibtex_entry_type_unknown() {
+ check(
+ r#"
+%! main.bib
+@foo{bar,}
+ |"#,
+ expect![[r#"
+ None
+ "#]],
+ );
+}
+
+#[test]
+fn test_bibtex_field_known() {
+ check(
+ r#"
+%! main.bib
+@article{foo, author = bar}
+ |
+ ^^^^^^"#,
+ expect![[r#"
+ Some(
+ FieldType(
+ BibtexFieldType {
+ name: "author",
+ documentation: "The author(s) of the `title`.",
+ },
+ ),
+ )
+ "#]],
+ );
+}
+
+#[test]
+fn test_bibtex_field_unknown() {
+ check(
+ r#"
+%! main.bib
+@article{foo, bar = baz}
+ |"#,
+ expect![[r#"
+ None
+ "#]],
+ );
+}
+
+#[test]
+fn test_bibtex_string_ref() {
+ check(
+ r#"
+%! main.bib
+@string{foo = "Foo"}
+@string{bar = "Bar"}
+@article{baz, author = bar}
+ |
+ ^^^"#,
+ expect![[r#"
+ Some(
+ StringRef(
+ "Bar",
+ ),
+ )
+ "#]],
+ );
+}
+
+#[test]
+fn test_bibtex_value() {
+ check(
+ r#"
+%! main.bib
+@string{foo = "Foo"}
+@string{bar = "Bar"}
+@article{baz, author = bar}
+ |"#,
+ expect![[r#"
+ None
+ "#]],
+ );
+}
+
+#[test]
+fn test_latex_package_known() {
+ check(
+ r#"
+%! main.tex
+\usepackage{amsmath}
+ |
+ ^^^^^^^"#,
+ expect![[r#"
+ Some(
+ Package(
+ "The package provides the principal packages in the AMS-LaTeX distribution. It adapts for use in LaTeX most of the mathematical features found in AMS-TeX; it is highly recommended as an adjunct to serious mathematical typesetting in LaTeX. When amsmath is loaded, AMS-LaTeX packages amsbsy (for bold symbols), amsopn (for operator names) and amstext (for text embedded in mathematics) are also loaded. amsmath is part of the LaTeX required distribution; however, several contributed packages add still further to its appeal; examples are empheq, which provides functions for decorating and highlighting mathematics, and ntheorem, for specifying theorem (and similar) definitions.",
+ ),
+ )
+ "#]],
+ );
+}
+
+#[test]
+fn test_latex_class_unknown() {
+ check(
+ r#"
+%! main.tex
+\documentclass{abcdefghijklmnop}
+ |"#,
+ expect![[r#"
+ None
+ "#]],
+ );
+}
+
+#[test]
+fn test_latex_label_section() {
+ check(
+ r#"
+%! main.tex
+\section{Foo}
+\label{sec:foo}
+ |
+ ^^^^^^^"#,
+ expect![[r#"
+ Some(
+ Label(
+ RenderedLabel {
+ range: 0..29,
+ number: None,
+ object: Section {
+ prefix: "Section",
+ text: "Foo",
+ },
+ },
+ ),
+ )
+ "#]],
+ );
+}
+
+#[test]
+fn test_latex_label_theorem_child_file() {
+ check(
+ r#"
+%! main.tex
+\documentclass{article}
+\newtheorem{lemma}{Lemma}
+\include{child}
+\ref{thm:foo}
+ |
+ ^^^^^^^
+
+%! child.tex
+\begin{lemma}\label{thm:foo}
+ 1 + 1 = 2
+\end{lemma}"#,
+ expect![[r#"
+ Some(
+ Label(
+ RenderedLabel {
+ range: 0..54,
+ number: None,
+ object: Theorem {
+ kind: "Lemma",
+ description: None,
+ },
+ },
+ ),
+ )
+ "#]],
+ );
+}
+
+#[test]
+fn test_latex_label_theorem_child_file_mumber() {
+ check(
+ r#"
+%! main.tex
+\documentclass{article}
+\newtheorem{lemma}{Lemma}
+\include{child}
+\ref{thm:foo}
+ |
+ ^^^^^^^
+
+%! child.tex
+\begin{lemma}[Foo]\label{thm:foo}
+ 1 + 1 = 2
+\end{lemma}
+
+%! child.aux
+\newlabel{thm:foo}{{1}{1}{Foo}{lemma.1}{}}"#,
+ expect![[r#"
+ Some(
+ Label(
+ RenderedLabel {
+ range: 0..59,
+ number: Some(
+ "1",
+ ),
+ object: Theorem {
+ kind: "Lemma",
+ description: Some(
+ "Foo",
+ ),
+ },
+ },
+ ),
+ )
+ "#]],
+ );
+}
diff --git a/support/texlab/crates/parser/Cargo.toml b/support/texlab/crates/parser/Cargo.toml
index 1b57d5aec1..6df7596933 100644
--- a/support/texlab/crates/parser/Cargo.toml
+++ b/support/texlab/crates/parser/Cargo.toml
@@ -8,14 +8,14 @@ rust-version.workspace = true
[dependencies]
logos = "0.13.0"
-once_cell = "1.17.1"
-regex = "1.8.1"
+once_cell = "1.18.0"
+regex = "1.9.1"
rowan = "0.15.11"
rustc-hash = "1.1.0"
syntax = { path = "../syntax" }
[dev-dependencies]
-insta = { version = "1.29.0", features = ["glob", "redactions", "json"] }
+insta = { version = "1.31.0", features = ["glob", "redactions", "json"] }
[lib]
doctest = false
diff --git a/support/texlab/crates/parser/src/latex.rs b/support/texlab/crates/parser/src/latex.rs
index 63c160d4e2..9d85d5c6b7 100644
--- a/support/texlab/crates/parser/src/latex.rs
+++ b/support/texlab/crates/parser/src/latex.rs
@@ -340,19 +340,18 @@ impl<'a> Parser<'a> {
}
fn key(&mut self) {
+ self.key_with_eq(true);
+ }
+
+ fn key_with_eq(&mut self, allow_eq: bool) {
self.builder.start_node(KEY.into());
self.eat();
- while self
- .peek()
- .filter(|&kind| {
- matches!(
- kind,
- Token::Whitespace | Token::LineComment | Token::Word | Token::Pipe
- )
- })
- .is_some()
- {
- self.eat();
+ while let Some(kind) = self.peek() {
+ match kind {
+ Token::Whitespace | Token::LineComment | Token::Word | Token::Pipe => self.eat(),
+ Token::Eq if allow_eq => self.eat(),
+ _ => break,
+ }
}
self.trivia();
@@ -375,7 +374,7 @@ impl<'a> Parser<'a> {
fn key_value_pair(&mut self) {
self.builder.start_node(KEY_VALUE_PAIR.into());
- self.key();
+ self.key_with_eq(false);
if self.peek() == Some(Token::Eq) {
self.eat();
self.trivia();
@@ -717,7 +716,7 @@ impl<'a> Parser<'a> {
| Token::RBrack
| Token::CommandName(CommandName::Generic) => self.path(),
Token::LCurly => self.curly_group_path(),
- Token::Whitespace => self.eat(),
+ Token::Whitespace | Token::Pipe => self.eat(),
_ => break,
};
}
@@ -741,7 +740,7 @@ impl<'a> Parser<'a> {
| Token::LParen
| Token::RParen
| Token::CommandName(CommandName::Generic) => self.path(),
- Token::Whitespace | Token::LineBreak | Token::Comma => self.eat(),
+ Token::Whitespace | Token::LineBreak | Token::Comma | Token::Pipe => self.eat(),
Token::LCurly => self.curly_group_path(),
_ => break,
};
diff --git a/support/texlab/crates/parser/src/snapshots/parser__latex__tests__parse@issue_568.txt.snap b/support/texlab/crates/parser/src/snapshots/parser__latex__tests__parse@issue_568.txt.snap
new file mode 100644
index 0000000000..f71507c2ac
--- /dev/null
+++ b/support/texlab/crates/parser/src/snapshots/parser__latex__tests__parse@issue_568.txt.snap
@@ -0,0 +1,28 @@
+---
+source: crates/parser/src/latex.rs
+expression: root
+input_file: crates/parser/src/test_data/latex/issue_568.txt
+---
+ROOT@0..51
+ PREAMBLE@0..51
+ LATEX_INCLUDE@0..36
+ COMMAND_NAME@0..6 "\\input"
+ CURLY_GROUP_WORD_LIST@6..36
+ L_CURLY@6..7 "{"
+ WORD@7..8 "|"
+ KEY@8..34
+ WORD@8..15 "ipython"
+ WHITESPACE@15..16 " "
+ WORD@16..34 "scripts/test.ipynb"
+ R_CURLY@34..35 "}"
+ LINE_BREAK@35..36 "\n"
+ LABEL_DEFINITION@36..51
+ COMMAND_NAME@36..42 "\\label"
+ CURLY_GROUP_WORD@42..51
+ L_CURLY@42..43 "{"
+ KEY@43..50
+ WORD@43..48 "fig:x"
+ EQUALITY_SIGN@48..49 "="
+ WORD@49..50 "2"
+ R_CURLY@50..51 "}"
+
diff --git a/support/texlab/crates/parser/src/test_data/latex/issue_568.txt b/support/texlab/crates/parser/src/test_data/latex/issue_568.txt
new file mode 100644
index 0000000000..a01a16854d
--- /dev/null
+++ b/support/texlab/crates/parser/src/test_data/latex/issue_568.txt
@@ -0,0 +1,2 @@
+\input{|ipython scripts/test.ipynb}
+\label{fig:x=2} \ No newline at end of file
diff --git a/support/texlab/crates/references/Cargo.toml b/support/texlab/crates/references/Cargo.toml
new file mode 100644
index 0000000000..7aa810487d
--- /dev/null
+++ b/support/texlab/crates/references/Cargo.toml
@@ -0,0 +1,18 @@
+[package]
+name = "references"
+version = "0.0.0"
+license.workspace = true
+authors.workspace = true
+edition.workspace = true
+rust-version.workspace = true
+
+[dependencies]
+base-db = { path = "../base-db" }
+rowan = "0.15.11"
+syntax = { path = "../syntax" }
+
+[dev-dependencies]
+test-utils = { path = "../test-utils" }
+
+[lib]
+doctest = false
diff --git a/support/texlab/crates/references/src/entry.rs b/support/texlab/crates/references/src/entry.rs
new file mode 100644
index 0000000000..3e6b7d7a6e
--- /dev/null
+++ b/support/texlab/crates/references/src/entry.rs
@@ -0,0 +1,49 @@
+use base_db::{
+ semantics::{bib, tex},
+ util::queries::{self, Object},
+ DocumentData,
+};
+
+use crate::{Reference, ReferenceContext, ReferenceKind};
+
+pub(super) fn find_all<'db>(context: &mut ReferenceContext<'db>) -> Option<()> {
+ let offset = context.params.offset;
+
+ let name = match &context.params.document.data {
+ DocumentData::Tex(data) => {
+ let result = queries::object_at_cursor(
+ &data.semantics.citations,
+ offset,
+ queries::SearchMode::Full,
+ )?;
+ result.object.name_text()
+ }
+ DocumentData::Bib(data) => {
+ let result = queries::object_at_cursor(
+ &data.semantics.entries,
+ offset,
+ queries::SearchMode::Name,
+ )?;
+ result.object.name_text()
+ }
+ _ => return None,
+ };
+
+ for (document, obj) in queries::objects_with_name::<tex::Citation>(&context.project, name) {
+ context.results.push(Reference {
+ document,
+ range: obj.name.range,
+ kind: ReferenceKind::Reference,
+ });
+ }
+
+ for (document, obj) in queries::objects_with_name::<bib::Entry>(&context.project, name) {
+ context.results.push(Reference {
+ document,
+ range: obj.name.range,
+ kind: ReferenceKind::Definition,
+ });
+ }
+
+ Some(())
+}
diff --git a/support/texlab/crates/references/src/label.rs b/support/texlab/crates/references/src/label.rs
new file mode 100644
index 0000000000..8e7c17d6a7
--- /dev/null
+++ b/support/texlab/crates/references/src/label.rs
@@ -0,0 +1,29 @@
+use base_db::{
+ semantics::tex,
+ util::queries::{self, Object},
+};
+
+use crate::{Reference, ReferenceContext, ReferenceKind};
+
+pub(super) fn find_all<'db>(context: &mut ReferenceContext<'db>) -> Option<()> {
+ let data = context.params.document.data.as_tex()?;
+ let mode = queries::SearchMode::Full;
+ let name = queries::object_at_cursor(&data.semantics.labels, context.params.offset, mode)?
+ .object
+ .name_text();
+
+ for (document, label) in queries::objects_with_name::<tex::Label>(&context.project, name) {
+ let kind = match label.kind {
+ tex::LabelKind::Definition => ReferenceKind::Definition,
+ tex::LabelKind::Reference | tex::LabelKind::ReferenceRange => ReferenceKind::Reference,
+ };
+
+ context.results.push(Reference {
+ document,
+ range: label.name.range,
+ kind,
+ });
+ }
+
+ Some(())
+}
diff --git a/support/texlab/crates/references/src/lib.rs b/support/texlab/crates/references/src/lib.rs
new file mode 100644
index 0000000000..76cd8ce53c
--- /dev/null
+++ b/support/texlab/crates/references/src/lib.rs
@@ -0,0 +1,50 @@
+mod entry;
+mod label;
+mod string_def;
+
+use base_db::{Document, Project, Workspace};
+use rowan::{TextRange, TextSize};
+
+#[derive(Debug)]
+pub struct Reference<'db> {
+ pub document: &'db Document,
+ pub range: TextRange,
+ pub kind: ReferenceKind,
+}
+
+#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Hash)]
+pub enum ReferenceKind {
+ Definition,
+ Reference,
+}
+
+#[derive(Debug)]
+pub struct ReferenceParams<'db> {
+ pub workspace: &'db Workspace,
+ pub document: &'db Document,
+ pub offset: TextSize,
+}
+
+#[derive(Debug)]
+struct ReferenceContext<'db> {
+ params: ReferenceParams<'db>,
+ project: Project<'db>,
+ results: Vec<Reference<'db>>,
+}
+
+pub fn find_all(params: ReferenceParams) -> Vec<Reference<'_>> {
+ let project = params.workspace.project(params.document);
+ let mut context = ReferenceContext {
+ params,
+ project,
+ results: Vec::new(),
+ };
+
+ entry::find_all(&mut context);
+ label::find_all(&mut context);
+ string_def::find_all(&mut context);
+ context.results
+}
+
+#[cfg(test)]
+mod tests;
diff --git a/support/texlab/crates/references/src/string_def.rs b/support/texlab/crates/references/src/string_def.rs
new file mode 100644
index 0000000000..8f64a71e36
--- /dev/null
+++ b/support/texlab/crates/references/src/string_def.rs
@@ -0,0 +1,42 @@
+use rowan::ast::AstNode;
+use syntax::bibtex;
+
+use crate::{Reference, ReferenceContext, ReferenceKind};
+
+pub(super) fn find_all<'db>(context: &mut ReferenceContext<'db>) -> Option<()> {
+ let document = context.params.document;
+ let data = document.data.as_bib()?;
+ let root = data.root_node();
+ let name = root
+ .token_at_offset(context.params.offset)
+ .filter(|token| token.kind() == bibtex::NAME)
+ .find(|token| {
+ let parent = token.parent().unwrap();
+ bibtex::Value::can_cast(parent.kind()) || bibtex::StringDef::can_cast(parent.kind())
+ })?;
+
+ for string in &data.semantics.strings {
+ if string.name.text == name.text() {
+ context.results.push(Reference {
+ document,
+ range: string.name.range,
+ kind: ReferenceKind::Definition,
+ });
+ }
+ }
+
+ for token in root
+ .descendants()
+ .filter_map(bibtex::Value::cast)
+ .filter_map(|token| token.syntax().first_token())
+ .filter(|token| token.text() == name.text())
+ {
+ context.results.push(Reference {
+ document,
+ range: token.text_range(),
+ kind: ReferenceKind::Reference,
+ });
+ }
+
+ Some(())
+}
diff --git a/support/texlab/crates/references/src/tests.rs b/support/texlab/crates/references/src/tests.rs
new file mode 100644
index 0000000000..03a83c5f14
--- /dev/null
+++ b/support/texlab/crates/references/src/tests.rs
@@ -0,0 +1,246 @@
+use std::collections::HashSet;
+
+use crate::{ReferenceKind, ReferenceParams};
+
+fn check(fixture: &str, include_def: bool) {
+ let fixture = test_utils::fixture::Fixture::parse(fixture);
+ let workspace = &fixture.workspace;
+
+ let expected = fixture
+ .documents
+ .iter()
+ .flat_map(|document| document.ranges.iter().map(|&range| (&document.uri, range)))
+ .collect::<HashSet<_>>();
+
+ let (document, offset) = fixture
+ .documents
+ .iter()
+ .find_map(|document| Some((workspace.lookup(&document.uri)?, document.cursor?)))
+ .unwrap();
+
+ let params = ReferenceParams {
+ workspace,
+ document,
+ offset,
+ };
+
+ let actual = crate::find_all(params)
+ .into_iter()
+ .filter(|reference| reference.kind == ReferenceKind::Reference || include_def)
+ .map(|reference| (&reference.document.uri, reference.range))
+ .collect::<HashSet<_>>();
+
+ assert_eq!(actual, expected);
+}
+
+#[test]
+fn test_entry_definition() {
+ check(
+ r#"
+%! foo.bib
+@article{foo,}
+ |
+
+%! bar.tex
+\cite{foo}
+ ^^^
+\addbibresource{foo.bib}
+"#,
+ false,
+ );
+}
+
+#[test]
+fn test_entry_definition_include_decl() {
+ check(
+ r#"
+%! foo.bib
+@article{foo,}
+ |
+ ^^^
+
+%! bar.tex
+\cite{foo}
+ ^^^
+\addbibresource{foo.bib}
+"#,
+ true,
+ );
+}
+
+#[test]
+fn test_entry_reference() {
+ check(
+ r#"
+%! foo.bib
+@article{foo,}
+
+%! bar.tex
+\cite{foo}
+ |
+ ^^^
+\addbibresource{foo.bib}
+"#,
+ false,
+ );
+}
+
+#[test]
+fn test_entry_reference_include_decl() {
+ check(
+ r#"
+%! foo.bib
+@article{foo,}
+ ^^^
+
+%! bar.tex
+\cite{foo}
+ |
+ ^^^
+\addbibresource{foo.bib}
+"#,
+ true,
+ );
+}
+
+#[test]
+fn test_label_definition() {
+ check(
+ r#"
+%! foo.tex
+\label{foo}
+ |
+
+%! bar.tex
+\ref{foo}
+ ^^^
+\input{foo.tex}
+"#,
+ false,
+ );
+}
+
+#[test]
+fn test_label_definition_include_decl() {
+ check(
+ r#"
+%! foo.tex
+\label{foo}
+ |
+ ^^^
+
+%! bar.tex
+\ref{foo}
+ ^^^
+\input{foo.tex}
+"#,
+ true,
+ );
+}
+
+#[test]
+fn test_label_reference() {
+ check(
+ r#"
+%! foo.tex
+\label{foo}
+\input{bar.tex}
+
+%! bar.tex
+\ref{foo}
+ |
+ ^^^
+
+%! baz.tex
+\ref{foo}
+ ^^^
+\input{bar.tex}
+"#,
+ false,
+ );
+}
+
+#[test]
+fn test_label_reference_include_decl() {
+ check(
+ r#"
+%! foo.tex
+\label{foo}
+ ^^^
+\input{bar.tex}
+
+%! bar.tex
+\ref{foo}
+ |
+ ^^^
+
+%! baz.tex
+\ref{foo}
+ ^^^
+\input{bar.tex}
+"#,
+ true,
+ );
+}
+
+#[test]
+fn test_string_reference() {
+ check(
+ r#"
+%! main.bib
+@string{foo = {Foo}}
+@string{bar = {Bar}}
+@article{baz, author = foo}
+ |
+ ^^^
+"#,
+ false,
+ );
+}
+
+#[test]
+fn test_string_reference_include_decl() {
+ check(
+ r#"
+%! main.bib
+@string{foo = {Foo}}
+ ^^^
+@string{bar = {Bar}}
+@article{baz, author = foo}
+ |
+ ^^^
+"#,
+ true,
+ );
+}
+
+#[test]
+fn test_string_definition() {
+ check(
+ r#"
+%! main.bib
+@string{foo = {Foo}}
+ |
+@string{bar = {Bar}}
+@article{baz, author = foo}
+ ^^^
+"#,
+ false,
+ );
+}
+
+#[test]
+fn test_string_definition_include_decl() {
+ check(
+ r#"
+%! main.bib
+@string{foo = {Foo}}
+ |
+ ^^^
+@string{bar = {Bar}}
+@article{baz, author = foo}
+ ^^^
+"#,
+ true,
+ );
+}
diff --git a/support/texlab/crates/symbols/Cargo.toml b/support/texlab/crates/symbols/Cargo.toml
index b79e671597..348e28d576 100644
--- a/support/texlab/crates/symbols/Cargo.toml
+++ b/support/texlab/crates/symbols/Cargo.toml
@@ -12,13 +12,13 @@ doctest = false
[dependencies]
base-db = { path = "../base-db" }
distro = { path = "../distro" }
-itertools = "0.10.5"
+itertools = "0.11.0"
rowan = "0.15.11"
syntax = { path = "../syntax" }
titlecase = "2.2.1"
-url = "2.3.1"
+url = "=2.3.1"
[dev-dependencies]
-insta = "1.29.0"
-regex = "1.8.1"
+insta = "1.31.0"
+regex = "1.9.1"
test-utils = { path = "../test-utils" }
diff --git a/support/texlab/crates/syntax/Cargo.toml b/support/texlab/crates/syntax/Cargo.toml
index 92c334c7f2..8477e01e0d 100644
--- a/support/texlab/crates/syntax/Cargo.toml
+++ b/support/texlab/crates/syntax/Cargo.toml
@@ -7,7 +7,7 @@ edition.workspace = true
rust-version.workspace = true
[dependencies]
-itertools = "0.10.5"
+itertools = "0.11.0"
rowan = "0.15.11"
[lib]
diff --git a/support/texlab/crates/syntax/src/bibtex.rs b/support/texlab/crates/syntax/src/bibtex.rs
index 9b4b1ab77e..b568fbf8e8 100644
--- a/support/texlab/crates/syntax/src/bibtex.rs
+++ b/support/texlab/crates/syntax/src/bibtex.rs
@@ -181,7 +181,7 @@ pub trait HasEq: AstNode<Language = Lang> {
self.syntax()
.children_with_tokens()
.filter_map(NodeOrToken::into_token)
- .find(|token| token.kind() == NAME)
+ .find(|token| token.kind() == EQ)
}
}
diff --git a/support/texlab/crates/syntax/src/latex/cst.rs b/support/texlab/crates/syntax/src/latex/cst.rs
index a2945c3e7a..95aa1e4667 100644
--- a/support/texlab/crates/syntax/src/latex/cst.rs
+++ b/support/texlab/crates/syntax/src/latex/cst.rs
@@ -253,12 +253,19 @@ impl Eq for Key {}
impl ToString for Key {
fn to_string(&self) -> String {
let mut buf = String::new();
- for word in self.words() {
- buf.push_str(word.text());
- buf.push(' ');
+ for token in self
+ .syntax()
+ .children_with_tokens()
+ .filter_map(|node| node.into_token())
+ {
+ if matches!(token.kind(), WHITESPACE | LINE_BREAK | COMMENT) {
+ buf.push(' ');
+ } else {
+ buf.push_str(token.text());
+ }
}
- buf.pop().unwrap();
+ buf = String::from(buf.trim());
buf
}
}
diff --git a/support/texlab/crates/test-utils/Cargo.toml b/support/texlab/crates/test-utils/Cargo.toml
index 4ce7b1a5ca..6426dad920 100644
--- a/support/texlab/crates/test-utils/Cargo.toml
+++ b/support/texlab/crates/test-utils/Cargo.toml
@@ -10,8 +10,7 @@ rust-version.workspace = true
base-db = { path = "../base-db" }
distro = { path = "../distro" }
rowan = "0.15.11"
-syntax = { path = "../syntax" }
-url = "2.3.1"
+url = "=2.3.1"
[lib]
doctest = false
diff --git a/support/texlab/crates/test-utils/src/fixture.rs b/support/texlab/crates/test-utils/src/fixture.rs
index 1a5071b2db..371fc5d139 100644
--- a/support/texlab/crates/test-utils/src/fixture.rs
+++ b/support/texlab/crates/test-utils/src/fixture.rs
@@ -68,26 +68,28 @@ impl DocumentSpec {
let mut cursor = None;
let mut text = String::new();
+ let mut line_start = 0;
for line in input.lines().map(|line| line.trim_end()) {
if line.chars().all(|c| matches!(c, ' ' | '^' | '|' | '!')) && !line.is_empty() {
cursor = cursor.or_else(|| {
let offset = line.find('|')?;
- Some(TextSize::from((text.len() + offset) as u32))
+ Some(TextSize::from((line_start + offset) as u32))
});
if let Some(start) = line.find('!') {
- let position = TextSize::from((text.len() + start) as u32);
+ let position = TextSize::from((line_start + start) as u32);
ranges.push(TextRange::new(position, position));
}
if let Some(start) = line.find('^') {
let end = line.rfind('^').unwrap() + 1;
ranges.push(TextRange::new(
- TextSize::from((text.len() + start) as u32),
- TextSize::from((text.len() + end) as u32),
+ TextSize::from((line_start + start) as u32),
+ TextSize::from((line_start + end) as u32),
));
}
} else {
+ line_start = text.len();
text.push_str(line);
text.push('\n');
}
diff --git a/support/texlab/crates/texlab/Cargo.toml b/support/texlab/crates/texlab/Cargo.toml
index 1f9ca98abe..9bb0654a8b 100644
--- a/support/texlab/crates/texlab/Cargo.toml
+++ b/support/texlab/crates/texlab/Cargo.toml
@@ -1,7 +1,7 @@
[package]
name = "texlab"
description = "LaTeX Language Server"
-version = "5.7.0"
+version = "5.8.0"
license.workspace = true
readme = "README.md"
authors.workspace = true
@@ -31,45 +31,47 @@ test = false
doctest = false
[dependencies]
-anyhow = "1.0.71"
+anyhow = "1.0.72"
base-db = { path = "../base-db" }
citeproc = { path = "../citeproc" }
-clap = { version = "4.3.0", features = ["derive"] }
+clap = { version = "4.3.15", features = ["derive"] }
commands = { path = "../commands" }
+completion-data = { path = "../completion-data" }
crossbeam-channel = "0.5.8"
+definition = { path = "../definition" }
+diagnostics = { path = "../diagnostics" }
dirs = "5.0.1"
distro = { path = "../distro" }
encoding_rs = "0.8.32"
encoding_rs_io = "0.1.7"
fern = "0.6.2"
-flate2 = "1.0.26"
fuzzy-matcher = { version = "0.3.7", features = ["compact"] }
-itertools = "0.10.5"
-log = "0.4.17"
-lsp-server = "0.7.0"
+hover = { path = "../hover" }
+itertools = "0.11.0"
+log = "0.4.19"
+lsp-server = "0.7.2"
lsp-types = "0.94.0"
-notify = "6.0.0"
-once_cell = "1.17.1"
+notify = "6.0.1"
+once_cell = "1.18.0"
parking_lot = "0.12.1"
parser = { path = "../parser" }
-regex = "1.8.1"
+references = { path = "../references" }
+regex = "1.9.1"
rowan = "0.15.11"
rustc-hash = "1.1.0"
-serde = "1.0.163"
-serde_json = "1.0.96"
+serde = "1.0.171"
+serde_json = "1.0.103"
serde_regex = "1.1.0"
-serde_repr = "0.1.12"
-smol_str = { version = "0.2.0", features = ["serde"] }
+serde_repr = "0.1.14"
+symbols = { path = "../symbols" }
syntax = { path = "../syntax" }
-tempfile = "3.5.0"
+tempfile = "3.6.0"
threadpool = "1.8.1"
-titlecase = "2.2.1"
-symbols = { path = "../symbols" }
[dev-dependencies]
assert_unordered = "0.3.5"
criterion = { version = "0.5.1" }
-insta = { version = "1.29.0", features = ["glob", "redactions", "json"] }
+insta = { version = "1.31.0", features = ["glob", "redactions", "json"] }
[[bench]]
name = "bench_main"
diff --git a/support/texlab/crates/texlab/src/features/completion/argument.rs b/support/texlab/crates/texlab/src/features/completion/argument.rs
index ae4cb98471..2481685ea5 100644
--- a/support/texlab/crates/texlab/src/features/completion/argument.rs
+++ b/support/texlab/crates/texlab/src/features/completion/argument.rs
@@ -1,7 +1,7 @@
use rowan::{ast::AstNode, TextRange};
use syntax::latex;
-use crate::util::{components::COMPONENT_DATABASE, cursor::CursorContext};
+use crate::util::cursor::CursorContext;
use super::builder::CompletionBuilder;
@@ -34,13 +34,13 @@ pub fn complete<'a>(context: &'a CursorContext, builder: &mut CompletionBuilder<
let command_name = command.name()?;
let command_name = &command_name.text()[1..];
- for component in COMPONENT_DATABASE.linked_components(&context.project) {
- for component_command in component
+ for package in context.included_packages() {
+ for package_command in package
.commands
.iter()
.filter(|command| command.name == command_name)
{
- for (_, param) in component_command
+ for (_, param) in package_command
.parameters
.iter()
.enumerate()
diff --git a/support/texlab/crates/texlab/src/features/completion/builder.rs b/support/texlab/crates/texlab/src/features/completion/builder.rs
index bae7ebbfb7..fede7aeaf9 100644
--- a/support/texlab/crates/texlab/src/features/completion/builder.rs
+++ b/support/texlab/crates/texlab/src/features/completion/builder.rs
@@ -12,7 +12,6 @@ use once_cell::sync::Lazy;
use regex::Regex;
use rowan::{ast::AstNode, TextRange, TextSize};
use serde::{Deserialize, Serialize};
-use smol_str::SmolStr;
use syntax::{
bibtex::{self, HasName, HasType},
latex,
@@ -246,7 +245,7 @@ impl<'a> CompletionBuilder<'a> {
name: &'a str,
image: Option<&'a str>,
glyph: Option<&'a str>,
- file_names: &'a [SmolStr],
+ file_names: &'a [&'a str],
) -> Option<()> {
let score = self.matcher.score(name, &self.text_pattern[1..])?;
let data = Data::ComponentCommand {
@@ -270,7 +269,7 @@ impl<'a> CompletionBuilder<'a> {
&mut self,
range: TextRange,
name: &'a str,
- file_names: &'a [SmolStr],
+ file_names: &'a [&'a str],
) -> Option<()> {
let score = self.matcher.score(name, &self.text_pattern)?;
self.items.push(Item {
@@ -662,7 +661,7 @@ impl<'a> CompletionBuilder<'a> {
}
}
- fn component_detail(&self, file_names: &[SmolStr]) -> String {
+ fn component_detail(&self, file_names: &[&str]) -> String {
if file_names.is_empty() {
"built-in".into()
} else {
@@ -702,11 +701,11 @@ enum Data<'a> {
name: &'a str,
image: Option<&'a str>,
glyph: Option<&'a str>,
- file_names: &'a [SmolStr],
+ file_names: &'a [&'a str],
},
ComponentEnvironment {
name: &'a str,
- file_names: &'a [SmolStr],
+ file_names: &'a [&'a str],
},
Class {
name: &'a str,
diff --git a/support/texlab/crates/texlab/src/features/completion/component_command.rs b/support/texlab/crates/texlab/src/features/completion/component_command.rs
index ca1a1354ec..fd7ad5f9f4 100644
--- a/support/texlab/crates/texlab/src/features/completion/component_command.rs
+++ b/support/texlab/crates/texlab/src/features/completion/component_command.rs
@@ -1,4 +1,4 @@
-use crate::util::{components::COMPONENT_DATABASE, cursor::CursorContext};
+use crate::util::cursor::CursorContext;
use super::builder::CompletionBuilder;
@@ -8,14 +8,14 @@ pub fn complete<'db>(
) -> Option<()> {
let range = context.cursor.command_range(context.offset)?;
- for component in COMPONENT_DATABASE.linked_components(&context.project) {
- for command in &component.commands {
+ for package in context.included_packages() {
+ for command in &package.commands {
builder.component_command(
range,
&command.name,
command.image.as_deref(),
command.glyph.as_deref(),
- &component.file_names,
+ &package.file_names,
);
}
}
diff --git a/support/texlab/crates/texlab/src/features/completion/component_environment.rs b/support/texlab/crates/texlab/src/features/completion/component_environment.rs
index 85c29558ca..94921958cd 100644
--- a/support/texlab/crates/texlab/src/features/completion/component_environment.rs
+++ b/support/texlab/crates/texlab/src/features/completion/component_environment.rs
@@ -1,4 +1,4 @@
-use crate::util::{components::COMPONENT_DATABASE, cursor::CursorContext};
+use crate::util::cursor::CursorContext;
use super::builder::CompletionBuilder;
@@ -8,9 +8,9 @@ pub fn complete<'db>(
) -> Option<()> {
let range = context.find_environment_name()?;
- for component in COMPONENT_DATABASE.linked_components(&context.project) {
- for name in &component.environments {
- builder.component_environment(range, name, &component.file_names);
+ for package in context.included_packages() {
+ for name in &package.environments {
+ builder.component_environment(range, name, &package.file_names);
}
}
diff --git a/support/texlab/crates/texlab/src/features/completion/import.rs b/support/texlab/crates/texlab/src/features/completion/import.rs
index 826408368e..cb8c20a814 100644
--- a/support/texlab/crates/texlab/src/features/completion/import.rs
+++ b/support/texlab/crates/texlab/src/features/completion/import.rs
@@ -2,7 +2,7 @@ use rowan::ast::AstNode;
use rustc_hash::FxHashSet;
use syntax::latex;
-use crate::util::{components::COMPONENT_DATABASE, cursor::CursorContext};
+use crate::util::cursor::CursorContext;
use super::builder::CompletionBuilder;
@@ -20,13 +20,12 @@ pub fn complete<'db>(
};
let mut file_names = FxHashSet::default();
- for file_name in COMPONENT_DATABASE
- .components
+ for file_name in completion_data::DATABASE
.iter()
- .flat_map(|comp| comp.file_names.iter())
+ .flat_map(|package| package.file_names.iter())
.filter(|file_name| file_name.ends_with(extension))
{
- file_names.insert(file_name.as_str());
+ file_names.insert(file_name);
let stem = &file_name[0..file_name.len() - 4];
if kind == latex::PACKAGE_INCLUDE {
builder.package(range, stem);
diff --git a/support/texlab/crates/texlab/src/features/definition.rs b/support/texlab/crates/texlab/src/features/definition.rs
index c1a5f5162b..d450831e1f 100644
--- a/support/texlab/crates/texlab/src/features/definition.rs
+++ b/support/texlab/crates/texlab/src/features/definition.rs
@@ -1,60 +1,44 @@
-mod command;
-mod document;
-mod entry;
-mod label;
-mod string;
-
-use base_db::{Document, Workspace};
+use base_db::Workspace;
+use definition::DefinitionParams;
use lsp_types::{GotoDefinitionResponse, LocationLink, Position, Url};
-use rowan::TextRange;
-use crate::util::{cursor::CursorContext, line_index_ext::LineIndexExt};
+use crate::util::line_index_ext::LineIndexExt;
pub fn goto_definition(
workspace: &Workspace,
uri: &Url,
position: Position,
) -> Option<GotoDefinitionResponse> {
- let context = CursorContext::new(workspace, uri, position, ())?;
- log::debug!("[Definition] Cursor: {:?}", context.cursor);
-
- let links: Vec<_> = command::goto_definition(&context)
- .or_else(|| document::goto_definition(&context))
- .or_else(|| entry::goto_definition(&context))
- .or_else(|| label::goto_definition(&context))
- .or_else(|| string::goto_definition(&context))?
- .into_iter()
- .map(|result| {
- let origin_selection_range = Some(
- context
- .document
- .line_index
- .line_col_lsp_range(result.origin_selection_range),
- );
-
- let target_line_index = &result.target.line_index;
- let target_uri = result.target.uri.clone();
- let target_range = target_line_index.line_col_lsp_range(result.target_range);
-
- let target_selection_range =
- target_line_index.line_col_lsp_range(result.target_selection_range);
-
- LocationLink {
- origin_selection_range,
- target_uri,
- target_range,
- target_selection_range,
- }
- })
- .collect();
+ let document = workspace.lookup(uri)?;
+ let offset = document.line_index.offset_lsp(position);
+ let params = DefinitionParams {
+ workspace,
+ document,
+ offset,
+ };
+
+ let mut links = Vec::new();
+ for result in definition::goto_definition(params) {
+ let origin_selection_range = Some(
+ document
+ .line_index
+ .line_col_lsp_range(result.origin_selection_range),
+ );
+
+ let target_line_index = &result.target.line_index;
+ let target_uri = result.target.uri.clone();
+ let target_range = target_line_index.line_col_lsp_range(result.target_range);
+
+ let target_selection_range =
+ target_line_index.line_col_lsp_range(result.target_selection_range);
+
+ links.push(LocationLink {
+ origin_selection_range,
+ target_uri,
+ target_range,
+ target_selection_range,
+ });
+ }
Some(GotoDefinitionResponse::Link(links))
}
-
-#[derive(Debug, Clone)]
-struct DefinitionResult<'a> {
- origin_selection_range: TextRange,
- target: &'a Document,
- target_range: TextRange,
- target_selection_range: TextRange,
-}
diff --git a/support/texlab/crates/texlab/src/features/definition/document.rs b/support/texlab/crates/texlab/src/features/definition/document.rs
deleted file mode 100644
index a035eec718..0000000000
--- a/support/texlab/crates/texlab/src/features/definition/document.rs
+++ /dev/null
@@ -1,31 +0,0 @@
-use rowan::TextRange;
-
-use crate::util::cursor::CursorContext;
-
-use super::DefinitionResult;
-
-pub(super) fn goto_definition<'a>(
- context: &CursorContext<'a>,
-) -> Option<Vec<DefinitionResult<'a>>> {
- context
- .workspace
- .parents(context.document)
- .iter()
- .copied()
- .chain(std::iter::once(context.document))
- .flat_map(|parent| base_db::graph::Graph::new(context.workspace, parent).edges)
- .filter(|edge| edge.source == context.document)
- .find_map(|edge| {
- let range = edge.weight?.link.path.range;
- if range.contains_inclusive(context.offset) {
- Some(vec![DefinitionResult {
- origin_selection_range: range,
- target: edge.target,
- target_range: TextRange::default(),
- target_selection_range: TextRange::default(),
- }])
- } else {
- None
- }
- })
-}
diff --git a/support/texlab/crates/texlab/src/features/definition/entry.rs b/support/texlab/crates/texlab/src/features/definition/entry.rs
deleted file mode 100644
index 9cced101ff..0000000000
--- a/support/texlab/crates/texlab/src/features/definition/entry.rs
+++ /dev/null
@@ -1,42 +0,0 @@
-use base_db::DocumentData;
-use rowan::ast::AstNode;
-use syntax::{
- bibtex::{self, HasName},
- latex,
-};
-
-use crate::util::cursor::CursorContext;
-
-use super::DefinitionResult;
-
-pub(super) fn goto_definition<'a>(
- context: &CursorContext<'a>,
-) -> Option<Vec<DefinitionResult<'a>>> {
- let word = context
- .cursor
- .as_tex()
- .filter(|token| token.kind() == latex::WORD)?;
-
- let key = latex::Key::cast(word.parent()?)?;
-
- latex::Citation::cast(key.syntax().parent()?.parent()?)?;
-
- let origin_selection_range = latex::small_range(&key);
-
- for document in &context.project.documents {
- let DocumentData::Bib(data) = &document.data else { continue };
-
- for entry in data.root_node().children().filter_map(bibtex::Entry::cast) {
- if let Some(key) = entry.name_token().filter(|k| k.text() == word.text()) {
- return Some(vec![DefinitionResult {
- origin_selection_range,
- target: document,
- target_selection_range: key.text_range(),
- target_range: entry.syntax().text_range(),
- }]);
- }
- }
- }
-
- None
-}
diff --git a/support/texlab/crates/texlab/src/features/definition/label.rs b/support/texlab/crates/texlab/src/features/definition/label.rs
deleted file mode 100644
index 9a518974b7..0000000000
--- a/support/texlab/crates/texlab/src/features/definition/label.rs
+++ /dev/null
@@ -1,37 +0,0 @@
-use base_db::{semantics::tex::LabelKind, util::render_label, DocumentData};
-
-use crate::util::cursor::CursorContext;
-
-use super::DefinitionResult;
-
-pub(super) fn goto_definition<'a>(
- context: &CursorContext<'a>,
-) -> Option<Vec<DefinitionResult<'a>>> {
- let (name_text, origin_selection_range) = context
- .find_label_name_key()
- .or_else(|| context.find_label_name_command())?;
-
- for document in &context.project.documents {
- let DocumentData::Tex(data) = &document.data else { continue };
-
- let Some(label) = data
- .semantics
- .labels
- .iter()
- .filter(|label| label.kind == LabelKind::Definition)
- .find(|label| label.name.text == name_text) else { continue };
-
- let target_selection_range = label.name.range;
- let target_range = render_label(context.workspace, &context.project, label)
- .map_or(target_selection_range, |label| label.range);
-
- return Some(vec![DefinitionResult {
- origin_selection_range,
- target: document,
- target_range,
- target_selection_range,
- }]);
- }
-
- None
-}
diff --git a/support/texlab/crates/texlab/src/features/definition/string.rs b/support/texlab/crates/texlab/src/features/definition/string.rs
deleted file mode 100644
index 420d2818ea..0000000000
--- a/support/texlab/crates/texlab/src/features/definition/string.rs
+++ /dev/null
@@ -1,35 +0,0 @@
-use base_db::DocumentData;
-use rowan::ast::AstNode;
-use syntax::bibtex::{self, HasName};
-
-use crate::util::cursor::CursorContext;
-
-use super::DefinitionResult;
-
-pub(super) fn goto_definition<'a>(
- context: &CursorContext<'a>,
-) -> Option<Vec<DefinitionResult<'a>>> {
- let DocumentData::Bib(data) = &context.document.data else { return None };
-
- let key = context
- .cursor
- .as_bib()
- .filter(|token| token.kind() == bibtex::NAME)?;
-
- bibtex::Value::cast(key.parent()?)?;
-
- let origin_selection_range = key.text_range();
-
- data.root_node()
- .children()
- .filter_map(bibtex::StringDef::cast)
- .find_map(|string| {
- let string_name = string.name_token().filter(|k| k.text() == key.text())?;
- Some(vec![DefinitionResult {
- origin_selection_range,
- target: context.document,
- target_selection_range: string_name.text_range(),
- target_range: string.syntax().text_range(),
- }])
- })
-}
diff --git a/support/texlab/crates/texlab/src/features/hover.rs b/support/texlab/crates/texlab/src/features/hover.rs
index 0c70c48099..bde71261c3 100644
--- a/support/texlab/crates/texlab/src/features/hover.rs
+++ b/support/texlab/crates/texlab/src/features/hover.rs
@@ -1,39 +1,47 @@
-mod citation;
-mod component;
-mod entry_type;
-mod field;
-mod label;
-mod string_ref;
-
use base_db::Workspace;
-use lsp_types::{Hover, HoverContents, MarkupContent, MarkupKind, Position, Url};
-use rowan::TextRange;
+use hover::{HoverData, HoverParams};
-use crate::util::{cursor::CursorContext, line_index_ext::LineIndexExt};
+use crate::util::line_index_ext::LineIndexExt;
-pub fn find(workspace: &Workspace, uri: &Url, position: Position) -> Option<Hover> {
- let context = CursorContext::new(workspace, uri, position, ())?;
- log::debug!("[Hover] Cursor: {:?}", context.cursor);
+pub fn find(
+ workspace: &Workspace,
+ uri: &lsp_types::Url,
+ position: lsp_types::Position,
+) -> Option<lsp_types::Hover> {
+ let document = workspace.lookup(uri)?;
+ let offset = document.line_index.offset_lsp(position);
+ let params = HoverParams::new(workspace, document, offset);
+ let hover = ::hover::find(&params)?;
- let result = label::find_hover(&context)
- .or_else(|| citation::find_hover(&context))
- .or_else(|| component::find_hover(&context))
- .or_else(|| string_ref::find_hover(&context))
- .or_else(|| field::find_hover(&context))
- .or_else(|| entry_type::find_hover(&context))?;
+ let contents = match hover.data {
+ HoverData::Citation(text) => lsp_types::MarkupContent {
+ kind: lsp_types::MarkupKind::Markdown,
+ value: text,
+ },
+ HoverData::Package(description) => lsp_types::MarkupContent {
+ kind: lsp_types::MarkupKind::PlainText,
+ value: description.into(),
+ },
+ HoverData::EntryType(type_) => lsp_types::MarkupContent {
+ kind: lsp_types::MarkupKind::Markdown,
+ value: type_.documentation?.into(),
+ },
+ HoverData::FieldType(type_) => lsp_types::MarkupContent {
+ kind: lsp_types::MarkupKind::Markdown,
+ value: type_.documentation.into(),
+ },
+ HoverData::Label(label) => lsp_types::MarkupContent {
+ kind: lsp_types::MarkupKind::PlainText,
+ value: label.reference(),
+ },
+ HoverData::StringRef(text) => lsp_types::MarkupContent {
+ kind: lsp_types::MarkupKind::PlainText,
+ value: text,
+ },
+ };
- Some(Hover {
- contents: HoverContents::Markup(MarkupContent {
- kind: result.value_kind,
- value: result.value,
- }),
- range: Some(context.document.line_index.line_col_lsp_range(result.range)),
+ Some(lsp_types::Hover {
+ contents: lsp_types::HoverContents::Markup(contents),
+ range: Some(document.line_index.line_col_lsp_range(hover.range)),
})
}
-
-#[derive(Debug, Clone)]
-struct HoverResult {
- range: TextRange,
- value: String,
- value_kind: MarkupKind,
-}
diff --git a/support/texlab/crates/texlab/src/features/hover/citation.rs b/support/texlab/crates/texlab/src/features/hover/citation.rs
deleted file mode 100644
index 1f76404871..0000000000
--- a/support/texlab/crates/texlab/src/features/hover/citation.rs
+++ /dev/null
@@ -1,27 +0,0 @@
-use lsp_types::MarkupKind;
-use rowan::ast::AstNode;
-use syntax::bibtex;
-
-use crate::util::cursor::CursorContext;
-
-use super::HoverResult;
-
-pub(super) fn find_hover(context: &CursorContext) -> Option<HoverResult> {
- let (key, range) = context
- .find_citation_key_word()
- .or_else(|| context.find_citation_key_command())
- .or_else(|| context.find_entry_key())?;
-
- let value = context.project.documents.iter().find_map(|document| {
- let data = document.data.as_bib()?;
- let root = bibtex::Root::cast(data.root_node())?;
- let entry = root.find_entry(&key)?;
- citeproc::render(&entry)
- })?;
-
- Some(HoverResult {
- range,
- value,
- value_kind: MarkupKind::Markdown,
- })
-}
diff --git a/support/texlab/crates/texlab/src/features/hover/component.rs b/support/texlab/crates/texlab/src/features/hover/component.rs
deleted file mode 100644
index 89954ebf0f..0000000000
--- a/support/texlab/crates/texlab/src/features/hover/component.rs
+++ /dev/null
@@ -1,23 +0,0 @@
-use base_db::{semantics::tex::LinkKind, DocumentData};
-use lsp_types::MarkupKind;
-
-use crate::util::{components::COMPONENT_DATABASE, cursor::CursorContext};
-
-use super::HoverResult;
-
-pub(super) fn find_hover(context: &CursorContext) -> Option<HoverResult> {
- let DocumentData::Tex(data) = &context.document.data else { return None };
- data.semantics
- .links
- .iter()
- .filter(|link| matches!(link.kind, LinkKind::Sty | LinkKind::Cls))
- .filter(|link| link.path.range.contains_inclusive(context.offset))
- .find_map(|link| {
- let value = COMPONENT_DATABASE.documentation(&link.path.text)?.value;
- Some(HoverResult {
- value,
- value_kind: MarkupKind::PlainText,
- range: link.path.range,
- })
- })
-}
diff --git a/support/texlab/crates/texlab/src/features/hover/entry_type.rs b/support/texlab/crates/texlab/src/features/hover/entry_type.rs
deleted file mode 100644
index da737a148f..0000000000
--- a/support/texlab/crates/texlab/src/features/hover/entry_type.rs
+++ /dev/null
@@ -1,21 +0,0 @@
-use base_db::data::BibtexEntryType;
-use lsp_types::MarkupKind;
-use syntax::bibtex;
-
-use crate::util::cursor::CursorContext;
-
-use super::HoverResult;
-
-pub(super) fn find_hover(context: &CursorContext) -> Option<HoverResult> {
- let name = context
- .cursor
- .as_bib()
- .filter(|token| token.kind() == bibtex::TYPE)?;
-
- let documentation = BibtexEntryType::find(&name.text()[1..]).and_then(|ty| ty.documentation)?;
- Some(HoverResult {
- range: name.text_range(),
- value: String::from(documentation),
- value_kind: MarkupKind::Markdown,
- })
-}
diff --git a/support/texlab/crates/texlab/src/features/hover/field.rs b/support/texlab/crates/texlab/src/features/hover/field.rs
deleted file mode 100644
index d16bea6157..0000000000
--- a/support/texlab/crates/texlab/src/features/hover/field.rs
+++ /dev/null
@@ -1,24 +0,0 @@
-use base_db::data::BibtexFieldType;
-use lsp_types::MarkupKind;
-use rowan::ast::AstNode;
-use syntax::bibtex;
-
-use crate::util::cursor::CursorContext;
-
-use super::HoverResult;
-
-pub(super) fn find_hover(context: &CursorContext) -> Option<HoverResult> {
- let name = context
- .cursor
- .as_bib()
- .filter(|token| token.kind() == bibtex::NAME)?;
-
- bibtex::Field::cast(name.parent()?)?;
-
- let docs = BibtexFieldType::find(name.text())?.documentation;
- Some(HoverResult {
- range: name.text_range(),
- value: docs.into(),
- value_kind: MarkupKind::Markdown,
- })
-}
diff --git a/support/texlab/crates/texlab/src/features/hover/label.rs b/support/texlab/crates/texlab/src/features/hover/label.rs
deleted file mode 100644
index bb5768d2b0..0000000000
--- a/support/texlab/crates/texlab/src/features/hover/label.rs
+++ /dev/null
@@ -1,26 +0,0 @@
-use base_db::{semantics::tex::LabelKind, util::render_label};
-use lsp_types::MarkupKind;
-
-use crate::util::cursor::CursorContext;
-
-use super::HoverResult;
-
-pub(super) fn find_hover(context: &CursorContext) -> Option<HoverResult> {
- let (name_text, range) = context
- .find_label_name_key()
- .or_else(|| context.find_label_name_command())?;
-
- context
- .project
- .documents
- .iter()
- .filter_map(|document| document.data.as_tex())
- .flat_map(|data| data.semantics.labels.iter())
- .find(|label| label.kind == LabelKind::Definition && label.name.text == name_text)
- .and_then(|label| render_label(context.workspace, &context.project, label))
- .map(|label| HoverResult {
- range,
- value: label.reference(),
- value_kind: MarkupKind::PlainText,
- })
-}
diff --git a/support/texlab/crates/texlab/src/features/hover/string_ref.rs b/support/texlab/crates/texlab/src/features/hover/string_ref.rs
deleted file mode 100644
index 52310ad68e..0000000000
--- a/support/texlab/crates/texlab/src/features/hover/string_ref.rs
+++ /dev/null
@@ -1,42 +0,0 @@
-use base_db::DocumentData;
-use citeproc::field::text::TextFieldData;
-use lsp_types::MarkupKind;
-use rowan::ast::AstNode;
-use syntax::bibtex::{self, HasName, HasValue};
-
-use crate::util::cursor::CursorContext;
-
-use super::HoverResult;
-
-pub(super) fn find_hover(context: &CursorContext) -> Option<HoverResult> {
- let DocumentData::Bib(data) = &context.document.data else { return None };
-
- let name = context
- .cursor
- .as_bib()
- .filter(|token| token.kind() == bibtex::NAME)
- .filter(|token| {
- let parent = token.parent().unwrap();
- bibtex::Value::can_cast(parent.kind()) || bibtex::StringDef::can_cast(parent.kind())
- })?;
-
- for string in data
- .root_node()
- .children()
- .filter_map(bibtex::StringDef::cast)
- {
- if string
- .name_token()
- .map_or(false, |token| token.text() == name.text())
- {
- let value = TextFieldData::parse(&string.value()?)?.text;
- return Some(HoverResult {
- range: name.text_range(),
- value,
- value_kind: MarkupKind::PlainText,
- });
- }
- }
-
- None
-}
diff --git a/support/texlab/crates/texlab/src/features/reference.rs b/support/texlab/crates/texlab/src/features/reference.rs
index 21243e0b6c..eb0807c7fc 100644
--- a/support/texlab/crates/texlab/src/features/reference.rs
+++ b/support/texlab/crates/texlab/src/features/reference.rs
@@ -1,39 +1,33 @@
-mod entry;
-mod label;
-mod string;
+use base_db::Workspace;
+use references::{ReferenceKind, ReferenceParams};
-use base_db::{Document, Workspace};
-use lsp_types::{Location, Position, ReferenceContext, Url};
-use rowan::TextRange;
-
-use crate::util::{cursor::CursorContext, line_index_ext::LineIndexExt};
+use crate::util::line_index_ext::LineIndexExt;
pub fn find_all(
workspace: &Workspace,
- uri: &Url,
- position: Position,
- params: &ReferenceContext,
-) -> Option<Vec<Location>> {
- let mut results = Vec::new();
- let context = CursorContext::new(workspace, uri, position, params)?;
- log::debug!("[References] Cursor: {:?}", context.cursor);
- label::find_all_references(&context, &mut results);
- entry::find_all_references(&context, &mut results);
- string::find_all_references(&context, &mut results);
+ uri: &lsp_types::Url,
+ position: lsp_types::Position,
+ context: &lsp_types::ReferenceContext,
+) -> Option<Vec<lsp_types::Location>> {
+ let document = workspace.lookup(uri)?;
+ let offset = document.line_index.offset_lsp(position);
+ let params = ReferenceParams {
+ workspace,
+ document,
+ offset,
+ };
- let locations = results
+ let mut results = Vec::new();
+ for result in references::find_all(params)
.into_iter()
- .map(|result| Location {
- uri: result.document.uri.clone(),
- range: result.document.line_index.line_col_lsp_range(result.range),
- })
- .collect();
-
- Some(locations)
-}
+ .filter(|result| result.kind == ReferenceKind::Reference || context.include_declaration)
+ {
+ let document = result.document;
+ let uri = document.uri.clone();
+ let range = document.line_index.line_col_lsp_range(result.range);
+ let location = lsp_types::Location::new(uri, range);
+ results.push(location);
+ }
-#[derive(Debug)]
-struct ReferenceResult<'a> {
- document: &'a Document,
- range: TextRange,
+ Some(results)
}
diff --git a/support/texlab/crates/texlab/src/features/reference/entry.rs b/support/texlab/crates/texlab/src/features/reference/entry.rs
deleted file mode 100644
index d84e7fa4cb..0000000000
--- a/support/texlab/crates/texlab/src/features/reference/entry.rs
+++ /dev/null
@@ -1,56 +0,0 @@
-use base_db::DocumentData;
-use lsp_types::ReferenceContext;
-use rowan::ast::AstNode;
-use syntax::{
- bibtex::{self, HasName},
- latex,
-};
-
-use crate::util::cursor::CursorContext;
-
-use super::ReferenceResult;
-
-pub(super) fn find_all_references<'a>(
- context: &CursorContext<'a, &ReferenceContext>,
- results: &mut Vec<ReferenceResult<'a>>,
-) -> Option<()> {
- let (key_text, _) = context
- .find_citation_key_word()
- .or_else(|| context.find_citation_key_command())
- .or_else(|| context.find_entry_key())?;
-
- for document in &context.project.documents {
- match &document.data {
- DocumentData::Tex(data) => {
- data.root_node()
- .descendants()
- .filter_map(latex::Citation::cast)
- .filter_map(|citation| citation.key_list())
- .flat_map(|keys| keys.keys())
- .filter(|key| key.to_string() == key_text)
- .map(|key| latex::small_range(&key))
- .for_each(|range| {
- results.push(ReferenceResult { document, range });
- });
- }
- DocumentData::Bib(data) if context.params.include_declaration => {
- data.root_node()
- .children()
- .filter_map(bibtex::Entry::cast)
- .filter_map(|entry| entry.name_token())
- .filter(|key| key.text() == key_text)
- .map(|key| key.text_range())
- .for_each(|range| {
- results.push(ReferenceResult { document, range });
- });
- }
- DocumentData::Bib(_)
- | DocumentData::Aux(_)
- | DocumentData::Log(_)
- | DocumentData::Root
- | DocumentData::Tectonic => {}
- };
- }
-
- Some(())
-}
diff --git a/support/texlab/crates/texlab/src/features/reference/label.rs b/support/texlab/crates/texlab/src/features/reference/label.rs
deleted file mode 100644
index 3f0433e0f9..0000000000
--- a/support/texlab/crates/texlab/src/features/reference/label.rs
+++ /dev/null
@@ -1,36 +0,0 @@
-use base_db::{semantics::tex::LabelKind, DocumentData};
-use lsp_types::ReferenceContext;
-
-use crate::util::cursor::CursorContext;
-
-use super::ReferenceResult;
-
-pub(super) fn find_all_references<'a>(
- context: &CursorContext<'a, &ReferenceContext>,
- results: &mut Vec<ReferenceResult<'a>>,
-) -> Option<()> {
- let (name_text, _) = context
- .find_label_name_key()
- .or_else(|| context.find_label_name_command())?;
-
- for document in &context.project.documents {
- let DocumentData::Tex(data) = &document.data else { continue };
-
- for label in data
- .semantics
- .labels
- .iter()
- .filter(|label| label.name.text == name_text)
- .filter(|label| {
- label.kind != LabelKind::Definition || context.params.include_declaration
- })
- {
- results.push(ReferenceResult {
- document,
- range: label.name.range,
- });
- }
- }
-
- Some(())
-}
diff --git a/support/texlab/crates/texlab/src/features/reference/string.rs b/support/texlab/crates/texlab/src/features/reference/string.rs
deleted file mode 100644
index eca592309c..0000000000
--- a/support/texlab/crates/texlab/src/features/reference/string.rs
+++ /dev/null
@@ -1,44 +0,0 @@
-use base_db::DocumentData;
-use lsp_types::ReferenceContext;
-use rowan::ast::AstNode;
-use syntax::bibtex::{self, HasName};
-
-use crate::util::cursor::CursorContext;
-
-use super::ReferenceResult;
-
-pub(super) fn find_all_references<'a>(
- context: &CursorContext<'a, &ReferenceContext>,
- results: &mut Vec<ReferenceResult<'a>>,
-) -> Option<()> {
- let name_text = context
- .cursor
- .as_bib()
- .filter(|token| token.kind() == bibtex::NAME)
- .filter(|token| {
- let parent = token.parent().unwrap();
- bibtex::Value::can_cast(parent.kind()) || bibtex::StringDef::can_cast(parent.kind())
- })?
- .text();
-
- let DocumentData::Bib(data) = &context.document.data else { return None };
-
- for node in data.root_node().descendants() {
- if let Some(name) = bibtex::StringDef::cast(node.clone())
- .and_then(|string| string.name_token())
- .filter(|name| context.params.include_declaration && name.text() == name_text)
- .or_else(|| {
- bibtex::Value::cast(node)
- .and_then(|token| token.syntax().first_token())
- .filter(|name| name.text() == name_text)
- })
- {
- results.push(ReferenceResult {
- document: context.document,
- range: name.text_range(),
- });
- }
- }
-
- Some(())
-}
diff --git a/support/texlab/crates/texlab/src/features/rename/entry.rs b/support/texlab/crates/texlab/src/features/rename/entry.rs
index fd1fb183f9..6a6037c999 100644
--- a/support/texlab/crates/texlab/src/features/rename/entry.rs
+++ b/support/texlab/crates/texlab/src/features/rename/entry.rs
@@ -1,10 +1,6 @@
-use base_db::DocumentData;
-use rowan::{ast::AstNode, TextRange};
+use base_db::{Document, DocumentData};
+use rowan::TextRange;
use rustc_hash::FxHashMap;
-use syntax::{
- bibtex::{self, HasName},
- latex,
-};
use crate::util::cursor::CursorContext;
@@ -24,46 +20,35 @@ pub(super) fn rename<'a>(context: &CursorContext<'a, Params>) -> Option<RenameRe
.find_citation_key_word()
.or_else(|| context.find_entry_key())?;
- let mut changes = FxHashMap::default();
+ let mut changes: FxHashMap<&Document, Vec<Indel>> = FxHashMap::default();
for document in &context.project.documents {
- match &document.data {
- DocumentData::Tex(data) => {
- let root = data.root_node();
-
- let edits: Vec<_> = root
- .descendants()
- .filter_map(latex::Citation::cast)
- .filter_map(|citation| citation.key_list())
- .flat_map(|keys| keys.keys())
- .filter(|key| key.to_string() == key_text)
- .map(|key| Indel {
- delete: latex::small_range(&key),
- insert: context.params.new_name.clone(),
- })
- .collect();
-
- changes.insert(*document, edits);
- }
- DocumentData::Bib(data) => {
- let root = data.root_node();
- let edits: Vec<_> = root
- .descendants()
- .filter_map(bibtex::Entry::cast)
- .filter_map(|entry| entry.name_token())
- .filter(|key| key.text() == key_text)
- .map(|key| Indel {
- delete: key.text_range(),
- insert: context.params.new_name.clone(),
- })
- .collect();
-
- changes.insert(*document, edits);
- }
- DocumentData::Aux(_)
- | DocumentData::Log(_)
- | DocumentData::Root
- | DocumentData::Tectonic => {}
- };
+ if let DocumentData::Tex(data) = &document.data {
+ let edits = data
+ .semantics
+ .citations
+ .iter()
+ .filter(|citation| citation.name.text == key_text)
+ .map(|citation| Indel {
+ delete: citation.name.range,
+ insert: context.params.new_name.clone(),
+ })
+ .collect();
+
+ changes.insert(document, edits);
+ } else if let DocumentData::Bib(data) = &document.data {
+ let edits = data
+ .semantics
+ .entries
+ .iter()
+ .filter(|entry| entry.name.text == key_text)
+ .map(|entry| Indel {
+ delete: entry.name.range,
+ insert: context.params.new_name.clone(),
+ })
+ .collect();
+
+ changes.insert(document, edits);
+ }
}
Some(RenameResult { changes })
diff --git a/support/texlab/crates/texlab/src/server.rs b/support/texlab/crates/texlab/src/server.rs
index 2a62abe96d..ef919bab74 100644
--- a/support/texlab/crates/texlab/src/server.rs
+++ b/support/texlab/crates/texlab/src/server.rs
@@ -13,6 +13,7 @@ use anyhow::Result;
use base_db::{util::LineCol, Config, Owner, Workspace};
use commands::{BuildCommand, CleanCommand, CleanTarget, ForwardSearch};
use crossbeam_channel::{Receiver, Sender};
+use diagnostics::{DiagnosticManager, DiagnosticSource};
use distro::{Distro, Language};
use lsp_server::{Connection, ErrorCode, Message, RequestId};
use lsp_types::{notification::*, request::*, *};
@@ -31,8 +32,7 @@ use crate::{
symbols,
},
util::{
- self, capabilities::ClientCapabilitiesExt, components::COMPONENT_DATABASE,
- line_index_ext::LineIndexExt, normalize_uri,
+ self, capabilities::ClientCapabilitiesExt, line_index_ext::LineIndexExt, normalize_uri,
},
};
@@ -63,6 +63,7 @@ pub struct Server {
client: LspClient,
client_capabilities: Arc<ClientCapabilities>,
client_info: Option<Arc<ClientInfo>>,
+ diagnostic_manager: DiagnosticManager,
chktex_diagnostics: FxHashMap<Url, Vec<Diagnostic>>,
watcher: FileWatcher,
pool: ThreadPool,
@@ -84,6 +85,7 @@ impl Server {
client_capabilities: Default::default(),
client_info: Default::default(),
chktex_diagnostics: Default::default(),
+ diagnostic_manager: DiagnosticManager::default(),
watcher,
pool: threadpool::Builder::new().build(),
pending_builds: Default::default(),
@@ -254,20 +256,35 @@ impl Server {
}
fn update_workspace(&mut self) {
+ let mut checked_paths = FxHashSet::default();
let mut workspace = self.workspace.write();
- workspace.discover();
+ workspace.discover(&mut checked_paths);
self.watcher.watch(&mut workspace);
+
+ for document in checked_paths
+ .iter()
+ .filter_map(|path| workspace.lookup_path(path))
+ {
+ self.diagnostic_manager.update(&workspace, document);
+ }
+
drop(workspace);
self.publish_diagnostics_with_delay();
}
fn publish_diagnostics(&mut self) -> Result<()> {
let workspace = self.workspace.read();
- let mut all_diagnostics = util::diagnostics::collect(&workspace);
+
+ let mut all_diagnostics =
+ util::diagnostics::collect(&workspace, &mut self.diagnostic_manager);
for (uri, diagnostics) in &self.chktex_diagnostics {
- let Some(document) = workspace.lookup(uri) else { continue };
- let Some(existing) = all_diagnostics.get_mut(document) else { continue };
+ let Some(document) = workspace.lookup(uri) else {
+ continue;
+ };
+ let Some(existing) = all_diagnostics.get_mut(document) else {
+ continue;
+ };
existing.extend(diagnostics.iter().cloned());
}
@@ -369,7 +386,12 @@ impl Server {
self.update_workspace();
- if self.workspace.read().config().diagnostics.chktex.on_open {
+ let workspace = self.workspace.read();
+ self.diagnostic_manager
+ .update(&workspace, workspace.lookup(&uri).unwrap());
+
+ if workspace.config().diagnostics.chktex.on_open {
+ drop(workspace);
self.run_chktex(&uri);
}
@@ -383,7 +405,9 @@ impl Server {
let mut workspace = self.workspace.write();
for change in params.content_changes {
- let Some(document) = workspace.lookup(&uri) else { return Ok(()) };
+ let Some(document) = workspace.lookup(&uri) else {
+ return Ok(());
+ };
match change.range {
Some(range) => {
let range = document.line_index.offset_lsp_range(range);
@@ -406,6 +430,9 @@ impl Server {
};
}
+ self.diagnostic_manager
+ .update(&workspace, workspace.lookup(&uri).unwrap());
+
drop(workspace);
self.update_workspace();
@@ -449,8 +476,12 @@ impl Server {
fn run_chktex(&mut self, uri: &Url) {
let workspace = self.workspace.read();
- let Some(document) = workspace.lookup(uri) else { return };
- let Some(command) = util::chktex::Command::new(&workspace, document) else { return };
+ let Some(document) = workspace.lookup(uri) else {
+ return;
+ };
+ let Some(command) = util::chktex::Command::new(&workspace, document) else {
+ return;
+ };
let sender = self.internal_tx.clone();
let uri = document.uri.clone();
@@ -534,9 +565,15 @@ impl Server {
.map(|data| serde_json::from_value(data).unwrap())
{
Some(CompletionItemData::Package | CompletionItemData::Class) => {
- item.documentation = COMPONENT_DATABASE
- .documentation(&item.label)
- .map(Documentation::MarkupContent);
+ item.documentation = completion_data::DATABASE
+ .meta(&item.label)
+ .and_then(|meta| meta.description.as_deref())
+ .map(|value| {
+ Documentation::MarkupContent(MarkupContent {
+ kind: MarkupKind::PlainText,
+ value: value.into(),
+ })
+ });
}
Some(CompletionItemData::Citation { uri, key }) => {
if let Some(data) = workspace
@@ -863,6 +900,10 @@ impl Server {
{
if let Some(language) = Language::from_path(&path) {
changed |= workspace.load(&path, language, Owner::Server).is_ok();
+
+ if let Some(document) = workspace.lookup_path(&path) {
+ self.diagnostic_manager.update(&workspace, document);
+ }
}
}
}
@@ -915,7 +956,8 @@ impl Server {
let line_index = &document.line_index;
let position = line_index.offset_lsp(params.text_document_position.position);
- let Some(result) = commands::change_environment(document, position, &params.new_name) else {
+ let Some(result) = commands::change_environment(document, position, &params.new_name)
+ else {
anyhow::bail!("No environment found at the current position");
};
diff --git a/support/texlab/crates/texlab/src/server/extensions.rs b/support/texlab/crates/texlab/src/server/extensions.rs
index 2d9d17a5b3..ce876c7892 100644
--- a/support/texlab/crates/texlab/src/server/extensions.rs
+++ b/support/texlab/crates/texlab/src/server/extensions.rs
@@ -1,3 +1,5 @@
+#![allow(non_camel_case_types)]
+
use commands::ForwardSearchError;
use lsp_types::{Position, Range, TextDocumentIdentifier, TextDocumentPositionParams};
use serde::{Deserialize, Serialize};
diff --git a/support/texlab/crates/texlab/src/server/options.rs b/support/texlab/crates/texlab/src/server/options.rs
index ba5b645402..ddf20f0a5d 100644
--- a/support/texlab/crates/texlab/src/server/options.rs
+++ b/support/texlab/crates/texlab/src/server/options.rs
@@ -1,3 +1,4 @@
+use std::path::PathBuf;
use std::time::Duration;
use base_db::{Config, Formatter, SynctexConfig};
@@ -68,6 +69,9 @@ pub struct BuildOptions {
pub args: Option<Vec<String>>,
pub on_save: bool,
pub forward_search_after: bool,
+ pub aux_directory: Option<String>,
+ pub log_directory: Option<String>,
+ pub filename: Option<String>,
}
#[derive(Debug, PartialEq, Eq, Clone, Default, Serialize, Deserialize)]
@@ -153,7 +157,20 @@ impl From<Options> for Config {
config.build.args = value.build.args.unwrap_or(config.build.args);
config.build.on_save = value.build.on_save;
config.build.forward_search_after = value.build.forward_search_after;
- config.build.output_dir = value.aux_directory.unwrap_or_else(|| String::from("."));
+
+ config.build.aux_dir = value
+ .build
+ .aux_directory
+ .or_else(|| value.aux_directory.clone())
+ .unwrap_or_else(|| String::from("."));
+
+ config.build.log_dir = value
+ .build
+ .log_directory
+ .or_else(|| value.aux_directory)
+ .unwrap_or_else(|| String::from("."));
+
+ config.build.output_filename = value.build.filename.map(PathBuf::from);
config.diagnostics.allowed_patterns = value
.diagnostics
diff --git a/support/texlab/crates/texlab/src/util.rs b/support/texlab/crates/texlab/src/util.rs
index b1e1adc396..5a4257efaa 100644
--- a/support/texlab/crates/texlab/src/util.rs
+++ b/support/texlab/crates/texlab/src/util.rs
@@ -1,6 +1,5 @@
pub mod capabilities;
pub mod chktex;
-pub mod components;
pub mod cursor;
pub mod diagnostics;
pub mod line_index_ext;
diff --git a/support/texlab/crates/texlab/src/util/components.rs b/support/texlab/crates/texlab/src/util/components.rs
deleted file mode 100644
index 9d51cd039d..0000000000
--- a/support/texlab/crates/texlab/src/util/components.rs
+++ /dev/null
@@ -1,116 +0,0 @@
-use std::io::Read;
-
-use base_db::{semantics::tex::LinkKind, Project};
-use flate2::read::GzDecoder;
-use itertools::Itertools;
-use lsp_types::{MarkupContent, MarkupKind};
-use once_cell::sync::Lazy;
-use serde::Deserialize;
-use smol_str::SmolStr;
-
-#[derive(Debug, PartialEq, Eq, Clone, Deserialize)]
-#[serde(rename_all = "camelCase")]
-pub struct ComponentDatabase {
- pub components: Vec<Component>,
- pub metadata: Vec<ComponentMetadata>,
-}
-
-impl ComponentDatabase {
- pub fn find(&self, name: &str) -> Option<&Component> {
- self.components.iter().find(|component| {
- component
- .file_names
- .iter()
- .any(|file_name| file_name == name)
- })
- }
-
- pub fn linked_components(&self, project: &Project) -> Vec<&Component> {
- project
- .documents
- .iter()
- .filter_map(|document| document.data.as_tex())
- .flat_map(|data| data.semantics.links.iter())
- .filter_map(|link| match link.kind {
- LinkKind::Sty => Some(format!("{}.sty", link.path.text)),
- LinkKind::Cls => Some(format!("{}.cls", link.path.text)),
- _ => None,
- })
- .filter_map(|name| self.find(&name))
- .chain(std::iter::once(self.kernel()))
- .flat_map(|comp| {
- comp.references
- .iter()
- .filter_map(|name| self.find(name))
- .chain(std::iter::once(comp))
- })
- .unique_by(|comp| &comp.file_names)
- .collect()
- }
-
- pub fn kernel(&self) -> &Component {
- self.components
- .iter()
- .find(|component| component.file_names.is_empty())
- .unwrap()
- }
-
- pub fn documentation(&self, name: &str) -> Option<MarkupContent> {
- let metadata = self
- .metadata
- .iter()
- .find(|metadata| metadata.name == name)?;
-
- let desc = metadata.description.clone()?;
- Some(MarkupContent {
- kind: MarkupKind::PlainText,
- value: desc,
- })
- }
-}
-
-#[derive(Debug, PartialEq, Eq, Clone, Deserialize)]
-#[serde(rename_all = "camelCase")]
-pub struct Component {
- pub file_names: Vec<SmolStr>,
- pub references: Vec<SmolStr>,
- pub commands: Vec<ComponentCommand>,
- pub environments: Vec<SmolStr>,
-}
-
-#[derive(Debug, PartialEq, Eq, Clone, Deserialize)]
-#[serde(rename_all = "camelCase")]
-pub struct ComponentCommand {
- pub name: SmolStr,
- pub image: Option<String>,
- pub glyph: Option<SmolStr>,
- pub parameters: Vec<ComponentParameter>,
-}
-
-#[derive(Debug, PartialEq, Eq, Clone, Deserialize)]
-#[serde(rename_all = "camelCase")]
-pub struct ComponentParameter(pub Vec<ComponentArgument>);
-
-#[derive(Debug, PartialEq, Eq, Clone, Deserialize)]
-#[serde(rename_all = "camelCase")]
-pub struct ComponentArgument {
- pub name: SmolStr,
- pub image: Option<String>,
-}
-
-#[derive(Debug, PartialEq, Eq, Clone, Deserialize)]
-#[serde(rename_all = "camelCase")]
-pub struct ComponentMetadata {
- pub name: String,
- pub caption: Option<String>,
- pub description: Option<String>,
-}
-
-const JSON_GZ: &[u8] = include_bytes!("../../data/components.json.gz");
-
-pub static COMPONENT_DATABASE: Lazy<ComponentDatabase> = Lazy::new(|| {
- let mut decoder = GzDecoder::new(JSON_GZ);
- let mut buf = String::new();
- decoder.read_to_string(&mut buf).unwrap();
- serde_json::from_str(&buf).unwrap()
-});
diff --git a/support/texlab/crates/texlab/src/util/cursor.rs b/support/texlab/crates/texlab/src/util/cursor.rs
index a39d0e7bf7..16b00776d4 100644
--- a/support/texlab/crates/texlab/src/util/cursor.rs
+++ b/support/texlab/crates/texlab/src/util/cursor.rs
@@ -295,4 +295,22 @@ impl<'a, T> CursorContext<'a, T> {
})
.or_else(|| Some((String::new(), TextRange::empty(self.offset), group)))
}
+
+ pub fn included_packages(&self) -> impl Iterator<Item = &completion_data::Package<'_>> + '_ {
+ let db = &completion_data::DATABASE;
+ self.project
+ .documents
+ .iter()
+ .filter_map(|document| document.data.as_tex())
+ .flat_map(|data| data.semantics.links.iter())
+ .filter_map(|link| link.package_name())
+ .filter_map(|name| db.find(&name))
+ .chain(std::iter::once(db.kernel()))
+ .flat_map(|pkg| {
+ pkg.references
+ .iter()
+ .filter_map(|name| db.find(name))
+ .chain(std::iter::once(pkg))
+ })
+ }
}
diff --git a/support/texlab/crates/texlab/src/util/diagnostics.rs b/support/texlab/crates/texlab/src/util/diagnostics.rs
index b6f1efbce9..4a26765897 100644
--- a/support/texlab/crates/texlab/src/util/diagnostics.rs
+++ b/support/texlab/crates/texlab/src/util/diagnostics.rs
@@ -1,106 +1,191 @@
-use base_db::{diagnostics::ErrorCode, util::filter_regex_patterns, Document, Workspace};
-use distro::Language;
-use lsp_types::{DiagnosticSeverity, NumberOrString};
+use base_db::{util::filter_regex_patterns, Document, Workspace};
+use diagnostics::{
+ types::{BibError, Diagnostic, DiagnosticData, TexError},
+ DiagnosticBuilder, DiagnosticSource,
+};
+use rowan::TextRange;
use rustc_hash::FxHashMap;
use syntax::BuildErrorLevel;
use super::line_index_ext::LineIndexExt;
-pub fn collect(workspace: &Workspace) -> FxHashMap<&Document, Vec<lsp_types::Diagnostic>> {
- let mut results = FxHashMap::default();
-
- for document in workspace.iter() {
- let lsp_diagnostics = document
- .diagnostics
- .iter()
- .map(|diagnostic| create_diagnostic(document, diagnostic))
- .collect::<Vec<_>>();
-
- results.insert(document, lsp_diagnostics);
- }
-
- for document in workspace
+pub fn collect<'db>(
+ workspace: &'db Workspace,
+ source: &mut dyn DiagnosticSource,
+) -> FxHashMap<&'db Document, Vec<lsp_types::Diagnostic>> {
+ let mut builder = DiagnosticBuilder::default();
+ source.publish(workspace, &mut builder);
+ builder
.iter()
- .filter(|document| document.language == Language::Log)
- {
- for (document, diagnostics) in base_db::diagnostics::log::analyze(workspace, document) {
- let lsp_diagnostics = diagnostics
- .iter()
- .map(|diagnostic| create_diagnostic(document, diagnostic))
+ .into_iter()
+ .filter_map(|(uri, diags)| workspace.lookup(uri).map(|document| (document, diags)))
+ .map(|(document, diags)| {
+ let diags = diags
+ .into_iter()
+ .map(|diag| create_diagnostic(workspace, document, &diag))
.collect::<Vec<_>>();
- results.get_mut(document).unwrap().extend(lsp_diagnostics);
- }
- }
-
- results
+ (document, diags)
+ })
+ .collect()
}
fn create_diagnostic(
+ workspace: &Workspace,
document: &Document,
- diagnostic: &base_db::diagnostics::Diagnostic,
+ diagnostic: &Diagnostic,
) -> lsp_types::Diagnostic {
let range = document.line_index.line_col_lsp_range(diagnostic.range);
- let severity = match &diagnostic.code {
- ErrorCode::UnexpectedRCurly
- | ErrorCode::RCurlyInserted
- | ErrorCode::MismatchedEnvironment
- | ErrorCode::ExpectingLCurly
- | ErrorCode::ExpectingKey
- | ErrorCode::ExpectingRCurly
- | ErrorCode::ExpectingEq
- | ErrorCode::ExpectingFieldValue => DiagnosticSeverity::ERROR,
- ErrorCode::Build(error) => match error.level {
- BuildErrorLevel::Error => DiagnosticSeverity::ERROR,
- BuildErrorLevel::Warning => DiagnosticSeverity::WARNING,
+ let severity = match &diagnostic.data {
+ DiagnosticData::Tex(error) => match error {
+ TexError::UnexpectedRCurly => lsp_types::DiagnosticSeverity::ERROR,
+ TexError::ExpectingRCurly => lsp_types::DiagnosticSeverity::ERROR,
+ TexError::MismatchedEnvironment => lsp_types::DiagnosticSeverity::ERROR,
+ TexError::UnusedLabel => lsp_types::DiagnosticSeverity::HINT,
+ TexError::UndefinedLabel => lsp_types::DiagnosticSeverity::ERROR,
+ TexError::UndefinedCitation => lsp_types::DiagnosticSeverity::ERROR,
+ TexError::DuplicateLabel(_) => lsp_types::DiagnosticSeverity::ERROR,
+ },
+ DiagnosticData::Bib(error) => match error {
+ BibError::ExpectingLCurly => lsp_types::DiagnosticSeverity::ERROR,
+ BibError::ExpectingKey => lsp_types::DiagnosticSeverity::ERROR,
+ BibError::ExpectingRCurly => lsp_types::DiagnosticSeverity::ERROR,
+ BibError::ExpectingEq => lsp_types::DiagnosticSeverity::ERROR,
+ BibError::ExpectingFieldValue => lsp_types::DiagnosticSeverity::ERROR,
+ BibError::UnusedEntry => lsp_types::DiagnosticSeverity::HINT,
+ BibError::DuplicateEntry(_) => lsp_types::DiagnosticSeverity::ERROR,
+ },
+ DiagnosticData::Build(error) => match error.level {
+ BuildErrorLevel::Error => lsp_types::DiagnosticSeverity::ERROR,
+ BuildErrorLevel::Warning => lsp_types::DiagnosticSeverity::WARNING,
},
};
- let code = match &diagnostic.code {
- ErrorCode::UnexpectedRCurly => Some(1),
- ErrorCode::RCurlyInserted => Some(2),
- ErrorCode::MismatchedEnvironment => Some(3),
- ErrorCode::ExpectingLCurly => Some(4),
- ErrorCode::ExpectingKey => Some(5),
- ErrorCode::ExpectingRCurly => Some(6),
- ErrorCode::ExpectingEq => Some(7),
- ErrorCode::ExpectingFieldValue => Some(8),
- ErrorCode::Build(_) => None,
+ let code = match &diagnostic.data {
+ DiagnosticData::Tex(error) => match error {
+ TexError::UnexpectedRCurly => Some(1),
+ TexError::ExpectingRCurly => Some(2),
+ TexError::MismatchedEnvironment => Some(3),
+ TexError::UnusedLabel => Some(9),
+ TexError::UndefinedLabel => Some(10),
+ TexError::UndefinedCitation => Some(11),
+ TexError::DuplicateLabel(_) => Some(14),
+ },
+ DiagnosticData::Bib(error) => match error {
+ BibError::ExpectingLCurly => Some(4),
+ BibError::ExpectingKey => Some(5),
+ BibError::ExpectingRCurly => Some(6),
+ BibError::ExpectingEq => Some(7),
+ BibError::ExpectingFieldValue => Some(8),
+ BibError::UnusedEntry => Some(12),
+ BibError::DuplicateEntry(_) => Some(13),
+ },
+ DiagnosticData::Build(_) => None,
};
- let source = match &diagnostic.code {
- ErrorCode::UnexpectedRCurly
- | ErrorCode::RCurlyInserted
- | ErrorCode::MismatchedEnvironment
- | ErrorCode::ExpectingLCurly
- | ErrorCode::ExpectingKey
- | ErrorCode::ExpectingRCurly
- | ErrorCode::ExpectingEq
- | ErrorCode::ExpectingFieldValue => "texlab",
- ErrorCode::Build(_) => "latex",
+ let source = match &diagnostic.data {
+ DiagnosticData::Tex(_) | DiagnosticData::Bib(_) => "texlab",
+ DiagnosticData::Build(_) => "latex",
};
- let message = String::from(match &diagnostic.code {
- ErrorCode::UnexpectedRCurly => "Unexpected \"}\"",
- ErrorCode::RCurlyInserted => "Missing \"}\" inserted",
- ErrorCode::MismatchedEnvironment => "Mismatched environment",
- ErrorCode::ExpectingLCurly => "Expecting a curly bracket: \"{\"",
- ErrorCode::ExpectingKey => "Expecting a key",
- ErrorCode::ExpectingRCurly => "Expecting a curly bracket: \"}\"",
- ErrorCode::ExpectingEq => "Expecting an equality sign: \"=\"",
- ErrorCode::ExpectingFieldValue => "Expecting a field value",
- ErrorCode::Build(error) => &error.message,
+ let message = String::from(match &diagnostic.data {
+ DiagnosticData::Tex(error) => match error {
+ TexError::UnexpectedRCurly => "Unexpected \"}\"",
+ TexError::ExpectingRCurly => "Expecting a curly bracket: \"}\"",
+ TexError::MismatchedEnvironment => "Mismatched environment",
+ TexError::UnusedLabel => "Unused label",
+ TexError::UndefinedLabel => "Undefined reference",
+ TexError::UndefinedCitation => "Undefined reference",
+ TexError::DuplicateLabel(_) => "Duplicate label",
+ },
+ DiagnosticData::Bib(error) => match error {
+ BibError::ExpectingLCurly => "Expecting a curly bracket: \"{\"",
+ BibError::ExpectingKey => "Expecting a key",
+ BibError::ExpectingRCurly => "Expecting a curly bracket: \"}\"",
+ BibError::ExpectingEq => "Expecting an equality sign: \"=\"",
+ BibError::ExpectingFieldValue => "Expecting a field value",
+ BibError::UnusedEntry => "Unused entry",
+ BibError::DuplicateEntry(_) => "Duplicate entry key",
+ },
+ DiagnosticData::Build(error) => &error.message,
});
+ let tags = match &diagnostic.data {
+ DiagnosticData::Tex(error) => match error {
+ TexError::UnexpectedRCurly => None,
+ TexError::ExpectingRCurly => None,
+ TexError::MismatchedEnvironment => None,
+ TexError::UnusedLabel => Some(vec![lsp_types::DiagnosticTag::UNNECESSARY]),
+ TexError::UndefinedLabel => None,
+ TexError::UndefinedCitation => None,
+ TexError::DuplicateLabel(_) => None,
+ },
+ DiagnosticData::Bib(error) => match error {
+ BibError::ExpectingLCurly => None,
+ BibError::ExpectingKey => None,
+ BibError::ExpectingRCurly => None,
+ BibError::ExpectingEq => None,
+ BibError::ExpectingFieldValue => None,
+ BibError::UnusedEntry => Some(vec![lsp_types::DiagnosticTag::UNNECESSARY]),
+ BibError::DuplicateEntry(_) => None,
+ },
+ DiagnosticData::Build(_) => None,
+ };
+
+ let related_information = match &diagnostic.data {
+ DiagnosticData::Tex(error) => match error {
+ TexError::UnexpectedRCurly => None,
+ TexError::ExpectingRCurly => None,
+ TexError::MismatchedEnvironment => None,
+ TexError::UnusedLabel => None,
+ TexError::UndefinedLabel => None,
+ TexError::UndefinedCitation => None,
+ TexError::DuplicateLabel(others) => make_conflict_info(workspace, others, "label"),
+ },
+ DiagnosticData::Bib(error) => match error {
+ BibError::ExpectingLCurly => None,
+ BibError::ExpectingKey => None,
+ BibError::ExpectingRCurly => None,
+ BibError::ExpectingEq => None,
+ BibError::ExpectingFieldValue => None,
+ BibError::UnusedEntry => None,
+ BibError::DuplicateEntry(others) => make_conflict_info(workspace, others, "entry"),
+ },
+ DiagnosticData::Build(_) => None,
+ };
+
lsp_types::Diagnostic {
severity: Some(severity),
- code: code.map(NumberOrString::Number),
+ code: code.map(lsp_types::NumberOrString::Number),
source: Some(String::from(source)),
+ tags,
+ related_information,
..lsp_types::Diagnostic::new_simple(range, message)
}
}
+fn make_conflict_info(
+ workspace: &Workspace,
+ locations: &Vec<(lsp_types::Url, TextRange)>,
+ object: &str,
+) -> Option<Vec<lsp_types::DiagnosticRelatedInformation>> {
+ let mut items = Vec::new();
+ for (uri, range) in locations {
+ let range = workspace
+ .lookup(uri)
+ .unwrap()
+ .line_index
+ .line_col_lsp_range(*range);
+
+ let message = format!("conflicting {object} defined here");
+ let location = lsp_types::Location::new(uri.clone(), range);
+ items.push(lsp_types::DiagnosticRelatedInformation { location, message });
+ }
+
+ Some(items)
+}
+
pub fn filter(
all_diagnostics: &mut FxHashMap<&Document, Vec<lsp_types::Diagnostic>>,
workspace: &Workspace,
diff --git a/support/texlab/crates/texlab/tests/lsp/text_document.rs b/support/texlab/crates/texlab/tests/lsp/text_document.rs
index cfab1d04c8..88b6333877 100644
--- a/support/texlab/crates/texlab/tests/lsp/text_document.rs
+++ b/support/texlab/crates/texlab/tests/lsp/text_document.rs
@@ -1,11 +1,8 @@
mod completion;
-mod definition;
mod document_highlight;
mod document_link;
mod document_symbol;
mod folding_range;
mod formatting;
-mod hover;
mod inlay_hint;
-mod references;
mod rename;
diff --git a/support/texlab/crates/texlab/tests/lsp/text_document/definition.rs b/support/texlab/crates/texlab/tests/lsp/text_document/definition.rs
deleted file mode 100644
index 78eed633c8..0000000000
--- a/support/texlab/crates/texlab/tests/lsp/text_document/definition.rs
+++ /dev/null
@@ -1,156 +0,0 @@
-use itertools::Itertools;
-use lsp_types::{
- request::GotoDefinition, ClientCapabilities, GotoDefinitionParams, GotoDefinitionResponse,
- LocationLink,
-};
-
-use crate::fixture::TestBed;
-
-fn check(fixture: &str) {
- let test_bed = TestBed::new(fixture).unwrap();
- test_bed.initialize(ClientCapabilities::default()).unwrap();
-
- let text_document_position_params = test_bed.cursor().unwrap();
- let cursor = text_document_position_params.position;
-
- let origin_selection = test_bed
- .locations()
- .iter()
- .filter(|location| location.uri == text_document_position_params.text_document.uri)
- .find(|location| cursor >= location.range.start && cursor <= location.range.end);
-
- let mut expected_links: Vec<_> = test_bed
- .locations()
- .iter()
- .filter(|location| Some(*location) != origin_selection)
- .batching(|it| {
- let target_selection_range = it.next()?.range;
- let target = it.next()?;
- Some(LocationLink {
- origin_selection_range: origin_selection.map(|sel| sel.range),
- target_uri: target.uri.clone(),
- target_range: target.range,
- target_selection_range,
- })
- })
- .collect();
-
- let mut actual_links = match test_bed
- .client()
- .send_request::<GotoDefinition>(GotoDefinitionParams {
- text_document_position_params,
- partial_result_params: Default::default(),
- work_done_progress_params: Default::default(),
- })
- .unwrap()
- {
- Some(GotoDefinitionResponse::Link(links)) => links,
- Some(GotoDefinitionResponse::Array(_)) => unreachable!(),
- Some(GotoDefinitionResponse::Scalar(_)) => unreachable!(),
- None => Vec::new(),
- };
-
- sort_links(&mut actual_links);
- sort_links(&mut expected_links);
- assert_eq!(actual_links, expected_links);
-}
-
-fn sort_links(links: &mut Vec<LocationLink>) {
- links.sort_by(|a, b| {
- let left = (&a.target_uri, a.target_range.start);
- let right = (&b.target_uri, b.target_range.start);
- left.cmp(&right)
- });
-}
-
-#[test]
-fn command_definition() {
- check(
- r#"
-%! main.tex
-\DeclareMathOperator{\foo}{foo}
- ^^^^
-^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-\foo
- |
-^^^^"#,
- )
-}
-
-#[test]
-fn document() {
- check(
- r#"
-%! foo.tex
-\addbibresource{baz.bib}
- |
- ^^^^^^^
-
-%! bar.bib
-@article{foo, bar = {baz}}
-
-%! baz.bib
-@article{foo, bar = {baz}}
-!
-!"#,
- )
-}
-
-#[test]
-fn entry() {
- check(
- r#"
-%! foo.tex
-\addbibresource{baz.bib}
-\cite{foo}
- |
- ^^^
-
-%! bar.bib
-@article{foo, bar = {baz}}
-
-%! baz.bib
-@article{foo, bar = {baz}}
- ^^^
-^^^^^^^^^^^^^^^^^^^^^^^^^^"#,
- )
-}
-
-#[test]
-fn string_simple() {
- check(
- r#"
-%! main.bib
-@string{foo = {bar}}
- ^^^
-^^^^^^^^^^^^^^^^^^^^
-@article{bar, author = foo}
- |
- ^^^"#,
- )
-}
-
-#[test]
-fn string_join() {
- check(
- r#"
-%! main.bib
-@string{foo = {bar}}
- ^^^
-^^^^^^^^^^^^^^^^^^^^
-@article{bar, author = foo # "bar"}
- |
- ^^^"#,
- )
-}
-
-#[test]
-fn string_field() {
- check(
- r#"
-%! main.bib
-@string{foo = {bar}}
-@article{bar, author = foo # "bar"}
- |"#,
- )
-}
diff --git a/support/texlab/crates/texlab/tests/lsp/text_document/hover.rs b/support/texlab/crates/texlab/tests/lsp/text_document/hover.rs
deleted file mode 100644
index 5dafceb781..0000000000
--- a/support/texlab/crates/texlab/tests/lsp/text_document/hover.rs
+++ /dev/null
@@ -1,246 +0,0 @@
-use insta::assert_json_snapshot;
-use lsp_types::{request::HoverRequest, ClientCapabilities, HoverContents, HoverParams};
-
-use crate::fixture::TestBed;
-
-fn find_hover(fixture: &str) -> Option<HoverContents> {
- let test_bed = TestBed::new(fixture).unwrap();
-
- test_bed.initialize(ClientCapabilities::default()).unwrap();
-
- let text_document_position_params = test_bed.cursor().unwrap();
-
- test_bed
- .client()
- .send_request::<HoverRequest>(HoverParams {
- text_document_position_params,
- work_done_progress_params: Default::default(),
- })
- .unwrap()
- .map(|hover| {
- assert_eq!(hover.range, Some(test_bed.locations()[0].range));
- hover.contents
- })
-}
-
-#[test]
-fn empty_latex_document() {
- assert_eq!(
- find_hover(
- r#"
-%! main.tex
-
-|"#
- ),
- None,
- );
-}
-
-#[test]
-fn empty_bibtex_document() {
- assert_eq!(
- find_hover(
- r#"
-%! main.bib
-
-|"#
- ),
- None,
- );
-}
-
-#[test]
-fn citation_inside_cite() {
- assert_json_snapshot!(find_hover(
- r#"
-%! main.bib
-@article{foo, author = {Foo Bar}, title = {Baz Qux}, year = 1337}
-
-%! main.tex
-\addbibresource{main.bib}
-\cite{foo}
- |
- ^^^"#
- ));
-}
-
-#[test]
-fn citation_inside_entry() {
- assert_json_snapshot!(find_hover(
- r#"
-%! main.bib
-@article{foo, author = {Foo Bar}, title = {Baz Qux}, year = 1337}
- |
- ^^^
-
-%! main.tex
-\addbibresource{main.bib}
-\cite{foo}"#
- ));
-}
-
-#[test]
-fn component_known_package() {
- assert_json_snapshot!(find_hover(
- r#"
-%! main.tex
-\usepackage{amsmath}
- |
- ^^^^^^^"#
- ));
-}
-
-#[test]
-fn component_unknown_class() {
- assert_eq!(
- find_hover(
- r#"
-%! main.tex
-\documentclass{abcdefghijklmnop}
- |"#
- ),
- None,
- );
-}
-
-#[test]
-fn entry_type_known_type() {
- assert_json_snapshot!(find_hover(
- r#"
-%! main.bib
-@article{foo,}
- |
-^^^^^^^^"#
- ));
-}
-
-#[test]
-fn entry_type_unknown_field() {
- assert_eq!(
- find_hover(
- r#"
-%! main.bib
-@foo{bar,}
- |"#
- ),
- None,
- );
-}
-
-#[test]
-fn entry_type_key() {
- assert_eq!(
- find_hover(
- r#"
-%! main.bib
-@foo{bar,}
- |"#
- ),
- None,
- );
-}
-
-#[test]
-fn field_known() {
- assert_json_snapshot!(find_hover(
- r#"
-%! main.bib
-@article{foo, author = bar}
- |
- ^^^^^^"#
- ));
-}
-
-#[test]
-fn field_unknown() {
- assert_eq!(
- find_hover(
- r#"
-%! main.bib
-@article{foo, bar = baz}
- |"#
- ),
- None,
- );
-}
-
-#[test]
-fn section() {
- assert_json_snapshot!(find_hover(
- r#"
-%! main.tex
-\section{Foo}
-\label{sec:foo}
- |
- ^^^^^^^"#,
- ));
-}
-
-#[test]
-fn string_inside_reference() {
- assert_json_snapshot!(find_hover(
- r#"
-%! main.bib
-@string{foo = "Foo"}
-@string{bar = "Bar"}
-@article{baz, author = bar}
- |
- ^^^"#
- ));
-}
-
-#[test]
-fn string_inside_field() {
- assert_eq!(
- find_hover(
- r#"
-%! main.bib
-@string{foo = "Foo"}
-@string{bar = "Bar"}
-@article{baz, author = bar}
- |"#
- ),
- None,
- );
-}
-
-#[test]
-fn label_theorem_child_file() {
- assert_json_snapshot!(find_hover(
- r#"
-%! main.tex
-\documentclass{article}
-\newtheorem{lemma}{Lemma}
-\include{child}
-\ref{thm:foo}
- |
- ^^^^^^^
-
-%! child.tex
-\begin{lemma}\label{thm:foo}
- 1 + 1 = 2
-\end{lemma}"#
- ));
-}
-
-#[test]
-fn label_theorem_child_file_mumber() {
- assert_json_snapshot!(find_hover(
- r#"
-%! main.tex
-\documentclass{article}
-\newtheorem{lemma}{Lemma}
-\include{child}
-\ref{thm:foo}
- |
- ^^^^^^^
-
-%! child.tex
-\begin{lemma}[Foo]\label{thm:foo}
- 1 + 1 = 2
-\end{lemma}
-
-%! child.aux
-\newlabel{thm:foo}{{1}{1}{Foo}{lemma.1}{}}"#
- ));
-}
diff --git a/support/texlab/crates/texlab/tests/lsp/text_document/references.rs b/support/texlab/crates/texlab/tests/lsp/text_document/references.rs
deleted file mode 100644
index 9933babbd4..0000000000
--- a/support/texlab/crates/texlab/tests/lsp/text_document/references.rs
+++ /dev/null
@@ -1,270 +0,0 @@
-use lsp_types::{
- request::References, ClientCapabilities, Location, ReferenceContext, ReferenceParams,
-};
-
-use crate::fixture::TestBed;
-
-fn sort(locations: &mut Vec<Location>) {
- locations.sort_by(|a, b| (&a.uri, a.range.start).cmp(&(&b.uri, b.range.start)));
-}
-
-fn check(fixture: &str, context: ReferenceContext) {
- let test_bed = TestBed::new(fixture).unwrap();
-
- test_bed.initialize(ClientCapabilities::default()).unwrap();
-
- let text_document_position = test_bed.cursor().unwrap();
-
- let mut expected = test_bed.locations().to_vec();
-
- let mut actual = test_bed
- .client()
- .send_request::<References>(ReferenceParams {
- text_document_position,
- context,
- partial_result_params: Default::default(),
- work_done_progress_params: Default::default(),
- })
- .unwrap()
- .unwrap_or_default();
-
- sort(&mut actual);
- sort(&mut expected);
- assert_eq!(actual, expected);
-}
-
-#[test]
-fn entry_definition() {
- check(
- r#"
-%! foo.bib
-@article{foo,}
- |
-
-%! bar.tex
-\cite{foo}
- ^^^
-\addbibresource{foo.bib}
-"#,
- ReferenceContext {
- include_declaration: false,
- },
- )
-}
-
-#[test]
-fn entry_definition_include_decl() {
- check(
- r#"
-%! foo.bib
-@article{foo,}
- |
- ^^^
-
-%! bar.tex
-\cite{foo}
- ^^^
-\addbibresource{foo.bib}
-"#,
- ReferenceContext {
- include_declaration: true,
- },
- )
-}
-
-#[test]
-fn entry_reference() {
- check(
- r#"
-%! foo.bib
-@article{foo,}
-
-%! bar.tex
-\cite{foo}
- |
- ^^^
-\addbibresource{foo.bib}
-"#,
- ReferenceContext {
- include_declaration: false,
- },
- )
-}
-
-#[test]
-fn entry_reference_include_decl() {
- check(
- r#"
-%! foo.bib
-@article{foo,}
- ^^^
-
-%! bar.tex
-\cite{foo}
- |
- ^^^
-\addbibresource{foo.bib}
-"#,
- ReferenceContext {
- include_declaration: true,
- },
- )
-}
-
-#[test]
-fn label_definition() {
- check(
- r#"
-%! foo.tex
-\label{foo}
- |
-
-%! bar.tex
-\ref{foo}
- ^^^
-\input{foo.tex}
-"#,
- ReferenceContext {
- include_declaration: false,
- },
- )
-}
-
-#[test]
-fn label_definition_include_decl() {
- check(
- r#"
-%! foo.tex
-\label{foo}
- |
- ^^^
-
-%! bar.tex
-\ref{foo}
- ^^^
-\input{foo.tex}
-"#,
- ReferenceContext {
- include_declaration: true,
- },
- )
-}
-
-#[test]
-fn label_reference() {
- check(
- r#"
-%! foo.tex
-\label{foo}
-\input{bar.tex}
-
-%! bar.tex
-\ref{foo}
- |
- ^^^
-
-%! baz.tex
-\ref{foo}
- ^^^
-\input{bar.tex}
-"#,
- ReferenceContext {
- include_declaration: false,
- },
- )
-}
-
-#[test]
-fn label_reference_include_decl() {
- check(
- r#"
-%! foo.tex
-\label{foo}
- ^^^
-\input{bar.tex}
-
-%! bar.tex
-\ref{foo}
- |
- ^^^
-
-%! baz.tex
-\ref{foo}
- ^^^
-\input{bar.tex}
-"#,
- ReferenceContext {
- include_declaration: true,
- },
- )
-}
-
-#[test]
-fn string_reference() {
- check(
- r#"
-%! main.bib
-@string{foo = {Foo}}
-@string{bar = {Bar}}
-@article{baz, author = foo}
- |
- ^^^
-"#,
- ReferenceContext {
- include_declaration: false,
- },
- )
-}
-
-#[test]
-fn string_reference_include_decl() {
- check(
- r#"
-%! main.bib
-@string{foo = {Foo}}
- ^^^
-@string{bar = {Bar}}
-@article{baz, author = foo}
- |
- ^^^
-"#,
- ReferenceContext {
- include_declaration: true,
- },
- )
-}
-
-#[test]
-fn string_definition() {
- check(
- r#"
-%! main.bib
-@string{foo = {Foo}}
- |
-@string{bar = {Bar}}
-@article{baz, author = foo}
- ^^^
-"#,
- ReferenceContext {
- include_declaration: false,
- },
- )
-}
-
-#[test]
-fn string_definition_include_decl() {
- check(
- r#"
-%! main.bib
-@string{foo = {Foo}}
- |
- ^^^
-@string{bar = {Bar}}
-@article{baz, author = foo}
- ^^^
-"#,
- ReferenceContext {
- include_declaration: true,
- },
- )
-}
diff --git a/support/texlab/crates/texlab/tests/lsp/text_document/snapshots/lsp__text_document__hover__citation_inside_cite.snap b/support/texlab/crates/texlab/tests/lsp/text_document/snapshots/lsp__text_document__hover__citation_inside_cite.snap
deleted file mode 100644
index 6e6c6b264c..0000000000
--- a/support/texlab/crates/texlab/tests/lsp/text_document/snapshots/lsp__text_document__hover__citation_inside_cite.snap
+++ /dev/null
@@ -1,8 +0,0 @@
----
-source: tests/lsp/text_document/hover.rs
-expression: "find_hover(r#\"\n%! main.bib\n@article{foo, author = {Foo Bar}, title = {Baz Qux}, year = 1337}\n\n%! main.tex\n\\addbibresource{main.bib}\n\\cite{foo}\n |\n ^^^\"#)"
----
-{
- "kind": "markdown",
- "value": "F. Bar: \"Baz Qux\". (1337)."
-}
diff --git a/support/texlab/crates/texlab/tests/lsp/text_document/snapshots/lsp__text_document__hover__citation_inside_entry.snap b/support/texlab/crates/texlab/tests/lsp/text_document/snapshots/lsp__text_document__hover__citation_inside_entry.snap
deleted file mode 100644
index c13c16051d..0000000000
--- a/support/texlab/crates/texlab/tests/lsp/text_document/snapshots/lsp__text_document__hover__citation_inside_entry.snap
+++ /dev/null
@@ -1,8 +0,0 @@
----
-source: tests/lsp/text_document/hover.rs
-expression: "find_hover(r#\"\n%! main.bib\n@article{foo, author = {Foo Bar}, title = {Baz Qux}, year = 1337}\n |\n ^^^\n\n%! main.tex\n\\addbibresource{main.bib}\n\\cite{foo}\"#)"
----
-{
- "kind": "markdown",
- "value": "F. Bar: \"Baz Qux\". (1337)."
-}
diff --git a/support/texlab/crates/texlab/tests/lsp/text_document/snapshots/lsp__text_document__hover__component_known_package.snap b/support/texlab/crates/texlab/tests/lsp/text_document/snapshots/lsp__text_document__hover__component_known_package.snap
deleted file mode 100644
index ea1801d4ed..0000000000
--- a/support/texlab/crates/texlab/tests/lsp/text_document/snapshots/lsp__text_document__hover__component_known_package.snap
+++ /dev/null
@@ -1,8 +0,0 @@
----
-source: tests/lsp/text_document/hover.rs
-expression: "find_hover(r#\"\n%! main.tex\n\\usepackage{amsmath}\n |\n ^^^^^^^\"#)"
----
-{
- "kind": "plaintext",
- "value": "The package provides the principal packages in the AMS-LaTeX distribution. It adapts for use in LaTeX most of the mathematical features found in AMS-TeX; it is highly recommended as an adjunct to serious mathematical typesetting in LaTeX. When amsmath is loaded, AMS-LaTeX packages amsbsy (for bold symbols), amsopn (for operator names) and amstext (for text embedded in mathematics) are also loaded. amsmath is part of the LaTeX required distribution; however, several contributed packages add still further to its appeal; examples are empheq, which provides functions for decorating and highlighting mathematics, and ntheorem, for specifying theorem (and similar) definitions."
-}
diff --git a/support/texlab/crates/texlab/tests/lsp/text_document/snapshots/lsp__text_document__hover__entry_type_known_type.snap b/support/texlab/crates/texlab/tests/lsp/text_document/snapshots/lsp__text_document__hover__entry_type_known_type.snap
deleted file mode 100644
index f3b146fb21..0000000000
--- a/support/texlab/crates/texlab/tests/lsp/text_document/snapshots/lsp__text_document__hover__entry_type_known_type.snap
+++ /dev/null
@@ -1,8 +0,0 @@
----
-source: tests/lsp/text_document/hover.rs
-expression: "find_hover(r#\"\n%! main.bib\n@article{foo,}\n |\n^^^^^^^^\"#)"
----
-{
- "kind": "markdown",
- "value": "An article in a journal, magazine, newspaper, or other periodical which forms a \n self-contained unit with its own title. The title of the periodical is given in the \n journaltitle field. If the issue has its own title in addition to the main title of \n the periodical, it goes in the issuetitle field. Note that editor and related \n fields refer to the journal while translator and related fields refer to the article.\n\nRequired fields: `author`, `title`, `journaltitle`, `year/date`"
-}
diff --git a/support/texlab/crates/texlab/tests/lsp/text_document/snapshots/lsp__text_document__hover__field_known.snap b/support/texlab/crates/texlab/tests/lsp/text_document/snapshots/lsp__text_document__hover__field_known.snap
deleted file mode 100644
index 42d630c287..0000000000
--- a/support/texlab/crates/texlab/tests/lsp/text_document/snapshots/lsp__text_document__hover__field_known.snap
+++ /dev/null
@@ -1,8 +0,0 @@
----
-source: tests/lsp/text_document/hover.rs
-expression: "find_hover(r#\"\n%! main.bib\n@article{foo, author = bar}\n |\n ^^^^^^\"#)"
----
-{
- "kind": "markdown",
- "value": "The author(s) of the `title`."
-}
diff --git a/support/texlab/crates/texlab/tests/lsp/text_document/snapshots/lsp__text_document__hover__label_theorem_child_file.snap b/support/texlab/crates/texlab/tests/lsp/text_document/snapshots/lsp__text_document__hover__label_theorem_child_file.snap
deleted file mode 100644
index 19b36c4ebb..0000000000
--- a/support/texlab/crates/texlab/tests/lsp/text_document/snapshots/lsp__text_document__hover__label_theorem_child_file.snap
+++ /dev/null
@@ -1,8 +0,0 @@
----
-source: tests/lsp/text_document/hover.rs
-expression: "find_hover(r#\"\n%! main.tex\n\\documentclass{article}\n\\newtheorem{lemma}{Lemma}\n\\include{child}\n\\ref{thm:foo}\n |\n ^^^^^^^\n\n%! child.tex\n\\begin{lemma}\\label{thm:foo}\n 1 + 1 = 2\n\\end{lemma}\"#)"
----
-{
- "kind": "plaintext",
- "value": "Lemma"
-}
diff --git a/support/texlab/crates/texlab/tests/lsp/text_document/snapshots/lsp__text_document__hover__label_theorem_child_file_mumber.snap b/support/texlab/crates/texlab/tests/lsp/text_document/snapshots/lsp__text_document__hover__label_theorem_child_file_mumber.snap
deleted file mode 100644
index 78ccef8e46..0000000000
--- a/support/texlab/crates/texlab/tests/lsp/text_document/snapshots/lsp__text_document__hover__label_theorem_child_file_mumber.snap
+++ /dev/null
@@ -1,8 +0,0 @@
----
-source: tests/lsp/text_document/hover.rs
-expression: "find_hover(r#\"\n%! main.tex\n\\documentclass{article}\n\\newtheorem{lemma}{Lemma}\n\\include{child}\n\\ref{thm:foo}\n |\n ^^^^^^^\n\n%! child.tex\n\\begin{lemma}[Foo]\\label{thm:foo}\n 1 + 1 = 2\n\\end{lemma}\n\n%! child.aux\n\\newlabel{thm:foo}{{1}{1}{Foo}{lemma.1}{}}\"#)"
----
-{
- "kind": "plaintext",
- "value": "Lemma 1 (Foo)"
-}
diff --git a/support/texlab/crates/texlab/tests/lsp/text_document/snapshots/lsp__text_document__hover__section.snap b/support/texlab/crates/texlab/tests/lsp/text_document/snapshots/lsp__text_document__hover__section.snap
deleted file mode 100644
index c8aa265330..0000000000
--- a/support/texlab/crates/texlab/tests/lsp/text_document/snapshots/lsp__text_document__hover__section.snap
+++ /dev/null
@@ -1,8 +0,0 @@
----
-source: tests/lsp/text_document/hover.rs
-expression: "find_hover(r#\"\n%! main.tex\n\\section{Foo}\n\\label{sec:foo}\n |\n ^^^^^^^\"#)"
----
-{
- "kind": "plaintext",
- "value": "Section (Foo)"
-}
diff --git a/support/texlab/crates/texlab/tests/lsp/text_document/snapshots/lsp__text_document__hover__string_inside_reference.snap b/support/texlab/crates/texlab/tests/lsp/text_document/snapshots/lsp__text_document__hover__string_inside_reference.snap
deleted file mode 100644
index cfcc196e26..0000000000
--- a/support/texlab/crates/texlab/tests/lsp/text_document/snapshots/lsp__text_document__hover__string_inside_reference.snap
+++ /dev/null
@@ -1,8 +0,0 @@
----
-source: tests/lsp/text_document/hover.rs
-expression: "find_hover(r#\"\n%! main.bib\n@string{foo = \"Foo\"}\n@string{bar = \"Bar\"}\n@article{baz, author = bar}\n |\n ^^^\"#)"
----
-{
- "kind": "plaintext",
- "value": "Bar"
-}
diff --git a/support/texlab/texlab.1 b/support/texlab/texlab.1
index 4042ae0f1e..7b58ac6af1 100644
--- a/support/texlab/texlab.1
+++ b/support/texlab/texlab.1
@@ -1,7 +1,7 @@
.\" DO NOT MODIFY THIS FILE! It was generated by help2man 1.47.13.
-.TH TEXLAB "1" "June 2023" "texlab 5.7.0" "User Commands"
+.TH TEXLAB "1" "July 2023" "texlab 5.8.0" "User Commands"
.SH NAME
-texlab \- manual page for texlab 5.7.0
+texlab \- manual page for texlab 5.8.0
.SH SYNOPSIS
.B texlab
[\fI\,OPTIONS\/\fR]
diff --git a/support/texlab/texlab.pdf b/support/texlab/texlab.pdf
index ed15c250f5..927983b23e 100644
--- a/support/texlab/texlab.pdf
+++ b/support/texlab/texlab.pdf
Binary files differ