summaryrefslogtreecommitdiff
path: root/support/texlab/crates/completion/src
diff options
context:
space:
mode:
authorNorbert Preining <norbert@preining.info>2023-10-01 03:01:18 +0000
committerNorbert Preining <norbert@preining.info>2023-10-01 03:01:18 +0000
commit8a05034cb5d7ae61b2de6857258caa3e43b496aa (patch)
treeb9c266e7f98a034ed060178b3c0c162598e68948 /support/texlab/crates/completion/src
parent61bc2520e547446bddedc290572684496c9347a0 (diff)
CTAN sync 202310010301
Diffstat (limited to 'support/texlab/crates/completion/src')
-rw-r--r--support/texlab/crates/completion/src/lib.rs187
-rw-r--r--support/texlab/crates/completion/src/providers.rs27
-rw-r--r--support/texlab/crates/completion/src/providers/argument.rs76
-rw-r--r--support/texlab/crates/completion/src/providers/citations.rs71
-rw-r--r--support/texlab/crates/completion/src/providers/color.rs116
-rw-r--r--support/texlab/crates/completion/src/providers/color_model.rs50
-rw-r--r--support/texlab/crates/completion/src/providers/command.rs134
-rw-r--r--support/texlab/crates/completion/src/providers/entry_type.rs40
-rw-r--r--support/texlab/crates/completion/src/providers/environment.rs135
-rw-r--r--support/texlab/crates/completion/src/providers/field.rs66
-rw-r--r--support/texlab/crates/completion/src/providers/glossary.rs88
-rw-r--r--support/texlab/crates/completion/src/providers/import.rs69
-rw-r--r--support/texlab/crates/completion/src/providers/include.rs157
-rw-r--r--support/texlab/crates/completion/src/providers/label.rs101
-rw-r--r--support/texlab/crates/completion/src/providers/tikz_library.rs149
-rw-r--r--support/texlab/crates/completion/src/tests.rs2071
-rw-r--r--support/texlab/crates/completion/src/util.rs33
-rw-r--r--support/texlab/crates/completion/src/util/builder.rs43
-rw-r--r--support/texlab/crates/completion/src/util/matchers.rs42
-rw-r--r--support/texlab/crates/completion/src/util/patterns.rs75
20 files changed, 3730 insertions, 0 deletions
diff --git a/support/texlab/crates/completion/src/lib.rs b/support/texlab/crates/completion/src/lib.rs
new file mode 100644
index 0000000000..6acec28d49
--- /dev/null
+++ b/support/texlab/crates/completion/src/lib.rs
@@ -0,0 +1,187 @@
+mod providers;
+mod util;
+
+use base_db::{
+ data::{BibtexEntryType, BibtexFieldType},
+ semantics::bib,
+ util::RenderedObject,
+ Document, FeatureParams,
+};
+use rowan::{TextRange, TextSize};
+use util::CompletionBuilder;
+
+pub const LIMIT: usize = 50;
+
+#[derive(Debug)]
+pub struct CompletionParams<'a> {
+ pub feature: FeatureParams<'a>,
+ pub offset: TextSize,
+}
+
+#[derive(Debug, Default)]
+pub struct CompletionResult<'a> {
+ pub items: Vec<CompletionItem<'a>>,
+}
+
+#[derive(Debug, PartialEq, Eq)]
+pub struct CompletionItem<'a> {
+ pub score: i32,
+ pub range: TextRange,
+ pub preselect: bool,
+ pub data: CompletionItemData<'a>,
+}
+
+impl<'a> CompletionItem<'a> {
+ pub fn new_simple(score: i32, range: TextRange, data: CompletionItemData<'a>) -> Self {
+ Self {
+ score,
+ range,
+ preselect: false,
+ data,
+ }
+ }
+}
+
+#[derive(Debug, PartialEq, Eq)]
+pub enum CompletionItemData<'a> {
+ Command(CommandData<'a>),
+ BeginEnvironment,
+ Citation(CitationData<'a>),
+ Environment(EnvironmentData<'a>),
+ GlossaryEntry(GlossaryEntryData),
+ Label(LabelData<'a>),
+ Color(&'a str),
+ ColorModel(&'a str),
+ File(String),
+ Directory(String),
+ Argument(ArgumentData<'a>),
+ Package(&'a str),
+ DocumentClass(&'a str),
+ EntryType(EntryTypeData<'a>),
+ Field(FieldTypeData<'a>),
+ TikzLibrary(&'a str),
+}
+
+impl<'a> CompletionItemData<'a> {
+ pub fn label<'b: 'a>(&'b self) -> &'a str {
+ match self {
+ Self::Command(data) => data.name,
+ Self::BeginEnvironment => "begin",
+ Self::Citation(data) => &data.entry.name.text,
+ Self::Environment(data) => data.name,
+ Self::GlossaryEntry(data) => &data.name,
+ Self::Label(data) => data.name,
+ Self::Color(name) => name,
+ Self::ColorModel(name) => name,
+ Self::File(name) => &name,
+ Self::Directory(name) => &name,
+ Self::Argument(data) => &data.0.name,
+ Self::Package(name) => name,
+ Self::DocumentClass(name) => name,
+ Self::EntryType(data) => data.0.name,
+ Self::Field(data) => data.0.name,
+ Self::TikzLibrary(name) => name,
+ }
+ }
+}
+
+#[derive(PartialEq, Eq)]
+pub struct CommandData<'a> {
+ pub name: &'a str,
+ pub glyph: Option<&'a str>,
+ pub image: Option<&'a str>,
+ pub package: Option<&'a completion_data::Package<'a>>,
+}
+
+impl<'a> std::fmt::Debug for CommandData<'a> {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ f.debug_struct("CommandData")
+ .field("name", &self.name)
+ .field("package", self.package.map_or(&"<user>", |p| &p.file_names))
+ .finish()
+ }
+}
+
+#[derive(PartialEq, Eq)]
+pub struct EnvironmentData<'a> {
+ pub name: &'a str,
+ pub package: Option<&'a completion_data::Package<'a>>,
+}
+
+impl<'a> std::fmt::Debug for EnvironmentData<'a> {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ f.debug_struct("EnvironmentData")
+ .field("name", &self.name)
+ .field("package", self.package.map_or(&"<user>", |p| &p.file_names))
+ .finish()
+ }
+}
+
+#[derive(PartialEq, Eq)]
+pub struct ArgumentData<'a>(pub &'a completion_data::Argument<'a>);
+
+impl<'a> std::fmt::Debug for ArgumentData<'a> {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ f.debug_tuple("ArgumentData").field(&self.0.name).finish()
+ }
+}
+
+#[derive(Debug, PartialEq, Eq)]
+pub struct CitationData<'a> {
+ pub document: &'a Document,
+ pub entry: &'a bib::Entry,
+}
+
+#[derive(Debug, PartialEq, Eq)]
+pub struct GlossaryEntryData {
+ pub name: String,
+}
+
+#[derive(Debug, PartialEq, Eq)]
+pub struct LabelData<'a> {
+ pub name: &'a str,
+ pub header: Option<String>,
+ pub footer: Option<&'a str>,
+ pub object: Option<RenderedObject<'a>>,
+ pub keywords: String,
+}
+
+#[derive(PartialEq, Eq)]
+pub struct EntryTypeData<'a>(pub BibtexEntryType<'a>);
+
+impl<'a> std::fmt::Debug for EntryTypeData<'a> {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ f.debug_tuple("EntryTypeData").field(&self.0.name).finish()
+ }
+}
+
+#[derive(PartialEq, Eq)]
+pub struct FieldTypeData<'a>(pub BibtexFieldType<'a>);
+
+impl<'a> std::fmt::Debug for FieldTypeData<'a> {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ f.debug_tuple("FieldTypeData").field(&self.0.name).finish()
+ }
+}
+
+pub fn complete<'a>(params: &'a CompletionParams<'a>) -> CompletionResult<'a> {
+ let mut builder = CompletionBuilder::from(params.feature.workspace);
+ providers::complete_commands(params, &mut builder);
+ providers::complete_environments(params, &mut builder);
+ providers::complete_citations(params, &mut builder);
+ providers::complete_acronyms(params, &mut builder);
+ providers::complete_glossaries(params, &mut builder);
+ providers::complete_labels(params, &mut builder);
+ providers::complete_colors(params, &mut builder);
+ providers::complete_color_models(params, &mut builder);
+ providers::complete_includes(params, &mut builder);
+ providers::complete_arguments(params, &mut builder);
+ providers::complete_imports(params, &mut builder);
+ providers::complete_entry_types(params, &mut builder);
+ providers::complete_fields(params, &mut builder);
+ providers::complete_tikz_libraries(params, &mut builder);
+ builder.finish()
+}
+
+#[cfg(test)]
+mod tests;
diff --git a/support/texlab/crates/completion/src/providers.rs b/support/texlab/crates/completion/src/providers.rs
new file mode 100644
index 0000000000..5160c85d85
--- /dev/null
+++ b/support/texlab/crates/completion/src/providers.rs
@@ -0,0 +1,27 @@
+mod argument;
+mod citations;
+mod color;
+mod color_model;
+mod command;
+mod entry_type;
+mod environment;
+mod field;
+mod glossary;
+mod import;
+mod include;
+mod label;
+mod tikz_library;
+
+pub use argument::complete_arguments;
+pub use citations::complete_citations;
+pub use color::complete_colors;
+pub use color_model::complete_color_models;
+pub use command::complete_commands;
+pub use entry_type::complete_entry_types;
+pub use environment::complete_environments;
+pub use field::complete_fields;
+pub use glossary::{complete_acronyms, complete_glossaries};
+pub use import::complete_imports;
+pub use include::complete_includes;
+pub use label::complete_labels;
+pub use tikz_library::complete_tikz_libraries;
diff --git a/support/texlab/crates/completion/src/providers/argument.rs b/support/texlab/crates/completion/src/providers/argument.rs
new file mode 100644
index 0000000000..6637cecdd6
--- /dev/null
+++ b/support/texlab/crates/completion/src/providers/argument.rs
@@ -0,0 +1,76 @@
+use base_db::semantics::Span;
+use rowan::{ast::AstNode, TokenAtOffset};
+use syntax::latex;
+
+use crate::{
+ util::{included_packages, is_inside_latex_curly, CompletionBuilder},
+ ArgumentData, CompletionItem, CompletionItemData, CompletionParams,
+};
+
+pub fn complete_arguments<'a>(
+ params: &'a CompletionParams<'a>,
+ builder: &mut CompletionBuilder<'a>,
+) -> Option<()> {
+ let (cursor, group) = find_argument(params)?;
+
+ let command = latex::GenericCommand::cast(group.syntax().parent()?)?;
+
+ let index = command
+ .syntax()
+ .children()
+ .filter_map(latex::CurlyGroup::cast)
+ .position(|g| g.syntax().text_range() == group.syntax().text_range())?;
+
+ let command_name = command.name()?;
+ let command_name = &command_name.text()[1..];
+
+ for package in included_packages(&params.feature) {
+ for package_command in package
+ .commands
+ .iter()
+ .filter(|command| command.name == command_name)
+ {
+ for (_, param) in package_command
+ .parameters
+ .iter()
+ .enumerate()
+ .filter(|(i, _)| *i == index)
+ {
+ for arg in &param.0 {
+ if let Some(score) = builder.matcher.score(&arg.name, &cursor.text) {
+ let data = CompletionItemData::Argument(ArgumentData(arg));
+ builder
+ .items
+ .push(CompletionItem::new_simple(score, cursor.range, data));
+ }
+ }
+ }
+ }
+ }
+
+ Some(())
+}
+
+fn find_argument(params: &CompletionParams) -> Option<(Span, latex::CurlyGroup)> {
+ let data = params.feature.document.data.as_tex()?;
+ let tokens = data.root_node().token_at_offset(params.offset);
+
+ let (span, token) = match tokens.clone().find(|token| token.kind() == latex::WORD) {
+ Some(token) => (Span::from(&token), token),
+ None if matches!(tokens, TokenAtOffset::Between(_, _)) => {
+ (Span::empty(params.offset), tokens.left_biased()?)
+ }
+ None => return None,
+ };
+
+ let group = latex::CurlyGroup::cast(token.parent()?)
+ .or_else(|| {
+ token
+ .parent()
+ .and_then(|node| node.parent())
+ .and_then(latex::CurlyGroup::cast)
+ })
+ .filter(|group| is_inside_latex_curly(group, params.offset))?;
+
+ Some((span, group))
+}
diff --git a/support/texlab/crates/completion/src/providers/citations.rs b/support/texlab/crates/completion/src/providers/citations.rs
new file mode 100644
index 0000000000..631a899203
--- /dev/null
+++ b/support/texlab/crates/completion/src/providers/citations.rs
@@ -0,0 +1,71 @@
+use base_db::semantics::Span;
+use rayon::prelude::{IntoParallelRefIterator, ParallelExtend, ParallelIterator};
+use rowan::ast::AstNode;
+use syntax::latex;
+
+use crate::{
+ util::{find_curly_group_word_list, CompletionBuilder},
+ CitationData, CompletionItem, CompletionItemData, CompletionParams,
+};
+
+pub fn complete_citations<'a>(
+ params: &'a CompletionParams<'a>,
+ builder: &mut CompletionBuilder<'a>,
+) -> Option<()> {
+ let cursor = find_citation(params)?;
+
+ for document in &params.feature.project.documents {
+ if let Some(data) = document.data.as_bib() {
+ let items = data.semantics.entries.par_iter().filter_map(|entry| {
+ let score = builder.matcher.score(&entry.name.text, &cursor.text)?;
+ let data = CompletionItemData::Citation(CitationData { document, entry });
+ Some(CompletionItem::new_simple(score, cursor.range, data))
+ });
+
+ builder.items.par_extend(items);
+ }
+ }
+
+ Some(())
+}
+
+fn find_citation(params: &CompletionParams) -> Option<Span> {
+ find_citation_command(params).or_else(|| find_citation_acronym(params))
+}
+
+fn find_citation_command(params: &CompletionParams) -> Option<Span> {
+ let (span, group) = find_curly_group_word_list(params)?;
+ latex::Citation::cast(group.syntax().parent()?)?;
+ Some(span)
+}
+
+fn find_citation_acronym(params: &CompletionParams) -> Option<Span> {
+ let offset = params.offset;
+ let data = params.feature.document.data.as_tex()?;
+ let root = data.root_node();
+ let tokens = root.token_at_offset(offset);
+ let token = tokens
+ .clone()
+ .find(|token| token.kind() == latex::WORD)
+ .or_else(|| tokens.left_biased())?;
+
+ let span = if token.kind() == latex::WORD {
+ let name = latex::Text::cast(token.parent()?)?;
+ Span::new(token.text().into(), latex::small_range(&name))
+ } else {
+ Span::empty(offset)
+ };
+
+ let pair = token
+ .parent_ancestors()
+ .find_map(latex::KeyValuePair::cast)?;
+
+ if pair.key()?.to_string() == "cite" {
+ let body = pair.syntax().parent()?;
+ let group = body.parent()?;
+ latex::AcronymDeclaration::cast(group.parent()?)?;
+ Some(span)
+ } else {
+ None
+ }
+}
diff --git a/support/texlab/crates/completion/src/providers/color.rs b/support/texlab/crates/completion/src/providers/color.rs
new file mode 100644
index 0000000000..cbdd38dc32
--- /dev/null
+++ b/support/texlab/crates/completion/src/providers/color.rs
@@ -0,0 +1,116 @@
+use rowan::ast::AstNode;
+use syntax::latex;
+
+use crate::{
+ util::{find_curly_group_word, CompletionBuilder},
+ CompletionItem, CompletionItemData, CompletionParams,
+};
+
+pub fn complete_colors<'a>(
+ params: &'a CompletionParams<'a>,
+ builder: &mut CompletionBuilder<'a>,
+) -> Option<()> {
+ let (cursor, group) = find_curly_group_word(params)?;
+ latex::ColorReference::cast(group.syntax().parent()?)?;
+
+ for name in COLORS {
+ if let Some(score) = builder.matcher.score(&name, &cursor.text) {
+ let data = CompletionItemData::Color(name);
+ builder
+ .items
+ .push(CompletionItem::new_simple(score, cursor.range, data));
+ }
+ }
+
+ Some(())
+}
+
+const COLORS: &[&str] = &[
+ "black",
+ "blue",
+ "brown",
+ "cyan",
+ "darkgray",
+ "gray",
+ "green",
+ "lightgray",
+ "lime",
+ "magenta",
+ "olive",
+ "orange",
+ "pink",
+ "purple",
+ "red",
+ "teal",
+ "violet",
+ "white",
+ "yellow",
+ "Apricot",
+ "Bittersweet",
+ "Blue",
+ "BlueViolet",
+ "Brown",
+ "CadetBlue",
+ "Cerulean",
+ "Cyan",
+ "DarkOrchid",
+ "ForestGreen",
+ "Goldenrod",
+ "Green",
+ "JungleGreen",
+ "LimeGreen",
+ "Mahogany",
+ "Melon",
+ "Mulberry",
+ "OliveGreen",
+ "OrangeRed",
+ "Peach",
+ "PineGreen",
+ "ProcessBlue",
+ "RawSienna",
+ "RedOrange",
+ "Rhodamine",
+ "RoyalPurple",
+ "Salmon",
+ "Sepia",
+ "SpringGreen",
+ "TealBlue",
+ "Turquoise",
+ "VioletRed",
+ "WildStrawberry",
+ "YellowGreen",
+ "Aquamarine",
+ "Black",
+ "BlueGreen",
+ "BrickRed",
+ "BurntOrange",
+ "CarnationPink",
+ "CornflowerBlue",
+ "Dandelion",
+ "Emerald",
+ "Fuchsia",
+ "Gray",
+ "GreenYellow",
+ "Lavender",
+ "Magenta",
+ "Maroon",
+ "MidnightBlue",
+ "NavyBlue",
+ "Orange",
+ "Orchid",
+ "Periwinkle",
+ "Plum",
+ "Purple",
+ "Red",
+ "RedViolet",
+ "RoyalBlue",
+ "RubineRed",
+ "SeaGreen",
+ "SkyBlue",
+ "Tan",
+ "Thistle",
+ "Violet",
+ "White",
+ "Yellow",
+ "YellowOrange",
+];
diff --git a/support/texlab/crates/completion/src/providers/color_model.rs b/support/texlab/crates/completion/src/providers/color_model.rs
new file mode 100644
index 0000000000..4b991c4807
--- /dev/null
+++ b/support/texlab/crates/completion/src/providers/color_model.rs
@@ -0,0 +1,50 @@
+use base_db::semantics::Span;
+use rowan::ast::AstNode;
+use syntax::latex;
+
+use crate::{
+ util::{find_curly_group_word, find_curly_group_word_list, CompletionBuilder},
+ CompletionItem, CompletionItemData, CompletionParams,
+};
+
+pub fn complete_color_models<'a>(
+ params: &'a CompletionParams<'a>,
+ builder: &mut CompletionBuilder<'a>,
+) -> Option<()> {
+ let cursor = check_color_definition(params).or_else(|| check_color_definition_set(params))?;
+
+ for name in MODEL_NAMES {
+ if let Some(score) = builder.matcher.score(&name, &cursor.text) {
+ let data = CompletionItemData::ColorModel(name);
+ builder
+ .items
+ .push(CompletionItem::new_simple(score, cursor.range, data));
+ }
+ }
+
+ Some(())
+}
+
+fn check_color_definition(params: &CompletionParams) -> Option<Span> {
+ let (span, group) = find_curly_group_word(params)?;
+
+ let definition = latex::ColorDefinition::cast(group.syntax().parent()?)?;
+ definition
+ .model()
+ .filter(|model| model.syntax().text_range() == group.syntax().text_range())?;
+
+ Some(span)
+}
+
+fn check_color_definition_set(params: &CompletionParams) -> Option<Span> {
+ let (span, group) = find_curly_group_word_list(params)?;
+
+ let definition = latex::ColorSetDefinition::cast(group.syntax().parent()?)?;
+ definition
+ .model_list()
+ .filter(|model| model.syntax().text_range() == group.syntax().text_range())?;
+
+ Some(span)
+}
+
+const MODEL_NAMES: &[&str] = &["gray", "rgb", "RGB", "HTML", "cmyk"];
diff --git a/support/texlab/crates/completion/src/providers/command.rs b/support/texlab/crates/completion/src/providers/command.rs
new file mode 100644
index 0000000000..ddbf8bc206
--- /dev/null
+++ b/support/texlab/crates/completion/src/providers/command.rs
@@ -0,0 +1,134 @@
+use base_db::{semantics::Span, DocumentData};
+use rowan::{TextRange, TextSize};
+use syntax::{bibtex, latex};
+
+use crate::{
+ util::{included_packages, CompletionBuilder, ProviderContext},
+ CommandData, CompletionItem, CompletionItemData, CompletionParams,
+};
+
+pub fn complete_commands<'a>(
+ params: &'a CompletionParams<'a>,
+ builder: &mut CompletionBuilder<'a>,
+) -> Option<()> {
+ let cursor = find_command_name(params)?;
+
+ let mut proc = Processor(ProviderContext {
+ builder,
+ params,
+ cursor,
+ });
+
+ proc.add_begin_snippet();
+ proc.add_library();
+ proc.add_user();
+ Some(())
+}
+
+struct Processor<'a, 'b>(ProviderContext<'a, 'b>);
+
+impl<'a, 'b> Processor<'a, 'b> {
+ pub fn add_begin_snippet(&mut self) -> Option<()> {
+ let score = self.0.builder.matcher.score("begin", &self.0.cursor.text)?;
+ let data = CompletionItemData::BeginEnvironment;
+ self.0
+ .builder
+ .items
+ .push(CompletionItem::new_simple(score, self.0.cursor.range, data));
+
+ Some(())
+ }
+
+ pub fn add_library(&mut self) -> Option<()> {
+ for package in included_packages(&self.0.params.feature) {
+ let commands_with_score = package.commands.iter().filter_map(|command| {
+ let matcher = &self.0.builder.matcher;
+ let score = matcher.score(&command.name, &self.0.cursor.text)?;
+ Some((command, score))
+ });
+
+ for (command, score) in commands_with_score {
+ let data = CompletionItemData::Command(CommandData {
+ name: &command.name,
+ glyph: command.glyph.as_deref(),
+ image: command.image.as_deref(),
+ package: Some(package),
+ });
+
+ self.0.builder.items.push(CompletionItem::new_simple(
+ score,
+ self.0.cursor.range,
+ data,
+ ));
+ }
+ }
+
+ Some(())
+ }
+
+ fn add_user(&mut self) {
+ let documents = self.0.params.feature.project.documents.iter();
+ for data in documents.filter_map(|document| document.data.as_tex()) {
+ let commands = data
+ .semantics
+ .commands
+ .iter()
+ .filter(|name| name.range != self.0.cursor.range);
+
+ let commands_with_score = commands.filter_map(|command| {
+ let matcher = &self.0.builder.matcher;
+ let score = matcher.score(&command.text, &self.0.cursor.text)?;
+ Some((command, score))
+ });
+
+ for (command, score) in commands_with_score {
+ let data = CompletionItemData::Command(CommandData {
+ name: &command.text,
+ glyph: None,
+ image: None,
+ package: None,
+ });
+
+ self.0.builder.items.push(CompletionItem::new_simple(
+ score,
+ self.0.cursor.range,
+ data,
+ ));
+ }
+ }
+ }
+}
+
+fn find_command_name(params: &CompletionParams) -> Option<Span> {
+ let offset = params.offset;
+ match &params.feature.document.data {
+ DocumentData::Tex(data) => {
+ let root = data.root_node();
+ find_command_name_ast(&root, latex::COMMAND_NAME, offset)
+ }
+ DocumentData::Bib(data) => {
+ let root = data.root_node();
+ find_command_name_ast(&root, bibtex::COMMAND_NAME, offset)
+ .or_else(|| find_command_name_ast(&root, bibtex::ACCENT_NAME, offset))
+ }
+ _ => None,
+ }
+}
+
+fn find_command_name_ast<L: rowan::Language>(
+ root: &rowan::SyntaxNode<L>,
+ kind: L::Kind,
+ offset: TextSize,
+) -> Option<Span> {
+ let token = root
+ .token_at_offset(offset)
+ .filter(|token| token.text_range().start() != offset)
+ .find(|token| token.kind() == kind)?;
+
+ let full_range = token.text_range();
+ let text = String::from(&token.text()[1..]);
+ Some(Span::new(
+ text,
+ TextRange::new(full_range.start() + TextSize::of('\\'), full_range.end()),
+ ))
+}
diff --git a/support/texlab/crates/completion/src/providers/entry_type.rs b/support/texlab/crates/completion/src/providers/entry_type.rs
new file mode 100644
index 0000000000..ce8bffa51e
--- /dev/null
+++ b/support/texlab/crates/completion/src/providers/entry_type.rs
@@ -0,0 +1,40 @@
+use base_db::semantics::Span;
+use syntax::bibtex;
+
+use crate::{
+ util::CompletionBuilder, CompletionItem, CompletionItemData, CompletionParams, EntryTypeData,
+};
+
+pub fn complete_entry_types<'a>(
+ params: &'a CompletionParams<'a>,
+ builder: &mut CompletionBuilder<'a>,
+) -> Option<()> {
+ let cursor = find_entry_type(params)?;
+
+ for entry_type in base_db::data::BIBTEX_ENTRY_TYPES {
+ if let Some(score) = builder.matcher.score(entry_type.name, &cursor.text) {
+ let data = CompletionItemData::EntryType(EntryTypeData(*entry_type));
+ builder
+ .items
+ .push(CompletionItem::new_simple(score, cursor.range, data));
+ }
+ }
+
+ Some(())
+}
+
+fn find_entry_type(params: &CompletionParams) -> Option<Span> {
+ let data = params.feature.document.data.as_bib()?;
+
+ let token = data
+ .root_node()
+ .token_at_offset(params.offset)
+ .find(|token| token.kind() == bibtex::TYPE)?;
+
+ let range = token.text_range();
+ if range.start() == params.offset {
+ None
+ } else {
+ Some(Span::from(&token))
+ }
+}
diff --git a/support/texlab/crates/completion/src/providers/environment.rs b/support/texlab/crates/completion/src/providers/environment.rs
new file mode 100644
index 0000000000..393dd100ba
--- /dev/null
+++ b/support/texlab/crates/completion/src/providers/environment.rs
@@ -0,0 +1,135 @@
+use base_db::semantics::Span;
+use rowan::ast::AstNode;
+use syntax::latex;
+
+use crate::{
+ util::{find_curly_group_word, included_packages, CompletionBuilder, ProviderContext},
+ CompletionItem, CompletionItemData, CompletionParams, EnvironmentData,
+};
+
+pub fn complete_environments<'a>(
+ params: &'a CompletionParams<'a>,
+ builder: &mut CompletionBuilder<'a>,
+) -> Option<()> {
+ let (cursor, group) = find_environment(params)?;
+
+ let begin = group
+ .syntax()
+ .parent()
+ .and_then(|node| node.parent())
+ .and_then(latex::Environment::cast)
+ .and_then(|env| env.begin())
+ .and_then(|begin| begin.name())
+ .and_then(|name| name.key())
+ .map_or_else(|| String::new(), |name| name.to_string());
+
+ let mut proc = Processor {
+ inner: ProviderContext {
+ builder,
+ params,
+ cursor,
+ },
+ begin,
+ };
+
+ proc.add_library();
+ proc.add_theorem();
+ proc.add_user();
+ Some(())
+}
+
+struct Processor<'a, 'b> {
+ inner: ProviderContext<'a, 'b>,
+ begin: String,
+}
+
+impl<'a, 'b> Processor<'a, 'b> {
+ fn add_library(&mut self) {
+ for package in included_packages(&self.inner.params.feature) {
+ let envs_with_score = package.environments.iter().filter_map(|env| {
+ let matcher = &self.inner.builder.matcher;
+ let score = matcher.score(&env, &self.inner.cursor.text)?;
+ Some((*env, score))
+ });
+
+ for (name, score) in envs_with_score {
+ let data = CompletionItemData::Environment(EnvironmentData {
+ name,
+ package: Some(package),
+ });
+
+ self.inner.builder.items.push(CompletionItem {
+ score,
+ data,
+ range: self.inner.cursor.range,
+ preselect: name == self.begin,
+ });
+ }
+ }
+ }
+
+ fn add_theorem(&mut self) {
+ let documents = self.inner.params.feature.project.documents.iter();
+ for theorem in documents
+ .filter_map(|document| document.data.as_tex())
+ .flat_map(|data| data.semantics.theorem_definitions.iter())
+ {
+ let matcher = &self.inner.builder.matcher;
+ let name = theorem.name.text.as_str();
+ if let Some(score) = matcher.score(&name, &self.inner.cursor.text) {
+ let data = CompletionItemData::Environment(EnvironmentData {
+ name,
+ package: None,
+ });
+
+ self.inner.builder.items.push(CompletionItem {
+ score,
+ data,
+ range: self.inner.cursor.range,
+ preselect: name == self.begin,
+ });
+ }
+ }
+ }
+
+ fn add_user(&mut self) {
+ let documents = self.inner.params.feature.project.documents.iter();
+ for data in documents.filter_map(|document| document.data.as_tex()) {
+ let envs = data
+ .semantics
+ .environments
+ .iter()
+ .filter(|name| name.range != self.inner.cursor.range);
+
+ let envs_with_score = envs.filter_map(|env| {
+ let matcher = &self.inner.builder.matcher;
+ let score = matcher.score(&env.text, &self.inner.cursor.text)?;
+ Some((&env.text, score))
+ });
+
+ for (name, score) in envs_with_score {
+ let data = CompletionItemData::Environment(EnvironmentData {
+ name,
+ package: None,
+ });
+
+ self.inner.builder.items.push(CompletionItem {
+ score,
+ data,
+ range: self.inner.cursor.range,
+ preselect: name == &self.begin,
+ });
+ }
+ }
+ }
+}
+
+fn find_environment(params: &CompletionParams) -> Option<(Span, latex::CurlyGroupWord)> {
+ let (span, group) = find_curly_group_word(params)?;
+ let parent = group.syntax().parent()?;
+ if matches!(parent.kind(), latex::BEGIN | latex::END) {
+ Some((span, group))
+ } else {
+ None
+ }
+}
diff --git a/support/texlab/crates/completion/src/providers/field.rs b/support/texlab/crates/completion/src/providers/field.rs
new file mode 100644
index 0000000000..2dcd7fd26b
--- /dev/null
+++ b/support/texlab/crates/completion/src/providers/field.rs
@@ -0,0 +1,66 @@
+use base_db::semantics::Span;
+use rowan::{ast::AstNode, TokenAtOffset};
+use syntax::bibtex::{self, HasName};
+
+use crate::{
+ util::CompletionBuilder, CompletionItem, CompletionItemData, CompletionParams, FieldTypeData,
+};
+
+pub fn complete_fields<'a>(
+ params: &'a CompletionParams<'a>,
+ builder: &mut CompletionBuilder<'a>,
+) -> Option<()> {
+ let cursor = find_field(params)?;
+
+ for field in base_db::data::BIBTEX_FIELD_TYPES {
+ if let Some(score) = builder.matcher.score(field.name, &cursor.text) {
+ let data = CompletionItemData::Field(FieldTypeData(*field));
+ builder
+ .items
+ .push(CompletionItem::new_simple(score, cursor.range, data));
+ }
+ }
+
+ Some(())
+}
+
+fn find_field(params: &CompletionParams) -> Option<Span> {
+ let token = select_token(params)?;
+ if token.kind() == bibtex::TYPE {
+ return None;
+ }
+
+ let parent = token.parent()?;
+ if let Some(entry) = bibtex::Entry::cast(parent.clone()) {
+ if entry.name_token()?.text_range() == token.text_range() {
+ return None;
+ }
+ } else {
+ bibtex::Field::cast(parent)?;
+ }
+
+ Some(if token.kind() == bibtex::NAME {
+ Span::from(&token)
+ } else {
+ Span::empty(params.offset)
+ })
+}
+
+fn select_token(params: &CompletionParams) -> Option<bibtex::SyntaxToken> {
+ let data = params.feature.document.data.as_bib()?;
+ Some(match data.root_node().token_at_offset(params.offset) {
+ TokenAtOffset::Between(_, r) if r.kind() == bibtex::TYPE => r,
+ TokenAtOffset::Between(l, _) if l.kind() == bibtex::TYPE => l,
+ TokenAtOffset::Between(l, _) if l.kind() == bibtex::COMMAND_NAME => l,
+ TokenAtOffset::Between(l, _) if l.kind() == bibtex::ACCENT_NAME => l,
+ TokenAtOffset::Between(_, r) if r.kind() == bibtex::WORD => r,
+ TokenAtOffset::Between(_, r) if r.kind() == bibtex::NAME => r,
+ TokenAtOffset::Between(l, _) if l.kind() == bibtex::WORD => l,
+ TokenAtOffset::Between(l, _) if l.kind() == bibtex::NAME => l,
+ TokenAtOffset::Between(_, r) if r.kind() == bibtex::COMMAND_NAME => r,
+ TokenAtOffset::Between(_, r) if r.kind() == bibtex::ACCENT_NAME => r,
+ TokenAtOffset::Between(_, r) => r,
+ TokenAtOffset::Single(t) => t,
+ TokenAtOffset::None => return None,
+ })
+}
diff --git a/support/texlab/crates/completion/src/providers/glossary.rs b/support/texlab/crates/completion/src/providers/glossary.rs
new file mode 100644
index 0000000000..c22f5b5888
--- /dev/null
+++ b/support/texlab/crates/completion/src/providers/glossary.rs
@@ -0,0 +1,88 @@
+use base_db::semantics::Span;
+use rowan::ast::AstNode;
+use syntax::latex;
+
+use crate::{
+ util::{find_curly_group_word, CompletionBuilder, ProviderContext},
+ CompletionItem, CompletionItemData, CompletionParams, GlossaryEntryData,
+};
+
+pub fn complete_acronyms<'a>(
+ params: &'a CompletionParams<'a>,
+ builder: &mut CompletionBuilder<'a>,
+) -> Option<()> {
+ let cursor = find_acronym(params)?;
+ let mut proc = Processor(ProviderContext {
+ builder,
+ params,
+ cursor,
+ });
+
+ proc.add_acronyms();
+ Some(())
+}
+
+pub fn complete_glossaries<'a>(
+ params: &'a CompletionParams<'a>,
+ builder: &mut CompletionBuilder<'a>,
+) -> Option<()> {
+ let cursor = find_glossary(params)?;
+ let mut proc = Processor(ProviderContext {
+ builder,
+ params,
+ cursor,
+ });
+
+ proc.add_acronyms();
+ proc.add_glossaries();
+ Some(())
+}
+
+struct Processor<'a, 'b>(ProviderContext<'a, 'b>);
+
+impl<'a, 'b> Processor<'a, 'b> {
+ pub fn add_acronyms(&mut self) {
+ self.add_generic(|node| latex::AcronymDefinition::cast(node)?.name());
+ }
+
+ pub fn add_glossaries(&mut self) {
+ self.add_generic(|node| latex::GlossaryEntryDefinition::cast(node)?.name());
+ }
+
+ fn add_generic<F>(&mut self, extract: F)
+ where
+ F: Fn(latex::SyntaxNode) -> Option<latex::CurlyGroupWord>,
+ {
+ let documents = self.0.params.feature.project.documents.iter();
+ for data in documents.filter_map(|document| document.data.as_tex()) {
+ for name in data
+ .root_node()
+ .descendants()
+ .filter_map(|node| extract(node))
+ .filter_map(|name| name.key())
+ .map(|name| name.to_string())
+ {
+ if let Some(score) = self.0.builder.matcher.score(&name, &self.0.cursor.text) {
+ let data = CompletionItemData::GlossaryEntry(GlossaryEntryData { name });
+ self.0.builder.items.push(CompletionItem::new_simple(
+ score,
+ self.0.cursor.range,
+ data,
+ ));
+ }
+ }
+ }
+ }
+}
+
+fn find_acronym(params: &CompletionParams) -> Option<Span> {
+ let (cursor, group) = find_curly_group_word(params)?;
+ latex::AcronymReference::cast(group.syntax().parent()?)?;
+ Some(cursor)
+}
+
+fn find_glossary(params: &CompletionParams) -> Option<Span> {
+ let (cursor, group) = find_curly_group_word(params)?;
+ latex::GlossaryEntryReference::cast(group.syntax().parent()?)?;
+ Some(cursor)
+}
diff --git a/support/texlab/crates/completion/src/providers/import.rs b/support/texlab/crates/completion/src/providers/import.rs
new file mode 100644
index 0000000000..c506026a99
--- /dev/null
+++ b/support/texlab/crates/completion/src/providers/import.rs
@@ -0,0 +1,69 @@
+use rowan::ast::AstNode;
+use rustc_hash::FxHashSet;
+use syntax::latex;
+
+use crate::{
+ util::{find_curly_group_word_list, CompletionBuilder},
+ CompletionItem, CompletionItemData, CompletionParams,
+};
+
+pub fn complete_imports<'a>(
+ params: &'a CompletionParams<'a>,
+ builder: &mut CompletionBuilder<'a>,
+) -> Option<()> {
+ let (cursor, group) = find_curly_group_word_list(params)?;
+
+ let kind = group.syntax().parent()?.kind();
+ let extension = match kind {
+ latex::PACKAGE_INCLUDE => "sty",
+ latex::CLASS_INCLUDE => "cls",
+ _ => return Some(()),
+ };
+
+ let mut file_names = FxHashSet::default();
+ for file_name in completion_data::DATABASE
+ .iter()
+ .flat_map(|package| package.file_names.iter())
+ .filter(|file_name| file_name.ends_with(extension))
+ {
+ file_names.insert(file_name);
+ let stem = &file_name[0..file_name.len() - 4];
+ if kind == latex::PACKAGE_INCLUDE {
+ if let Some(score) = builder.matcher.score(&stem, &cursor.text) {
+ let data = CompletionItemData::Package(stem);
+ builder
+ .items
+ .push(CompletionItem::new_simple(score, cursor.range, data));
+ }
+ } else if let Some(score) = builder.matcher.score(&stem, &cursor.text) {
+ let data = CompletionItemData::DocumentClass(stem);
+ builder
+ .items
+ .push(CompletionItem::new_simple(score, cursor.range, data));
+ }
+ }
+
+ let file_name_db = &params.feature.workspace.distro().file_name_db;
+ for file_name in file_name_db
+ .iter()
+ .map(|(file_name, _)| file_name)
+ .filter(|file_name| file_name.ends_with(extension) && !file_names.contains(file_name))
+ {
+ let stem = &file_name[0..file_name.len() - 4];
+ if kind == latex::PACKAGE_INCLUDE {
+ if let Some(score) = builder.matcher.score(&stem, &cursor.text) {
+ let data = CompletionItemData::Package(stem);
+ builder
+ .items
+ .push(CompletionItem::new_simple(score, cursor.range, data));
+ }
+ } else if let Some(score) = builder.matcher.score(&stem, &cursor.text) {
+ let data = CompletionItemData::DocumentClass(stem);
+ builder
+ .items
+ .push(CompletionItem::new_simple(score, cursor.range, data));
+ }
+ }
+
+ Some(())
+}
diff --git a/support/texlab/crates/completion/src/providers/include.rs b/support/texlab/crates/completion/src/providers/include.rs
new file mode 100644
index 0000000000..2dd204cfdc
--- /dev/null
+++ b/support/texlab/crates/completion/src/providers/include.rs
@@ -0,0 +1,157 @@
+use std::{
+ fs,
+ path::{Path, PathBuf},
+};
+
+use base_db::{DocumentData, FeatureParams};
+use rowan::{ast::AstNode, TextLen, TextRange};
+use syntax::latex;
+
+use crate::{
+ util::{find_curly_group_word_list, CompletionBuilder},
+ CompletionItem, CompletionItemData, CompletionParams,
+};
+
+pub fn complete_includes<'a>(
+ params: &'a CompletionParams<'a>,
+ builder: &mut CompletionBuilder<'a>,
+) -> Option<()> {
+ if params.feature.document.path.is_none() {
+ return None;
+ }
+
+ let (cursor, group) = find_curly_group_word_list(params)?;
+
+ let include = group.syntax().parent()?;
+ let (include_extension, extensions): (bool, &[&str]) = match include.kind() {
+ latex::PACKAGE_INCLUDE => (false, &["sty"]),
+ latex::CLASS_INCLUDE => (false, &["cls"]),
+ latex::LATEX_INCLUDE => {
+ let include = latex::Include::cast(include.clone())?;
+ (
+ matches!(include.command()?.text(), "\\input" | "\\subfile"),
+ &["tex"],
+ )
+ }
+ latex::BIBLATEX_INCLUDE => (true, &["bib"]),
+ latex::BIBTEX_INCLUDE => (false, &["bib"]),
+ latex::GRAPHICS_INCLUDE => (true, &["pdf", "png", "jpg", "jpeg", "bmp"]),
+ latex::SVG_INCLUDE => (true, &["svg"]),
+ latex::INKSCAPE_INCLUDE => (true, &["pdf", "eps", "ps", "png"]),
+ latex::VERBATIM_INCLUDE => (true, &[]),
+ _ => return None,
+ };
+
+ let segment_range = if cursor.text.is_empty() {
+ cursor.range
+ } else {
+ let start = cursor.range.end() - cursor.text.split('/').last()?.text_len();
+ TextRange::new(start, cursor.range.end())
+ };
+
+ let segment_text = &params.feature.document.text[std::ops::Range::from(segment_range)];
+
+ let mut dirs = vec![current_dir(&params.feature, &cursor.text, None)];
+ if include.kind() == latex::GRAPHICS_INCLUDE {
+ for document in &params.feature.project.documents {
+ let DocumentData::Tex(data) = &document.data else {
+ continue;
+ };
+
+ for graphics_path in &data.semantics.graphics_paths {
+ dirs.push(current_dir(
+ &params.feature,
+ &cursor.text,
+ Some(graphics_path),
+ ));
+ }
+ }
+ }
+
+ for entry in dirs
+ .into_iter()
+ .flatten()
+ .filter_map(|dir| fs::read_dir(dir).ok())
+ .flatten()
+ .flatten()
+ {
+ let mut path = entry.path();
+
+ let file_type = entry.file_type().ok()?;
+ if file_type.is_file() && is_included(&path, extensions) {
+ if !include_extension {
+ remove_extension(&mut path);
+ }
+
+ let name = String::from(path.file_name()?.to_str()?);
+ if let Some(score) = builder.matcher.score(&name, segment_text) {
+ builder.items.push(CompletionItem::new_simple(
+ score,
+ cursor.range,
+ CompletionItemData::File(name),
+ ));
+ }
+ } else if file_type.is_dir() {
+ let name = String::from(path.file_name()?.to_str()?);
+ if let Some(score) = builder.matcher.score(&name, segment_text) {
+ builder.items.push(CompletionItem::new_simple(
+ score,
+ cursor.range,
+ CompletionItemData::Directory(name),
+ ));
+ }
+ }
+ }
+
+ Some(())
+}
+
+fn current_dir(
+ params: &FeatureParams,
+ path_text: &str,
+ graphics_path: Option<&str>,
+) -> Option<PathBuf> {
+ let workspace = &params.workspace;
+ let parent = workspace
+ .parents(params.document)
+ .iter()
+ .next()
+ .map_or(params.document, Clone::clone);
+
+ let path = workspace.current_dir(&parent.dir).to_file_path().ok()?;
+
+ let mut path = PathBuf::from(path.to_str()?.replace('\\', "/"));
+ if !path_text.is_empty() {
+ if let Some(graphics_path) = graphics_path {
+ path.push(graphics_path);
+ }
+
+ path.push(path_text);
+ if !path_text.ends_with('/') {
+ path.pop();
+ }
+ }
+
+ Some(path)
+}
+
+fn is_included(file: &Path, allowed_extensions: &[&str]) -> bool {
+ allowed_extensions.is_empty()
+ || file
+ .extension()
+ .and_then(std::ffi::OsStr::to_str)
+ .map(str::to_lowercase)
+ .map(|ext| allowed_extensions.contains(&ext.as_str()))
+ .unwrap_or_default()
+}
+
+fn remove_extension(path: &mut PathBuf) {
+ if let Some(stem) = path
+ .file_stem()
+ .and_then(std::ffi::OsStr::to_str)
+ .map(ToOwned::to_owned)
+ {
+ path.pop();
+ path.push(stem);
+ }
+}
diff --git a/support/texlab/crates/completion/src/providers/label.rs b/support/texlab/crates/completion/src/providers/label.rs
new file mode 100644
index 0000000000..8af2873457
--- /dev/null
+++ b/support/texlab/crates/completion/src/providers/label.rs
@@ -0,0 +1,101 @@
+use base_db::{
+ semantics::{tex::LabelKind, Span},
+ util::{render_label, RenderedObject},
+ DocumentData,
+};
+use rowan::ast::AstNode;
+use syntax::latex;
+
+use crate::{
+ util::{find_curly_group_word, find_curly_group_word_list, CompletionBuilder},
+ CompletionItem, CompletionItemData, CompletionParams,
+};
+
+pub fn complete_labels<'a>(
+ params: &'a CompletionParams<'a>,
+ builder: &mut CompletionBuilder<'a>,
+) -> Option<()> {
+ let FindResult { cursor, is_math } =
+ find_reference(params).or_else(|| find_reference_range(params))?;
+
+ for document in &params.feature.project.documents {
+ let DocumentData::Tex(data) = &document.data else {
+ continue;
+ };
+
+ for label in data
+ .semantics
+ .labels
+ .iter()
+ .filter(|label| label.kind == LabelKind::Definition)
+ {
+ match render_label(params.feature.workspace, &params.feature.project, label) {
+ Some(rendered_label) => {
+ if is_math && !matches!(rendered_label.object, RenderedObject::Equation) {
+ continue;
+ }
+
+ let header = rendered_label.detail();
+ let footer = match &rendered_label.object {
+ RenderedObject::Float { caption, .. } => Some(*caption),
+ _ => None,
+ };
+
+ let keywords = format!("{} {}", label.name.text, rendered_label.reference());
+
+ if let Some(score) = builder.matcher.score(&keywords, &cursor.text) {
+ let data = CompletionItemData::Label(crate::LabelData {
+ name: &label.name.text,
+ header,
+ footer,
+ object: Some(rendered_label.object),
+ keywords,
+ });
+
+ builder
+ .items
+ .push(CompletionItem::new_simple(score, cursor.range, data));
+ }
+ }
+ None => {
+ if let Some(score) = builder.matcher.score(&label.name.text, &cursor.text) {
+ let data = CompletionItemData::Label(crate::LabelData {
+ name: &label.name.text,
+ header: None,
+ footer: None,
+ object: None,
+ keywords: label.name.text.clone(),
+ });
+
+ builder
+ .items
+ .push(CompletionItem::new_simple(score, cursor.range, data));
+ }
+ }
+ }
+ }
+ }
+
+ Some(())
+}
+
+struct FindResult {
+ cursor: Span,
+ is_math: bool,
+}
+
+fn find_reference(params: &CompletionParams) -> Option<FindResult> {
+ let (cursor, group) = find_curly_group_word_list(params)?;
+ let reference = latex::LabelReference::cast(group.syntax().parent()?)?;
+ let is_math = reference.command()?.text() == "\\eqref";
+ Some(FindResult { cursor, is_math })
+}
+
+fn find_reference_range(params: &CompletionParams) -> Option<FindResult> {
+ let (cursor, group) = find_curly_group_word(params)?;
+ latex::LabelReferenceRange::cast(group.syntax().parent()?)?;
+ Some(FindResult {
+ cursor,
+ is_math: false,
+ })
+}
diff --git a/support/texlab/crates/completion/src/providers/tikz_library.rs b/support/texlab/crates/completion/src/providers/tikz_library.rs
new file mode 100644
index 0000000000..10a0aa44ba
--- /dev/null
+++ b/support/texlab/crates/completion/src/providers/tikz_library.rs
@@ -0,0 +1,149 @@
+use rowan::ast::AstNode;
+use syntax::latex;
+
+use crate::{
+ util::{find_curly_group_word_list, CompletionBuilder},
+ CompletionItem, CompletionItemData, CompletionParams,
+};
+
+pub fn complete_tikz_libraries<'a>(
+ params: &'a CompletionParams,
+ builder: &mut CompletionBuilder<'a>,
+) -> Option<()> {
+ let (cursor, group) = find_curly_group_word_list(params)?;
+
+ let import = latex::TikzLibraryImport::cast(group.syntax().parent()?)?;
+
+ let libraries = if import.command()?.text() == "\\usepgflibrary" {
+ PGF_LIBRARIES
+ } else {
+ TIKZ_LIBRARIES
+ };
+
+ for name in libraries {
+ if let Some(score) = builder.matcher.score(&name, &cursor.text) {
+ let data = CompletionItemData::TikzLibrary(name);
+ builder
+ .items
+ .push(CompletionItem::new_simple(score, cursor.range, data));
+ }
+ }
+
+ Some(())
+}
+
+static PGF_LIBRARIES: &[&str] = &[
+ "arrows",
+ "arrows.meta",
+ "arrows.spaced",
+ "curvilinear",
+ "datavisualization.barcharts",
+ "datavisualization.formats.functions",
+ "datavisualization.polar",
+ "decorations.footprints",
+ "decorations.fractals",
+ "decorations.markings",
+ "decorations.pathmorphing",
+ "decorations.pathreplacing",
+ "decorations.shapes",
+ "decorations.text",
+ "fadings",
+ "fixedpointarithmetic",
+ "fpu",
+ "intersections",
+ "lindenmayersystems",
+ "luamath",
+ "patterns",
+ "patterns.meta",
+ "plothandlers",
+ "plotmarks",
+ "profiler",
+ "shadings",
+ "shapes.arrows",
+ "shapes.callouts",
+ "shapes",
+ "shapes.gates.ee",
+ "shapes.gates.ee.IEC",
+ "shapes.gates.logic",
+ "shapes.gates.logic.IEC",
+ "shapes.gates.logic.US",
+ "shapes.geometric",
+ "shapes.misc",
+ "shapes.multipart",
+ "shapes.symbols",
+ "snakes",
+ "svg.path",
+];
+
+static TIKZ_LIBRARIES: &[&str] = &[
+ "3d",
+ "angles",
+ "arrows",
+ "automata",
+ "babel",
+ "backgrounds",
+ "bending",
+ "calc",
+ "calendar",
+ "chains",
+ "circuits",
+ "circuits.ee",
+ "circuits.ee.IEC",
+ "circuits.logic.CDH",
+ "circuits.logic",
+ "circuits.logic.IEC",
+ "circuits.logic.US",
+ "datavisualization.3d",
+ "datavisualization.barcharts",
+ "datavisualization",
+ "datavisualization.formats.functions",
+ "datavisualization.polar",
+ "datavisualization.sparklines",
+ "decorations",
+ "decorations.footprints",
+ "decorations.fractals",
+ "decorations.markings",
+ "decorations.pathmorphing",
+ "decorations.pathreplacing",
+ "decorations.shapes",
+ "decorations.text",
+ "er",
+ "fadings",
+ "fit",
+ "fixedpointarithmetic",
+ "folding",
+ "fpu",
+ "graphs",
+ "graphs.standard",
+ "intersections",
+ "lindenmayersystems",
+ "math",
+ "matrix",
+ "mindmap",
+ "patterns",
+ "patterns.meta",
+ "petri",
+ "plothandlers",
+ "plotmarks",
+ "positioning",
+ "quotes",
+ "scopes",
+ "shadings",
+ "shadows",
+ "shapes.arrows",
+ "shapes.callouts",
+ "shapes",
+ "shapes.gates.logic.IEC",
+ "shapes.gates.logic.US",
+ "shapes.geometric",
+ "shapes.misc",
+ "shapes.multipart",
+ "shapes.symbols",
+ "snakes",
+ "spy",
+ "svg.path",
+ "through",
+ "topaths",
+ "trees",
+ "turtle",
+];
diff --git a/support/texlab/crates/completion/src/tests.rs b/support/texlab/crates/completion/src/tests.rs
new file mode 100644
index 0000000000..84cd06447b
--- /dev/null
+++ b/support/texlab/crates/completion/src/tests.rs
@@ -0,0 +1,2071 @@
+use base_db::FeatureParams;
+use expect_test::{expect, Expect};
+use rowan::TextRange;
+
+use crate::CompletionParams;
+
+fn check(input: &str, expect: Expect) {
+ let fixture = test_utils::fixture::Fixture::parse(input);
+
+ let (offset, spec) = fixture
+ .documents
+ .iter()
+ .find_map(|document| Some((document.cursor?, document)))
+ .unwrap();
+
+ let document = fixture.workspace.lookup(&spec.uri).unwrap();
+ let feature = FeatureParams::new(&fixture.workspace, document);
+ let params = CompletionParams { feature, offset };
+ let result = crate::complete(&params);
+
+ let range = spec
+ .ranges
+ .first()
+ .map_or_else(|| TextRange::empty(offset), |range| *range);
+
+ for item in &result.items {
+ assert_eq!(item.range, range);
+ }
+
+ let items = result
+ .items
+ .into_iter()
+ .take(5)
+ .map(|item| item.data)
+ .collect::<Vec<_>>();
+
+ expect.assert_debug_eq(&items);
+}
+
+#[test]
+fn acronym_ref_simple() {
+ check(
+ r#"
+%! main.tex
+\newacronym[longplural={Frames per Second}]{fpsLabel}{FPS}{Frame per Second}
+\acrshort{f}
+ |
+ ^"#,
+ expect![[r#"
+ [
+ GlossaryEntry(
+ GlossaryEntryData {
+ name: "fpsLabel",
+ },
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn acronym_ref_empty() {
+ check(
+ r#"
+%! main.tex
+\newacronym[longplural={Frames per Second}]{fpsLabel}{FPS}{Frame per Second}
+\acrshort{}
+ |"#,
+ expect![[r#"
+ [
+ GlossaryEntry(
+ GlossaryEntryData {
+ name: "fpsLabel",
+ },
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn acronym_ref_after_group() {
+ check(
+ r#"
+%! main.tex
+\newacronym[longplural={Frames per Second}]{fpsLabel}{FPS}{Frame per Second}
+\acrshort{}
+ |"#,
+ expect![[r#"
+ []
+ "#]],
+ );
+}
+
+#[test]
+fn acronym_ref_open_brace() {
+ check(
+ r#"
+%! main.tex
+\newacronym[longplural={Frames per Second}]{fpsLabel}{FPS}{Frame per Second}
+\acrshort{f
+ |
+ ^"#,
+ expect![[r#"
+ [
+ GlossaryEntry(
+ GlossaryEntryData {
+ name: "fpsLabel",
+ },
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn acronym_package_ref() {
+ check(
+ r#"
+%! main.tex
+\acrodef{fpsLabel}[FPS]{Frames per Second}
+\ac{f
+ |
+ ^"#,
+ expect![[r#"
+ [
+ GlossaryEntry(
+ GlossaryEntryData {
+ name: "fpsLabel",
+ },
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn glossary_ref_simple() {
+ check(
+ r#"
+%! main.tex
+\newacronym[longplural={Frames per Second}]{fpsLabel}{FPS}{Frame per Second}
+\gls{f}
+ |
+ ^"#,
+ expect![[r#"
+ [
+ GlossaryEntry(
+ GlossaryEntryData {
+ name: "fpsLabel",
+ },
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn glossary_ref_open_brace() {
+ check(
+ r#"
+%! main.tex
+\newacronym[longplural={Frames per Second}]{fpsLabel}{FPS}{Frame per Second}
+\gls{f
+ |
+ ^"#,
+ expect![[r#"
+ [
+ GlossaryEntry(
+ GlossaryEntryData {
+ name: "fpsLabel",
+ },
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn argument_empty() {
+ check(
+ r#"
+%! main.tex
+\usepackage{amsfonts}
+\mathbb{}
+ |"#,
+ expect![[r#"
+ [
+ Argument(
+ ArgumentData(
+ "A",
+ ),
+ ),
+ Argument(
+ ArgumentData(
+ "B",
+ ),
+ ),
+ Argument(
+ ArgumentData(
+ "C",
+ ),
+ ),
+ Argument(
+ ArgumentData(
+ "D",
+ ),
+ ),
+ Argument(
+ ArgumentData(
+ "E",
+ ),
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn argument_word() {
+ check(
+ r#"
+%! main.tex
+\usepackage{amsfonts}
+\mathbb{A}
+ |
+ ^"#,
+ expect![[r#"
+ [
+ Argument(
+ ArgumentData(
+ "A",
+ ),
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn argument_open_brace() {
+ check(
+ r#"
+%! main.tex
+\usepackage{amsfonts}
+\mathbb{
+ |
+Test"#,
+ expect![[r#"
+ [
+ Argument(
+ ArgumentData(
+ "A",
+ ),
+ ),
+ Argument(
+ ArgumentData(
+ "B",
+ ),
+ ),
+ Argument(
+ ArgumentData(
+ "C",
+ ),
+ ),
+ Argument(
+ ArgumentData(
+ "D",
+ ),
+ ),
+ Argument(
+ ArgumentData(
+ "E",
+ ),
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn argument_open_brace_unrelated() {
+ check(
+ r#"
+%! main.tex
+\usepackage{amsfonts}
+\mathbb{}{
+ |
+Test"#,
+ expect![[r#"
+ []
+ "#]],
+ );
+}
+
+#[test]
+fn begin_environment_without_snippet_support() {
+ check(
+ r#"
+%! main.tex
+\beg
+ |
+ ^^^"#,
+ expect![[r#"
+ [
+ BeginEnvironment,
+ Command(
+ CommandData {
+ name: "begingroup",
+ package: [],
+ },
+ ),
+ Command(
+ CommandData {
+ name: "AtBeginDocument",
+ package: [],
+ },
+ ),
+ Command(
+ CommandData {
+ name: "AtBeginDvi",
+ package: [],
+ },
+ ),
+ Command(
+ CommandData {
+ name: "bigwedge",
+ package: [],
+ },
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn citation() {
+ check(
+ r#"
+%! main.tex
+\documentclass{article}
+\bibliography{main}
+\begin{document}
+\cite{
+ |
+\end{document}
+
+%! main.bib
+@article{foo:2019,
+ author = {Foo Bar},
+ title = {Baz Qux},
+ year = {2019},
+}
+
+@article{bar:2005,}"#,
+ expect![[r#"
+ [
+ Citation(
+ CitationData {
+ document: Document(
+ "file:///texlab/main.bib",
+ ),
+ entry: Entry {
+ name: Span(
+ "bar:2005",
+ 97..105,
+ ),
+ full_range: 88..107,
+ keywords: "bar:2005 @article",
+ category: Article,
+ },
+ },
+ ),
+ Citation(
+ CitationData {
+ document: Document(
+ "file:///texlab/main.bib",
+ ),
+ entry: Entry {
+ name: Span(
+ "foo:2019",
+ 9..17,
+ ),
+ full_range: 0..86,
+ keywords: "foo:2019 @article Foo Bar Baz Qux 2019",
+ category: Article,
+ },
+ },
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn citation_open_brace() {
+ check(
+ r#"
+%! main.tex
+\addbibresource{main.bib}
+\cite{
+ |
+
+%! main.bib
+@article{foo,}"#,
+ expect![[r#"
+ [
+ Citation(
+ CitationData {
+ document: Document(
+ "file:///texlab/main.bib",
+ ),
+ entry: Entry {
+ name: Span(
+ "foo",
+ 9..12,
+ ),
+ full_range: 0..14,
+ keywords: "foo @article",
+ category: Article,
+ },
+ },
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn citation_open_brace_multiple() {
+ check(
+ r#"
+%! main.tex
+\addbibresource{main.bib}
+\cite{foo,f
+ |
+ ^
+
+%! main.bib
+@article{foo,}"#,
+ expect![[r#"
+ [
+ Citation(
+ CitationData {
+ document: Document(
+ "file:///texlab/main.bib",
+ ),
+ entry: Entry {
+ name: Span(
+ "foo",
+ 9..12,
+ ),
+ full_range: 0..14,
+ keywords: "foo @article",
+ category: Article,
+ },
+ },
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn citation_acronym() {
+ check(
+ r#"
+%! main.tex
+\addbibresource{main.bib}
+\DeclareAcronym{foo}{cite={}}
+ |
+
+%! main.bib
+@article{foo,}"#,
+ expect![[r#"
+ [
+ Citation(
+ CitationData {
+ document: Document(
+ "file:///texlab/main.bib",
+ ),
+ entry: Entry {
+ name: Span(
+ "foo",
+ 9..12,
+ ),
+ full_range: 0..14,
+ keywords: "foo @article",
+ category: Article,
+ },
+ },
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn citation_after_brace() {
+ check(
+ r#"
+%! main.tex
+\documentclass{article}
+\bibliography{main}
+\begin{document}
+\cite{}
+ |
+\end{document}
+
+%! main.bib
+@article{foo,}"#,
+ expect![[r#"
+ []
+ "#]],
+ );
+}
+
+#[test]
+fn color_model_definition_simple() {
+ check(
+ r#"
+%! main.tex
+\definecolor{foo}{}
+ |"#,
+ expect![[r#"
+ [
+ ColorModel(
+ "HTML",
+ ),
+ ColorModel(
+ "RGB",
+ ),
+ ColorModel(
+ "cmyk",
+ ),
+ ColorModel(
+ "gray",
+ ),
+ ColorModel(
+ "rgb",
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn color_model_definition_open_brace() {
+ check(
+ r#"
+%! main.tex
+\definecolor{foo}{
+ |"#,
+ expect![[r#"
+ [
+ ColorModel(
+ "HTML",
+ ),
+ ColorModel(
+ "RGB",
+ ),
+ ColorModel(
+ "cmyk",
+ ),
+ ColorModel(
+ "gray",
+ ),
+ ColorModel(
+ "rgb",
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn color_model_definition_set_simple() {
+ check(
+ r#"
+%! main.tex
+\definecolorset{}
+ |"#,
+ expect![[r#"
+ [
+ ColorModel(
+ "HTML",
+ ),
+ ColorModel(
+ "RGB",
+ ),
+ ColorModel(
+ "cmyk",
+ ),
+ ColorModel(
+ "gray",
+ ),
+ ColorModel(
+ "rgb",
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn color_model_definition_set_open_brace() {
+ check(
+ r#"
+%! main.tex
+\definecolorset{
+ |"#,
+ expect![[r#"
+ [
+ ColorModel(
+ "HTML",
+ ),
+ ColorModel(
+ "RGB",
+ ),
+ ColorModel(
+ "cmyk",
+ ),
+ ColorModel(
+ "gray",
+ ),
+ ColorModel(
+ "rgb",
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn color_simple() {
+ check(
+ r#"
+%! main.tex
+\color{}
+ |"#,
+ expect![[r#"
+ [
+ Color(
+ "Apricot",
+ ),
+ Color(
+ "Aquamarine",
+ ),
+ Color(
+ "Bittersweet",
+ ),
+ Color(
+ "Black",
+ ),
+ Color(
+ "Blue",
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn color_word() {
+ check(
+ r#"
+%! main.tex
+\color{re}
+ |
+ ^^"#,
+ expect![[r#"
+ [
+ Color(
+ "red",
+ ),
+ Color(
+ "Red",
+ ),
+ Color(
+ "RedOrange",
+ ),
+ Color(
+ "RedViolet",
+ ),
+ Color(
+ "BrickRed",
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn color_open_brace() {
+ check(
+ r#"
+%! main.tex
+\color{
+ |"#,
+ expect![[r#"
+ [
+ Color(
+ "Apricot",
+ ),
+ Color(
+ "Aquamarine",
+ ),
+ Color(
+ "Bittersweet",
+ ),
+ Color(
+ "Black",
+ ),
+ Color(
+ "Blue",
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn component_command_simple() {
+ check(
+ r#"
+%! main.tex
+\
+ |"#,
+ expect![[r##"
+ [
+ Command(
+ CommandData {
+ name: "!",
+ package: [],
+ },
+ ),
+ Command(
+ CommandData {
+ name: "\"",
+ package: [],
+ },
+ ),
+ Command(
+ CommandData {
+ name: "#",
+ package: [],
+ },
+ ),
+ Command(
+ CommandData {
+ name: "$",
+ package: [],
+ },
+ ),
+ Command(
+ CommandData {
+ name: "%",
+ package: [],
+ },
+ ),
+ ]
+ "##]],
+ );
+}
+
+#[test]
+fn component_command_simple_before() {
+ check(
+ r#"
+%! main.tex
+\
+|"#,
+ expect![[r#"
+ []
+ "#]],
+ );
+}
+
+#[test]
+fn component_command_simple_package() {
+ check(
+ r#"
+%! main.tex
+\usepackage{lipsum}
+\lips
+ |
+ ^^^^"#,
+ expect![[r#"
+ [
+ Command(
+ CommandData {
+ name: "lipsum",
+ package: [
+ "lipsum.sty",
+ ],
+ },
+ ),
+ Command(
+ CommandData {
+ name: "lipsumexp",
+ package: [
+ "lipsum.sty",
+ ],
+ },
+ ),
+ Command(
+ CommandData {
+ name: "LipsumPar",
+ package: [
+ "lipsum.sty",
+ ],
+ },
+ ),
+ Command(
+ CommandData {
+ name: "LipsumProtect",
+ package: [
+ "lipsum.sty",
+ ],
+ },
+ ),
+ Command(
+ CommandData {
+ name: "LipsumRestoreAll",
+ package: [
+ "lipsum.sty",
+ ],
+ },
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn component_command_bibtex() {
+ check(
+ r#"
+%! main.bib
+@article{b,
+ c = {\LaT }
+ |
+ ^^^
+}"#,
+ expect![[r#"
+ [
+ Command(
+ CommandData {
+ name: "LaTeX",
+ package: [],
+ },
+ ),
+ Command(
+ CommandData {
+ name: "LaTeXe",
+ package: [],
+ },
+ ),
+ Command(
+ CommandData {
+ name: "latexreleaseversion",
+ package: [],
+ },
+ ),
+ Command(
+ CommandData {
+ name: "LastDeclaredEncoding",
+ package: [],
+ },
+ ),
+ Command(
+ CommandData {
+ name: "last",
+ package: [],
+ },
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn component_environment_simple() {
+ check(
+ r#"
+%! main.tex
+\begin{doc
+ |
+ ^^^"#,
+ expect![[r#"
+ [
+ Environment(
+ EnvironmentData {
+ name: "document",
+ package: [],
+ },
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn component_environment_simple_end() {
+ check(
+ r#"
+%! main.tex
+\begin{document}
+\end{
+ |"#,
+ expect![[r#"
+ [
+ Environment(
+ EnvironmentData {
+ name: "document",
+ package: [],
+ },
+ ),
+ Environment(
+ EnvironmentData {
+ name: "abstract",
+ package: [],
+ },
+ ),
+ Environment(
+ EnvironmentData {
+ name: "array",
+ package: [],
+ },
+ ),
+ Environment(
+ EnvironmentData {
+ name: "center",
+ package: [],
+ },
+ ),
+ Environment(
+ EnvironmentData {
+ name: "csname",
+ package: [],
+ },
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn component_environment_class() {
+ check(
+ r#"
+%! main.tex
+\documentclass{article}
+\begin{thein}
+ |
+ ^^^^^"#,
+ expect![[r#"
+ [
+ Environment(
+ EnvironmentData {
+ name: "theindex",
+ package: [
+ "article.cls",
+ ],
+ },
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn component_environment_command_definition() {
+ check(
+ r#"
+%! main.tex
+\newcommand{\foo}{\begin{doc}
+ |
+ ^^^"#,
+ expect![[r#"
+ [
+ Environment(
+ EnvironmentData {
+ name: "document",
+ package: [],
+ },
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn entry_type_at_empty() {
+ check(
+ r#"
+%! main.bib
+@
+ |
+^"#,
+ expect![[r#"
+ [
+ EntryType(
+ EntryTypeData(
+ "@article",
+ ),
+ ),
+ EntryType(
+ EntryTypeData(
+ "@artwork",
+ ),
+ ),
+ EntryType(
+ EntryTypeData(
+ "@audio",
+ ),
+ ),
+ EntryType(
+ EntryTypeData(
+ "@bibnote",
+ ),
+ ),
+ EntryType(
+ EntryTypeData(
+ "@book",
+ ),
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn entry_type_before_preamble() {
+ check(
+ r#"
+%! main.bib
+@preamble
+ |
+^^^^^^^^^"#,
+ expect![[r#"
+ [
+ EntryType(
+ EntryTypeData(
+ "@preamble",
+ ),
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn entry_type_before_string() {
+ check(
+ r#"
+%! main.bib
+@string
+ |
+^^^^^^^"#,
+ expect![[r#"
+ [
+ EntryType(
+ EntryTypeData(
+ "@string",
+ ),
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn entry_type_before_article() {
+ check(
+ r#"
+%! main.bib
+@article
+ |
+^^^^^^^^"#,
+ expect![[r#"
+ [
+ EntryType(
+ EntryTypeData(
+ "@article",
+ ),
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn entry_type_after_preamble() {
+ check(
+ r#"
+%! main.bib
+@preamble{
+ |
+^^^^^^^^^"#,
+ expect![[r#"
+ [
+ EntryType(
+ EntryTypeData(
+ "@preamble",
+ ),
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn entry_type_after_string() {
+ check(
+ r#"
+%! main.bib
+@string{
+ |
+^^^^^^^"#,
+ expect![[r#"
+ [
+ EntryType(
+ EntryTypeData(
+ "@string",
+ ),
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn entry_type_complete_entry() {
+ check(
+ r#"
+%! main.bib
+@article{foo, author = {foo}}
+ |
+^^^^^^^^"#,
+ expect![[r#"
+ [
+ EntryType(
+ EntryTypeData(
+ "@article",
+ ),
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn field_empty_entry_open() {
+ check(
+ r#"
+%! main.bib
+@article{foo,
+ |"#,
+ expect![[r#"
+ [
+ Field(
+ FieldTypeData(
+ "abstract",
+ ),
+ ),
+ Field(
+ FieldTypeData(
+ "addendum",
+ ),
+ ),
+ Field(
+ FieldTypeData(
+ "address",
+ ),
+ ),
+ Field(
+ FieldTypeData(
+ "afterword",
+ ),
+ ),
+ Field(
+ FieldTypeData(
+ "annotation",
+ ),
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn field_empty_entry_closed() {
+ check(
+ r#"
+%! main.bib
+@article{foo,}
+ |"#,
+ expect![[r#"
+ [
+ Field(
+ FieldTypeData(
+ "abstract",
+ ),
+ ),
+ Field(
+ FieldTypeData(
+ "addendum",
+ ),
+ ),
+ Field(
+ FieldTypeData(
+ "address",
+ ),
+ ),
+ Field(
+ FieldTypeData(
+ "afterword",
+ ),
+ ),
+ Field(
+ FieldTypeData(
+ "annotation",
+ ),
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn field_entry_field_name() {
+ check(
+ r#"
+%! main.bib
+@article{foo, a
+ |
+ ^"#,
+ expect![[r#"
+ [
+ Field(
+ FieldTypeData(
+ "abstract",
+ ),
+ ),
+ Field(
+ FieldTypeData(
+ "addendum",
+ ),
+ ),
+ Field(
+ FieldTypeData(
+ "address",
+ ),
+ ),
+ Field(
+ FieldTypeData(
+ "afterword",
+ ),
+ ),
+ Field(
+ FieldTypeData(
+ "annotation",
+ ),
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn field_entry_two_fields_name_open() {
+ check(
+ r#"
+%! main.bib
+@article{foo, author = bar, edit
+ |
+ ^^^^"#,
+ expect![[r#"
+ [
+ Field(
+ FieldTypeData(
+ "edition",
+ ),
+ ),
+ Field(
+ FieldTypeData(
+ "editor",
+ ),
+ ),
+ Field(
+ FieldTypeData(
+ "editora",
+ ),
+ ),
+ Field(
+ FieldTypeData(
+ "editoratype",
+ ),
+ ),
+ Field(
+ FieldTypeData(
+ "editorb",
+ ),
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn field_entry_two_fields_name_closed() {
+ check(
+ r#"
+%! main.bib
+@article{foo, author = bar, edit}
+ |
+ ^^^^"#,
+ expect![[r#"
+ [
+ Field(
+ FieldTypeData(
+ "edition",
+ ),
+ ),
+ Field(
+ FieldTypeData(
+ "editor",
+ ),
+ ),
+ Field(
+ FieldTypeData(
+ "editora",
+ ),
+ ),
+ Field(
+ FieldTypeData(
+ "editoratype",
+ ),
+ ),
+ Field(
+ FieldTypeData(
+ "editorb",
+ ),
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn import_package_open_brace() {
+ check(
+ r#"
+%! main.tex
+\usepackage{lips
+ |
+ ^^^^"#,
+ expect![[r#"
+ [
+ Package(
+ "lips",
+ ),
+ Package(
+ "lipsum",
+ ),
+ Package(
+ "lisp-simple-alloc",
+ ),
+ Package(
+ "lisp-string",
+ ),
+ Package(
+ "lwarp-lips",
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn import_package_closed_brace() {
+ check(
+ r#"
+%! main.tex
+\usepackage{lips}
+ |
+ ^^^^"#,
+ expect![[r#"
+ [
+ Package(
+ "lips",
+ ),
+ Package(
+ "lipsum",
+ ),
+ Package(
+ "lisp-simple-alloc",
+ ),
+ Package(
+ "lisp-string",
+ ),
+ Package(
+ "lwarp-lips",
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn import_class_open_brace() {
+ check(
+ r#"
+%! main.tex
+\documentclass{art \foo
+ |
+ ^^^"#,
+ expect![[r#"
+ [
+ DocumentClass(
+ "article",
+ ),
+ DocumentClass(
+ "articleingud",
+ ),
+ DocumentClass(
+ "articoletteracdp",
+ ),
+ DocumentClass(
+ "artikel1",
+ ),
+ DocumentClass(
+ "artikel2",
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn import_class_closed_brace() {
+ check(
+ r#"
+%! main.tex
+\documentclass{art}
+ |
+ ^^^"#,
+ expect![[r#"
+ [
+ DocumentClass(
+ "article",
+ ),
+ DocumentClass(
+ "articleingud",
+ ),
+ DocumentClass(
+ "articoletteracdp",
+ ),
+ DocumentClass(
+ "artikel1",
+ ),
+ DocumentClass(
+ "artikel2",
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn label() {
+ check(
+ r#"
+%! foo.tex
+\documentclass{article}
+
+\usepackage{amsmath}
+\usepackage{caption}
+\usepackage{amsthm}
+\newtheorem{lemma}{Lemma}
+
+\begin{document}
+
+\section{Foo}%
+\label{sec:foo}
+
+\begin{equation}%
+\label{eq:foo}
+ 1 + 1 = 2
+\end{equation}
+
+\begin{equation}%
+\label{eq:bar}
+ 1 + 1 = 2
+\end{equation}
+
+\begin{figure}%
+\LaTeX{}
+\caption{Baz}%
+\label{fig:baz}
+\end{figure}
+
+\begin{lemma}%
+\label{thm:foo}
+ 1 + 1 = 2
+\end{lemma}
+
+\include{bar}
+
+\end{document}
+
+%! bar.tex
+\section{Bar}%
+\label{sec:bar}
+
+Lorem ipsum dolor sit amet.
+\ref{}
+ |
+
+%! foo.aux
+\relax
+\@writefile{lof}{\contentsline {figure}{\numberline {1}{\ignorespaces Baz\relax }}{1}\protected@file@percent }
+\providecommand*\caption@xref[2]{\@setref\relax\@undefined{#1}}
+\newlabel{fig:baz}{{1}{1}}
+\@writefile{toc}{\contentsline {section}{\numberline {1}Foo}{1}\protected@file@percent }
+\newlabel{sec:foo}{{1}{1}}
+\newlabel{eq:foo}{{1}{1}}
+\newlabel{eq:bar}{{2}{1}}
+\newlabel{thm:foo}{{1}{1}}
+\@input{bar.aux}"#,
+ expect![[r#"
+ [
+ Label(
+ LabelData {
+ name: "eq:bar",
+ header: Some(
+ "Equation (2)",
+ ),
+ footer: None,
+ object: Some(
+ Equation,
+ ),
+ keywords: "eq:bar Equation (2)",
+ },
+ ),
+ Label(
+ LabelData {
+ name: "eq:foo",
+ header: Some(
+ "Equation (1)",
+ ),
+ footer: None,
+ object: Some(
+ Equation,
+ ),
+ keywords: "eq:foo Equation (1)",
+ },
+ ),
+ Label(
+ LabelData {
+ name: "fig:baz",
+ header: Some(
+ "Figure 1",
+ ),
+ footer: Some(
+ "Baz",
+ ),
+ object: Some(
+ Float {
+ kind: Figure,
+ caption: "Baz",
+ },
+ ),
+ keywords: "fig:baz Figure 1: Baz",
+ },
+ ),
+ Label(
+ LabelData {
+ name: "sec:bar",
+ header: Some(
+ "Section (Bar)",
+ ),
+ footer: None,
+ object: Some(
+ Section {
+ prefix: "Section",
+ text: "Bar",
+ },
+ ),
+ keywords: "sec:bar Section (Bar)",
+ },
+ ),
+ Label(
+ LabelData {
+ name: "sec:foo",
+ header: Some(
+ "Section 1 (Foo)",
+ ),
+ footer: None,
+ object: Some(
+ Section {
+ prefix: "Section",
+ text: "Foo",
+ },
+ ),
+ keywords: "sec:foo Section 1 (Foo)",
+ },
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn theorem_begin() {
+ check(
+ r#"
+%! main.tex
+\newtheorem{lemma}{Lemma}
+\begin{lem
+ |
+ ^^^"#,
+ expect![[r#"
+ [
+ Environment(
+ EnvironmentData {
+ name: "lemma",
+ package: "<user>",
+ },
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn theorem_end() {
+ check(
+ r#"
+%! main.tex
+\newtheorem{lemma}{Lemma}
+\begin{}
+\end{lem
+ |
+ ^^^"#,
+ expect![[r#"
+ [
+ Environment(
+ EnvironmentData {
+ name: "lemma",
+ package: "<user>",
+ },
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn tikz_library_open_brace() {
+ check(
+ r#"
+%! main.tex
+\usepgflibrary{
+ |"#,
+ expect![[r#"
+ [
+ TikzLibrary(
+ "arrows",
+ ),
+ TikzLibrary(
+ "arrows.meta",
+ ),
+ TikzLibrary(
+ "arrows.spaced",
+ ),
+ TikzLibrary(
+ "curvilinear",
+ ),
+ TikzLibrary(
+ "datavisualization.barcharts",
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn tikz_library_closed_brace() {
+ check(
+ r#"
+%! main.tex
+\usepgflibrary{}
+ |"#,
+ expect![[r#"
+ [
+ TikzLibrary(
+ "arrows",
+ ),
+ TikzLibrary(
+ "arrows.meta",
+ ),
+ TikzLibrary(
+ "arrows.spaced",
+ ),
+ TikzLibrary(
+ "curvilinear",
+ ),
+ TikzLibrary(
+ "datavisualization.barcharts",
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_user_command() {
+ check(
+ r#"
+%! main.tex
+\foobar
+\fooba
+ |
+ ^^^^^
+\begin{foo}
+\end{foo}
+\begin{fo}"#,
+ expect![[r#"
+ [
+ Command(
+ CommandData {
+ name: "foobar",
+ package: "<user>",
+ },
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_user_environment() {
+ check(
+ r#"
+%! main.tex
+\foobar
+\fooba
+\begin{foo}
+\end{foo}
+\begin{fo}
+ |
+ ^^"#,
+ expect![[r#"
+ [
+ Environment(
+ EnvironmentData {
+ name: "foo",
+ package: "<user>",
+ },
+ ),
+ Environment(
+ EnvironmentData {
+ name: "filecontents",
+ package: [],
+ },
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_project_resolution_import() {
+ check(
+ r#"
+%! main.tex
+\documentclass{article}
+\import{sub}{sub.tex}
+\lipsu
+ |
+ ^^^^^
+
+%! sub/sub.tex
+\input{child.tex}
+
+%! sub/child.tex
+\usepackage{lipsum}"#,
+ expect![[r#"
+ [
+ Command(
+ CommandData {
+ name: "lipsum",
+ package: [
+ "lipsum.sty",
+ ],
+ },
+ ),
+ Command(
+ CommandData {
+ name: "lipsumexp",
+ package: [
+ "lipsum.sty",
+ ],
+ },
+ ),
+ Command(
+ CommandData {
+ name: "LipsumPar",
+ package: [
+ "lipsum.sty",
+ ],
+ },
+ ),
+ Command(
+ CommandData {
+ name: "LipsumProtect",
+ package: [
+ "lipsum.sty",
+ ],
+ },
+ ),
+ Command(
+ CommandData {
+ name: "LipsumRestoreAll",
+ package: [
+ "lipsum.sty",
+ ],
+ },
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_project_resolution_texlabroot() {
+ check(
+ r#"
+%! src/main.tex
+\documentclass{article}
+\include{src/foo}
+\lipsu
+ |
+ ^^^^^
+
+%! src/foo.tex
+\include{src/bar}
+
+%! src/bar.tex
+\usepackage{lipsum}
+
+%! .texlabroot"#,
+ expect![[r#"
+ [
+ Command(
+ CommandData {
+ name: "lipsum",
+ package: [
+ "lipsum.sty",
+ ],
+ },
+ ),
+ Command(
+ CommandData {
+ name: "lipsumexp",
+ package: [
+ "lipsum.sty",
+ ],
+ },
+ ),
+ Command(
+ CommandData {
+ name: "LipsumPar",
+ package: [
+ "lipsum.sty",
+ ],
+ },
+ ),
+ Command(
+ CommandData {
+ name: "LipsumProtect",
+ package: [
+ "lipsum.sty",
+ ],
+ },
+ ),
+ Command(
+ CommandData {
+ name: "LipsumRestoreAll",
+ package: [
+ "lipsum.sty",
+ ],
+ },
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn issue_857_1() {
+ check(
+ r#"
+%! bug.tex
+\documentclass{article}
+\newcommand{\ö}{foo}
+\newcommand{\öö}{bar}
+\newcommand{\ööabc}{baz}
+\begin{document}
+\ö
+ |
+ ^
+\end{document}
+"#,
+ expect![[r#"
+ [
+ Command(
+ CommandData {
+ name: "ö",
+ package: "<user>",
+ },
+ ),
+ Command(
+ CommandData {
+ name: "öö",
+ package: "<user>",
+ },
+ ),
+ Command(
+ CommandData {
+ name: "ööabc",
+ package: "<user>",
+ },
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn issue_864() {
+ check(
+ r#"
+%! bug.tex
+\documentclass{article}
+\def\あいうえお{}
+\begin{document}
+\あ
+ |
+ ^
+\end{document}"#,
+ expect![[r#"
+ [
+ Command(
+ CommandData {
+ name: "あいうえお",
+ package: "<user>",
+ },
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn issue_883() {
+ check(
+ r#"
+%! bug.tex
+\begin{doc
+ |
+ ^^^
+% Comment"#,
+ expect![[r#"
+ [
+ Environment(
+ EnvironmentData {
+ name: "document",
+ package: [],
+ },
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn issue_885() {
+ check(
+ r#"
+%! main.tex
+\documentclass{book}
+\usepackage{import}
+\begin{document}
+\subincludefrom{part 1}{main}
+\include{part 2/main}
+
+\ref{sec}
+ |
+ ^^^
+\end{document}
+
+%! part 1/main.tex
+\part{1}
+\label{part 1}
+\subimport{chapter 1}{main}
+
+%! part 1/chapter 1/main.tex
+\chapter{1}
+\label{chapter 1}
+\subimport{./}{section 1}
+%\subimport{}{section 1}
+
+%! part 1/chapter 1/section 1.tex
+\section{1}
+\label{section 1}
+
+%! part 2/main.tex
+\part{2}
+\label{part 2}
+\input{part 2/chapter 2/main}
+
+%! part 2/chapter 2/main.tex
+\chapter{2}
+\label{chapter 2}
+\input{part 2/chapter 2/section 2}
+
+%! part 2/chapter 2/section 2.tex
+\section{2}
+\label{section 2}
+"#,
+ expect![[r#"
+ [
+ Label(
+ LabelData {
+ name: "section 1",
+ header: Some(
+ "Section (1)",
+ ),
+ footer: None,
+ object: Some(
+ Section {
+ prefix: "Section",
+ text: "1",
+ },
+ ),
+ keywords: "section 1 Section (1)",
+ },
+ ),
+ Label(
+ LabelData {
+ name: "section 2",
+ header: Some(
+ "Section (2)",
+ ),
+ footer: None,
+ object: Some(
+ Section {
+ prefix: "Section",
+ text: "2",
+ },
+ ),
+ keywords: "section 2 Section (2)",
+ },
+ ),
+ ]
+ "#]],
+ );
+}
diff --git a/support/texlab/crates/completion/src/util.rs b/support/texlab/crates/completion/src/util.rs
new file mode 100644
index 0000000000..6e64866979
--- /dev/null
+++ b/support/texlab/crates/completion/src/util.rs
@@ -0,0 +1,33 @@
+mod builder;
+pub mod matchers;
+mod patterns;
+
+pub use builder::*;
+pub use patterns::*;
+
+pub fn included_packages<'a>(
+ params: &'a base_db::FeatureParams<'a>,
+) -> impl Iterator<Item = &completion_data::Package<'_>> + 'a {
+ let db = &completion_data::DATABASE;
+ let documents = params.project.documents.iter();
+ let links = documents
+ .filter_map(|document| document.data.as_tex())
+ .flat_map(|data| data.semantics.links.iter());
+
+ links
+ .filter_map(|link| link.package_name())
+ .filter_map(|name| db.find(&name))
+ .chain(std::iter::once(db.kernel()))
+ .flat_map(|pkg| {
+ pkg.references
+ .iter()
+ .filter_map(|name| db.find(name))
+ .chain(std::iter::once(pkg))
+ })
+}
+
+pub struct ProviderContext<'a, 'b> {
+ pub builder: &'b mut CompletionBuilder<'a>,
+ pub params: &'a crate::CompletionParams<'a>,
+ pub cursor: base_db::semantics::Span,
+}
diff --git a/support/texlab/crates/completion/src/util/builder.rs b/support/texlab/crates/completion/src/util/builder.rs
new file mode 100644
index 0000000000..4ae0818e58
--- /dev/null
+++ b/support/texlab/crates/completion/src/util/builder.rs
@@ -0,0 +1,43 @@
+use base_db::{MatchingAlgo, Workspace};
+use fuzzy_matcher::skim::SkimMatcherV2;
+
+use crate::{CompletionItem, CompletionResult};
+
+use super::matchers::{self, Matcher};
+
+pub struct CompletionBuilder<'a> {
+ pub matcher: Box<dyn Matcher>,
+ pub items: Vec<CompletionItem<'a>>,
+}
+
+impl<'a> From<&Workspace> for CompletionBuilder<'a> {
+ fn from(workspace: &Workspace) -> Self {
+ let matcher: Box<dyn Matcher> = match workspace.config().completion.matcher {
+ MatchingAlgo::Skim => Box::<SkimMatcherV2>::default(),
+ MatchingAlgo::SkimIgnoreCase => Box::new(SkimMatcherV2::default().ignore_case()),
+ MatchingAlgo::Prefix => Box::new(matchers::Prefix),
+ MatchingAlgo::PrefixIgnoreCase => Box::new(matchers::PrefixIgnoreCase),
+ };
+
+ Self {
+ matcher,
+ items: Vec::new(),
+ }
+ }
+}
+
+impl<'a> CompletionBuilder<'a> {
+ pub fn finish(mut self) -> CompletionResult<'a> {
+ self.items.sort_by(|a, b| {
+ b.preselect
+ .cmp(&a.preselect)
+ .then_with(|| b.score.cmp(&a.score))
+ .then_with(|| a.data.label().cmp(b.data.label()))
+ });
+
+ self.items.dedup_by(|a, b| a.data.label() == b.data.label());
+ self.items.truncate(crate::LIMIT);
+ let Self { items, .. } = self;
+ CompletionResult { items }
+ }
+}
diff --git a/support/texlab/crates/completion/src/util/matchers.rs b/support/texlab/crates/completion/src/util/matchers.rs
new file mode 100644
index 0000000000..fb53d1c032
--- /dev/null
+++ b/support/texlab/crates/completion/src/util/matchers.rs
@@ -0,0 +1,42 @@
+pub trait Matcher: Send + Sync {
+ fn score(&self, choice: &str, pattern: &str) -> Option<i32>;
+}
+
+impl<T: fuzzy_matcher::FuzzyMatcher> Matcher for T {
+ fn score(&self, choice: &str, pattern: &str) -> Option<i32> {
+ fuzzy_matcher::FuzzyMatcher::fuzzy_match(self, choice, pattern)
+ }
+}
+
+#[derive(Debug)]
+pub struct Prefix;
+
+impl Matcher for Prefix {
+ fn score(&self, choice: &str, pattern: &str) -> Option<i32> {
+ if choice.starts_with(pattern) {
+ Some(-(choice.len() as i32))
+ } else {
+ None
+ }
+ }
+}
+
+#[derive(Debug)]
+pub struct PrefixIgnoreCase;
+
+impl Matcher for PrefixIgnoreCase {
+ fn score(&self, choice: &str, pattern: &str) -> Option<i32> {
+ if pattern.len() > choice.len() {
+ return None;
+ }
+
+ let mut cs = choice.chars();
+ for p in pattern.chars() {
+ if !cs.next().unwrap().eq_ignore_ascii_case(&p) {
+ return None;
+ }
+ }
+
+ Some(-(choice.len() as i32))
+ }
+}
diff --git a/support/texlab/crates/completion/src/util/patterns.rs b/support/texlab/crates/completion/src/util/patterns.rs
new file mode 100644
index 0000000000..60bbaa526f
--- /dev/null
+++ b/support/texlab/crates/completion/src/util/patterns.rs
@@ -0,0 +1,75 @@
+use base_db::semantics::Span;
+use rowan::{ast::AstNode, TextRange, TextSize};
+use syntax::latex;
+
+use crate::CompletionParams;
+
+pub fn find_curly_group_word(params: &CompletionParams) -> Option<(Span, latex::CurlyGroupWord)> {
+ let offset = params.offset;
+ let data = params.feature.document.data.as_tex()?;
+ let root = data.root_node();
+ let tokens = root.token_at_offset(offset);
+ let token = tokens
+ .clone()
+ .find(|token| token.kind() == latex::WORD)
+ .or_else(|| tokens.left_biased())?;
+
+ let key = latex::Key::cast(token.parent()?);
+
+ let group = key
+ .as_ref()
+ .and_then(|key| key.syntax().parent())
+ .unwrap_or(token.parent()?);
+
+ let group =
+ latex::CurlyGroupWord::cast(group).filter(|group| is_inside_latex_curly(group, offset))?;
+
+ let span = key.map_or_else(|| Span::empty(offset), |key| Span::from(&key));
+ Some((span, group))
+}
+
+pub fn find_curly_group_word_list(
+ params: &CompletionParams,
+) -> Option<(Span, latex::CurlyGroupWordList)> {
+ let offset = params.offset;
+ let data = params.feature.document.data.as_tex()?;
+ let root = data.root_node();
+ let tokens = root.token_at_offset(offset);
+ let token = tokens
+ .clone()
+ .find(|token| token.kind() == latex::WORD)
+ .or_else(|| tokens.left_biased())?;
+
+ let key = latex::Key::cast(token.parent()?);
+
+ let group = key
+ .as_ref()
+ .and_then(|key| key.syntax().parent())
+ .unwrap_or(token.parent()?);
+
+ let group = latex::CurlyGroupWordList::cast(group)
+ .filter(|group| is_inside_latex_curly(group, offset))?;
+
+ let span = key.map_or_else(
+ || Span::empty(offset),
+ |key| {
+ let range = if group
+ .syntax()
+ .last_token()
+ .map_or(false, |tok| tok.kind() != latex::R_CURLY)
+ {
+ TextRange::new(latex::small_range(&key).start(), token.text_range().end())
+ } else {
+ latex::small_range(&key)
+ };
+
+ Span::new(token.text().into(), range)
+ },
+ );
+
+ Some((span, group))
+}
+
+pub fn is_inside_latex_curly(group: &impl latex::HasCurly, offset: TextSize) -> bool {
+ latex::small_range(group).contains(offset) || group.right_curly().is_none()
+}