summaryrefslogtreecommitdiff
path: root/support/texlab/crates/diagnostics/src/citations.rs
blob: 0b750aaadb2e69be1b423f3e4bf4d4ca24fb8a8f (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
use std::borrow::Cow;

use base_db::{
    semantics::{bib::Entry, tex::Citation},
    util::queries::{self, Object},
    Document, Project, Workspace,
};
use rustc_hash::FxHashSet;

use crate::{
    types::{BibError, Diagnostic, DiagnosticData, TexError},
    DiagnosticBuilder, DiagnosticSource,
};

const MAX_UNUSED_ENTRIES: usize = 1000;

#[derive(Default)]
pub struct CitationErrors;

impl DiagnosticSource for CitationErrors {
    fn publish<'db>(
        &'db mut self,
        workspace: &'db Workspace,
        builder: &mut DiagnosticBuilder<'db>,
    ) {
        for document in workspace.iter() {
            let project = workspace.project(document);
            detect_undefined_citations(&project, document, builder);
            detect_unused_entries(&project, document, builder);
        }

        detect_duplicate_entries(workspace, builder);
    }
}

fn detect_undefined_citations<'db>(
    project: &Project<'db>,
    document: &'db Document,
    builder: &mut DiagnosticBuilder<'db>,
) {
    let Some(data) = document.data.as_tex() else {
        return;
    };

    let entries: FxHashSet<&str> = Entry::find_all(project)
        .map(|(_, entry)| entry.name_text())
        .collect();

    for citation in &data.semantics.citations {
        let name = citation.name_text();
        if name != "*" && !entries.contains(name) {
            let diagnostic = Diagnostic {
                range: citation.name.range,
                data: DiagnosticData::Tex(TexError::UndefinedCitation),
            };

            builder.push(&document.uri, Cow::Owned(diagnostic));
        }
    }
}

fn detect_unused_entries<'db>(
    project: &Project<'db>,
    document: &'db Document,
    builder: &mut DiagnosticBuilder<'db>,
) {
    let Some(data) = document.data.as_bib() else {
        return;
    };

    // If this is a huge bibliography, then don't bother checking for unused entries.
    if data.semantics.entries.len() > MAX_UNUSED_ENTRIES {
        return;
    }

    let citations: FxHashSet<&str> = Citation::find_all(project)
        .map(|(_, citation)| citation.name_text())
        .collect();

    for entry in &data.semantics.entries {
        if !citations.contains(entry.name.text.as_str()) {
            let diagnostic = Diagnostic {
                range: entry.name.range,
                data: DiagnosticData::Bib(BibError::UnusedEntry),
            };

            builder.push(&document.uri, Cow::Owned(diagnostic));
        }
    }
}

fn detect_duplicate_entries<'db>(workspace: &'db Workspace, builder: &mut DiagnosticBuilder<'db>) {
    for conflict in queries::Conflict::find_all::<Entry>(workspace) {
        let others = conflict
            .rest
            .iter()
            .map(|location| (location.document.uri.clone(), location.range))
            .collect();

        let diagnostic = Diagnostic {
            range: conflict.main.range,
            data: DiagnosticData::Bib(BibError::DuplicateEntry(others)),
        };

        builder.push(&conflict.main.document.uri, Cow::Owned(diagnostic));
    }
}