summaryrefslogtreecommitdiff
path: root/support/texlab/src
diff options
context:
space:
mode:
Diffstat (limited to 'support/texlab/src')
-rw-r--r--support/texlab/src/citation.rs33
-rw-r--r--support/texlab/src/citation/name.rs279
-rw-r--r--support/texlab/src/citation/name/parser.lalrpop150
-rw-r--r--support/texlab/src/citation/ris.rs24
-rw-r--r--support/texlab/src/component_db.rs13
-rw-r--r--support/texlab/src/features/completion.rs11
-rw-r--r--support/texlab/src/features/hover/citation.rs4
-rw-r--r--support/texlab/src/server.rs92
8 files changed, 118 insertions, 488 deletions
diff --git a/support/texlab/src/citation.rs b/support/texlab/src/citation.rs
index daa4b60997..96a5a2eccf 100644
--- a/support/texlab/src/citation.rs
+++ b/support/texlab/src/citation.rs
@@ -1,12 +1,11 @@
mod bibutils;
-mod name;
mod ris;
use std::sync::Arc;
-use citeproc::{prelude::SupportedFormat, ClusterPosition, Processor};
+use citeproc::{prelude::SupportedFormat, ClusterPosition, InitOptions, Processor};
use citeproc_db::PredefinedLocales;
-use citeproc_io::{Cite, Cluster, Reference};
+use citeproc_io::{Cite, Reference};
use lsp_types::{MarkupContent, MarkupKind};
use once_cell::sync::Lazy;
use regex::Regex;
@@ -67,7 +66,9 @@ fn convert_to_ris(root: &bibtex::SyntaxNode, key: &str) -> Option<RisReference>
let entry = root
.children()
.filter_map(bibtex::Entry::cast)
- .find(|entry| entry.key().map(|key| key.to_string()).as_deref() == Some(key))?;
+ .find(|entry| entry.key().map(|key| key.to_string()).as_deref() == Some(key))
+ .filter(|entry| entry.fields().next().is_some())?;
+
bib_code.push_str(&entry.syntax().to_string());
bib_code = bib_code.replace("\\hypen", "-");
@@ -88,22 +89,24 @@ fn get_doi_url_markdown(ris_reference: &RisReference) -> Option<String> {
}
fn generate_bibliography(reference: Reference) -> Option<String> {
- let locales = Arc::new(PredefinedLocales::bundled_en_us());
- let mut processor = Processor::new(APA_STYLE, locales, false, SupportedFormat::Html).unwrap();
+ let mut processor = Processor::new(InitOptions {
+ style: APA_STYLE,
+ format: SupportedFormat::Html,
+ fetcher: Some(Arc::new(PredefinedLocales::bundled_en_us())),
+ ..InitOptions::default()
+ })
+ .ok()?;
let cite = Cite::basic(&reference.id);
- let cluster = Cluster {
- id: 1,
- cites: vec![cite],
- };
+ let cluster_id = processor.new_cluster("texlab");
processor.insert_reference(reference);
- processor.init_clusters(vec![cluster]);
+ processor.insert_cites(cluster_id, &[cite]);
processor
.set_cluster_order(&[ClusterPosition {
- id: 1,
+ id: cluster_id,
note: Some(1),
}])
.unwrap();
- processor.get_bibliography().pop()
+ Some(processor.get_bibliography().pop()?.value.to_string())
}
#[cfg(test)]
@@ -126,7 +129,7 @@ mod tests {
let expected_md = MarkupContent {
kind: MarkupKind::Markdown,
- value: "Bar, F. (2020). *Baz Qux*.".into(),
+ value: "Bar, Foo. (2020). *Baz Qux*.".into(),
};
assert_eq!(actual_md, expected_md);
@@ -148,7 +151,7 @@ mod tests {
let expected_md = MarkupContent {
kind: MarkupKind::Markdown,
- value: "Bar, F. (2020). *Baz Qux*.".into(),
+ value: "Bar, Foo. (2020). *Baz Qux*.".into(),
};
assert_eq!(actual_md, expected_md);
diff --git a/support/texlab/src/citation/name.rs b/support/texlab/src/citation/name.rs
deleted file mode 100644
index 9e22383a7d..0000000000
--- a/support/texlab/src/citation/name.rs
+++ /dev/null
@@ -1,279 +0,0 @@
-// Ported from: https://github.com/michel-kraemer/citeproc-java/blob/master/citeproc-java/grammars/InternalName.g4
-// Michel Kraemer
-// Apache License 2.0
-use citeproc_io::Name;
-
-use parser::NamesParser;
-
-mod parser {
- #![allow(warnings)]
- include!(concat!(env!("OUT_DIR"), "/citation/name/parser.rs"));
-}
-
-pub fn parse(input: &str) -> Vec<Name> {
- let parser = NamesParser::new();
- parser.parse(input).unwrap_or_else(|_| {
- vec![Name::Literal {
- literal: input.into(),
- }]
- })
-}
-
-#[cfg(test)]
-mod tests {
- use super::*;
- use citeproc_io::PersonName;
-
- #[test]
- fn test_family_only() {
- let name = Name::Person(PersonName {
- family: Some("Thompson".into()),
- given: None,
- non_dropping_particle: None,
- dropping_particle: None,
- suffix: None,
- });
- assert_eq!(parse("Thompson"), vec![name]);
- }
-
- #[test]
- fn test_simple() {
- let name = Name::Person(PersonName {
- family: Some("Thompson".into()),
- given: Some("Ken".into()),
- non_dropping_particle: None,
- dropping_particle: None,
- suffix: None,
- });
- assert_eq!(parse("Ken Thompson"), vec![name]);
- }
-
- #[test]
- fn test_middle_name() {
- let name = Name::Person(PersonName {
- family: Some("Ritchie".into()),
- given: Some("Dennis M.".into()),
- non_dropping_particle: None,
- dropping_particle: None,
- suffix: None,
- });
- assert_eq!(parse("Dennis M. Ritchie"), vec![name]);
- }
-
- #[test]
- fn test_initials() {
- let name = Name::Person(PersonName {
- family: Some("Johnson".into()),
- given: Some("S. C.".into()),
- non_dropping_particle: None,
- dropping_particle: None,
- suffix: None,
- });
- assert_eq!(parse("S. C. Johnson"), vec![name]);
- }
-
- #[test]
- fn test_non_dropping_particle() {
- let name = Name::Person(PersonName {
- family: Some("Gerwen".into()),
- given: Some("Michael".into()),
- non_dropping_particle: Some("van".into()),
- dropping_particle: None,
- suffix: None,
- });
- assert_eq!(parse("Michael van Gerwen"), vec![name]);
- }
-
- #[test]
- fn test_non_dropping_particle_family_only() {
- let name = Name::Person(PersonName {
- family: Some("Gerwen".into()),
- given: None,
- non_dropping_particle: Some("van".into()),
- dropping_particle: None,
- suffix: None,
- });
- assert_eq!(parse("van Gerwen"), vec![name]);
- }
-
- #[test]
- fn test_comma() {
- let name = Name::Person(PersonName {
- family: Some("Thompson".into()),
- given: Some("Ken".into()),
- non_dropping_particle: None,
- dropping_particle: None,
- suffix: None,
- });
- assert_eq!(parse("Thompson, Ken"), vec![name]);
- }
-
- #[test]
- fn test_comma_junior() {
- let name = Name::Person(PersonName {
- family: Some("Friedman".into()),
- given: Some("George".into()),
- non_dropping_particle: None,
- dropping_particle: None,
- suffix: Some("Jr.".into()),
- });
- assert_eq!(parse("Friedman, Jr., George"), vec![name]);
- }
-
- #[test]
- fn test_comma_no_junior() {
- let name = Name::Person(PersonName {
- family: Some("Familya Familyb".into()),
- given: Some("Given".into()),
- non_dropping_particle: None,
- dropping_particle: None,
- suffix: None,
- });
- assert_eq!(parse("Familya Familyb, Given"), vec![name]);
- }
-
- #[test]
- fn test_comma_initials() {
- let name = Name::Person(PersonName {
- family: Some("Ritchie".into()),
- given: Some("Dennis M.".into()),
- non_dropping_particle: None,
- dropping_particle: None,
- suffix: None,
- });
- assert_eq!(parse("Ritchie, Dennis M."), vec![name]);
- }
-
- #[test]
- fn test_comma_non_dropping_particle() {
- let name = Name::Person(PersonName {
- family: Some("Gerwen".into()),
- given: Some("Michael".into()),
- non_dropping_particle: Some("van".into()),
- dropping_particle: None,
- suffix: None,
- });
- assert_eq!(parse("van Gerwen, Michael"), vec![name]);
- }
-
- #[test]
- fn test_comma_non_dropping_particles() {
- let name = Name::Person(PersonName {
- family: Some("Voort".into()),
- given: Some("Vincent".into()),
- non_dropping_particle: Some("Van der".into()),
- dropping_particle: None,
- suffix: None,
- });
- assert_eq!(parse("Van der Voort, Vincent"), vec![name]);
- }
-
- #[test]
- fn test_and() {
- let name1 = Name::Person(PersonName {
- family: Some("Gerwen".into()),
- given: Some("Michael".into()),
- non_dropping_particle: Some("van".into()),
- dropping_particle: None,
- suffix: None,
- });
- let name2 = Name::Person(PersonName {
- family: Some("Voort".into()),
- given: Some("Vincent".into()),
- non_dropping_particle: Some("van der".into()),
- dropping_particle: None,
- suffix: None,
- });
- assert_eq!(
- parse("Michael van Gerwen and Vincent van der Voort"),
- vec![name1, name2]
- );
- }
-
- #[test]
- fn test_and_comma1() {
- let name1 = Name::Person(PersonName {
- family: Some("Gerwen".into()),
- given: Some("Michael".into()),
- non_dropping_particle: Some("van".into()),
- dropping_particle: None,
- suffix: None,
- });
- let name2 = Name::Person(PersonName {
- family: Some("Voort".into()),
- given: Some("Vincent".into()),
- non_dropping_particle: Some("Van der".into()),
- dropping_particle: None,
- suffix: None,
- });
- assert_eq!(
- parse("van Gerwen, Michael and Van der Voort, Vincent"),
- vec![name1, name2]
- );
- }
-
- #[test]
- fn test_and_comma2() {
- let name1 = Name::Person(PersonName {
- family: Some("Gerwen".into()),
- given: Some("Michael".into()),
- non_dropping_particle: Some("van".into()),
- dropping_particle: None,
- suffix: None,
- });
- let name2 = Name::Person(PersonName {
- family: Some("Voort".into()),
- given: Some("Vincent".into()),
- non_dropping_particle: Some("van der".into()),
- dropping_particle: None,
- suffix: None,
- });
- assert_eq!(
- parse("van Gerwen, Michael and van der Voort, Vincent"),
- vec![name1, name2]
- );
- }
-
- #[test]
- fn test_and_comma_mix() {
- let name1 = Name::Person(PersonName {
- family: Some("Gerwen".into()),
- given: Some("Michael".into()),
- non_dropping_particle: Some("van".into()),
- dropping_particle: None,
- suffix: None,
- });
- let name2 = Name::Person(PersonName {
- family: Some("Voort".into()),
- given: Some("Vincent".into()),
- non_dropping_particle: Some("van der".into()),
- dropping_particle: None,
- suffix: None,
- });
- assert_eq!(
- parse("van Gerwen, Michael and Vincent van der Voort"),
- vec![name1, name2]
- );
- }
-
- #[test]
- fn test_junior() {
- let name = Name::Person(PersonName {
- family: Some("Friedman".into()),
- given: Some("George".into()),
- non_dropping_particle: None,
- dropping_particle: None,
- suffix: Some("Jr.".into()),
- });
- assert_eq!(parse("George Friedman, Jr."), vec![name]);
- }
-
- #[test]
- fn test_non_parseable() {
- let literal = "Jerry Peek and Tim O'Reilly and Mike Loukides and other authors of the Nutshell handbooks";
- let name = Name::Literal {
- literal: literal.into(),
- };
- assert_eq!(parse(literal), vec![name]);
- }
-}
diff --git a/support/texlab/src/citation/name/parser.lalrpop b/support/texlab/src/citation/name/parser.lalrpop
deleted file mode 100644
index e7bce8a302..0000000000
--- a/support/texlab/src/citation/name/parser.lalrpop
+++ /dev/null
@@ -1,150 +0,0 @@
-// Ported from: https://github.com/michel-kraemer/citeproc-java/blob/master/citeproc-java/grammars/InternalName.g4
-// Michel Kraemer
-// Apache License 2.0
-use citeproc_io::{Name, PersonName};
-use itertools::Itertools;
-
-grammar;
-
-pub Names: Vec<Name> = And<Name>;
-
-And<T>: Vec<T> = {
- <v:(<T> "and")*> <e:T?> => match e {
- None => v,
- Some(e) => {
- let mut v = v;
- v.push(e);
- v
- }
- }
-};
-
-Name: Name = {
- <np1:UWord+> <np2:Von> <fam:Last> "," <fst:First> => {
- let (fst1, fst2) = fst;
- let name = PersonName {
- family: Some(fam.join(" ")),
- given: fst2,
- non_dropping_particle: Some(format!("{} {}", np1.join(" "), np2)),
- dropping_particle: None,
- suffix: fst1,
- };
- Name::Person(name)
- },
- <np:Von> <fam:Last> "," <fst:First> => {
- let (fst1, fst2) = fst;
- let name = PersonName {
- family: Some(fam.join(" ")),
- given: fst2,
- non_dropping_particle: Some(np),
- dropping_particle: None,
- suffix: fst1,
- };
- Name::Person(name)
- },
- <last:Last> "," <fst:First> => {
- let (fst1, fst2) = fst;
- let first_text = format!(
- "{} {}",
- fst1.as_ref().map(|s| s.as_str()).unwrap_or_default(),
- fst2.as_ref().map(|s| s.as_str()).unwrap_or_default(),
- ).trim().to_owned();
-
- let name = if first_text == "Jr." {
- if last.len() == 1 {
- PersonName {
- family: Some(last.join(" ")),
- given: None,
- non_dropping_particle: None,
- dropping_particle: None,
- suffix: Some(first_text),
- }
- } else {
- let mut last = last.into_iter();
- let given = last.next().map(ToOwned::to_owned);
- PersonName {
- family: Some(last.join(" ")),
- given,
- non_dropping_particle: None,
- dropping_particle: None,
- suffix: Some(first_text),
- }
- }
- } else {
- PersonName {
- family: Some(last.join(" ")),
- given: fst2,
- non_dropping_particle: None,
- dropping_particle: None,
- suffix: fst1,
- }
- };
- Name::Person(name)
- },
- <np:Von> <fam:Last> => {
- let name = PersonName {
- family: Some(fam.join(" ")),
- given: None,
- non_dropping_particle: Some(np),
- dropping_particle: None,
- suffix: None,
- };
- Name::Person(name)
- },
- <giv:UWord+> <np:Von> <fam:Last> => {
- let name = PersonName {
- family: Some(fam.join(" ")),
- given: Some(giv.join(" ")),
- non_dropping_particle: Some(np),
- dropping_particle: None,
- suffix: None,
- };
- Name::Person(name)
- },
- <giv:UWord+> <fam:Word> => {
- let name = PersonName {
- family: Some(fam.into()),
- given: Some(giv.join(" ")),
- non_dropping_particle: None,
- dropping_particle: None,
- suffix: None,
- };
- Name::Person(name)
- },
- <fam:Word> => {
- let name = PersonName {
- family: Some(fam.into()),
- given: None,
- non_dropping_particle: None,
- dropping_particle: None,
- suffix: None,
- };
- Name::Person(name)
- },
-};
-
-First: (Option<String>, Option<String>) = {
- <a:Word*> "," <b:Word*> => (Some(a.join(" ")), Some(b.join(" "))),
- <a:Word*> => (None, Some(a.join(" "))),
-};
-
-Last: Vec<&'input str> = {
- LWord => vec![(<>)],
- UWord+ => (<>),
-};
-
-Von: String = {
- LWord => String::from(<>),
- <a:Von> <b:LWord> => format!("{} {}", a, b),
- <a:Von> <b:UWord+> <c:LWord> => format!("{} {} {}", a, b.join(" "), c),
-};
-
-Word: &'input str = {
- UWord => (<>),
- LWord => (<>),
-};
-
-
-UWord: &'input str = r"[A-Z\u00C0-\uFFFF(?][A-Z\u00C0-\uFFFF(?a-z\-)&/.]+" => (<>);
-
-LWord: &'input str = r"[a-z\-)&/.][A-Z\u00C0-\uFFFF(?a-z\-)&/.]+" => (<>);
diff --git a/support/texlab/src/citation/ris.rs b/support/texlab/src/citation/ris.rs
index 5b666160a3..041fb654a0 100644
--- a/support/texlab/src/citation/ris.rs
+++ b/support/texlab/src/citation/ris.rs
@@ -1,13 +1,11 @@
// Ported from: https://github.com/michel-kraemer/citeproc-java/tree/master/citeproc-java/templates
// Michel Kraemer
// Apache License 2.0
-use citeproc_io::{Date, DateOrRange, Name, NumericValue, Reference};
+use citeproc_io::{unicode::is_latin_cyrillic, Date, DateOrRange, Name, NumberLike, Reference};
use csl::*;
use fnv::FnvHashMap;
use serde::{Deserialize, Serialize};
-use super::name;
-
#[derive(Debug, PartialEq, Eq, Clone, Copy, Serialize, Deserialize)]
#[serde(rename_all = "UPPERCASE")]
pub enum RisType {
@@ -303,7 +301,7 @@ impl Into<Reference> for RisReference {
let csl_type = self.ty.expect("RIS type is missing").csl();
let mut date: FnvHashMap<DateVariable, DateOrRange> = FnvHashMap::default();
let mut name: FnvHashMap<NameVariable, Vec<Name>> = FnvHashMap::default();
- let mut number: FnvHashMap<NumberVariable, NumericValue> = FnvHashMap::default();
+ let mut number: FnvHashMap<NumberVariable, NumberLike> = FnvHashMap::default();
let mut ordinary: FnvHashMap<Variable, String> = FnvHashMap::default();
if let Some(access_date) = self.access_date {
@@ -389,7 +387,7 @@ impl Into<Reference> for RisReference {
(Some(start_page), Some(end_page)) => {
number.insert(
NumberVariable::Page,
- NumericValue::Str(format!("{}-{}", start_page, end_page)),
+ NumberLike::Str(format!("{}-{}", start_page, end_page).into()),
);
}
(Some(page), None) | (None, Some(page)) => {
@@ -434,24 +432,30 @@ impl Into<Reference> for RisReference {
}
}
-fn parse_number(value: String) -> NumericValue {
+fn parse_number(value: String) -> NumberLike {
match value.parse() {
- Ok(value) => NumericValue::num(value),
- Err(_) => NumericValue::Str(value),
+ Ok(value) => NumberLike::Num(value),
+ Err(_) => NumberLike::Str(value.into()),
}
}
fn parse_authors(authors: Vec<String>) -> Vec<Name> {
authors
.into_iter()
- .flat_map(|author| name::parse(&author))
+ .map(|author| Name::Literal {
+ is_latin_cyrillic: is_latin_cyrillic(&author),
+ literal: author.into(),
+ })
.collect()
}
fn parse_date_or_range(value: String) -> DateOrRange {
parse_date(&value)
.map(DateOrRange::Single)
- .unwrap_or_else(|| DateOrRange::Literal(value))
+ .unwrap_or_else(|| DateOrRange::Literal {
+ literal: value.into(),
+ circa: false,
+ })
}
fn parse_date(value: &str) -> Option<Date> {
diff --git a/support/texlab/src/component_db.rs b/support/texlab/src/component_db.rs
index a0c728a7ae..7676feff5c 100644
--- a/support/texlab/src/component_db.rs
+++ b/support/texlab/src/component_db.rs
@@ -1,3 +1,6 @@
+use std::io::Read;
+
+use flate2::read::GzDecoder;
use itertools::Itertools;
use lsp_types::{MarkupContent, MarkupKind};
use once_cell::sync::Lazy;
@@ -131,7 +134,11 @@ pub struct ComponentMetadata {
pub description: Option<String>,
}
-const JSON: &str = include_str!("../data/components.json");
+const JSON_GZ: &[u8] = include_bytes!("../data/components.json.gz");
-pub static COMPONENT_DATABASE: Lazy<ComponentDatabase> =
- Lazy::new(|| serde_json::from_str(JSON).unwrap());
+pub static COMPONENT_DATABASE: Lazy<ComponentDatabase> = Lazy::new(|| {
+ let mut decoder = GzDecoder::new(JSON_GZ);
+ let mut buf = String::new();
+ decoder.read_to_string(&mut buf).unwrap();
+ serde_json::from_str(&buf).unwrap()
+});
diff --git a/support/texlab/src/features/completion.rs b/support/texlab/src/features/completion.rs
index 29559bd808..f9ad8554cd 100644
--- a/support/texlab/src/features/completion.rs
+++ b/support/texlab/src/features/completion.rs
@@ -22,6 +22,7 @@ mod util;
use std::borrow::Cow;
use cancellation::CancellationToken;
+use cstree::TextSize;
use fuzzy_matcher::{skim::SkimMatcherV2, FuzzyMatcher};
use lsp_types::{
CompletionItem, CompletionList, CompletionParams, CompletionTextEdit, Documentation,
@@ -147,7 +148,15 @@ fn dedup(items: Vec<InternalCompletionItem>) -> Vec<InternalCompletionItem> {
fn score(context: &CursorContext<CompletionParams>, items: &mut Vec<InternalCompletionItem>) {
let pattern: Cow<str> = match &context.cursor {
- Cursor::Latex(token) if token.kind().is_command_name() => token.text().trim_end().into(),
+ Cursor::Latex(token) if token.kind().is_command_name() => {
+ if token.text_range().start() + TextSize::from(1) == context.offset {
+ // Handle cases similar to this one correctly:
+ // $\|$ % (| is the cursor)
+ "\\".into()
+ } else {
+ token.text().trim_end().into()
+ }
+ }
Cursor::Latex(token) if token.kind() == latex::WORD => {
if let Some(key) = latex::Key::cast(token.parent()) {
key.to_string().into()
diff --git a/support/texlab/src/features/hover/citation.rs b/support/texlab/src/features/hover/citation.rs
index 1fd6d6c06b..c097a922f7 100644
--- a/support/texlab/src/features/hover/citation.rs
+++ b/support/texlab/src/features/hover/citation.rs
@@ -95,7 +95,7 @@ mod tests {
let expected_hover = Hover {
contents: HoverContents::Markup(MarkupContent {
kind: MarkupKind::Markdown,
- value: "Bar, F. (1337). *Baz Qux*.".into(),
+ value: "Bar, Foo. (1337). *Baz Qux*.".into(),
}),
range: Some(Range::new_simple(1, 6, 1, 9)),
};
@@ -124,7 +124,7 @@ mod tests {
let expected_hover = Hover {
contents: HoverContents::Markup(MarkupContent {
kind: MarkupKind::Markdown,
- value: "Bar, F. (1337). *Baz Qux*.".into(),
+ value: "Bar, Foo. (1337). *Baz Qux*.".into(),
}),
range: Some(Range::new_simple(0, 9, 0, 12)),
};
diff --git a/support/texlab/src/server.rs b/support/texlab/src/server.rs
index 6806e5f8e5..000118a9a2 100644
--- a/support/texlab/src/server.rs
+++ b/support/texlab/src/server.rs
@@ -42,7 +42,7 @@ use crate::{
FeatureRequest, ForwardSearchResult,
},
req_queue::{IncomingData, ReqQueue},
- DocumentLanguage, ServerContext, Uri, Workspace, WorkspaceSource,
+ Document, DocumentLanguage, LineIndexExt, ServerContext, Uri, Workspace, WorkspaceSource,
};
pub struct Server {
@@ -92,7 +92,7 @@ impl Server {
text_document_sync: Some(TextDocumentSyncCapability::Options(
TextDocumentSyncOptions {
open_close: Some(true),
- change: Some(TextDocumentSyncKind::Full),
+ change: Some(TextDocumentSyncKind::Incremental),
will_save: None,
will_save_wait_until: None,
save: Some(TextDocumentSyncSaveOptions::SaveOptions(SaveOptions {
@@ -176,6 +176,7 @@ impl Server {
let req_queue = Arc::clone(&self.req_queue);
let sender = self.connection.sender.clone();
let context = Arc::clone(&self.context);
+ let workspace = Arc::clone(&self.workspace);
self.pool.execute(move || {
register_config_capability(&req_queue, &sender, &context.client_capabilities);
pull_config(
@@ -184,6 +185,15 @@ impl Server {
&context.options,
&context.client_capabilities.lock().unwrap(),
);
+
+ for document in workspace.documents() {
+ workspace.open(
+ Arc::clone(&document.uri),
+ document.text.clone(),
+ document.language(),
+ WorkspaceSource::Client,
+ );
+ }
});
Ok(())
}
@@ -257,17 +267,35 @@ impl Server {
fn did_change(&self, mut params: DidChangeTextDocumentParams) -> Result<()> {
let uri = params.text_document.uri.into();
- assert_eq!(params.content_changes.len(), 1);
let old_document = self.workspace.get(&uri);
let old_text = old_document.as_ref().map(|document| document.text.as_str());
- let new_text = params.content_changes.pop().unwrap().text;
-
let uri = Arc::new(uri);
+ let language = self
+ .workspace
+ .get(&uri)
+ .map(|document| document.data.language())
+ .unwrap_or(DocumentLanguage::Latex);
+
+ let new_document = match &old_document {
+ Some(old_document) => params
+ .content_changes
+ .into_iter()
+ .fold(Arc::clone(&old_document), |old_document, change| {
+ self.merge_text_changes(&old_document, language, change)
+ }),
+ None => self.workspace.open(
+ Arc::clone(&uri),
+ params.content_changes.pop().unwrap().text,
+ language,
+ WorkspaceSource::Client,
+ ),
+ };
+
let line = match old_text {
Some(old_text) => old_text
.lines()
- .zip(new_text.lines())
+ .zip(new_document.text.lines())
.position(|(a, b)| a != b)
.unwrap_or_default() as u32,
None => 0,
@@ -276,33 +304,45 @@ impl Server {
.positions_by_uri
.insert(Arc::clone(&uri), Position::new(line, 0));
- let language = self
- .workspace
- .get(&uri)
- .map(|document| document.data.language())
- .unwrap_or(DocumentLanguage::Latex);
-
- let document = self
- .workspace
- .open(uri, new_text, language, WorkspaceSource::Client);
-
let should_lint = { self.context.options.read().unwrap().chktex.on_edit };
- if let Some(document) = self
- .workspace
- .get(document.uri.as_ref())
- .filter(|_| should_lint)
- {
+ if should_lint {
self.chktex_debouncer
.sender
.send(DiagnosticsMessage::Analyze {
workspace: Arc::clone(&self.workspace),
- document,
+ document: new_document,
})?;
};
Ok(())
}
+ fn merge_text_changes(
+ &self,
+ old_document: &Document,
+ new_language: DocumentLanguage,
+ change: TextDocumentContentChangeEvent,
+ ) -> Arc<Document> {
+ let new_text = match change.range {
+ Some(range) => {
+ let range = old_document.line_index.offset_lsp_range(range);
+ let mut new_text = String::new();
+ new_text.push_str(&old_document.text[..range.start().into()]);
+ new_text.push_str(&change.text);
+ new_text.push_str(&old_document.text[range.end().into()..]);
+ new_text
+ }
+ None => change.text,
+ };
+
+ self.workspace.open(
+ Arc::clone(&old_document.uri),
+ new_text,
+ new_language,
+ WorkspaceSource::Client,
+ )
+ }
+
fn did_save(&self, params: DidSaveTextDocumentParams) -> Result<()> {
let uri = params.text_document.uri.into();
@@ -358,12 +398,8 @@ impl Server {
let sender = self.connection.sender.clone();
let cx = Arc::clone(&self.context);
self.pool.execute(move || {
- pull_config(
- &req_queue,
- &sender,
- &cx.options,
- &cx.client_capabilities.lock().unwrap(),
- );
+ let client_capabilities = &cx.client_capabilities.lock().unwrap().clone();
+ pull_config(&req_queue, &sender, &cx.options, &client_capabilities);
});
Some(FeatureRequest {