summaryrefslogtreecommitdiff
path: root/support/texlab/src/syntax/latex
diff options
context:
space:
mode:
Diffstat (limited to 'support/texlab/src/syntax/latex')
-rw-r--r--support/texlab/src/syntax/latex/analysis.rs913
-rw-r--r--support/texlab/src/syntax/latex/ast.rs529
-rw-r--r--support/texlab/src/syntax/latex/env.rs123
-rw-r--r--support/texlab/src/syntax/latex/finder.rs74
-rw-r--r--support/texlab/src/syntax/latex/glossary.rs58
-rw-r--r--support/texlab/src/syntax/latex/lexer.rs117
-rw-r--r--support/texlab/src/syntax/latex/math.rs199
-rw-r--r--support/texlab/src/syntax/latex/mod.rs912
-rw-r--r--support/texlab/src/syntax/latex/parser.rs177
-rw-r--r--support/texlab/src/syntax/latex/printer.rs77
-rw-r--r--support/texlab/src/syntax/latex/structure.rs250
11 files changed, 2003 insertions, 1426 deletions
diff --git a/support/texlab/src/syntax/latex/analysis.rs b/support/texlab/src/syntax/latex/analysis.rs
new file mode 100644
index 0000000000..af8b756f31
--- /dev/null
+++ b/support/texlab/src/syntax/latex/analysis.rs
@@ -0,0 +1,913 @@
+use crate::{
+ protocol::{Options, Position, Range, RangeExt, Uri},
+ syntax::{generic_ast::AstNodeIndex, lang_data::*, latex::ast::*, text::SyntaxNode},
+ tex::Resolver,
+};
+use itertools::{iproduct, Itertools};
+use serde::{Deserialize, Serialize};
+use std::{borrow::Cow, ops::Deref, path::Path};
+
+#[derive(Debug, Clone)]
+pub struct SymbolTableParams<'a> {
+ pub tree: Tree,
+ pub uri: &'a Uri,
+ pub resolver: &'a Resolver,
+ pub options: &'a Options,
+ pub current_dir: &'a Path,
+}
+
+#[derive(Debug, Clone)]
+pub struct SymbolTable {
+ pub(crate) tree: Tree,
+ pub commands: Vec<AstNodeIndex>,
+ pub environments: Vec<Environment>,
+ pub is_standalone: bool,
+ pub includes: Vec<Include>,
+ pub imports: Vec<Import>,
+ pub components: Vec<String>,
+ pub citations: Vec<Citation>,
+ pub command_definitions: Vec<CommandDefinition>,
+ pub glossary_entries: Vec<GlossaryEntry>,
+ pub equations: Vec<Equation>,
+ pub inlines: Vec<Inline>,
+ pub math_operators: Vec<MathOperator>,
+ pub theorem_definitions: Vec<TheoremDefinition>,
+ pub sections: Vec<Section>,
+ pub labels: Vec<Label>,
+ pub label_numberings: Vec<LabelNumbering>,
+ pub captions: Vec<Caption>,
+ pub items: Vec<Item>,
+}
+
+impl SymbolTable {
+ pub fn analyze(params: SymbolTableParams) -> Self {
+ let SymbolTableParams {
+ tree,
+ uri,
+ resolver,
+ options,
+ current_dir,
+ } = params;
+
+ let commands: Vec<_> = tree.commands().collect();
+ let ctx = SymbolContext {
+ tree: &tree,
+ commands: &commands,
+ uri,
+ resolver,
+ options,
+ current_dir,
+ };
+
+ let mut environments = None;
+ let mut includes = None;
+ let mut imports = None;
+ let mut citations = None;
+ let mut command_definitions = None;
+ let mut glossary_entries = None;
+ let mut equations = None;
+ let mut inlines = None;
+ let mut math_operators = None;
+ let mut theorem_definitions = None;
+ let mut sections = None;
+ let mut labels = None;
+ let mut label_numberings = None;
+ let mut captions = None;
+ let mut items = None;
+
+ rayon::scope(|s| {
+ s.spawn(|_| environments = Some(Environment::parse(ctx)));
+ s.spawn(|_| includes = Some(Include::parse(ctx)));
+ s.spawn(|_| imports = Some(Import::parse(ctx)));
+ s.spawn(|_| citations = Some(Citation::parse(ctx)));
+ s.spawn(|_| command_definitions = Some(CommandDefinition::parse(ctx)));
+ s.spawn(|_| glossary_entries = Some(GlossaryEntry::parse(ctx)));
+ s.spawn(|_| equations = Some(Equation::parse(ctx)));
+ s.spawn(|_| inlines = Some(Inline::parse(ctx)));
+ s.spawn(|_| math_operators = Some(MathOperator::parse(ctx)));
+ s.spawn(|_| theorem_definitions = Some(TheoremDefinition::parse(ctx)));
+ s.spawn(|_| sections = Some(Section::parse(ctx)));
+ s.spawn(|_| labels = Some(Label::parse(ctx)));
+ s.spawn(|_| label_numberings = Some(LabelNumbering::parse(ctx)));
+ s.spawn(|_| captions = Some(Caption::parse(ctx)));
+ s.spawn(|_| items = Some(Item::parse(ctx)));
+ });
+
+ let is_standalone = environments
+ .as_ref()
+ .unwrap()
+ .iter()
+ .any(|env| env.is_root(&tree));
+
+ let components = includes
+ .as_ref()
+ .unwrap()
+ .iter()
+ .flat_map(|include| include.components(&tree))
+ .collect();
+
+ Self {
+ tree,
+ commands,
+ environments: environments.unwrap(),
+ is_standalone,
+ includes: includes.unwrap(),
+ imports: imports.unwrap(),
+ components,
+ citations: citations.unwrap(),
+ command_definitions: command_definitions.unwrap(),
+ glossary_entries: glossary_entries.unwrap(),
+ equations: equations.unwrap(),
+ inlines: inlines.unwrap(),
+ math_operators: math_operators.unwrap(),
+ theorem_definitions: theorem_definitions.unwrap(),
+ sections: sections.unwrap(),
+ labels: labels.unwrap(),
+ label_numberings: label_numberings.unwrap(),
+ captions: captions.unwrap(),
+ items: items.unwrap(),
+ }
+ }
+
+ pub fn is_direct_child(&self, env: Environment, pos: Position) -> bool {
+ env.range(&self.tree).contains(pos)
+ && !self
+ .environments
+ .iter()
+ .filter(|e| e.left.parent != env.left.parent)
+ .filter(|e| env.range(&self.tree).contains(e.range(&self.tree).start))
+ .any(|e| e.range(&self.tree).contains(pos))
+ }
+
+ pub fn is_enum_item(&self, enumeration: Environment, item: Item) -> bool {
+ let item_range = self.tree[item.parent].range();
+ enumeration.range(&self.tree).contains(item_range.start)
+ && !self
+ .environments
+ .iter()
+ .filter(|env| env.left.parent != enumeration.left.parent)
+ .filter(|env| env.left.is_enum(&self.tree))
+ .filter(|env| {
+ enumeration
+ .range(&self.tree)
+ .contains(env.range(&self.tree).start)
+ })
+ .any(|env| env.range(&self.tree).contains(item_range.start))
+ }
+
+ pub fn find_label_by_range(&self, range: Range) -> Option<&Label> {
+ self.labels
+ .iter()
+ .filter(|label| label.kind == LatexLabelKind::Definition)
+ .filter(|label| label.names(&self).len() == 1)
+ .find(|label| range.contains(self[label.parent].range().start))
+ }
+
+ pub fn find_label_by_environment(&self, env: Environment) -> Option<&Label> {
+ self.labels
+ .iter()
+ .filter(|label| label.kind == LatexLabelKind::Definition)
+ .filter(|label| label.names(&self.tree).len() == 1)
+ .find(|label| self.is_direct_child(env, self.tree[label.parent].start()))
+ }
+}
+
+impl Deref for SymbolTable {
+ type Target = Tree;
+
+ fn deref(&self) -> &Self::Target {
+ &self.tree
+ }
+}
+
+#[derive(Debug, Clone, Copy)]
+pub struct SymbolContext<'a> {
+ tree: &'a Tree,
+ commands: &'a [AstNodeIndex],
+ uri: &'a Uri,
+ resolver: &'a Resolver,
+ options: &'a Options,
+ current_dir: &'a Path,
+}
+
+#[derive(Debug, Clone, Copy, Serialize, Deserialize)]
+pub struct EnvironmentDelimiter {
+ pub parent: AstNodeIndex,
+}
+
+impl EnvironmentDelimiter {
+ pub fn name(self, tree: &Tree) -> Option<&Token> {
+ tree.extract_word(self.parent, GroupKind::Group, 0)
+ }
+
+ pub fn is_math(self, tree: &Tree) -> bool {
+ self.is_special(tree, LANGUAGE_DATA.math_environments.iter())
+ }
+
+ pub fn is_enum(self, tree: &Tree) -> bool {
+ self.is_special(tree, LANGUAGE_DATA.enum_environments.iter())
+ }
+
+ fn is_special<'a, I: Iterator<Item = &'a String>>(self, tree: &Tree, mut values: I) -> bool {
+ match self.name(tree) {
+ Some(name) => values.any(|env| env == name.text()),
+ None => false,
+ }
+ }
+}
+
+#[derive(Debug, Clone, Copy, Serialize, Deserialize)]
+pub struct Environment {
+ pub left: EnvironmentDelimiter,
+ pub right: EnvironmentDelimiter,
+}
+
+impl Environment {
+ pub fn is_root(self, tree: &Tree) -> bool {
+ self.left
+ .name(tree)
+ .iter()
+ .chain(self.right.name(tree).iter())
+ .any(|name| name.text() == "document")
+ }
+
+ pub fn range(self, tree: &Tree) -> Range {
+ let start = tree[self.left.parent].start();
+ let end = tree[self.right.parent].end();
+ Range::new(start, end)
+ }
+
+ fn parse(ctx: SymbolContext) -> Vec<Self> {
+ let mut stack = Vec::new();
+ let mut envs = Vec::new();
+ for parent in ctx.commands {
+ if let Some((delim, delim_cmd)) = Self::parse_delimiter(ctx.tree, *parent) {
+ if delim_cmd.name.text() == "\\begin" {
+ stack.push(delim);
+ } else if let Some(left) = stack.pop() {
+ envs.push(Self { left, right: delim });
+ }
+ }
+ }
+ envs
+ }
+
+ fn parse_delimiter(
+ tree: &Tree,
+ parent: AstNodeIndex,
+ ) -> Option<(EnvironmentDelimiter, &Command)> {
+ let cmd = tree.as_command(parent)?;
+ if cmd.name.text() != "\\begin" && cmd.name.text() != "\\end" {
+ return None;
+ }
+
+ let group = tree.extract_group(parent, GroupKind::Group, 0)?;
+ if tree.extract_word(parent, GroupKind::Group, 0).is_some()
+ || tree.children(group).next().is_none()
+ || tree.as_group(group)?.right.is_none()
+ {
+ Some((EnvironmentDelimiter { parent }, cmd))
+ } else {
+ None
+ }
+ }
+}
+
+#[derive(Debug, Clone, Serialize, Deserialize)]
+pub struct Include {
+ pub parent: AstNodeIndex,
+ pub arg_index: usize,
+ pub kind: LatexIncludeKind,
+ pub all_targets: Vec<Vec<Uri>>,
+ pub include_extension: bool,
+}
+
+impl Include {
+ pub fn paths<'a>(&self, tree: &'a Tree) -> Vec<&'a Token> {
+ tree.extract_comma_separated_words(self.parent, GroupKind::Group, self.arg_index)
+ .unwrap()
+ }
+
+ pub fn components<'a>(&self, tree: &'a Tree) -> impl Iterator<Item = String> + 'a {
+ let kind = self.kind;
+ self.paths(tree)
+ .into_iter()
+ .filter_map(move |path| match kind {
+ LatexIncludeKind::Package => Some(format!("{}.sty", path.text())),
+ LatexIncludeKind::Class => Some(format!("{}.cls", path.text())),
+ LatexIncludeKind::Latex
+ | LatexIncludeKind::Bibliography
+ | LatexIncludeKind::Image
+ | LatexIncludeKind::Svg
+ | LatexIncludeKind::Pdf
+ | LatexIncludeKind::Everything => None,
+ })
+ }
+
+ fn parse(ctx: SymbolContext) -> Vec<Self> {
+ iproduct!(ctx.commands, LANGUAGE_DATA.include_commands.iter())
+ .filter_map(|(parent, desc)| Self::parse_single(ctx, *parent, desc))
+ .collect()
+ }
+
+ fn parse_single(
+ ctx: SymbolContext,
+ parent: AstNodeIndex,
+ desc: &LatexIncludeCommand,
+ ) -> Option<Self> {
+ let cmd = ctx.tree.as_command(parent)?;
+ if cmd.name.text() != desc.name {
+ return None;
+ }
+
+ let mut all_targets = Vec::new();
+ let paths = ctx
+ .tree
+ .extract_comma_separated_words(parent, GroupKind::Group, desc.index)?;
+ for path in paths {
+ let mut targets = Vec::new();
+ let base_url = base_url(ctx)?;
+ targets.push(base_url.join(path.text()).ok()?.into());
+
+ if let Some(extensions) = desc.kind.extensions() {
+ for extension in extensions {
+ let path = format!("{}.{}", path.text(), extension);
+ targets.push(base_url.join(&path).ok()?.into());
+ }
+ }
+
+ if let Some(target) = Self::resolve_distro_file(ctx, desc, path.text()) {
+ targets.push(target);
+ }
+ all_targets.push(targets);
+ }
+
+ let include = Self {
+ parent,
+ arg_index: desc.index,
+ kind: desc.kind,
+ all_targets,
+ include_extension: desc.include_extension,
+ };
+ Some(include)
+ }
+
+ fn resolve_distro_file(
+ ctx: SymbolContext,
+ desc: &LatexIncludeCommand,
+ name: &str,
+ ) -> Option<Uri> {
+ let mut path = ctx.resolver.files_by_name.get(name);
+ if let Some(extensions) = desc.kind.extensions() {
+ for extension in extensions {
+ path = path.or_else(|| {
+ let full_name = format!("{}.{}", name, extension);
+ ctx.resolver.files_by_name.get(&full_name)
+ });
+ }
+ }
+ path.and_then(|p| Uri::from_file_path(p).ok())
+ }
+}
+
+fn base_url(ctx: SymbolContext) -> Option<Uri> {
+ if let Some(root_directory) = ctx
+ .options
+ .latex
+ .as_ref()
+ .and_then(|opts| opts.root_directory.as_ref())
+ {
+ let file_name = ctx.uri.path_segments()?.last()?;
+ let path = ctx.current_dir.join(root_directory).join(file_name);
+ Uri::from_file_path(path).ok()
+ } else {
+ Some(ctx.uri.clone())
+ }
+}
+
+#[derive(Debug, Clone, Serialize, Deserialize)]
+pub struct Import {
+ pub parent: AstNodeIndex,
+ pub targets: Vec<Uri>,
+}
+
+impl Import {
+ pub fn dir<'a>(&self, tree: &'a Tree) -> &'a Token {
+ tree.extract_word(self.parent, GroupKind::Group, 0).unwrap()
+ }
+
+ pub fn file<'a>(&self, tree: &'a Tree) -> &'a Token {
+ tree.extract_word(self.parent, GroupKind::Group, 1).unwrap()
+ }
+
+ fn parse(ctx: SymbolContext) -> Vec<Self> {
+ ctx.commands
+ .iter()
+ .filter_map(|parent| Self::parse_single(ctx, *parent))
+ .collect()
+ }
+
+ fn parse_single(ctx: SymbolContext, parent: AstNodeIndex) -> Option<Self> {
+ let cmd = ctx.tree.as_command(parent)?;
+ if cmd.name.text() != "\\import" && cmd.name.text() != "\\subimport" {
+ return None;
+ }
+
+ let dir = ctx.tree.extract_word(parent, GroupKind::Group, 0)?;
+ let file = ctx.tree.extract_word(parent, GroupKind::Group, 1)?;
+
+ let mut targets = Vec::new();
+ let base_url = base_url(ctx)?.join(dir.text()).ok()?;
+ targets.push(base_url.join(file.text()).ok()?.into());
+ targets.push(base_url.join(&format!("{}.tex", file.text())).ok()?.into());
+ Some(Self { parent, targets })
+ }
+}
+
+#[derive(Debug, Clone, Copy, Serialize, Deserialize)]
+pub struct Citation {
+ parent: AstNodeIndex,
+ arg_index: usize,
+}
+
+impl Citation {
+ pub fn keys(self, tree: &Tree) -> Vec<&Token> {
+ tree.extract_comma_separated_words(self.parent, GroupKind::Group, self.arg_index)
+ .unwrap()
+ }
+
+ fn parse(ctx: SymbolContext) -> Vec<Self> {
+ iproduct!(ctx.commands, LANGUAGE_DATA.citation_commands.iter())
+ .filter_map(|(parent, desc)| Self::parse_single(ctx, *parent, desc))
+ .collect()
+ }
+
+ fn parse_single(
+ ctx: SymbolContext,
+ parent: AstNodeIndex,
+ desc: &LatexCitationCommand,
+ ) -> Option<Self> {
+ let cmd = ctx.tree.as_command(parent)?;
+ if cmd.name.text() != desc.name {
+ return None;
+ }
+
+ ctx.tree
+ .extract_comma_separated_words(parent, GroupKind::Group, desc.index)?;
+
+ Some(Self {
+ parent,
+ arg_index: desc.index,
+ })
+ }
+}
+
+#[derive(Debug, Clone, Copy, Serialize, Deserialize)]
+pub struct CommandDefinition {
+ pub parent: AstNodeIndex,
+ pub definition: AstNodeIndex,
+ pub definition_index: usize,
+ pub implementation: AstNodeIndex,
+ pub implementation_index: usize,
+ pub arg_count_index: usize,
+}
+
+impl CommandDefinition {
+ pub fn definition_name(self, tree: &Tree) -> &str {
+ tree.as_command(self.definition).unwrap().name.text()
+ }
+
+ fn parse(ctx: SymbolContext) -> Vec<Self> {
+ let def = LANGUAGE_DATA.command_definition_commands.iter();
+ iproduct!(ctx.commands, def)
+ .filter_map(|(parent, desc)| Self::parse_single(ctx, *parent, desc))
+ .collect()
+ }
+
+ fn parse_single(
+ ctx: SymbolContext,
+ parent: AstNodeIndex,
+ desc: &LatexCommandDefinitionCommand,
+ ) -> Option<Self> {
+ let cmd = ctx.tree.as_command(parent)?;
+ if cmd.name.text() != desc.name {
+ return None;
+ }
+
+ let group_kind = GroupKind::Group;
+ let implementation =
+ ctx.tree
+ .extract_group(parent, group_kind, desc.implementation_index)?;
+
+ let def_group = ctx
+ .tree
+ .extract_group(parent, group_kind, desc.definition_index)?;
+
+ let mut def_children = ctx.tree.children(def_group);
+ let definition = def_children.next()?;
+ ctx.tree.as_command(definition)?;
+ Some(Self {
+ parent,
+ definition,
+ definition_index: desc.definition_index,
+ implementation,
+ implementation_index: desc.implementation_index,
+ arg_count_index: desc.arg_count_index,
+ })
+ }
+}
+
+#[derive(Debug, Clone, Copy, Serialize, Deserialize)]
+pub struct GlossaryEntry {
+ pub parent: AstNodeIndex,
+ pub label_index: usize,
+ pub kind: LatexGlossaryEntryKind,
+}
+
+impl GlossaryEntry {
+ pub fn label(self, tree: &Tree) -> &Token {
+ tree.extract_word(self.parent, GroupKind::Group, self.label_index)
+ .unwrap()
+ }
+
+ fn parse(ctx: SymbolContext) -> Vec<Self> {
+ let entry = LANGUAGE_DATA.glossary_entry_definition_commands.iter();
+ iproduct!(ctx.commands, entry)
+ .filter_map(|(parent, desc)| Self::parse_single(ctx, *parent, desc))
+ .collect()
+ }
+
+ fn parse_single(
+ ctx: SymbolContext,
+ parent: AstNodeIndex,
+ desc: &LatexGlossaryEntryDefinitionCommand,
+ ) -> Option<Self> {
+ let cmd = ctx.tree.as_command(parent)?;
+ if cmd.name.text() != desc.name {
+ return None;
+ }
+
+ ctx.tree
+ .extract_word(parent, GroupKind::Group, desc.label_index)?;
+
+ Some(Self {
+ parent,
+ label_index: desc.label_index,
+ kind: desc.kind,
+ })
+ }
+}
+
+#[derive(Debug, Clone, Copy, Serialize, Deserialize)]
+pub struct Equation {
+ pub left: AstNodeIndex,
+ pub right: AstNodeIndex,
+}
+
+impl Equation {
+ pub fn range(self, tree: &Tree) -> Range {
+ let start = tree[self.left].start();
+ let end = tree[self.right].end();
+ Range::new(start, end)
+ }
+
+ fn parse(ctx: SymbolContext) -> Vec<Self> {
+ let mut equations = Vec::new();
+ let mut left = None;
+ for node in ctx.commands {
+ let cmd = ctx.tree.as_command(*node).unwrap();
+ let name = cmd.name.text();
+ if name == "\\[" || name == "\\(" {
+ left = Some(node);
+ } else if name == "\\]" || name == "\\)" {
+ if let Some(begin) = left {
+ equations.push(Self {
+ left: *begin,
+ right: *node,
+ });
+ left = None;
+ }
+ }
+ }
+ equations
+ }
+}
+
+#[derive(Debug, Clone, Copy, Serialize, Deserialize)]
+pub struct Inline {
+ pub left: AstNodeIndex,
+ pub right: AstNodeIndex,
+}
+
+impl Inline {
+ pub fn range(self, tree: &Tree) -> Range {
+ let start = tree[self.left].start();
+ let end = tree[self.right].end();
+ Range::new(start, end)
+ }
+
+ fn parse(ctx: SymbolContext) -> Vec<Self> {
+ let mut inlines = Vec::new();
+ let mut left = None;
+ for node in ctx
+ .tree
+ .nodes()
+ .into_iter()
+ .filter(|node| ctx.tree.as_math(*node).is_some())
+ .sorted_by_key(|node| ctx.tree[*node].start())
+ {
+ if let Some(l) = left {
+ inlines.push(Inline {
+ left: l,
+ right: node,
+ });
+ left = None;
+ } else {
+ left = Some(node);
+ }
+ }
+ inlines
+ }
+}
+
+#[derive(Debug, Clone, Copy, Serialize, Deserialize)]
+pub struct MathOperator {
+ pub parent: AstNodeIndex,
+ pub definition: AstNodeIndex,
+ pub definition_index: usize,
+ pub implementation: AstNodeIndex,
+ pub implementation_index: usize,
+}
+
+impl MathOperator {
+ pub fn definition_name(self, tree: &Tree) -> &str {
+ tree.as_command(self.definition).unwrap().name.text()
+ }
+
+ fn parse(ctx: SymbolContext) -> Vec<Self> {
+ iproduct!(ctx.commands, LANGUAGE_DATA.math_operator_commands.iter())
+ .filter_map(|(parent, desc)| Self::parse_single(ctx, *parent, desc))
+ .collect()
+ }
+
+ fn parse_single(
+ ctx: SymbolContext,
+ parent: AstNodeIndex,
+ desc: &LatexMathOperatorCommand,
+ ) -> Option<Self> {
+ let cmd = ctx.tree.as_command(parent)?;
+ if cmd.name.text() != desc.name {
+ return None;
+ }
+
+ let group_kind = GroupKind::Group;
+ let def_group = ctx
+ .tree
+ .extract_group(parent, group_kind, desc.definition_index)?;
+ let implementation =
+ ctx.tree
+ .extract_group(parent, group_kind, desc.implementation_index)?;
+
+ let mut def_children = ctx.tree.children(def_group);
+ let definition = def_children.next()?;
+ Some(Self {
+ parent,
+ definition,
+ definition_index: desc.definition_index,
+ implementation,
+ implementation_index: desc.implementation_index,
+ })
+ }
+}
+
+#[derive(Debug, Clone, Copy, Serialize, Deserialize)]
+pub struct TheoremDefinition {
+ pub parent: AstNodeIndex,
+ pub arg_index: usize,
+}
+
+impl TheoremDefinition {
+ pub fn name(self, tree: &Tree) -> &Token {
+ tree.extract_word(self.parent, GroupKind::Group, self.arg_index)
+ .unwrap()
+ }
+
+ fn parse(ctx: SymbolContext) -> Vec<Self> {
+ let thm = LANGUAGE_DATA.theorem_definition_commands.iter();
+ iproduct!(ctx.commands, thm)
+ .filter_map(|(parent, desc)| Self::parse_single(ctx, *parent, desc))
+ .collect()
+ }
+
+ fn parse_single(
+ ctx: SymbolContext,
+ parent: AstNodeIndex,
+ desc: &LatexTheoremDefinitionCommand,
+ ) -> Option<Self> {
+ let cmd = ctx.tree.as_command(parent)?;
+ if cmd.name.text() != desc.name {
+ return None;
+ }
+
+ let group_kind = GroupKind::Group;
+ ctx.tree.extract_word(parent, group_kind, desc.index)?;
+
+ Some(Self {
+ parent,
+ arg_index: desc.index,
+ })
+ }
+}
+
+#[derive(Debug, Clone, Serialize, Deserialize)]
+pub struct Section {
+ pub parent: AstNodeIndex,
+ pub arg_index: usize,
+ pub level: i32,
+ pub prefix: Cow<'static, str>,
+}
+
+impl Section {
+ pub fn print(&self, tree: &Tree) -> Option<String> {
+ tree.print_group_content(self.parent, GroupKind::Group, self.arg_index)
+ }
+
+ fn parse(ctx: SymbolContext) -> Vec<Self> {
+ iproduct!(ctx.commands, LANGUAGE_DATA.section_commands.iter())
+ .filter_map(|(parent, desc)| Self::parse_single(ctx, *parent, desc))
+ .collect()
+ }
+
+ fn parse_single(
+ ctx: SymbolContext,
+ parent: AstNodeIndex,
+ desc: &'static LatexSectionCommand,
+ ) -> Option<Self> {
+ let cmd = ctx.tree.as_command(parent)?;
+ if cmd.name.text() != desc.name {
+ return None;
+ }
+
+ let group_kind = GroupKind::Group;
+ ctx.tree.extract_group(parent, group_kind, desc.index)?;
+
+ Some(Self {
+ parent,
+ arg_index: desc.index,
+ level: desc.level,
+ prefix: Cow::from(&desc.prefix),
+ })
+ }
+}
+
+#[derive(Debug, Clone, Copy, Serialize, Deserialize)]
+pub struct Label {
+ pub parent: AstNodeIndex,
+ pub arg_index: usize,
+ pub kind: LatexLabelKind,
+}
+
+impl Label {
+ pub fn names(self, tree: &Tree) -> Vec<&Token> {
+ tree.extract_comma_separated_words(self.parent, GroupKind::Group, self.arg_index)
+ .unwrap()
+ }
+
+ fn parse(ctx: SymbolContext) -> Vec<Self> {
+ iproduct!(ctx.commands, LANGUAGE_DATA.label_commands.iter())
+ .filter_map(|(parent, desc)| Self::parse_single(ctx, *parent, desc))
+ .collect()
+ }
+
+ fn parse_single(
+ ctx: SymbolContext,
+ parent: AstNodeIndex,
+ desc: &LatexLabelCommand,
+ ) -> Option<Self> {
+ let cmd = ctx.tree.as_command(parent)?;
+ if cmd.name.text() != desc.name {
+ return None;
+ }
+
+ ctx.tree
+ .extract_comma_separated_words(parent, GroupKind::Group, desc.index)?;
+
+ Some(Self {
+ parent,
+ arg_index: desc.index,
+ kind: desc.kind,
+ })
+ }
+}
+
+#[derive(Debug, Clone, Serialize, Deserialize)]
+pub struct LabelNumbering {
+ pub parent: AstNodeIndex,
+ pub number: String,
+}
+
+impl LabelNumbering {
+ pub fn name<'a>(&self, tree: &'a Tree) -> &'a Token {
+ tree.extract_word(self.parent, GroupKind::Group, 0).unwrap()
+ }
+
+ fn parse(ctx: SymbolContext) -> Vec<Self> {
+ ctx.commands
+ .iter()
+ .filter_map(|parent| Self::parse_single(ctx, *parent))
+ .collect()
+ }
+
+ fn parse_single(ctx: SymbolContext, parent: AstNodeIndex) -> Option<Self> {
+ let cmd = ctx.tree.as_command(parent)?;
+ if cmd.name.text() != "\\newlabel" {
+ return None;
+ }
+
+ ctx.tree.extract_word(parent, GroupKind::Group, 0)?;
+
+ let arg = ctx.tree.extract_group(parent, GroupKind::Group, 1)?;
+ let mut analyzer = FirstText::default();
+ analyzer.visit(ctx.tree, arg);
+ Some(Self {
+ parent,
+ number: analyzer.text?,
+ })
+ }
+}
+
+#[derive(Debug, Default)]
+struct FirstText {
+ text: Option<String>,
+}
+
+impl Visitor for FirstText {
+ fn visit(&mut self, tree: &Tree, node: AstNodeIndex) {
+ if let Some(text) = tree.as_text(node) {
+ self.text = Some(text.words.iter().map(Token::text).join(" "));
+ }
+
+ if self.text.is_none() {
+ tree.walk(self, node);
+ }
+ }
+}
+
+#[derive(Debug, Clone, Copy, Serialize, Deserialize)]
+pub struct Caption {
+ pub parent: AstNodeIndex,
+ pub arg_index: usize,
+}
+
+impl Caption {
+ pub fn print(self, tree: &Tree) -> Option<String> {
+ tree.print_group_content(self.parent, GroupKind::Group, self.arg_index)
+ }
+
+ fn parse(ctx: SymbolContext) -> Vec<Self> {
+ ctx.commands
+ .iter()
+ .flat_map(|parent| Self::parse_single(ctx, *parent))
+ .collect()
+ }
+
+ fn parse_single(ctx: SymbolContext, parent: AstNodeIndex) -> Option<Self> {
+ let cmd = ctx.tree.as_command(parent)?;
+ if cmd.name.text() != "\\caption" {
+ return None;
+ }
+
+ ctx.tree.extract_group(parent, GroupKind::Group, 0)?;
+ Some(Self {
+ parent,
+ arg_index: 0,
+ })
+ }
+}
+
+#[derive(Debug, Clone, Copy, Serialize, Deserialize)]
+pub struct Item {
+ pub parent: AstNodeIndex,
+}
+
+impl Item {
+ pub fn name(self, tree: &Tree) -> Option<String> {
+ tree.print_group_content(self.parent, GroupKind::Options, 0)
+ }
+
+ fn parse(ctx: SymbolContext) -> Vec<Self> {
+ ctx.commands
+ .iter()
+ .filter_map(|parent| Self::parse_single(ctx, *parent))
+ .collect()
+ }
+
+ fn parse_single(ctx: SymbolContext, parent: AstNodeIndex) -> Option<Self> {
+ let cmd = ctx.tree.as_command(parent)?;
+ if cmd.name.text() != "\\item" {
+ return None;
+ }
+
+ Some(Self { parent })
+ }
+}
diff --git a/support/texlab/src/syntax/latex/ast.rs b/support/texlab/src/syntax/latex/ast.rs
index 0af980b595..ab7c6863e1 100644
--- a/support/texlab/src/syntax/latex/ast.rs
+++ b/support/texlab/src/syntax/latex/ast.rs
@@ -1,11 +1,15 @@
-use crate::range::RangeExt;
-use crate::syntax::text::{Span, SyntaxNode};
-use itertools::Itertools;
-use lsp_types::Range;
-use std::sync::Arc;
-
-#[derive(Debug, PartialEq, Eq, Clone, Copy)]
-pub enum LatexTokenKind {
+use crate::{
+ protocol::{Position, Range, RangeExt},
+ syntax::{
+ generic_ast::{Ast, AstNodeIndex},
+ text::{Span, SyntaxNode},
+ },
+};
+use serde::{Deserialize, Serialize};
+use std::ops::Deref;
+
+#[derive(Debug, PartialEq, Eq, Clone, Copy, Serialize, Deserialize)]
+pub enum TokenKind {
Word,
Command,
Math,
@@ -16,14 +20,14 @@ pub enum LatexTokenKind {
EndOptions,
}
-#[derive(Debug, PartialEq, Eq, Clone)]
-pub struct LatexToken {
+#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
+pub struct Token {
pub span: Span,
- pub kind: LatexTokenKind,
+ pub kind: TokenKind,
}
-impl LatexToken {
- pub fn new(span: Span, kind: LatexTokenKind) -> Self {
+impl Token {
+ pub fn new(span: Span, kind: TokenKind) -> Self {
Self { span, kind }
}
@@ -32,314 +36,379 @@ impl LatexToken {
}
}
-impl SyntaxNode for LatexToken {
+impl SyntaxNode for Token {
fn range(&self) -> Range {
self.span.range()
}
}
-#[derive(Debug, PartialEq, Eq, Clone, Default)]
-pub struct LatexRoot {
- pub children: Vec<LatexContent>,
+#[derive(Debug, PartialEq, Eq, Clone, Copy, Serialize, Deserialize)]
+pub struct Root {
+ pub range: Range,
}
-impl LatexRoot {
- pub fn new(children: Vec<LatexContent>) -> Self {
- Self { children }
+impl SyntaxNode for Root {
+ fn range(&self) -> Range {
+ self.range
}
}
-impl SyntaxNode for LatexRoot {
+#[derive(Debug, PartialEq, Eq, Clone, Copy, Serialize, Deserialize)]
+pub enum GroupKind {
+ Group,
+ Options,
+}
+
+#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
+pub struct Group {
+ pub range: Range,
+ pub left: Token,
+ pub right: Option<Token>,
+ pub kind: GroupKind,
+}
+
+impl SyntaxNode for Group {
fn range(&self) -> Range {
- if self.children.is_empty() {
- Range::new_simple(0, 0, 0, 0)
- } else {
- Range::new(
- self.children[0].start(),
- self.children[self.children.len() - 1].end(),
- )
- }
+ self.range
}
}
-#[derive(Debug, PartialEq, Eq, Clone)]
-pub enum LatexContent {
- Group(Arc<LatexGroup>),
- Command(Arc<LatexCommand>),
- Text(Arc<LatexText>),
- Comma(Arc<LatexComma>),
- Math(Arc<LatexMath>),
+#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
+pub struct Command {
+ pub range: Range,
+ pub name: Token,
}
-impl LatexContent {
- pub fn accept<T: LatexVisitor>(&self, visitor: &mut T) {
- match self {
- LatexContent::Group(group) => visitor.visit_group(Arc::clone(&group)),
- LatexContent::Command(command) => visitor.visit_command(Arc::clone(&command)),
- LatexContent::Text(text) => visitor.visit_text(Arc::clone(&text)),
- LatexContent::Comma(comma) => visitor.visit_comma(Arc::clone(&comma)),
- LatexContent::Math(math) => visitor.visit_math(Arc::clone(&math)),
- }
+impl Command {
+ pub fn short_name_range(&self) -> Range {
+ Range::new_simple(
+ self.name.start().line,
+ self.name.start().character + 1,
+ self.name.end().line,
+ self.name.end().character,
+ )
}
}
-impl SyntaxNode for LatexContent {
+impl SyntaxNode for Command {
fn range(&self) -> Range {
- match self {
- LatexContent::Group(group) => group.range(),
- LatexContent::Command(command) => command.range(),
- LatexContent::Text(text) => text.range(),
- LatexContent::Comma(comma) => comma.range(),
- LatexContent::Math(math) => math.range(),
- }
+ self.range
}
}
-#[derive(Debug, PartialEq, Eq, Clone, Copy)]
-pub enum LatexGroupKind {
- Group,
- Options,
+#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
+pub struct Text {
+ pub range: Range,
+ pub words: Vec<Token>,
}
-#[derive(Debug, PartialEq, Eq, Clone)]
-pub struct LatexGroup {
- pub range: Range,
- pub left: LatexToken,
- pub children: Vec<LatexContent>,
- pub right: Option<LatexToken>,
- pub kind: LatexGroupKind,
+impl SyntaxNode for Text {
+ fn range(&self) -> Range {
+ self.range
+ }
}
-impl LatexGroup {
- pub fn new(
- left: LatexToken,
- children: Vec<LatexContent>,
- right: Option<LatexToken>,
- kind: LatexGroupKind,
- ) -> Self {
- let end = if let Some(ref right) = right {
- right.end()
- } else if !children.is_empty() {
- children[children.len() - 1].end()
- } else {
- left.end()
- };
+#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
+pub struct Comma {
+ pub range: Range,
+ pub token: Token,
+}
- Self {
- range: Range::new(left.start(), end),
- left,
- children,
- right,
- kind,
- }
+impl SyntaxNode for Comma {
+ fn range(&self) -> Range {
+ self.range
}
}
-impl SyntaxNode for LatexGroup {
+#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
+pub struct Math {
+ pub range: Range,
+ pub token: Token,
+}
+
+impl SyntaxNode for Math {
fn range(&self) -> Range {
self.range
}
}
+#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
+pub enum Node {
+ Root(Root),
+ Group(Group),
+ Command(Command),
+ Text(Text),
+ Comma(Comma),
+ Math(Math),
+}
+
+impl SyntaxNode for Node {
+ fn range(&self) -> Range {
+ match self {
+ Self::Root(root) => root.range(),
+ Self::Group(group) => group.range(),
+ Self::Command(cmd) => cmd.range(),
+ Self::Text(text) => text.range(),
+ Self::Comma(comma) => comma.range(),
+ Self::Math(math) => math.range(),
+ }
+ }
+}
+
#[derive(Debug, PartialEq, Eq, Clone)]
-pub struct LatexCommand {
- pub range: Range,
- pub name: LatexToken,
- pub options: Vec<Arc<LatexGroup>>,
- pub args: Vec<Arc<LatexGroup>>,
- pub groups: Vec<Arc<LatexGroup>>,
+pub struct Tree {
+ pub inner: Ast<Node>,
+ pub root: AstNodeIndex,
}
-impl LatexCommand {
- pub fn new(
- name: LatexToken,
- options: Vec<Arc<LatexGroup>>,
- args: Vec<Arc<LatexGroup>>,
- ) -> Self {
- let groups: Vec<Arc<LatexGroup>> = args
- .iter()
- .chain(options.iter())
- .sorted_by_key(|group| group.range.start)
- .map(Arc::clone)
- .collect();
-
- let end = if let Some(group) = groups.last() {
- group.end()
- } else {
- name.end()
- };
+impl Deref for Tree {
+ type Target = Ast<Node>;
- Self {
- range: Range::new(name.start(), end),
- name,
- options,
- args,
- groups,
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+
+impl Tree {
+ pub fn walk<V: Visitor>(&self, visitor: &mut V, parent: AstNodeIndex) {
+ for child in self.children(parent) {
+ visitor.visit(self, child);
}
}
- pub fn short_name_range(&self) -> Range {
- Range::new_simple(
- self.name.start().line,
- self.name.start().character + 1,
- self.name.end().line,
- self.name.end().character,
- )
+ pub fn find(&self, pos: Position) -> Vec<AstNodeIndex> {
+ let mut finder = Finder::new(pos);
+ finder.visit(self, self.root);
+ finder.results
}
- pub fn extract_text(&self, index: usize) -> Option<&LatexText> {
- if self.args.len() > index && self.args[index].children.len() == 1 {
- if let LatexContent::Text(ref text) = self.args[index].children[0] {
- Some(text)
- } else {
- None
- }
+ pub fn find_command_by_short_name_range(&self, pos: Position) -> Option<AstNodeIndex> {
+ self.find(pos).into_iter().find(|node| {
+ self.as_command(*node)
+ .filter(|cmd| {
+ cmd.name.range().contains(pos) && cmd.name.start().character != pos.character
+ })
+ .is_some()
+ })
+ }
+
+ pub fn print(&self, node: AstNodeIndex) -> String {
+ let start_position = self[node].start();
+ let mut printer = Printer::new(start_position);
+ printer.visit(self, node);
+ printer.output
+ }
+
+ pub fn commands<'a>(&'a self) -> impl Iterator<Item = AstNodeIndex> + 'a {
+ self.inner
+ .nodes()
+ .into_iter()
+ .filter(move |node| self.as_command(*node).is_some())
+ }
+
+ pub fn as_group(&self, node: AstNodeIndex) -> Option<&Group> {
+ if let Node::Group(group) = &self[node] {
+ Some(group)
} else {
None
}
}
- pub fn extract_word(&self, index: usize) -> Option<&LatexToken> {
- let text = self.extract_text(index)?;
- if text.words.len() == 1 {
- Some(&text.words[0])
+ pub fn as_command(&self, node: AstNodeIndex) -> Option<&Command> {
+ if let Node::Command(cmd) = &self[node] {
+ Some(cmd)
} else {
None
}
}
- pub fn has_word(&self, index: usize) -> bool {
- self.extract_word(index).is_some()
- }
-
- pub fn extract_comma_separated_words(&self, index: usize) -> Vec<&LatexToken> {
- let mut words = Vec::new();
- for child in &self.args[index].children {
- if let LatexContent::Text(text) = child {
- for word in &text.words {
- words.push(word);
- }
- }
+ pub fn as_text(&self, node: AstNodeIndex) -> Option<&Text> {
+ if let Node::Text(text) = &self[node] {
+ Some(text)
+ } else {
+ None
}
- words
}
- pub fn has_comma_separated_words(&self, index: usize) -> bool {
- if self.args.len() <= index {
- return false;
- }
-
- for node in &self.args[index].children {
- match node {
- LatexContent::Text(_) | LatexContent::Comma(_) => (),
- LatexContent::Command(_) | LatexContent::Group(_) | LatexContent::Math(_) => {
- return false;
- }
- }
+ pub fn as_math(&self, node: AstNodeIndex) -> Option<&Math> {
+ if let Node::Math(math) = &self[node] {
+ Some(math)
+ } else {
+ None
}
- true
}
-}
-impl SyntaxNode for LatexCommand {
- fn range(&self) -> Range {
- self.range
+ pub fn extract_group(
+ &self,
+ parent: AstNodeIndex,
+ group_kind: GroupKind,
+ index: usize,
+ ) -> Option<AstNodeIndex> {
+ self.children(parent)
+ .filter(|child| {
+ self.as_group(*child)
+ .filter(|group| group.kind == group_kind)
+ .is_some()
+ })
+ .nth(index)
}
-}
-#[derive(Debug, PartialEq, Eq, Clone)]
-pub struct LatexText {
- pub range: Range,
- pub words: Vec<LatexToken>,
-}
-
-impl LatexText {
- pub fn new(words: Vec<LatexToken>) -> Self {
- Self {
- range: Range::new(words[0].start(), words[words.len() - 1].end()),
- words,
+ pub fn extract_text(
+ &self,
+ parent: AstNodeIndex,
+ group_kind: GroupKind,
+ index: usize,
+ ) -> Option<&Text> {
+ let group = self.extract_group(parent, group_kind, index)?;
+ let mut contents = self.children(group);
+ let text = self.as_text(contents.next()?);
+ if contents.next().is_none() {
+ text
+ } else {
+ None
}
}
-}
-impl SyntaxNode for LatexText {
- fn range(&self) -> Range {
- self.range
+ pub fn extract_word(
+ &self,
+ parent: AstNodeIndex,
+ group_kind: GroupKind,
+ index: usize,
+ ) -> Option<&Token> {
+ let text = self.extract_text(parent, group_kind, index)?;
+ if text.words.len() == 1 {
+ Some(&text.words[0])
+ } else {
+ None
+ }
}
-}
-#[derive(Debug, PartialEq, Eq, Clone)]
-pub struct LatexComma {
- pub token: LatexToken,
-}
+ pub fn extract_comma_separated_words(
+ &self,
+ parent: AstNodeIndex,
+ group_kind: GroupKind,
+ index: usize,
+ ) -> Option<Vec<&Token>> {
+ let group = self.extract_group(parent, group_kind, index)?;
+ let mut words = Vec::new();
+ for child in self.children(group) {
+ match &self[child] {
+ Node::Root(_) | Node::Group(_) | Node::Command(_) | Node::Math(_) => return None,
+ Node::Text(text) => {
+ for word in &text.words {
+ words.push(word);
+ }
+ }
+ Node::Comma(_) => (),
+ }
+ }
+ Some(words)
+ }
-impl LatexComma {
- pub fn new(token: LatexToken) -> Self {
- Self { token }
+ pub fn print_group_content(
+ &self,
+ parent: AstNodeIndex,
+ group_kind: GroupKind,
+ index: usize,
+ ) -> Option<String> {
+ let arg = self.extract_group(parent, group_kind, index)?;
+ let text = self.print(arg);
+ self.as_group(arg)?.right.as_ref()?;
+ Some(text[1..text.len() - 1].trim().into())
}
}
-impl SyntaxNode for LatexComma {
- fn range(&self) -> Range {
- self.token.range()
- }
+pub trait Visitor {
+ fn visit(&mut self, tree: &Tree, node: AstNodeIndex);
}
-#[derive(Debug, PartialEq, Eq, Clone)]
-pub struct LatexMath {
- pub token: LatexToken,
+#[derive(Debug)]
+struct Finder {
+ position: Position,
+ results: Vec<AstNodeIndex>,
}
-impl LatexMath {
- pub fn new(token: LatexToken) -> Self {
- Self { token }
+impl Finder {
+ fn new(position: Position) -> Self {
+ Self {
+ position,
+ results: Vec::new(),
+ }
}
}
-impl SyntaxNode for LatexMath {
- fn range(&self) -> Range {
- self.token.range()
+impl Visitor for Finder {
+ fn visit(&mut self, tree: &Tree, node: AstNodeIndex) {
+ if tree[node].range().contains(self.position) {
+ self.results.push(node);
+ tree.walk(self, node);
+ }
}
}
-pub trait LatexVisitor {
- fn visit_root(&mut self, root: Arc<LatexRoot>);
-
- fn visit_group(&mut self, group: Arc<LatexGroup>);
-
- fn visit_command(&mut self, command: Arc<LatexCommand>);
-
- fn visit_text(&mut self, text: Arc<LatexText>);
-
- fn visit_comma(&mut self, comma: Arc<LatexComma>);
-
- fn visit_math(&mut self, math: Arc<LatexMath>);
+#[derive(Debug)]
+struct Printer {
+ output: String,
+ position: Position,
}
-pub struct LatexWalker;
-
-impl LatexWalker {
- pub fn walk_root<T: LatexVisitor>(visitor: &mut T, root: Arc<LatexRoot>) {
- for child in &root.children {
- child.accept(visitor);
+impl Printer {
+ fn new(start_position: Position) -> Self {
+ Self {
+ output: String::new(),
+ position: start_position,
}
}
- pub fn walk_group<T: LatexVisitor>(visitor: &mut T, group: Arc<LatexGroup>) {
- for child in &group.children {
- child.accept(visitor);
+ fn synchronize(&mut self, position: Position) {
+ while self.position.line < position.line {
+ self.output.push('\n');
+ self.position.line += 1;
+ self.position.character = 0;
}
- }
- pub fn walk_command<T: LatexVisitor>(visitor: &mut T, command: Arc<LatexCommand>) {
- for arg in &command.groups {
- visitor.visit_group(Arc::clone(&arg));
+ while self.position.character < position.character {
+ self.output.push(' ');
+ self.position.character += 1;
}
- }
- pub fn walk_text<T: LatexVisitor>(_visitor: &mut T, _text: Arc<LatexText>) {}
+ assert_eq!(self.position, position);
+ }
- pub fn walk_comma<T: LatexVisitor>(_visitor: &mut T, _comma: Arc<LatexComma>) {}
+ fn print_token(&mut self, token: &Token) {
+ self.synchronize(token.start());
+ self.output.push_str(token.text());
+ self.position.character += token.end().character - token.start().character;
+ self.synchronize(token.end());
+ }
+}
- pub fn walk_math<T: LatexVisitor>(_visitor: &mut T, _math: Arc<LatexMath>) {}
+impl Visitor for Printer {
+ fn visit(&mut self, tree: &Tree, node: AstNodeIndex) {
+ match &tree[node] {
+ Node::Root(_) => tree.walk(self, node),
+ Node::Group(group) => {
+ self.print_token(&group.left);
+ tree.walk(self, node);
+ if let Some(right) = &group.right {
+ self.print_token(right);
+ }
+ }
+ Node::Command(cmd) => {
+ self.print_token(&cmd.name);
+ tree.walk(self, node);
+ }
+ Node::Text(text) => {
+ for word in &text.words {
+ self.print_token(word);
+ }
+ }
+ Node::Comma(comma) => {
+ self.print_token(&comma.token);
+ }
+ Node::Math(math) => {
+ self.print_token(&math.token);
+ }
+ }
+ }
}
diff --git a/support/texlab/src/syntax/latex/env.rs b/support/texlab/src/syntax/latex/env.rs
deleted file mode 100644
index 163db02ca8..0000000000
--- a/support/texlab/src/syntax/latex/env.rs
+++ /dev/null
@@ -1,123 +0,0 @@
-use super::ast::*;
-use crate::syntax::language::*;
-use crate::syntax::text::SyntaxNode;
-use lsp_types::Range;
-use std::sync::Arc;
-
-#[derive(Debug, PartialEq, Eq, Clone)]
-pub struct LatexEnvironmentDelimiter {
- pub command: Arc<LatexCommand>,
-}
-
-impl LatexEnvironmentDelimiter {
- pub fn name(&self) -> Option<&LatexToken> {
- self.command.extract_word(0)
- }
-
- pub fn is_math(&self) -> bool {
- if let Some(name) = self.name() {
- LANGUAGE_DATA
- .math_environments
- .iter()
- .any(|env| env == name.text())
- } else {
- false
- }
- }
-
- pub fn is_enum(&self) -> bool {
- if let Some(name) = self.name() {
- LANGUAGE_DATA
- .enum_environments
- .iter()
- .any(|env| env == name.text())
- } else {
- false
- }
- }
-}
-
-impl SyntaxNode for LatexEnvironmentDelimiter {
- fn range(&self) -> Range {
- self.command.range()
- }
-}
-
-#[derive(Debug, PartialEq, Eq, Clone)]
-pub struct LatexEnvironment {
- pub left: LatexEnvironmentDelimiter,
- pub right: LatexEnvironmentDelimiter,
-}
-
-impl LatexEnvironment {
- pub fn is_root(&self) -> bool {
- self.left
- .name()
- .iter()
- .chain(self.right.name().iter())
- .any(|name| name.text() == "document")
- }
-
- fn parse(commands: &[Arc<LatexCommand>]) -> Vec<Self> {
- let mut stack = Vec::new();
- let mut environments = Vec::new();
- for command in commands {
- if let Some(delimiter) = Self::parse_delimiter(command) {
- if delimiter.command.name.text() == "\\begin" {
- stack.push(delimiter);
- } else if let Some(begin) = stack.pop() {
- environments.push(Self {
- left: begin,
- right: delimiter,
- });
- }
- }
- }
- environments
- }
-
- fn parse_delimiter(command: &Arc<LatexCommand>) -> Option<LatexEnvironmentDelimiter> {
- if command.name.text() != "\\begin" && command.name.text() != "\\end" {
- return None;
- }
-
- if command.args.is_empty() {
- return None;
- }
-
- if command.has_word(0)
- || command.args[0].children.is_empty()
- || command.args[0].right.is_none()
- {
- let delimiter = LatexEnvironmentDelimiter {
- command: Arc::clone(&command),
- };
- Some(delimiter)
- } else {
- None
- }
- }
-}
-
-impl SyntaxNode for LatexEnvironment {
- fn range(&self) -> Range {
- Range::new(self.left.start(), self.right.end())
- }
-}
-
-#[derive(Debug, PartialEq, Eq, Clone)]
-pub struct LatexEnvironmentInfo {
- pub environments: Vec<LatexEnvironment>,
- pub is_standalone: bool,
-}
-
-impl LatexEnvironmentInfo {
- pub fn parse(commands: &[Arc<LatexCommand>]) -> Self {
- let environments = LatexEnvironment::parse(commands);
- let is_standalone = environments.iter().any(LatexEnvironment::is_root);
- Self {
- environments,
- is_standalone,
- }
- }
-}
diff --git a/support/texlab/src/syntax/latex/finder.rs b/support/texlab/src/syntax/latex/finder.rs
deleted file mode 100644
index b59388dc07..0000000000
--- a/support/texlab/src/syntax/latex/finder.rs
+++ /dev/null
@@ -1,74 +0,0 @@
-use super::ast::*;
-use crate::range::RangeExt;
-use crate::syntax::text::SyntaxNode;
-use lsp_types::Position;
-use std::sync::Arc;
-
-#[derive(Debug, PartialEq, Eq, Clone)]
-pub enum LatexNode {
- Root(Arc<LatexRoot>),
- Group(Arc<LatexGroup>),
- Command(Arc<LatexCommand>),
- Text(Arc<LatexText>),
- Comma(Arc<LatexComma>),
- Math(Arc<LatexMath>),
-}
-
-#[derive(Debug)]
-pub struct LatexFinder {
- pub position: Position,
- pub results: Vec<LatexNode>,
-}
-
-impl LatexFinder {
- pub fn new(position: Position) -> Self {
- Self {
- position,
- results: Vec::new(),
- }
- }
-}
-
-impl LatexVisitor for LatexFinder {
- fn visit_root(&mut self, root: Arc<LatexRoot>) {
- if root.range().contains(self.position) {
- self.results.push(LatexNode::Root(Arc::clone(&root)));
- LatexWalker::walk_root(self, root);
- }
- }
-
- fn visit_group(&mut self, group: Arc<LatexGroup>) {
- if group.range.contains(self.position) {
- self.results.push(LatexNode::Group(Arc::clone(&group)));
- LatexWalker::walk_group(self, group);
- }
- }
-
- fn visit_command(&mut self, command: Arc<LatexCommand>) {
- if command.range.contains(self.position) {
- self.results.push(LatexNode::Command(Arc::clone(&command)));
- LatexWalker::walk_command(self, command);
- }
- }
-
- fn visit_text(&mut self, text: Arc<LatexText>) {
- if text.range.contains(self.position) {
- self.results.push(LatexNode::Text(Arc::clone(&text)));
- LatexWalker::walk_text(self, text);
- }
- }
-
- fn visit_comma(&mut self, comma: Arc<LatexComma>) {
- if comma.range().contains(self.position) {
- self.results.push(LatexNode::Comma(Arc::clone(&comma)));
- LatexWalker::walk_comma(self, comma);
- }
- }
-
- fn visit_math(&mut self, math: Arc<LatexMath>) {
- if math.range().contains(self.position) {
- self.results.push(LatexNode::Math(Arc::clone(&math)));
- LatexWalker::walk_math(self, math);
- }
- }
-}
diff --git a/support/texlab/src/syntax/latex/glossary.rs b/support/texlab/src/syntax/latex/glossary.rs
deleted file mode 100644
index 14f87ef130..0000000000
--- a/support/texlab/src/syntax/latex/glossary.rs
+++ /dev/null
@@ -1,58 +0,0 @@
-use super::ast::*;
-use crate::syntax::language::*;
-use crate::syntax::text::SyntaxNode;
-use lsp_types::Range;
-use std::sync::Arc;
-
-#[derive(Debug, PartialEq, Eq, Clone)]
-pub struct LatexGlossaryEntry {
- pub command: Arc<LatexCommand>,
- pub label_index: usize,
- pub kind: LatexGlossaryEntryKind,
-}
-
-impl SyntaxNode for LatexGlossaryEntry {
- fn range(&self) -> Range {
- self.command.range()
- }
-}
-
-impl LatexGlossaryEntry {
- pub fn label(&self) -> &LatexToken {
- self.command.extract_word(self.label_index).unwrap()
- }
-
- fn parse(commands: &[Arc<LatexCommand>]) -> Vec<Self> {
- let mut entries = Vec::new();
- for command in commands {
- for LatexGlossaryEntryDefinitionCommand {
- name,
- label_index,
- kind,
- } in &LANGUAGE_DATA.glossary_entry_definition_commands
- {
- if command.name.text() == name && command.has_word(*label_index) {
- entries.push(Self {
- command: Arc::clone(&command),
- label_index: *label_index,
- kind: *kind,
- });
- }
- }
- }
- entries
- }
-}
-
-#[derive(Debug, PartialEq, Eq, Clone)]
-pub struct LatexGlossaryInfo {
- pub entries: Vec<LatexGlossaryEntry>,
-}
-
-impl LatexGlossaryInfo {
- pub fn parse(commands: &[Arc<LatexCommand>]) -> Self {
- Self {
- entries: LatexGlossaryEntry::parse(commands),
- }
- }
-}
diff --git a/support/texlab/src/syntax/latex/lexer.rs b/support/texlab/src/syntax/latex/lexer.rs
index f753d7afce..e14f1f6d85 100644
--- a/support/texlab/src/syntax/latex/lexer.rs
+++ b/support/texlab/src/syntax/latex/lexer.rs
@@ -1,40 +1,41 @@
-use super::ast::{LatexToken, LatexTokenKind};
+use super::ast::{Token, TokenKind};
use crate::syntax::text::CharStream;
-pub struct LatexLexer<'a> {
+#[derive(Debug)]
+pub struct Lexer<'a> {
stream: CharStream<'a>,
}
-impl<'a> LatexLexer<'a> {
+impl<'a> Lexer<'a> {
pub fn new(text: &'a str) -> Self {
- LatexLexer {
+ Self {
stream: CharStream::new(text),
}
}
- fn single_char(&mut self, kind: LatexTokenKind) -> LatexToken {
+ fn single_char(&mut self, kind: TokenKind) -> Token {
self.stream.start_span();
self.stream.next();
let span = self.stream.end_span();
- LatexToken::new(span, kind)
+ Token::new(span, kind)
}
- fn math(&mut self) -> LatexToken {
+ fn math(&mut self) -> Token {
self.stream.start_span();
self.stream.next();
if self.stream.satifies(|c| *c == '$') {
self.stream.next();
}
let span = self.stream.end_span();
- LatexToken::new(span, LatexTokenKind::Math)
+ Token::new(span, TokenKind::Math)
}
- fn command(&mut self) -> LatexToken {
+ fn command(&mut self) -> Token {
let span = self.stream.command();
- LatexToken::new(span, LatexTokenKind::Command)
+ Token::new(span, TokenKind::Command)
}
- fn word(&mut self) -> LatexToken {
+ fn word(&mut self) -> Token {
self.stream.start_span();
self.stream.next();
while self.stream.satifies(|c| is_word_char(*c)) {
@@ -42,36 +43,36 @@ impl<'a> LatexLexer<'a> {
}
let span = self.stream.end_span();
- LatexToken::new(span, LatexTokenKind::Word)
+ Token::new(span, TokenKind::Word)
}
}
-impl<'a> Iterator for LatexLexer<'a> {
- type Item = LatexToken;
+impl<'a> Iterator for Lexer<'a> {
+ type Item = Token;
- fn next(&mut self) -> Option<LatexToken> {
+ fn next(&mut self) -> Option<Token> {
loop {
match self.stream.peek() {
Some('%') => {
self.stream.skip_rest_of_line();
}
Some('{') => {
- return Some(self.single_char(LatexTokenKind::BeginGroup));
+ return Some(self.single_char(TokenKind::BeginGroup));
}
Some('}') => {
- return Some(self.single_char(LatexTokenKind::EndGroup));
+ return Some(self.single_char(TokenKind::EndGroup));
}
Some('[') => {
- return Some(self.single_char(LatexTokenKind::BeginOptions));
+ return Some(self.single_char(TokenKind::BeginOptions));
}
Some(']') => {
- return Some(self.single_char(LatexTokenKind::EndOptions));
+ return Some(self.single_char(TokenKind::EndOptions));
}
Some('$') => {
return Some(self.math());
}
Some(',') => {
- return Some(self.single_char(LatexTokenKind::Comma));
+ return Some(self.single_char(TokenKind::Comma));
}
Some('\\') => {
return Some(self.command());
@@ -106,74 +107,70 @@ fn is_word_char(c: char) -> bool {
#[cfg(test)]
mod tests {
use super::*;
- use crate::syntax::text::Span;
- use lsp_types::{Position, Range};
-
- fn verify<'a>(
- lexer: &mut LatexLexer<'a>,
- line: u64,
- character: u64,
- text: &str,
- kind: LatexTokenKind,
- ) {
+ use crate::{
+ protocol::{Position, Range},
+ syntax::text::Span,
+ };
+
+ fn verify<'a>(lexer: &mut Lexer<'a>, line: u64, character: u64, text: &str, kind: TokenKind) {
let start = Position::new(line, character);
let end = Position::new(line, character + text.chars().count() as u64);
let range = Range::new(start, end);
let span = Span::new(range, text.to_owned());
- let token = LatexToken::new(span, kind);
+ let token = Token::new(span, kind);
assert_eq!(Some(token), lexer.next());
}
#[test]
- fn test_word() {
- let mut lexer = LatexLexer::new("foo bar baz");
- verify(&mut lexer, 0, 0, "foo", LatexTokenKind::Word);
- verify(&mut lexer, 0, 4, "bar", LatexTokenKind::Word);
- verify(&mut lexer, 0, 8, "baz", LatexTokenKind::Word);
+ fn word() {
+ let mut lexer = Lexer::new("foo bar baz");
+ verify(&mut lexer, 0, 0, "foo", TokenKind::Word);
+ verify(&mut lexer, 0, 4, "bar", TokenKind::Word);
+ verify(&mut lexer, 0, 8, "baz", TokenKind::Word);
assert_eq!(None, lexer.next());
}
#[test]
- fn test_command() {
- let mut lexer = LatexLexer::new("\\foo\\bar@baz\n\\foo*");
- verify(&mut lexer, 0, 0, "\\foo", LatexTokenKind::Command);
- verify(&mut lexer, 0, 4, "\\bar@baz", LatexTokenKind::Command);
- verify(&mut lexer, 1, 0, "\\foo*", LatexTokenKind::Command);
+ fn command() {
+ let mut lexer = Lexer::new("\\foo\\bar@baz\n\\foo*");
+ verify(&mut lexer, 0, 0, "\\foo", TokenKind::Command);
+ verify(&mut lexer, 0, 4, "\\bar@baz", TokenKind::Command);
+ verify(&mut lexer, 1, 0, "\\foo*", TokenKind::Command);
assert_eq!(None, lexer.next());
}
#[test]
- fn test_escape_sequence() {
- let mut lexer = LatexLexer::new("\\%\\**");
- verify(&mut lexer, 0, 0, "\\%", LatexTokenKind::Command);
- verify(&mut lexer, 0, 2, "\\*", LatexTokenKind::Command);
- verify(&mut lexer, 0, 4, "*", LatexTokenKind::Word);
+ fn escape_sequence() {
+ let mut lexer = Lexer::new("\\%\\**");
+ verify(&mut lexer, 0, 0, "\\%", TokenKind::Command);
+ verify(&mut lexer, 0, 2, "\\*", TokenKind::Command);
+ verify(&mut lexer, 0, 4, "*", TokenKind::Word);
assert_eq!(None, lexer.next());
}
#[test]
- fn test_group_delimiter() {
- let mut lexer = LatexLexer::new("{}[]");
- verify(&mut lexer, 0, 0, "{", LatexTokenKind::BeginGroup);
- verify(&mut lexer, 0, 1, "}", LatexTokenKind::EndGroup);
- verify(&mut lexer, 0, 2, "[", LatexTokenKind::BeginOptions);
- verify(&mut lexer, 0, 3, "]", LatexTokenKind::EndOptions);
+ fn group_delimiter() {
+ let mut lexer = Lexer::new("{}[]");
+ verify(&mut lexer, 0, 0, "{", TokenKind::BeginGroup);
+ verify(&mut lexer, 0, 1, "}", TokenKind::EndGroup);
+ verify(&mut lexer, 0, 2, "[", TokenKind::BeginOptions);
+ verify(&mut lexer, 0, 3, "]", TokenKind::EndOptions);
assert_eq!(None, lexer.next());
}
#[test]
- fn test_math() {
- let mut lexer = LatexLexer::new("$$ $ $");
- verify(&mut lexer, 0, 0, "$$", LatexTokenKind::Math);
- verify(&mut lexer, 0, 3, "$", LatexTokenKind::Math);
- verify(&mut lexer, 0, 5, "$", LatexTokenKind::Math);
+ fn math() {
+ let mut lexer = Lexer::new("$$ $ $");
+ verify(&mut lexer, 0, 0, "$$", TokenKind::Math);
+ verify(&mut lexer, 0, 3, "$", TokenKind::Math);
+ verify(&mut lexer, 0, 5, "$", TokenKind::Math);
assert_eq!(None, lexer.next());
}
#[test]
- fn test_line_comment() {
- let mut lexer = LatexLexer::new(" %foo \nfoo");
- verify(&mut lexer, 1, 0, "foo", LatexTokenKind::Word);
+ fn line_comment() {
+ let mut lexer = Lexer::new(" %foo \nfoo");
+ verify(&mut lexer, 1, 0, "foo", TokenKind::Word);
assert_eq!(None, lexer.next());
}
}
diff --git a/support/texlab/src/syntax/latex/math.rs b/support/texlab/src/syntax/latex/math.rs
deleted file mode 100644
index c234232c14..0000000000
--- a/support/texlab/src/syntax/latex/math.rs
+++ /dev/null
@@ -1,199 +0,0 @@
-use super::ast::*;
-use crate::syntax::language::*;
-use crate::syntax::text::SyntaxNode;
-use lsp_types::Range;
-use std::sync::Arc;
-
-#[derive(Debug, PartialEq, Eq, Clone)]
-pub struct LatexEquation {
- pub left: Arc<LatexCommand>,
- pub right: Arc<LatexCommand>,
-}
-
-impl LatexEquation {
- fn parse(commands: &[Arc<LatexCommand>]) -> Vec<Self> {
- let mut equations = Vec::new();
- let mut left = None;
- for command in commands {
- let name = command.name.text();
- if name == "\\[" || name == "\\(" {
- left = Some(command);
- } else if name == "\\]" || name == "\\)" {
- if let Some(begin) = left {
- equations.push(Self {
- left: Arc::clone(&begin),
- right: Arc::clone(&command),
- });
- left = None;
- }
- }
- }
- equations
- }
-}
-
-impl SyntaxNode for LatexEquation {
- fn range(&self) -> Range {
- Range::new(self.left.start(), self.right.end())
- }
-}
-
-#[derive(Debug, PartialEq, Eq, Clone)]
-pub struct LatexInline {
- pub left: Arc<LatexMath>,
- pub right: Arc<LatexMath>,
-}
-
-impl LatexInline {
- fn parse(root: Arc<LatexRoot>) -> Vec<Self> {
- let mut analyzer = LatexInlineAnalyzer::default();
- analyzer.visit_root(root);
- analyzer.inlines
- }
-}
-
-impl SyntaxNode for LatexInline {
- fn range(&self) -> Range {
- Range::new(self.left.start(), self.right.end())
- }
-}
-
-#[derive(Debug, Default)]
-struct LatexInlineAnalyzer {
- inlines: Vec<LatexInline>,
- left: Option<Arc<LatexMath>>,
-}
-
-impl LatexVisitor for LatexInlineAnalyzer {
- fn visit_root(&mut self, root: Arc<LatexRoot>) {
- LatexWalker::walk_root(self, root);
- }
-
- fn visit_group(&mut self, group: Arc<LatexGroup>) {
- LatexWalker::walk_group(self, group);
- }
-
- fn visit_command(&mut self, command: Arc<LatexCommand>) {
- LatexWalker::walk_command(self, command);
- }
-
- fn visit_text(&mut self, text: Arc<LatexText>) {
- LatexWalker::walk_text(self, text);
- }
-
- fn visit_comma(&mut self, comma: Arc<LatexComma>) {
- LatexWalker::walk_comma(self, comma);
- }
-
- fn visit_math(&mut self, math: Arc<LatexMath>) {
- if let Some(left) = &self.left {
- let inline = LatexInline {
- left: Arc::clone(&left),
- right: Arc::clone(&math),
- };
- self.inlines.push(inline);
- self.left = None;
- } else {
- self.left = Some(Arc::clone(&math));
- }
- LatexWalker::walk_math(self, math);
- }
-}
-
-#[derive(Debug, PartialEq, Eq, Clone)]
-pub struct LatexMathOperator {
- pub command: Arc<LatexCommand>,
- pub definition: Arc<LatexCommand>,
- pub definition_index: usize,
- pub implementation_index: usize,
-}
-
-impl LatexMathOperator {
- fn parse(commands: &[Arc<LatexCommand>]) -> Vec<Self> {
- let mut operators = Vec::new();
- for command in commands {
- for LatexMathOperatorCommand {
- name,
- definition_index,
- implementation_index,
- } in &LANGUAGE_DATA.math_operator_commands
- {
- if command.name.text() == name
- && command.args.len() > *definition_index
- && command.args.len() > *implementation_index
- {
- let definition = command.args[0].children.iter().next();
- if let Some(LatexContent::Command(definition)) = definition {
- operators.push(Self {
- command: Arc::clone(command),
- definition: Arc::clone(definition),
- definition_index: *definition_index,
- implementation_index: *implementation_index,
- })
- }
- }
- }
- }
- operators
- }
-}
-
-impl SyntaxNode for LatexMathOperator {
- fn range(&self) -> Range {
- self.command.range()
- }
-}
-
-#[derive(Debug, PartialEq, Eq, Clone)]
-pub struct LatexTheoremDefinition {
- pub command: Arc<LatexCommand>,
- pub index: usize,
-}
-
-impl LatexTheoremDefinition {
- pub fn name(&self) -> &LatexToken {
- self.command.extract_word(self.index).unwrap()
- }
-
- fn parse(commands: &[Arc<LatexCommand>]) -> Vec<Self> {
- let mut definitions = Vec::new();
- for command in commands {
- for LatexTheoremDefinitionCommand { name, index } in
- &LANGUAGE_DATA.theorem_definition_commands
- {
- if command.name.text() == name && command.has_word(*index) {
- definitions.push(Self {
- command: Arc::clone(&command),
- index: *index,
- });
- }
- }
- }
- definitions
- }
-}
-
-impl SyntaxNode for LatexTheoremDefinition {
- fn range(&self) -> Range {
- self.command.range()
- }
-}
-
-#[derive(Debug, PartialEq, Eq, Clone)]
-pub struct LatexMathInfo {
- pub equations: Vec<LatexEquation>,
- pub inlines: Vec<LatexInline>,
- pub operators: Vec<LatexMathOperator>,
- pub theorem_definitions: Vec<LatexTheoremDefinition>,
-}
-
-impl LatexMathInfo {
- pub fn parse(root: Arc<LatexRoot>, commands: &[Arc<LatexCommand>]) -> Self {
- Self {
- equations: LatexEquation::parse(commands),
- inlines: LatexInline::parse(root),
- operators: LatexMathOperator::parse(commands),
- theorem_definitions: LatexTheoremDefinition::parse(commands),
- }
- }
-}
diff --git a/support/texlab/src/syntax/latex/mod.rs b/support/texlab/src/syntax/latex/mod.rs
index dc24682752..cee83ab04e 100644
--- a/support/texlab/src/syntax/latex/mod.rs
+++ b/support/texlab/src/syntax/latex/mod.rs
@@ -1,358 +1,686 @@
+mod analysis;
mod ast;
-mod env;
-mod finder;
-mod glossary;
mod lexer;
-mod math;
mod parser;
-mod printer;
-mod structure;
-
-pub use self::ast::*;
-pub use self::env::*;
-pub use self::finder::LatexNode;
-pub use self::glossary::*;
-pub use self::math::*;
-pub use self::printer::LatexPrinter;
-pub use self::structure::*;
-
-use self::finder::LatexFinder;
-use self::lexer::LatexLexer;
-use self::parser::LatexParser;
-use super::language::*;
-use super::text::SyntaxNode;
-use crate::range::RangeExt;
-use crate::workspace::Uri;
-use lsp_types::{Position, Range};
-use path_clean::PathClean;
-use std::path::PathBuf;
-use std::sync::Arc;
-
-#[derive(Debug, Default)]
-struct LatexCommandAnalyzer {
- commands: Vec<Arc<LatexCommand>>,
+
+pub use self::{analysis::*, ast::*};
+
+use self::{lexer::Lexer, parser::Parser};
+use crate::{
+ protocol::{Options, Uri},
+ tex::Resolver,
+};
+use std::path::Path;
+
+#[derive(Debug, PartialEq, Eq, Clone, Copy)]
+pub struct OpenParams<'a> {
+ pub text: &'a str,
+ pub uri: &'a Uri,
+ pub resolver: &'a Resolver,
+ pub options: &'a Options,
+ pub current_dir: &'a Path,
}
-impl LatexCommandAnalyzer {
- fn parse(root: Arc<LatexRoot>) -> Vec<Arc<LatexCommand>> {
- let mut analyzer = Self::default();
- analyzer.visit_root(root);
- analyzer.commands
- }
+pub fn open(params: OpenParams) -> SymbolTable {
+ let OpenParams {
+ text,
+ uri,
+ resolver,
+ options,
+ current_dir,
+ } = params;
+
+ let lexer = Lexer::new(text);
+ let parser = Parser::new(lexer);
+ let tree = parser.parse();
+
+ let params = SymbolTableParams {
+ tree,
+ uri,
+ resolver,
+ options,
+ current_dir,
+ };
+ SymbolTable::analyze(params)
}
-impl LatexVisitor for LatexCommandAnalyzer {
- fn visit_root(&mut self, root: Arc<LatexRoot>) {
- LatexWalker::walk_root(self, root);
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::{
+ protocol::{Options, Range, RangeExt, Uri},
+ syntax::{generic_ast::AstNodeIndex, text::SyntaxNode},
+ tex::Resolver,
+ };
+ use indoc::indoc;
+ use std::env;
+
+ fn open_simple(text: &str) -> SymbolTable {
+ open(OpenParams {
+ text: text.trim(),
+ uri: &Uri::parse("http://www.foo.com/bar.tex").unwrap(),
+ resolver: &Resolver::default(),
+ options: &Options::default(),
+ current_dir: &env::current_dir().unwrap(),
+ })
}
- fn visit_group(&mut self, group: Arc<LatexGroup>) {
- LatexWalker::walk_group(self, group);
+ #[derive(Debug, Default)]
+ struct TreeTraversal {
+ nodes: Vec<AstNodeIndex>,
}
- fn visit_command(&mut self, command: Arc<LatexCommand>) {
- self.commands.push(Arc::clone(&command));
- LatexWalker::walk_command(self, command);
+ impl Visitor for TreeTraversal {
+ fn visit(&mut self, tree: &Tree, node: AstNodeIndex) {
+ self.nodes.push(node);
+ tree.walk(self, node);
+ }
}
- fn visit_text(&mut self, text: Arc<LatexText>) {
- LatexWalker::walk_text(self, text);
- }
+ mod range {
+ use super::*;
- fn visit_comma(&mut self, comma: Arc<LatexComma>) {
- LatexWalker::walk_comma(self, comma);
- }
+ fn verify(expected_ranges: Vec<Range>, text: &str) {
+ let table = open_simple(text);
- fn visit_math(&mut self, math: Arc<LatexMath>) {
- LatexWalker::walk_math(self, math);
- }
-}
+ let mut traversal = TreeTraversal::default();
+ traversal.visit(&table.tree, table.tree.root);
+ let actual_ranges: Vec<_> = traversal
+ .nodes
+ .into_iter()
+ .map(|node| table[node].range())
+ .collect();
+ assert_eq!(actual_ranges, expected_ranges);
+ }
-#[derive(Debug, PartialEq, Eq, Clone)]
-pub struct LatexCitation {
- pub command: Arc<LatexCommand>,
- pub index: usize,
-}
+ #[test]
+ fn command() {
+ verify(
+ vec![
+ Range::new_simple(0, 0, 2, 14),
+ Range::new_simple(0, 0, 0, 23),
+ Range::new_simple(0, 14, 0, 23),
+ Range::new_simple(0, 15, 0, 22),
+ Range::new_simple(1, 0, 1, 20),
+ Range::new_simple(1, 11, 1, 20),
+ Range::new_simple(1, 12, 1, 19),
+ Range::new_simple(2, 0, 2, 14),
+ Range::new_simple(2, 4, 2, 9),
+ Range::new_simple(2, 5, 2, 8),
+ Range::new_simple(2, 9, 2, 14),
+ Range::new_simple(2, 10, 2, 13),
+ ],
+ indoc!(
+ r#"
+ \documentclass{article}
+ \usepackage{amsmath}
+ \foo[bar]{baz}
+ "#
+ ),
+ );
+ }
-impl LatexCitation {
- pub fn keys(&self) -> Vec<&LatexToken> {
- self.command.extract_comma_separated_words(0)
- }
+ #[test]
+ fn text() {
+ verify(
+ vec![
+ Range::new_simple(0, 0, 0, 11),
+ Range::new_simple(0, 0, 0, 11),
+ ],
+ indoc!(
+ r#"
+ foo bar baz
+ "#
+ ),
+ );
+ }
- fn parse(commands: &[Arc<LatexCommand>]) -> Vec<Self> {
- let mut citations = Vec::new();
- for command in commands {
- for LatexCitationCommand { name, index } in &LANGUAGE_DATA.citation_commands {
- if command.name.text() == name && command.has_comma_separated_words(*index) {
- citations.push(Self {
- command: Arc::clone(command),
- index: *index,
- });
- }
- }
+ #[test]
+ fn text_bracket() {
+ verify(
+ vec![Range::new_simple(0, 0, 0, 5), Range::new_simple(0, 0, 0, 5)],
+ indoc!(
+ r#"
+ ]foo[
+ "#
+ ),
+ );
}
- citations
- }
-}
-impl SyntaxNode for LatexCitation {
- fn range(&self) -> Range {
- self.command.range()
- }
-}
+ #[test]
+ fn group() {
+ verify(
+ vec![
+ Range::new_simple(0, 0, 0, 15),
+ Range::new_simple(0, 0, 0, 15),
+ Range::new_simple(0, 2, 0, 5),
+ Range::new_simple(0, 6, 0, 13),
+ Range::new_simple(0, 8, 0, 11),
+ ],
+ indoc!(
+ r#"
+ { foo { bar } }
+ "#
+ ),
+ );
+ }
-#[derive(Debug, PartialEq, Eq, Clone)]
-pub struct LatexInclude {
- pub command: Arc<LatexCommand>,
- pub index: usize,
- pub kind: LatexIncludeKind,
- pub all_targets: Vec<Vec<Uri>>,
- pub include_extension: bool,
-}
+ #[test]
+ fn group_incomplete() {
+ verify(
+ vec![Range::new_simple(0, 1, 0, 2), Range::new_simple(0, 1, 0, 2)],
+ indoc!(
+ r#"
+ }{
+ "#
+ ),
+ );
+ }
-impl LatexInclude {
- pub fn paths(&self) -> Vec<&LatexToken> {
- self.command.extract_comma_separated_words(self.index)
+ #[test]
+ fn math() {
+ verify(
+ vec![
+ Range::new_simple(0, 0, 0, 9),
+ Range::new_simple(0, 0, 0, 1),
+ Range::new_simple(0, 2, 0, 7),
+ Range::new_simple(0, 8, 0, 9),
+ ],
+ indoc!(
+ r#"
+ $ x = 1 $
+ "#
+ ),
+ );
+ }
+
+ #[test]
+ fn comma() {
+ verify(
+ vec![
+ Range::new_simple(0, 0, 0, 8),
+ Range::new_simple(0, 0, 0, 3),
+ Range::new_simple(0, 3, 0, 4),
+ Range::new_simple(0, 5, 0, 8),
+ ],
+ indoc!(
+ r#"
+ foo, bar
+ "#
+ ),
+ );
+ }
}
- pub fn components(&self) -> Vec<String> {
- let mut components = Vec::new();
- for path in self.paths() {
- match self.kind {
- LatexIncludeKind::Package => components.push(format!("{}.sty", path.text())),
- LatexIncludeKind::Class => components.push(format!("{}.cls", path.text())),
- LatexIncludeKind::Latex
- | LatexIncludeKind::Bibliography
- | LatexIncludeKind::Image
- | LatexIncludeKind::Svg
- | LatexIncludeKind::Pdf
- | LatexIncludeKind::Everything => (),
- }
+ mod command {
+ use super::*;
+
+ fn verify(expected_names: Vec<&str>, text: &str) {
+ let table = open(OpenParams {
+ text,
+ uri: &Uri::parse("http://www.foo.com/bar.tex").unwrap(),
+ resolver: &Resolver::default(),
+ options: &Options::default(),
+ current_dir: &env::current_dir().unwrap(),
+ });
+
+ let actual_names: Vec<_> = table
+ .commands
+ .iter()
+ .map(|node| table.tree.as_command(*node).unwrap().name.text())
+ .collect();
+
+ assert_eq!(actual_names, expected_names);
+ }
+
+ #[test]
+ fn basic() {
+ verify(
+ vec!["\\documentclass", "\\usepackage", "\\begin", "\\end"],
+ indoc!(
+ r#"
+ \documentclass{article}
+ \usepackage{amsmath}
+ \begin{document}
+ Hello World
+ \end{document}
+ "#
+ ),
+ );
+ }
+
+ #[test]
+ fn star() {
+ verify(
+ vec!["\\section*", "\\subsection*"],
+ indoc!(
+ r#"
+ \section*{Foo}
+ \subsection**{Bar}
+ "#
+ ),
+ );
+ }
+
+ #[test]
+ fn at() {
+ verify(vec!["\\foo@bar"], indoc!(r#"\foo@bar"#));
+ }
+
+ #[test]
+ fn escape() {
+ verify(vec!["\\%"], indoc!(r#"\%foo"#))
}
- components
}
- fn parse(uri: &Uri, commands: &[Arc<LatexCommand>]) -> Vec<Self> {
- let mut includes = Vec::new();
- for command in commands {
- for description in &LANGUAGE_DATA.include_commands {
- if let Some(include) = Self::parse_single(uri, &command, &description) {
- includes.push(include);
- }
- }
+ mod environment {
+ use super::*;
+
+ fn verify(expected_names: Vec<(&str, &str)>, text: &str) {
+ let table = open_simple(text);
+ let actual_names: Vec<_> = table
+ .environments
+ .iter()
+ .map(|env| {
+ (
+ env.left
+ .name(&table.tree)
+ .map(Token::text)
+ .unwrap_or_default(),
+ env.right
+ .name(&table.tree)
+ .map(Token::text)
+ .unwrap_or_default(),
+ )
+ })
+ .collect();
+
+ assert_eq!(actual_names, expected_names);
+ }
+
+ #[test]
+ fn nested() {
+ verify(
+ vec![("b", "b"), ("a", "a")],
+ indoc!(
+ r#"
+ \begin{a}
+ \begin{b}
+ \end{b}
+ \end{a}
+ "#
+ ),
+ );
+ }
+
+ #[test]
+ fn empty_name() {
+ verify(
+ vec![("a", ""), ("", "b")],
+ indoc!(
+ r#"
+ \begin{a}
+ \end{}
+ \begin{}
+ \end{b}
+ "#
+ ),
+ );
+ }
+
+ #[test]
+ fn incomplete() {
+ verify(
+ Vec::new(),
+ indoc!(
+ r#"
+ \end{a}
+ \begin{a}
+ "#
+ ),
+ );
+ }
+
+ #[test]
+ fn standalone_true() {
+ let table = open_simple(r#"\begin{document}\end{document}"#);
+ assert!(table.is_standalone);
+ }
+
+ #[test]
+ fn standalone_false() {
+ let table = open_simple(r#"\begin{doc}\end{doc}"#);
+ assert!(!table.is_standalone);
}
- includes
}
- fn parse_single(
- uri: &Uri,
- command: &Arc<LatexCommand>,
- description: &LatexIncludeCommand,
- ) -> Option<Self> {
- if command.name.text() != description.name {
- return None;
+ mod include {
+ use super::*;
+
+ fn verify(expected_targets: Vec<Vec<&str>>, resolver: Resolver, text: &str) {
+ let table = open(OpenParams {
+ text,
+ uri: &Uri::parse("http://www.foo.com/dir1/dir2/foo.tex").unwrap(),
+ resolver: &resolver,
+ options: &Options::default(),
+ current_dir: &env::current_dir().unwrap(),
+ });
+
+ assert_eq!(table.includes.len(), 1);
+ let include = &table.includes[0];
+ let actual_targets: Vec<Vec<&str>> = include
+ .all_targets
+ .iter()
+ .map(|targets| targets.iter().map(|target| target.as_str()).collect())
+ .collect();
+
+ assert_eq!(actual_targets, expected_targets);
+ }
+
+ #[test]
+ fn same_directory() {
+ verify(
+ vec![vec![
+ "http://www.foo.com/dir1/dir2/bar",
+ "http://www.foo.com/dir1/dir2/bar.tex",
+ ]],
+ Resolver::default(),
+ indoc!(r#"\include{bar}"#),
+ );
+ }
+
+ #[test]
+ fn two_paths() {
+ verify(
+ vec![
+ vec![
+ "http://www.foo.com/dir1/dir2/bar.tex",
+ "http://www.foo.com/dir1/dir2/bar.tex.tex",
+ ],
+ vec![
+ "http://www.foo.com/dir1/dir2/baz.tex",
+ "http://www.foo.com/dir1/dir2/baz.tex.tex",
+ ],
+ ],
+ Resolver::default(),
+ indoc!(r#"\input{bar.tex, ./baz.tex}"#),
+ );
+ }
+
+ #[test]
+ fn sub_directory() {
+ verify(
+ vec![vec![
+ "http://www.foo.com/dir1/dir2/dir3/bar",
+ "http://www.foo.com/dir1/dir2/dir3/bar.tex",
+ ]],
+ Resolver::default(),
+ indoc!(r#"\include{dir3/bar}"#),
+ );
}
- if command.args.len() <= description.index {
- return None;
+ #[test]
+ fn parent_directory() {
+ verify(
+ vec![vec![
+ "http://www.foo.com/dir1/bar",
+ "http://www.foo.com/dir1/bar.tex",
+ ]],
+ Resolver::default(),
+ indoc!(r#"\include{../bar}"#),
+ );
}
- let mut all_targets = Vec::new();
- for relative_path in command.extract_comma_separated_words(description.index) {
- let mut path = uri.to_file_path().ok()?;
- path.pop();
- path.push(relative_path.text());
- path = PathBuf::from(path.to_string_lossy().into_owned().replace('\\', "/"));
- path = path.clean();
- let path = path.to_str()?.to_owned();
-
- let mut targets = Vec::new();
- targets.push(Uri::from_file_path(&path).ok()?);
- if let Some(extensions) = description.kind.extensions() {
- for extension in extensions {
- let path = format!("{}.{}", &path, extension);
- targets.push(Uri::from_file_path(&path).ok()?);
- }
- }
- all_targets.push(targets);
+ #[test]
+ fn distro_file() {
+ let mut resolver = Resolver::default();
+ let path = env::current_dir().unwrap().join("biblatex-examples.bib");
+ resolver
+ .files_by_name
+ .insert("biblatex-examples.bib".into(), path.clone());
+ verify(
+ vec![vec![
+ "http://www.foo.com/dir1/dir2/biblatex-examples.bib",
+ "http://www.foo.com/dir1/dir2/biblatex-examples.bib.bib",
+ Uri::from_file_path(&path).unwrap().as_str(),
+ ]],
+ resolver,
+ indoc!(r#"\addbibresource{biblatex-examples.bib}"#),
+ );
}
- let include = Self {
- command: Arc::clone(command),
- index: description.index,
- kind: description.kind,
- all_targets,
- include_extension: description.include_extension,
- };
- Some(include)
+ #[test]
+ fn component() {
+ let table = open(OpenParams {
+ text: indoc!(
+ r#"
+ \documentclass{article}
+ \usepackage{amsmath}
+ \usepackage{geometry, lipsum}
+ "#
+ ),
+ uri: &Uri::parse("http://www.foo.com/bar.tex").unwrap(),
+ resolver: &Resolver::default(),
+ options: &Options::default(),
+ current_dir: &env::current_dir().unwrap(),
+ });
+ assert_eq!(
+ table.components,
+ vec!["article.cls", "amsmath.sty", "geometry.sty", "lipsum.sty"]
+ );
+ }
}
-}
-impl SyntaxNode for LatexInclude {
- fn range(&self) -> Range {
- self.command.range()
+ #[test]
+ fn citation() {
+ let table = open_simple(indoc!(
+ r#"
+ \cite{key1}
+ \cite{key2, key3}
+ \nocite{*}
+ "#
+ ));
+
+ let expected_keys = vec![vec!["key1"], vec!["key2", "key3"], vec!["*"]];
+
+ let actual_keys: Vec<Vec<&str>> = table
+ .citations
+ .iter()
+ .map(|cit| cit.keys(&table.tree).into_iter().map(Token::text).collect())
+ .collect();
+
+ assert_eq!(actual_keys, expected_keys);
}
-}
-#[derive(Debug, PartialEq, Eq, Clone)]
-pub struct LatexCommandDefinition {
- pub command: Arc<LatexCommand>,
- pub definition: Arc<LatexCommand>,
- pub definition_index: usize,
- pub implementation: Arc<LatexGroup>,
- pub implementation_index: usize,
- pub argument_count_index: usize,
-}
+ #[test]
+ fn command_definition() {
+ let table = open_simple(indoc!(
+ r#"
+ \newcommand{\foo}{Foo}
+ \newcommand[2]{\bar}{Bar}
+ \renewcommand{\baz}{Baz}
+ \qux
+ "#
+ ));
+
+ let expected_cmds = vec!["\\foo", "\\bar", "\\baz"];
+
+ let actual_cmds: Vec<&str> = table
+ .command_definitions
+ .iter()
+ .map(|def| def.definition_name(&table.tree))
+ .collect();
-impl LatexCommandDefinition {
- fn parse(commands: &[Arc<LatexCommand>]) -> Vec<Self> {
- let mut definitions = Vec::new();
- for command in commands {
- for LatexCommandDefinitionCommand {
- name,
- definition_index,
- argument_count_index,
- implementation_index,
- } in &LANGUAGE_DATA.command_definition_commands
- {
- if command.name.text() == name
- && command.args.len() > *definition_index
- && command.args.len() > *implementation_index
- {
- let definition = command.args[0].children.iter().next();
- if let Some(LatexContent::Command(definition)) = definition {
- definitions.push(Self {
- command: Arc::clone(command),
- definition: Arc::clone(definition),
- definition_index: *definition_index,
- implementation: Arc::clone(&command.args[*implementation_index]),
- implementation_index: *implementation_index,
- argument_count_index: *argument_count_index,
- })
- }
- }
- }
- }
- definitions
+ assert_eq!(actual_cmds, expected_cmds);
}
-}
-impl SyntaxNode for LatexCommandDefinition {
- fn range(&self) -> Range {
- self.command.range()
+ #[test]
+ fn glossary_entry() {
+ let table = open_simple(indoc!(
+ r#"
+ \newglossaryentry{foo}{...}
+ \newacronym{bar}{...}
+ "#
+ ));
+
+ let expected_entries = vec!["foo", "bar"];
+
+ let actual_entries: Vec<&str> = table
+ .glossary_entries
+ .iter()
+ .map(|entry| entry.label(&table.tree).text())
+ .collect();
+
+ assert_eq!(actual_entries, expected_entries);
}
-}
-#[derive(Debug, PartialEq, Eq, Clone)]
-pub struct LatexSyntaxTree {
- pub root: Arc<LatexRoot>,
- pub commands: Vec<Arc<LatexCommand>>,
- pub includes: Vec<LatexInclude>,
- pub components: Vec<String>,
- pub env: LatexEnvironmentInfo,
- pub structure: LatexStructureInfo,
- pub citations: Vec<LatexCitation>,
- pub math: LatexMathInfo,
- pub command_definitions: Vec<LatexCommandDefinition>,
- pub glossary: LatexGlossaryInfo,
-}
+ #[test]
+ fn equation() {
+ let table = open_simple(indoc!(
+ r#"
+ \[
+ e^{i \pi} + 1 = 0
+ \]
+ \] \[
+ "#
+ ));
+
+ assert_eq!(table.equations.len(), 1);
+ }
-impl LatexSyntaxTree {
- pub fn parse(uri: &Uri, text: &str) -> Self {
- let lexer = LatexLexer::new(text);
- let mut parser = LatexParser::new(lexer);
- let root = Arc::new(parser.root());
- let commands = LatexCommandAnalyzer::parse(Arc::clone(&root));
- let includes = LatexInclude::parse(uri, &commands);
- let components = includes.iter().flat_map(LatexInclude::components).collect();
- let env = LatexEnvironmentInfo::parse(&commands);
- let structure = LatexStructureInfo::parse(&commands);
- let citations = LatexCitation::parse(&commands);
- let math = LatexMathInfo::parse(Arc::clone(&root), &commands);
- let command_definitions = LatexCommandDefinition::parse(&commands);
- let glossary = LatexGlossaryInfo::parse(&commands);
- Self {
- root,
- commands,
- includes,
- components,
- env,
- structure,
- citations,
- math,
- command_definitions,
- glossary,
- }
+ #[test]
+ fn inline() {
+ let table = open_simple(indoc!(
+ r#"
+ $ x $
+ $
+ "#
+ ));
+
+ assert_eq!(table.inlines.len(), 1);
}
- pub fn find(&self, position: Position) -> Vec<LatexNode> {
- let mut finder = LatexFinder::new(position);
- finder.visit_root(Arc::clone(&self.root));
- finder.results
+ #[test]
+ fn math_operator() {
+ let table = open_simple(indoc!(
+ r#"
+ \DeclareMathOperator{\foo}{foo}
+ "#
+ ));
+
+ assert_eq!(table.math_operators.len(), 1);
+ assert_eq!(
+ table.math_operators[0].definition_name(&table.tree),
+ "\\foo"
+ );
}
- pub fn find_command_by_name(&self, position: Position) -> Option<Arc<LatexCommand>> {
- for result in self.find(position) {
- if let LatexNode::Command(command) = result {
- if command.name.range().contains(position)
- && command.name.start().character != position.character
- {
- return Some(command);
- }
- }
- }
- None
+ #[test]
+ fn theorem_definition() {
+ let table = open_simple(indoc!(
+ r#"
+ \newtheorem{lemma}{Lemma}
+ "#
+ ));
+
+ assert_eq!(table.theorem_definitions.len(), 1);
+ assert_eq!(
+ table.theorem_definitions[0].name(&table.tree).text(),
+ "lemma"
+ );
}
- pub fn find_label_by_range(&self, range: Range) -> Option<&LatexLabel> {
- self.structure
- .labels
- .iter()
- .filter(|label| label.kind == LatexLabelKind::Definition)
- .filter(|label| label.names().len() == 1)
- .find(|label| range.contains(label.start()))
+ #[test]
+ fn section() {
+ let table = open_simple(indoc!(
+ r#"
+ \section{Introduction to \LaTeX}
+ \subsection*{Foo
+ "#
+ ));
+ assert_eq!(table.sections.len(), 2);
+ assert_eq!(
+ table.sections[0].print(&table.tree).unwrap(),
+ "Introduction to \\LaTeX"
+ );
+ assert_eq!(table.sections[1].print(&table.tree), None);
}
- pub fn find_label_by_environment(&self, environment: &LatexEnvironment) -> Option<&LatexLabel> {
- self.structure
+ #[test]
+ fn label() {
+ let table = open_simple(indoc!(
+ r#"
+ \label{foo}
+ \ref{bar, baz}
+ "#
+ ));
+
+ let expected_names = vec![vec!["foo"], vec!["bar", "baz"]];
+
+ let actual_names: Vec<Vec<&str>> = table
.labels
.iter()
- .filter(|label| label.kind == LatexLabelKind::Definition)
- .filter(|label| label.names().len() == 1)
- .find(|label| self.is_direct_child(environment, label.start()))
+ .map(|label| {
+ label
+ .names(&table.tree)
+ .into_iter()
+ .map(Token::text)
+ .collect()
+ })
+ .collect();
+
+ assert_eq!(actual_names, expected_names);
}
- pub fn is_enumeration_item(&self, enumeration: &LatexEnvironment, item: &LatexItem) -> bool {
- enumeration.range().contains(item.start())
- && !self
- .env
- .environments
- .iter()
- .filter(|env| *env != enumeration)
- .filter(|env| env.left.is_enum() && enumeration.range().contains(env.start()))
- .any(|env| env.range().contains(item.start()))
+ #[test]
+ fn label_numbering() {
+ let table = open_simple(indoc!(
+ r#"
+ \newlabel{foo}{{1}{1}}
+ "#
+ ));
+
+ assert_eq!(table.label_numberings.len(), 1);
+ assert_eq!(table.label_numberings[0].name(&table.tree).text(), "foo");
+ assert_eq!(table.label_numberings[0].number, "1");
}
- pub fn is_direct_child(&self, environment: &LatexEnvironment, position: Position) -> bool {
- environment.range().contains(position)
- && !self
- .env
- .environments
- .iter()
- .filter(|env| *env != environment)
- .filter(|env| environment.range().contains(env.start()))
- .any(|env| env.range().contains(position))
+ #[test]
+ fn caption() {
+ let table = open_simple(indoc!(
+ r#"
+ \caption{Foo \LaTeX Bar}
+ "#
+ ));
+
+ assert_eq!(table.captions.len(), 1);
+ assert_eq!(
+ table.captions[0].print(&table.tree).unwrap(),
+ "Foo \\LaTeX Bar"
+ );
}
-}
-pub fn extract_group(content: &LatexGroup) -> String {
- if content.children.is_empty() || content.right.is_none() {
- return String::new();
+ #[test]
+ fn item_without_name() {
+ let table = open_simple(indoc!(
+ r#"
+ \item
+ "#
+ ));
+
+ assert_eq!(table.items.len(), 1);
+ assert_eq!(table.items[0].name(&table.tree), None);
}
- let mut printer = LatexPrinter::new(content.children[0].start());
- for child in &content.children {
- child.accept(&mut printer);
+ #[test]
+ fn item_with_name() {
+ let table = open_simple(indoc!(
+ r#"
+ \item[foo bar]
+ "#
+ ));
+
+ assert_eq!(table.items.len(), 1);
+ assert_eq!(table.items[0].name(&table.tree).unwrap(), "foo bar");
}
- printer.output
}
diff --git a/support/texlab/src/syntax/latex/parser.rs b/support/texlab/src/syntax/latex/parser.rs
index 40693071c2..4ace4fbcd6 100644
--- a/support/texlab/src/syntax/latex/parser.rs
+++ b/support/texlab/src/syntax/latex/parser.rs
@@ -1,62 +1,84 @@
use super::ast::*;
+use crate::{
+ protocol::{Range, RangeExt},
+ syntax::{
+ generic_ast::{Ast, AstNodeIndex},
+ text::SyntaxNode,
+ },
+};
use std::iter::Peekable;
-use std::sync::Arc;
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
-enum LatexScope {
+enum Scope {
Root,
Group,
Options,
}
-pub struct LatexParser<I: Iterator<Item = LatexToken>> {
+#[derive(Debug)]
+pub struct Parser<I: Iterator<Item = Token>> {
+ tree: Ast<Node>,
tokens: Peekable<I>,
}
-impl<I: Iterator<Item = LatexToken>> LatexParser<I> {
+impl<I: Iterator<Item = Token>> Parser<I> {
pub fn new(tokens: I) -> Self {
- LatexParser {
+ Self {
+ tree: Ast::new(),
tokens: tokens.peekable(),
}
}
- pub fn root(&mut self) -> LatexRoot {
- let children = self.content(LatexScope::Root);
- LatexRoot::new(children)
+ pub fn parse(mut self) -> Tree {
+ let children = self.content(Scope::Root);
+
+ let range = if children.is_empty() {
+ Range::new_simple(0, 0, 0, 0)
+ } else {
+ let start = self.tree[children[0]].start();
+ let end = self.tree[children[children.len() - 1]].end();
+ Range::new(start, end)
+ };
+
+ let root = self.tree.add_node(Node::Root(Root { range }));
+ self.connect(root, &children);
+ Tree {
+ inner: self.tree,
+ root,
+ }
}
- fn content(&mut self, scope: LatexScope) -> Vec<LatexContent> {
+ fn content(&mut self, scope: Scope) -> Vec<AstNodeIndex> {
let mut children = Vec::new();
while let Some(ref token) = self.tokens.peek() {
match token.kind {
- LatexTokenKind::Word | LatexTokenKind::BeginOptions => {
- children.push(LatexContent::Text(self.text(scope)));
+ TokenKind::Word | TokenKind::BeginOptions => {
+ children.push(self.text(scope));
}
- LatexTokenKind::Command => {
- children.push(LatexContent::Command(self.command()));
+ TokenKind::Command => {
+ children.push(self.command());
}
- LatexTokenKind::Comma => {
- let node = LatexComma::new(self.tokens.next().unwrap());
- children.push(LatexContent::Comma(Arc::new(node)));
+ TokenKind::Comma => {
+ children.push(self.comma());
}
- LatexTokenKind::Math => {
- children.push(LatexContent::Math(self.math()));
+ TokenKind::Math => {
+ children.push(self.math());
}
- LatexTokenKind::BeginGroup => {
- children.push(LatexContent::Group(self.group(LatexGroupKind::Group)));
+ TokenKind::BeginGroup => {
+ children.push(self.group(GroupKind::Group));
}
- LatexTokenKind::EndGroup => {
- if scope == LatexScope::Root {
+ TokenKind::EndGroup => {
+ if scope == Scope::Root {
self.tokens.next();
} else {
return children;
}
}
- LatexTokenKind::EndOptions => {
- if scope == LatexScope::Options {
+ TokenKind::EndOptions => {
+ if scope == Scope::Options {
return children;
} else {
- children.push(LatexContent::Text(self.text(scope)));
+ children.push(self.text(scope));
}
}
}
@@ -64,37 +86,17 @@ impl<I: Iterator<Item = LatexToken>> LatexParser<I> {
children
}
- fn command(&mut self) -> Arc<LatexCommand> {
- let name = self.tokens.next().unwrap();
-
- let mut options = Vec::new();
- let mut args = Vec::new();
- while let Some(token) = self.tokens.peek() {
- match token.kind {
- LatexTokenKind::BeginGroup => {
- args.push(self.group(LatexGroupKind::Group));
- }
- LatexTokenKind::BeginOptions => {
- options.push(self.group(LatexGroupKind::Options));
- }
- _ => {
- break;
- }
- }
- }
- Arc::new(LatexCommand::new(name, options, args))
- }
-
- fn group(&mut self, kind: LatexGroupKind) -> Arc<LatexGroup> {
+ fn group(&mut self, kind: GroupKind) -> AstNodeIndex {
let left = self.tokens.next().unwrap();
let scope = match kind {
- LatexGroupKind::Group => LatexScope::Group,
- LatexGroupKind::Options => LatexScope::Options,
+ GroupKind::Group => Scope::Group,
+ GroupKind::Options => Scope::Options,
};
+
let children = self.content(scope);
let right_kind = match kind {
- LatexGroupKind::Group => LatexTokenKind::EndGroup,
- LatexGroupKind::Options => LatexTokenKind::EndOptions,
+ GroupKind::Group => TokenKind::EndGroup,
+ GroupKind::Options => TokenKind::EndOptions,
};
let right = if self.next_of_kind(right_kind) {
@@ -103,33 +105,82 @@ impl<I: Iterator<Item = LatexToken>> LatexParser<I> {
None
};
- Arc::new(LatexGroup::new(left, children, right, kind))
+ let end = right
+ .as_ref()
+ .map(SyntaxNode::end)
+ .or_else(|| children.last().map(|child| self.tree[*child].end()))
+ .unwrap_or_else(|| left.end());
+ let range = Range::new(left.start(), end);
+
+ let node = self.tree.add_node(Node::Group(Group {
+ range,
+ left,
+ kind,
+ right,
+ }));
+ self.connect(node, &children);
+ node
}
- fn text(&mut self, scope: LatexScope) -> Arc<LatexText> {
+ fn command(&mut self) -> AstNodeIndex {
+ let name = self.tokens.next().unwrap();
+ let mut children = Vec::new();
+ while let Some(token) = self.tokens.peek() {
+ match token.kind {
+ TokenKind::BeginGroup => children.push(self.group(GroupKind::Group)),
+ TokenKind::BeginOptions => children.push(self.group(GroupKind::Options)),
+ _ => break,
+ }
+ }
+
+ let end = children
+ .last()
+ .map(|child| self.tree[*child].end())
+ .unwrap_or_else(|| name.end());
+ let range = Range::new(name.start(), end);
+
+ let node = self.tree.add_node(Node::Command(Command { range, name }));
+ self.connect(node, &children);
+ node
+ }
+
+ fn text(&mut self, scope: Scope) -> AstNodeIndex {
let mut words = Vec::new();
while let Some(ref token) = self.tokens.peek() {
let kind = token.kind;
- let opts = kind == LatexTokenKind::EndOptions && scope != LatexScope::Options;
- if kind == LatexTokenKind::Word || kind == LatexTokenKind::BeginOptions || opts {
+ let opts = kind == TokenKind::EndOptions && scope != Scope::Options;
+ if kind == TokenKind::Word || kind == TokenKind::BeginOptions || opts {
words.push(self.tokens.next().unwrap());
} else {
break;
}
}
- Arc::new(LatexText::new(words))
+ let range = Range::new(words[0].start(), words[words.len() - 1].end());
+ self.tree.add_node(Node::Text(Text { range, words }))
}
- fn math(&mut self) -> Arc<LatexMath> {
+ fn comma(&mut self) -> AstNodeIndex {
let token = self.tokens.next().unwrap();
- Arc::new(LatexMath::new(token))
+ let range = token.range();
+ self.tree.add_node(Node::Comma(Comma { range, token }))
}
- fn next_of_kind(&mut self, kind: LatexTokenKind) -> bool {
- if let Some(ref token) = self.tokens.peek() {
- token.kind == kind
- } else {
- false
+ fn math(&mut self) -> AstNodeIndex {
+ let token = self.tokens.next().unwrap();
+ let range = token.range();
+ self.tree.add_node(Node::Math(Math { range, token }))
+ }
+
+ fn connect(&mut self, parent: AstNodeIndex, children: &[AstNodeIndex]) {
+ for child in children {
+ self.tree.add_edge(parent, *child);
}
}
+
+ fn next_of_kind(&mut self, kind: TokenKind) -> bool {
+ self.tokens
+ .peek()
+ .filter(|token| token.kind == kind)
+ .is_some()
+ }
}
diff --git a/support/texlab/src/syntax/latex/printer.rs b/support/texlab/src/syntax/latex/printer.rs
deleted file mode 100644
index ce03778033..0000000000
--- a/support/texlab/src/syntax/latex/printer.rs
+++ /dev/null
@@ -1,77 +0,0 @@
-use super::ast::*;
-use crate::syntax::text::*;
-use lsp_types::Position;
-use std::sync::Arc;
-
-#[derive(Debug)]
-pub struct LatexPrinter {
- pub output: String,
- position: Position,
-}
-
-impl LatexPrinter {
- pub fn new(start_position: Position) -> Self {
- Self {
- output: String::new(),
- position: start_position,
- }
- }
-
- fn synchronize(&mut self, position: Position) {
- while self.position.line < position.line {
- self.output.push('\n');
- self.position.line += 1;
- }
-
- while self.position.character < position.character {
- self.output.push(' ');
- self.position.character += 1;
- }
- }
-
- fn print_token(&mut self, token: &LatexToken) {
- self.synchronize(token.start());
- self.output.push_str(token.text());
- self.position.character += token.end().character - token.start().character;
- self.synchronize(token.end());
- }
-}
-
-impl LatexVisitor for LatexPrinter {
- fn visit_root(&mut self, root: Arc<LatexRoot>) {
- for child in &root.children {
- child.accept(self);
- }
- }
-
- fn visit_group(&mut self, group: Arc<LatexGroup>) {
- self.print_token(&group.left);
- for child in &group.children {
- child.accept(self);
- }
- if let Some(right) = &group.right {
- self.print_token(right);
- }
- }
-
- fn visit_command(&mut self, command: Arc<LatexCommand>) {
- self.print_token(&command.name);
- for group in &command.groups {
- self.visit_group(Arc::clone(&group));
- }
- }
-
- fn visit_text(&mut self, text: Arc<LatexText>) {
- for word in &text.words {
- self.print_token(word);
- }
- }
-
- fn visit_comma(&mut self, comma: Arc<LatexComma>) {
- self.print_token(&comma.token);
- }
-
- fn visit_math(&mut self, math: Arc<LatexMath>) {
- self.print_token(&math.token)
- }
-}
diff --git a/support/texlab/src/syntax/latex/structure.rs b/support/texlab/src/syntax/latex/structure.rs
deleted file mode 100644
index 79c9aac216..0000000000
--- a/support/texlab/src/syntax/latex/structure.rs
+++ /dev/null
@@ -1,250 +0,0 @@
-use super::ast::*;
-use crate::range::RangeExt;
-use crate::syntax::language::*;
-use crate::syntax::text::{CharStream, SyntaxNode};
-use itertools::Itertools;
-use lsp_types::Range;
-use std::sync::Arc;
-
-#[derive(Debug, PartialEq, Eq, Clone)]
-pub struct LatexSection {
- pub command: Arc<LatexCommand>,
- pub index: usize,
- pub level: i32,
- pub prefix: &'static str,
-}
-
-impl LatexSection {
- fn parse(commands: &[Arc<LatexCommand>]) -> Vec<Self> {
- let mut sections = Vec::new();
- for command in commands {
- for LatexSectionCommand {
- name,
- index,
- level,
- prefix,
- } in &LANGUAGE_DATA.section_commands
- {
- if command.name.text() == name && command.args.len() > *index {
- sections.push(Self {
- command: Arc::clone(command),
- index: *index,
- level: *level,
- prefix: prefix.as_ref(),
- })
- }
- }
- }
- sections
- }
-
- pub fn extract_text(&self, text: &str) -> Option<String> {
- let content = &self.command.args[self.index];
- let right = content.right.as_ref()?;
- let range = Range::new_simple(
- content.left.start().line,
- content.left.start().character + 1,
- right.end().line,
- right.end().character - 1,
- );
- Some(CharStream::extract(&text, range))
- }
-}
-
-impl SyntaxNode for LatexSection {
- fn range(&self) -> Range {
- self.command.range()
- }
-}
-
-#[derive(Debug, PartialEq, Eq, Clone)]
-pub struct LatexLabel {
- pub command: Arc<LatexCommand>,
- index: usize,
- pub kind: LatexLabelKind,
-}
-
-impl LatexLabel {
- pub fn names(&self) -> Vec<&LatexToken> {
- self.command.extract_comma_separated_words(self.index)
- }
-
- fn parse(commands: &[Arc<LatexCommand>]) -> Vec<Self> {
- let mut labels = Vec::new();
- for command in commands {
- for LatexLabelCommand { name, index, kind } in &LANGUAGE_DATA.label_commands {
- if command.name.text() == name && command.has_comma_separated_words(*index) {
- labels.push(Self {
- command: Arc::clone(command),
- index: *index,
- kind: *kind,
- });
- }
- }
- }
- labels
- }
-}
-
-impl SyntaxNode for LatexLabel {
- fn range(&self) -> Range {
- self.command.range
- }
-}
-
-#[derive(Debug, PartialEq, Eq, Clone)]
-pub struct LatexLabelNumbering {
- pub command: Arc<LatexCommand>,
- pub number: String,
-}
-
-impl LatexLabelNumbering {
- pub fn name(&self) -> &LatexToken {
- self.command.extract_word(0).unwrap()
- }
-
- fn parse(commands: &[Arc<LatexCommand>]) -> Vec<Self> {
- commands
- .iter()
- .map(Arc::clone)
- .filter_map(Self::parse_single)
- .collect()
- }
-
- fn parse_single(command: Arc<LatexCommand>) -> Option<Self> {
- #[derive(Debug, Default)]
- struct FirstText {
- text: Option<Arc<LatexText>>,
- }
-
- impl LatexVisitor for FirstText {
- fn visit_root(&mut self, root: Arc<LatexRoot>) {
- LatexWalker::walk_root(self, root);
- }
-
- fn visit_group(&mut self, group: Arc<LatexGroup>) {
- LatexWalker::walk_group(self, group);
- }
-
- fn visit_command(&mut self, command: Arc<LatexCommand>) {
- LatexWalker::walk_command(self, command);
- }
-
- fn visit_text(&mut self, text: Arc<LatexText>) {
- if self.text.is_none() {
- self.text = Some(text);
- }
- }
-
- fn visit_comma(&mut self, comma: Arc<LatexComma>) {
- LatexWalker::walk_comma(self, comma);
- }
-
- fn visit_math(&mut self, math: Arc<LatexMath>) {
- LatexWalker::walk_math(self, math);
- }
- }
-
- if command.name.text() != "\\newlabel" || !command.has_word(0) {
- return None;
- }
-
- let mut analyzer = FirstText::default();
- analyzer.visit_group(Arc::clone(command.args.get(1)?));
- let number = analyzer
- .text?
- .words
- .iter()
- .map(|word| word.text())
- .join(" ");
-
- Some(Self {
- command: Arc::clone(&command),
- number,
- })
- }
-}
-
-#[derive(Debug, PartialEq, Eq, Clone)]
-pub struct LatexCaption {
- pub command: Arc<LatexCommand>,
- pub index: usize,
-}
-
-impl LatexCaption {
- fn parse(commands: &[Arc<LatexCommand>]) -> Vec<Self> {
- let mut captions = Vec::new();
- for command in commands {
- if command.name.text() == "\\caption" && !command.args.is_empty() {
- captions.push(Self {
- command: Arc::clone(&command),
- index: 0,
- });
- }
- }
- captions
- }
-}
-
-impl SyntaxNode for LatexCaption {
- fn range(&self) -> Range {
- self.command.range()
- }
-}
-
-#[derive(Debug, PartialEq, Eq, Clone)]
-pub struct LatexItem {
- pub command: Arc<LatexCommand>,
-}
-
-impl LatexItem {
- fn parse(commands: &[Arc<LatexCommand>]) -> Vec<Self> {
- let mut items = Vec::new();
- for command in commands {
- if command.name.text() == "\\item" {
- items.push(Self {
- command: Arc::clone(&command),
- });
- }
- }
- items
- }
-
- pub fn name(&self) -> Option<String> {
- if let Some(options) = self.command.options.get(0) {
- if options.children.len() == 1 {
- if let LatexContent::Text(text) = &options.children[0] {
- return Some(text.words.iter().map(|word| word.text()).join(" "));
- }
- }
- }
- None
- }
-}
-
-impl SyntaxNode for LatexItem {
- fn range(&self) -> Range {
- self.command.range()
- }
-}
-
-#[derive(Debug, PartialEq, Eq, Clone)]
-pub struct LatexStructureInfo {
- pub sections: Vec<LatexSection>,
- pub labels: Vec<LatexLabel>,
- pub label_numberings: Vec<LatexLabelNumbering>,
- pub captions: Vec<LatexCaption>,
- pub items: Vec<LatexItem>,
-}
-
-impl LatexStructureInfo {
- pub fn parse(commands: &[Arc<LatexCommand>]) -> Self {
- Self {
- sections: LatexSection::parse(commands),
- labels: LatexLabel::parse(commands),
- label_numberings: LatexLabelNumbering::parse(commands),
- captions: LatexCaption::parse(commands),
- items: LatexItem::parse(commands),
- }
- }
-}