summaryrefslogtreecommitdiff
path: root/Master
diff options
context:
space:
mode:
authorKarl Berry <karl@freefriends.org>2025-01-14 20:45:54 +0000
committerKarl Berry <karl@freefriends.org>2025-01-14 20:45:54 +0000
commitd4b22dd6191f504b8012edea893e82abf9020d37 (patch)
tree0b7127e758e7eb455171f79228a4ed1600dafcd9 /Master
parent3cc25bc5f342ff292377cd9a8f870a159f6a590b (diff)
expltools (14jan25)
git-svn-id: svn://tug.org/texlive/trunk@73452 c570f23f-e606-0410-a88d-b1316a301751
Diffstat (limited to 'Master')
-rw-r--r--Master/texmf-dist/doc/support/expltools/CHANGES.md13
-rw-r--r--Master/texmf-dist/doc/support/expltools/README.md7
-rw-r--r--Master/texmf-dist/doc/support/expltools/e201.tex5
-rw-r--r--Master/texmf-dist/doc/support/expltools/e203.tex5
-rw-r--r--Master/texmf-dist/doc/support/expltools/e208.tex5
-rw-r--r--Master/texmf-dist/doc/support/expltools/e209.tex2
-rw-r--r--Master/texmf-dist/doc/support/expltools/project-proposal.pdfbin67243 -> 67243 bytes
-rw-r--r--Master/texmf-dist/doc/support/expltools/s204.tex6
-rw-r--r--Master/texmf-dist/doc/support/expltools/s205-01.tex3
-rw-r--r--Master/texmf-dist/doc/support/expltools/s205-02.tex3
-rw-r--r--Master/texmf-dist/doc/support/expltools/s205-03.tex3
-rw-r--r--Master/texmf-dist/doc/support/expltools/s205-04.tex5
-rw-r--r--Master/texmf-dist/doc/support/expltools/s206-01.tex7
-rw-r--r--Master/texmf-dist/doc/support/expltools/s206-02.tex7
-rw-r--r--Master/texmf-dist/doc/support/expltools/s206-03.tex6
-rw-r--r--Master/texmf-dist/doc/support/expltools/s207-01.tex2
-rw-r--r--Master/texmf-dist/doc/support/expltools/s207-02.tex2
-rw-r--r--Master/texmf-dist/doc/support/expltools/s207-03.tex2
-rw-r--r--Master/texmf-dist/doc/support/expltools/s207-04.tex2
-rw-r--r--Master/texmf-dist/doc/support/expltools/w200.tex8
-rw-r--r--Master/texmf-dist/doc/support/expltools/w202.tex2
-rw-r--r--Master/texmf-dist/doc/support/expltools/warnings-and-errors-02-lexical-analysis.md154
-rw-r--r--Master/texmf-dist/doc/support/expltools/warnings-and-errors.pdfbin2780745 -> 2780970 bytes
-rwxr-xr-xMaster/texmf-dist/scripts/expltools/explcheck-cli.lua18
-rwxr-xr-xMaster/texmf-dist/scripts/expltools/explcheck-lexical-analysis.lua255
-rwxr-xr-xMaster/texmf-dist/scripts/expltools/explcheck-obsolete.lua16
-rwxr-xr-xMaster/texmf-dist/scripts/expltools/explcheck-parsers.lua387
-rwxr-xr-xMaster/texmf-dist/scripts/expltools/explcheck-preprocessing-comments.lua108
-rwxr-xr-xMaster/texmf-dist/scripts/expltools/explcheck-preprocessing.lua169
-rwxr-xr-xMaster/texmf-dist/scripts/expltools/explcheck-utils.lua15
30 files changed, 864 insertions, 353 deletions
diff --git a/Master/texmf-dist/doc/support/expltools/CHANGES.md b/Master/texmf-dist/doc/support/expltools/CHANGES.md
index 3192d358ed9..b63171ff6f6 100644
--- a/Master/texmf-dist/doc/support/expltools/CHANGES.md
+++ b/Master/texmf-dist/doc/support/expltools/CHANGES.md
@@ -1,5 +1,18 @@
# Changes
+## expltools 2025-01-14
+
+### explcheck v0.4.0
+
+#### Development
+
+- Add lexical analysis. (#21)
+
+#### Fixes
+
+- Do not detect error E102 (expl3 material in non-expl3 parts) when the
+ command-line option `--expect-expl3-everywhere` has been specified. (#21)
+
## expltools 2024-12-23
### explcheck v0.3.0
diff --git a/Master/texmf-dist/doc/support/expltools/README.md b/Master/texmf-dist/doc/support/expltools/README.md
index 4b2f33cdcdf..08bd113d004 100644
--- a/Master/texmf-dist/doc/support/expltools/README.md
+++ b/Master/texmf-dist/doc/support/expltools/README.md
@@ -41,6 +41,7 @@ For example, here is Lua code that applies the preprocessing step to the code fr
``` lua
local new_issues = require("explcheck-issues")
local preprocessing = require("explcheck-preprocessing")
+local lexical_analysis = require("explcheck-lexical-analysis")
-- LuaTeX users must initialize Kpathsea Lua module searchers first.
local using_luatex, kpse = pcall(require, "kpse")
@@ -56,10 +57,12 @@ local file = assert(io.open(filename, "r"))
local content = assert(file:read("*a"))
assert(file:close())
-local line_starting_byte_numbers = preprocessing(issues, content)
+local line_starting_byte_numbers, expl_ranges = preprocessing(issues, content)
+local tokens = lexical_analysis(issues, content, expl_ranges)
print(
- "There were " .. #issues.warnings .. " warnings "
+ "There were " .. #tokens .. " tokens, "
+ .. #issues.warnings .. " warnings, "
.. "and " .. #issues.errors .. " errors "
.. "in the file " .. filename .. "."
)
diff --git a/Master/texmf-dist/doc/support/expltools/e201.tex b/Master/texmf-dist/doc/support/expltools/e201.tex
new file mode 100644
index 00000000000..c4913215184
--- /dev/null
+++ b/Master/texmf-dist/doc/support/expltools/e201.tex
@@ -0,0 +1,5 @@
+\cs_new:Nn
+ \example:bar % error on this line
+ { foo }
+ { bar }
+ { baz }
diff --git a/Master/texmf-dist/doc/support/expltools/e203.tex b/Master/texmf-dist/doc/support/expltools/e203.tex
new file mode 100644
index 00000000000..d9bc2a06c88
--- /dev/null
+++ b/Master/texmf-dist/doc/support/expltools/e203.tex
@@ -0,0 +1,5 @@
+\msg_log:n % error on this line
+ {
+ Foo~bar~
+ \c_one_thousand % error on this line
+ }
diff --git a/Master/texmf-dist/doc/support/expltools/e208.tex b/Master/texmf-dist/doc/support/expltools/e208.tex
new file mode 100644
index 00000000000..8285df3ea43
--- /dev/null
+++ b/Master/texmf-dist/doc/support/expltools/e208.tex
@@ -0,0 +1,5 @@
+\tl_new:N
+ \g_example_tl
+\tl_gset:Nn
+ \g_example_tl
+ { Hello,~ } } % error on this line
diff --git a/Master/texmf-dist/doc/support/expltools/e209.tex b/Master/texmf-dist/doc/support/expltools/e209.tex
new file mode 100644
index 00000000000..67e7c9200b5
--- /dev/null
+++ b/Master/texmf-dist/doc/support/expltools/e209.tex
@@ -0,0 +1,2 @@
+^^7f % error on this line
+\fo^^?o % error on this line
diff --git a/Master/texmf-dist/doc/support/expltools/project-proposal.pdf b/Master/texmf-dist/doc/support/expltools/project-proposal.pdf
index a4bc78b18b5..10c6dd8b45f 100644
--- a/Master/texmf-dist/doc/support/expltools/project-proposal.pdf
+++ b/Master/texmf-dist/doc/support/expltools/project-proposal.pdf
Binary files differ
diff --git a/Master/texmf-dist/doc/support/expltools/s204.tex b/Master/texmf-dist/doc/support/expltools/s204.tex
new file mode 100644
index 00000000000..bb9e445adba
--- /dev/null
+++ b/Master/texmf-dist/doc/support/expltools/s204.tex
@@ -0,0 +1,6 @@
+\cs_new:Npn \foo_bar:Nn #1#2
+{
+ \cs_if_exist:NTF#1 % warning on this line
+ { \__foo_bar:n {#2} }
+ { \__foo_bar:nn{#2}{literal} } % warning on this line
+}
diff --git a/Master/texmf-dist/doc/support/expltools/s205-01.tex b/Master/texmf-dist/doc/support/expltools/s205-01.tex
new file mode 100644
index 00000000000..c23826d78f5
--- /dev/null
+++ b/Master/texmf-dist/doc/support/expltools/s205-01.tex
@@ -0,0 +1,3 @@
+\cs_new:Nn
+ \description: % warning on this line
+ { foo }
diff --git a/Master/texmf-dist/doc/support/expltools/s205-02.tex b/Master/texmf-dist/doc/support/expltools/s205-02.tex
new file mode 100644
index 00000000000..dfec7a62794
--- /dev/null
+++ b/Master/texmf-dist/doc/support/expltools/s205-02.tex
@@ -0,0 +1,3 @@
+\cs_gset:Npn
+ \module__description: % warning on this line
+ { foo }
diff --git a/Master/texmf-dist/doc/support/expltools/s205-03.tex b/Master/texmf-dist/doc/support/expltools/s205-03.tex
new file mode 100644
index 00000000000..1422d5aca0c
--- /dev/null
+++ b/Master/texmf-dist/doc/support/expltools/s205-03.tex
@@ -0,0 +1,3 @@
+\cs_set_eq:Nn
+ \_module_description: % warning on this line
+ { foo }
diff --git a/Master/texmf-dist/doc/support/expltools/s205-04.tex b/Master/texmf-dist/doc/support/expltools/s205-04.tex
new file mode 100644
index 00000000000..85e74b9a64d
--- /dev/null
+++ b/Master/texmf-dist/doc/support/expltools/s205-04.tex
@@ -0,0 +1,5 @@
+\cs_generate_from_arg_count:NNnn
+ \__module_description:
+ \cs_new:Npn
+ { 0 }
+ { foo }
diff --git a/Master/texmf-dist/doc/support/expltools/s206-01.tex b/Master/texmf-dist/doc/support/expltools/s206-01.tex
new file mode 100644
index 00000000000..00e6878d0f4
--- /dev/null
+++ b/Master/texmf-dist/doc/support/expltools/s206-01.tex
@@ -0,0 +1,7 @@
+\tl_new:N
+ \g_description_tl % warning on this line
+\box_new:N
+ \l__description_box % warning on this line
+\int_const:Nn
+ \c_description % warning on this line
+ { 123 }
diff --git a/Master/texmf-dist/doc/support/expltools/s206-02.tex b/Master/texmf-dist/doc/support/expltools/s206-02.tex
new file mode 100644
index 00000000000..09c43c51711
--- /dev/null
+++ b/Master/texmf-dist/doc/support/expltools/s206-02.tex
@@ -0,0 +1,7 @@
+\regex_new:N
+ \g_module_description_regex
+\coffin_new:N
+ \l_module_description_coffin
+\str_const:Nn
+ \c__module_description_str
+ { foo }
diff --git a/Master/texmf-dist/doc/support/expltools/s206-03.tex b/Master/texmf-dist/doc/support/expltools/s206-03.tex
new file mode 100644
index 00000000000..d3f9fd77e54
--- /dev/null
+++ b/Master/texmf-dist/doc/support/expltools/s206-03.tex
@@ -0,0 +1,6 @@
+\tl_use:N
+ \l_tmpa_tl
+\int_use:N
+ \l_tmpb_int
+\str_use:N
+ \l_tmpa_str
diff --git a/Master/texmf-dist/doc/support/expltools/s207-01.tex b/Master/texmf-dist/doc/support/expltools/s207-01.tex
new file mode 100644
index 00000000000..d9aef68f3ce
--- /dev/null
+++ b/Master/texmf-dist/doc/support/expltools/s207-01.tex
@@ -0,0 +1,2 @@
+\quark_new:N
+ \foo_bar % error on this line
diff --git a/Master/texmf-dist/doc/support/expltools/s207-02.tex b/Master/texmf-dist/doc/support/expltools/s207-02.tex
new file mode 100644
index 00000000000..203015441ea
--- /dev/null
+++ b/Master/texmf-dist/doc/support/expltools/s207-02.tex
@@ -0,0 +1,2 @@
+\quark_new:N
+ \q_foo_bar
diff --git a/Master/texmf-dist/doc/support/expltools/s207-03.tex b/Master/texmf-dist/doc/support/expltools/s207-03.tex
new file mode 100644
index 00000000000..4bbd7f59156
--- /dev/null
+++ b/Master/texmf-dist/doc/support/expltools/s207-03.tex
@@ -0,0 +1,2 @@
+\scan_new:N
+ \foo_bar % error on this line
diff --git a/Master/texmf-dist/doc/support/expltools/s207-04.tex b/Master/texmf-dist/doc/support/expltools/s207-04.tex
new file mode 100644
index 00000000000..6faa3114114
--- /dev/null
+++ b/Master/texmf-dist/doc/support/expltools/s207-04.tex
@@ -0,0 +1,2 @@
+\scan_new:N
+ \s_foo_bar
diff --git a/Master/texmf-dist/doc/support/expltools/w200.tex b/Master/texmf-dist/doc/support/expltools/w200.tex
new file mode 100644
index 00000000000..90a0a29661f
--- /dev/null
+++ b/Master/texmf-dist/doc/support/expltools/w200.tex
@@ -0,0 +1,8 @@
+\cs_new:Npn
+ \show_until_if:w % warning on this line
+ #1 \if^^zw % warning on this line
+ { \tl_show:n {#1} }
+\show_until_if:^^7 % warning on this line
+ \tex_if:D % warning on this line
+ \if_charcode:^^77 % warning on this line
+ \if^^3aw % warning on this line
diff --git a/Master/texmf-dist/doc/support/expltools/w202.tex b/Master/texmf-dist/doc/support/expltools/w202.tex
new file mode 100644
index 00000000000..5e7cfc31670
--- /dev/null
+++ b/Master/texmf-dist/doc/support/expltools/w202.tex
@@ -0,0 +1,2 @@
+\str_lower_case:n % warning on this line
+ { FOO BAR }
diff --git a/Master/texmf-dist/doc/support/expltools/warnings-and-errors-02-lexical-analysis.md b/Master/texmf-dist/doc/support/expltools/warnings-and-errors-02-lexical-analysis.md
index 9dc0b564cd7..bf77cb70706 100644
--- a/Master/texmf-dist/doc/support/expltools/warnings-and-errors-02-lexical-analysis.md
+++ b/Master/texmf-dist/doc/support/expltools/warnings-and-errors-02-lexical-analysis.md
@@ -1,157 +1,65 @@
# Lexical analysis
In the lexical analysis step, the expl3 analysis tool converts the expl3 parts of the input files into a list of `\TeX`{=tex} tokens.
-## “Weird” and “Do not use” argument specifiers {.w}
+## “Weird” and “Do not use” argument specifiers {.w label=w200}
Some control sequence tokens correspond to functions with `w` (weird) or `D` (do not use) argument specifiers.
-``` tex
-\cs_new:Npn
- \show_until_if:w % warning on this line
- #1 \if:w % warning on this line
- { \tl_show:n {#1} }
-\show_until_if:w % warning on this line
- \tex_if:D % warning on this line
- \if_charcode:w % warning on this line
- \if:w % warning on this line
-```
+ /w200.tex
The above example has been taken from @latexteam2024interfaces [Chapter 24].
-## Unknown argument specifiers {.e}
+## Unknown argument specifiers {.e label=e201}
Some control sequence tokens correspond to functions with unknown argument specifiers. [@latexteam2024interfaces, Section 1.1]
-``` tex
-\cs_new:Nn
- \example:bar % error on this line
- { foo }
- { bar }
- { baz }
-```
+ /e201.tex
-## Deprecated control sequences {.w}
+## Deprecated control sequences {.w label=w202}
Some control sequence tokens correspond to deprecated expl3 control sequences from `l3obsolete.txt` [@josephwright2024obsolete].
-<!-- vi "$(texdoc -Il l3obsolete.txt | sed -r 's/\s*[0-9]+\s*//')" -->
-``` tex
-\str_lower_case:n % warning on this line
- { FOO BAR }
-```
+ /w202.tex
-## Removed control sequences {.e}
+## Removed control sequences {.e label=e203}
Some control sequence tokens correspond to removed expl3 control sequences from `l3obsolete.txt` [@josephwright2024obsolete].
-<!-- vi "$(texdoc -Il l3obsolete.txt | sed -r 's/\s*[0-9]+\s*//')" -->
-``` tex
-\msg_log:n % error on this line
- {
- Foo~bar~
- \c_one_thousand % error on this line
- }
-```
+ /e203.tex
-## Missing stylistic whitespaces {.s}
+## Missing stylistic whitespaces {.s label=s204}
Some control sequences and curly braces are not surrounded by whitespaces [@latexteam2024programming, Section 6] [@latexteam2024style, Section 3].
-``` tex
-\cs_new:Npn \foo_bar:Nn #1#2
-{
- \cs_if_exist:NTF#1 % warning on this line
- { \__foo_bar:n {#2} }
- { \__foo_bar:nn{#2}{literal} } % warning on this line
-}
-```
+ /s204.tex
-## Malformed function name {.s}
+## Malformed function name {.s label=s205}
Some function have names that are not in the format `\texttt{\textbackslash\meta{module}\_\meta{description}:\meta{arg-spec}}`{=tex} [@latexteam2024programming, Section 3.2].
-``` tex
-\cs_new:Nn
- \description: % warning on this line
- { foo }
-```
-
-``` tex
-\cs_new:Nn
- \module__description: % warning on this line
- { foo }
-```
-
-``` tex
-\cs_new:Nn
- \_module_description: % warning on this line
- { foo }
-```
-
-``` tex
-\cs_new:Nn
- \__module_description:
- { foo }
-```
-
-## Malformed variable or constant name {.s}
+ /s205-01.tex
+ /s205-02.tex
+ /s205-03.tex
+ /s205-04.tex
+
+## Malformed variable or constant name {.s label=s206}
Some expl3 variables and constants have names that are not in the format `\texttt{\textbackslash\meta{scope}\_\meta{module}\_\meta{description}\_\meta{type}}`{=tex} [@latexteam2024programming, Section 3.2], where the `\meta{module}`{=tex} part is optional.
-``` tex
-\tl_new:Nn
- \g_description_box % warning on this line
-\tl_new:Nn
- \l__description_box % warning on this line
-\tl_const:Nn
- \c_description % warning on this line
- { foo }
-```
-
-``` tex
-\tl_new:Nn
- \g_module_description_box
-\tl_new:Nn
- \l_module_description_box
-\tl_const:Nn
- \c__module_description_box
- { foo }
-```
+ /s206-01.tex
+ /s206-02.tex
An exception is made for scratch variables [@latexteam2024interfaces, Section 1.1.1]:
-``` tex
-\tl_use:N
- \l_tmpa_tl
-\int_use:N
- \l_tmpb_int
-\str_use:N
- \l_tmpa_str
-```
+ /s206-03.tex
-## Malformed quark or scan mark name {.s}
+## Malformed quark or scan mark name {.s label=s207}
Some expl3 quarks and scan marks have names that do not start with `\q_` and `\s_`, respectively [@latexteam2024programming, Chapter 19].
-``` tex
-\quark_new:N
- \foo_bar % error on this line
-```
-
-``` tex
-\quark_new:N
- \q_foo_bar
-```
+ /s207-01.tex
+ /s207-02.tex
+ /s207-03.tex
+ /s207-04.tex
-``` tex
-\scan_new:N
- \foo_bar % error on this line
-```
+## Too many closing braces {.e label=e208}
+An expl3 part of the input file contains too many closing braces.
-``` tex
-\scan_new:N
- \s_foo_bar
-```
+ /e208.tex
-## Too many closing braces {.e}
-An expl3 part of the input file contains too many closing braces.
+## Invalid characters {.e label=e209}
+An expl3 part of the input file contains invalid characters.
-``` tex
-\tl_new:N
- \g_example_tl
-\tl_gset:Nn
- \g_example_tl
- { Hello,~ } } % error on this line
-```
+ /e209.tex
diff --git a/Master/texmf-dist/doc/support/expltools/warnings-and-errors.pdf b/Master/texmf-dist/doc/support/expltools/warnings-and-errors.pdf
index 53c8011a94e..6f291b78cc4 100644
--- a/Master/texmf-dist/doc/support/expltools/warnings-and-errors.pdf
+++ b/Master/texmf-dist/doc/support/expltools/warnings-and-errors.pdf
Binary files differ
diff --git a/Master/texmf-dist/scripts/expltools/explcheck-cli.lua b/Master/texmf-dist/scripts/expltools/explcheck-cli.lua
index c1da9e63873..371927ac014 100755
--- a/Master/texmf-dist/scripts/expltools/explcheck-cli.lua
+++ b/Master/texmf-dist/scripts/expltools/explcheck-cli.lua
@@ -5,7 +5,7 @@ local new_issues = require("explcheck-issues")
local format = require("explcheck-format")
local preprocessing = require("explcheck-preprocessing")
--- local lexical_analysis = require("explcheck-lexical-analysis")
+local lexical_analysis = require("explcheck-lexical-analysis")
-- local syntactic_analysis = require("explcheck-syntactic-analysis")
-- local semantic_analysis = require("explcheck-semantic-analysis")
-- local pseudo_flow_analysis = require("explcheck-pseudo-flow-analysis")
@@ -100,14 +100,16 @@ local function main(pathnames, options)
local issues = new_issues()
-- Run all processing steps.
- local line_starting_byte_numbers, _ = preprocessing(issues, content, {
- expect_expl3_everywhere = options.expect_expl3_everywhere,
- max_line_length = options.max_line_length,
- })
+ local line_starting_byte_numbers, expl_ranges, tokens -- luacheck: ignore tokens
+
+ line_starting_byte_numbers, expl_ranges = preprocessing(issues, content, options)
+
if #issues.errors > 0 then
goto continue
end
- -- lexical_analysis(issues)
+
+ tokens = lexical_analysis(issues, content, expl_ranges, options)
+
-- syntactic_analysis(issues)
-- semantic_analysis(issues)
-- pseudo_flow_analysis(issues)
@@ -150,8 +152,8 @@ local function print_usage()
end
local function print_version()
- print("explcheck (expltools 2024-12-23) v0.3.0")
- print("Copyright (c) 2024 Vít Starý Novotný")
+ print("explcheck (expltools 2025-01-14) v0.4.0")
+ print("Copyright (c) 2024-2025 Vít Starý Novotný")
print("Licenses: LPPL 1.3 or later, GNU GPL v2 or later")
end
diff --git a/Master/texmf-dist/scripts/expltools/explcheck-lexical-analysis.lua b/Master/texmf-dist/scripts/expltools/explcheck-lexical-analysis.lua
new file mode 100755
index 00000000000..7699f50633f
--- /dev/null
+++ b/Master/texmf-dist/scripts/expltools/explcheck-lexical-analysis.lua
@@ -0,0 +1,255 @@
+-- The lexical analysis step of static analysis converts expl3 parts of the input files into TeX tokens.
+
+local parsers = require("explcheck-parsers")
+local obsolete = require("explcheck-obsolete")
+
+local lpeg = require("lpeg")
+
+-- Tokenize the content and register any issues.
+local function lexical_analysis(issues, all_content, expl_ranges, options) -- luacheck: ignore issues options
+
+ -- Process bytes within a given range similarly to TeX's input processor (TeX's "eyes" [1]) and produce lines.
+ --
+ -- See also:
+ -- - Section 31 on page 16 and Section 362 on page 142 of Knuth (1986) [1]
+ -- - Section 7 on page 36 and Section 8 on page 42 of Knuth (1986) [2]
+ -- - Section 1.2 on page 12 of Olsak (2001) [3]
+ --
+ -- [1]: Donald Ervin Knuth. 1986. TeX: The Program. Addison-Wesley, USA.
+ -- [2]: Donald Ervin Knuth. 1986. The TeXbook. Addison-Wesley, USA.
+ -- [3]: Petr Olsak. 2001. TeXbook naruby. Konvoj, Brno.
+ -- https://petr.olsak.net/ftp/olsak/tbn/tbn.pdf
+ --
+ local function get_lines(range)
+ local range_start, range_end = table.unpack(range)
+ local content = all_content:sub(range_start, range_end - 1)
+ for _, line in ipairs(lpeg.match(parsers.tex_lines, content)) do
+ local line_start, line_text, line_end = table.unpack(line)
+ local map_back = (function(line_start, line_text, line_end) -- luacheck: ignore line_start line_text line_end
+ return function (index)
+ assert(index > 0)
+ assert(index <= #line_text + #parsers.expl3_endlinechar)
+ if index > 0 and index <= #line_text then
+ local mapped_index = range_start + line_start + index - 2 -- a line character
+ assert(line_text[index] == content[mapped_index])
+ return mapped_index
+ elseif index > #line_text and index <= #line_text + #parsers.expl3_endlinechar then
+ return range_start + line_end - 2 -- an \endlinechar
+ else
+ assert(false)
+ end
+ end
+ end)(line_start, line_text, line_end)
+ coroutine.yield(line_text .. parsers.expl3_endlinechar, map_back)
+ end
+ end
+
+ -- Tokenize a line, similarly to TeX's token processor (TeX's "mouth" [1]).
+ --
+ -- See also:
+ -- - Section 303 on page 122 of Knuth (1986) [1]
+ -- - Section 7 on page 36 and Section 8 on page 42 of Knuth (1986) [2]
+ -- - Section 1.3 on page 19 of Olsak (2001) [3]
+ --
+ -- [1]: Donald Ervin Knuth. 1986. TeX: The Program. Addison-Wesley, USA.
+ -- [2]: Donald Ervin Knuth. 1986. The TeXbook. Addison-Wesley, USA.
+ -- [3]: Petr Olsak. 2001. TeXbook naruby. Konvoj, Brno.
+ -- https://petr.olsak.net/ftp/olsak/tbn/tbn.pdf
+ --
+ local function get_tokens(lines)
+ local tokens = {}
+ local state
+ local num_open_groups_upper_estimate = 0
+ for line_text, map_back in lines do
+ state = "N"
+ local character_index = 1
+
+ local function get_character_and_catcode(index)
+ assert(index <= #line_text)
+ local character = line_text:sub(index, index)
+ local catcode = lpeg.match(parsers.determine_expl3_catcode, character)
+ -- Process TeX' double circumflex convention (^^X and ^^XX).
+ local actual_character, index_increment = lpeg.match(parsers.double_superscript_convention, line_text, index)
+ if actual_character ~= nil then
+ local actual_catcode = lpeg.match(parsers.determine_expl3_catcode, actual_character)
+ return actual_character, actual_catcode, index_increment -- double circumflex convention
+ else
+ return character, catcode, 1 -- single character
+ end
+ end
+
+ local previous_catcode = 9
+ while character_index <= #line_text do
+ local character, catcode, character_index_increment = get_character_and_catcode(character_index)
+ local range_start = map_back(character_index)
+ local range_end = range_start + 1
+ if ( -- a potential missing stylistic whitespace
+ previous_catcode == 0 -- right after a control sequence
+ or previous_catcode == 1 or previous_catcode == 2 -- or a begin/end grouping
+ ) then
+ if (
+ catcode ~= 0 and catcode ~= 1 -- for a control sequence of being grouping, we will handle the lack of whitespace elsewhere
+ and not (previous_catcode == 2 and character == ",") -- allow a comma after end grouping without a whitespace in between
+ and not (previous_catcode == 1 and catcode == 6) -- allow a parameter after begin grouping without a whitespace in between
+ and catcode ~= 9 and catcode ~= 10
+ ) then
+ issues:add('s204', 'missing stylistic whitespaces', range_start, range_end)
+ end
+ end
+ if catcode == 0 then -- control sequence
+ local csname_table = {}
+ local csname_index = character_index + character_index_increment
+ local previous_csname_index = csname_index
+ if csname_index <= #line_text then
+ local csname_index_increment
+ character, catcode, csname_index_increment = get_character_and_catcode(csname_index)
+ table.insert(csname_table, character)
+ csname_index = csname_index + csname_index_increment
+ if catcode == 11 then -- control word
+ state = "S"
+ while csname_index <= #line_text do
+ character, catcode, csname_index_increment = get_character_and_catcode(csname_index)
+ if catcode == 11 then
+ table.insert(csname_table, character)
+ previous_csname_index = csname_index
+ csname_index = csname_index + csname_index_increment
+ else
+ break
+ end
+ end
+ elseif catcode == 10 then -- escaped space
+ state = "S"
+ else -- control symbol
+ state = "M"
+ end
+ end
+ local csname = table.concat(csname_table)
+ range_end = map_back(previous_csname_index) + 1
+ table.insert(tokens, {"control sequence", csname, 0, range_start, range_end})
+ if previous_catcode ~= 9 and previous_catcode ~= 10 then
+ issues:add('s204', 'missing stylistic whitespaces', range_start, range_end)
+ end
+ previous_catcode = 0
+ character_index = csname_index
+ elseif catcode == 5 then -- end of line
+ if state == "N" then
+ table.insert(tokens, {"control sequence", "par", range_start, range_end})
+ elseif state == "M" then
+ table.insert(tokens, {"character", " ", 10, range_start, range_end})
+ end
+ character_index = character_index + character_index_increment
+ elseif catcode == 9 then -- ignored character
+ previous_catcode = catcode
+ character_index = character_index + character_index_increment
+ elseif catcode == 10 then -- space
+ if state == "M" then
+ table.insert(tokens, {"character", " ", 10, range_start, range_end})
+ end
+ previous_catcode = catcode
+ character_index = character_index + character_index_increment
+ elseif catcode == 14 then -- comment character
+ character_index = #line_text + 1
+ elseif catcode == 15 then -- invalid character
+ issues:add('e209', 'invalid characters', range_start, range_end)
+ character_index = character_index + character_index_increment
+ else
+ if catcode == 1 or catcode == 2 then -- begin/end grouping
+ if catcode == 1 then
+ num_open_groups_upper_estimate = num_open_groups_upper_estimate + 1
+ elseif catcode == 2 then
+ if num_open_groups_upper_estimate > 0 then
+ num_open_groups_upper_estimate = num_open_groups_upper_estimate - 1
+ else
+ issues:add('e208', 'too many closing braces', range_start, range_end)
+ end
+ end
+ if previous_catcode ~= 9 and previous_catcode ~= 10 and not (previous_catcode == 6 and catcode == 2) then
+ issues:add('s204', 'missing stylistic whitespaces', range_start, range_end)
+ end
+ previous_catcode = catcode
+ elseif ( -- maybe a parameter?
+ previous_catcode == 6 and catcode == 12
+ and lpeg.match(parsers.decimal_digit, character) ~= nil
+ ) then
+ previous_catcode = 6
+ else -- some other character
+ previous_catcode = catcode
+ end
+ table.insert(tokens, {"character", character, catcode, range_start, range_end})
+ state = "M"
+ character_index = character_index + character_index_increment
+ end
+ end
+ end
+ return tokens
+ end
+
+ -- Tokenize the content.
+ local all_tokens = {}
+ for _, expl_range in ipairs(expl_ranges) do
+ local lines = (function()
+ local co = coroutine.create(function()
+ get_lines(expl_range)
+ end)
+ return function()
+ local _, line_text, map_back = coroutine.resume(co)
+ return line_text, map_back
+ end
+ end)()
+ local tokens = get_tokens(lines)
+ table.insert(all_tokens, tokens)
+ end
+
+ for _, tokens in ipairs(all_tokens) do
+ for token_index, token in ipairs(tokens) do
+ local token_type, payload, catcode, range_start, range_end = table.unpack(token) -- luacheck: ignore catcode
+ if token_type == "control sequence" then
+ local csname = payload
+ local _, _, argument_specifiers = csname:find(":(.*)")
+ if argument_specifiers ~= nil then
+ if lpeg.match(parsers.weird_argument_specifiers, argument_specifiers) then
+ issues:add('w200', '"weird" and "do not use" argument specifiers', range_start, range_end)
+ end
+ if lpeg.match(parsers.argument_specifiers, argument_specifiers) == nil then
+ issues:add('e201', 'unknown argument specifiers', range_start, range_end)
+ end
+ end
+ if lpeg.match(obsolete.deprecated_csname, csname) ~= nil then
+ issues:add('w202', 'deprecated control sequences', range_start, range_end)
+ end
+ if lpeg.match(obsolete.removed_csname, csname) ~= nil then
+ issues:add('e203', 'removed control sequences', range_start, range_end)
+ end
+ if token_index + 1 <= #tokens then
+ local next_token = tokens[token_index + 1]
+ local next_token_type, next_csname, _, next_range_start, next_range_end = table.unpack(next_token)
+ if next_token_type == "control sequence" then
+ if (
+ lpeg.match(parsers.expl3_function_assignment_csname, csname) ~= nil
+ and lpeg.match(parsers.expl3_function_csname, next_csname) == nil
+ ) then
+ issues:add('s205', 'malformed function name', next_range_start, next_range_end)
+ end
+ if (
+ lpeg.match(parsers.expl3_variable_or_constant_use_csname, csname) ~= nil
+ and lpeg.match(parsers.expl3_variable_or_constant_csname, next_csname) == nil
+ and lpeg.match(parsers.expl3_scratch_variable_csname, next_csname) == nil
+ ) then
+ issues:add('s206', 'malformed variable or constant name', next_range_start, next_range_end)
+ end
+ if (
+ lpeg.match(parsers.expl3_quark_or_scan_mark_definition_csname, csname) ~= nil
+ and lpeg.match(parsers.expl3_quark_or_scan_mark_csname, next_csname) == nil
+ ) then
+ issues:add('s207', 'malformed quark or scan mark name', next_range_start, next_range_end)
+ end
+ end
+ end
+ end
+ end
+ end
+
+ return all_tokens
+end
+
+return lexical_analysis
diff --git a/Master/texmf-dist/scripts/expltools/explcheck-obsolete.lua b/Master/texmf-dist/scripts/expltools/explcheck-obsolete.lua
new file mode 100755
index 00000000000..6a451eca205
--- /dev/null
+++ b/Master/texmf-dist/scripts/expltools/explcheck-obsolete.lua
@@ -0,0 +1,16 @@
+-- LPEG parsers for checking whether the name of a standard expl3 function/variable is obsolete.
+
+local lpeg = require("lpeg")
+local P = lpeg.P
+
+local eof = P(-1)
+local regular_character = P(1)
+local wildcard = regular_character^0
+
+-- luacheck: push no max line length
+local obsolete = {}
+obsolete.removed_csname = P("x") * (P("e") * (P("t") * (P("e") * (P("x") * (P("_") * (P("i") * (P("f") * (P("_") * (P("e") * (P("n") * (P("g") * (P("i") * (P("n") * (P("e") * (P("_p:") + P(":") * (P("F") + P("T") + P("TF"))))))))))) + wildcard * P(":D"))))))) + P("c") * (P("s") * (P("_") * (P("g") * (P("u") * (P("n") * (P("d") * (P("e") * (P("f") * (P("i") * (P("n") * (P("e") * (P(":") * (P("c") + P("N")))))))))) + P("n") * (P("e") * (P("w") * (P("_") * (P("e") * (P("q") * (P(":") * (P("c") * (P("c") + P("N")) + P("Nc")))) + P("p") * (P("r") * (P("o") * (P("t") * (P("e") * (P("c") * (P("t") * (P("e") * (P("d") * (P("_") * (P("n") * (P("o") * (P("p") * (P("a") * (P("r") * (P(":") * (P("c") * (P("p") * (P("n") + P("x"))) + P("N") * (P("p") * (P("n") + P("x")))))))))) + P(":") * (P("c") * (P("p") * (P("n") + P("x"))) + P("N") * (P("p") * (P("n") + P("x"))))))))))))) + P("n") * (P("o") * (P("p") * (P("a") * (P("r") * (P(":") * (P("c") * (P("p") * (P("n") + P("x"))) + P("N") * (P("p") * (P("n") + P("x")))))))))) + P(":") * (P("c") * (P("p") * (P("n") + P("x"))) + P("N") * (P("p") * (P("n") + P("x")))))))) + P("set_eq:NwN"))) + P("h") * (P("a") * (P("r") * (P("_") * (P("m") * (P("a") * (P("k") * (P("e") * (P("_") * (P("i") * (P("g") * (P("n") * (P("o") * (P("r") * (P("e") * (P(":") * (P("N") + P("n"))))))) + P("n") * (P("v") * (P("a") * (P("l") * (P("i") * (P("d") * (P(":") * (P("N") + P("n"))))))))) + P("m") * (P("a") * (P("t") * (P("h") * (P("_") * (P("s") * (P("u") * (P("p") * (P("e") * (P("r") * (P("s") * (P("c") * (P("r") * (P("i") * (P("p") * (P("t") * (P(":") * (P("N") + P("n"))))))))))) + P("b") * (P("s") * (P("c") * (P("r") * (P("i") * (P("p") * (P("t") * (P(":") * (P("N") + P("n")))))))))) + P("h") * (P("i") * (P("f") * (P("t") * (P(":") * (P("N") + P("n")))))))))))) + P("l") * (P("e") * (P("t") * (P("t") * (P("e") * (P("r") * (P(":") * (P("N") + P("n")))))))) + P("o") * (P("t") * (P("h") * (P("e") * (P("r") * (P(":") * (P("N") + P("n"))))))) + P("a") * (P("c") * (P("t") * (P("i") * (P("v") * (P("e") * (P(":") * (P("N") + P("n"))))))) + P("l") * (P("i") * (P("g") * (P("n") * (P("m") * (P("e") * (P("n") * (P("t") * (P("_") * (P("t") * (P("a") * (P("b") * (P(":") * (P("N") + P("n"))))))))))))))) + P("p") * (P("a") * (P("r") * (P("a") * (P("m") * (P("e") * (P("t") * (P("e") * (P("r") * (P(":") * (P("N") + P("n"))))))))))) + P("c") * (P("o") * (P("m") * (P("m") * (P("e") * (P("n") * (P("t") * (P(":") * (P("N") + P("n"))))))))) + P("b") * (P("e") * (P("g") * (P("i") * (P("n") * (P("_") * (P("g") * (P("r") * (P("o") * (P("u") * (P("p") * (P(":") * (P("N") + P("n"))))))))))))) + P("e") * (P("s") * (P("c") * (P("a") * (P("p") * (P("e") * (P(":") * (P("N") + P("n"))))))) + P("n") * (P("d") * (P("_") * (P("g") * (P("r") * (P("o") * (P("u") * (P("p") * (P(":") * (P("N") + P("n"))))))) + P("l") * (P("i") * (P("n") * (P("e") * (P(":") * (P("N") + P("n")))))))))) + P("s") * (P("p") * (P("a") * (P("c") * (P("e") * (P(":") * (P("N") + P("n")))))))))))) + P("s") * (P("e") * (P("t") * (P("_") * (P("sfcode:w") + P("mathcode:w") + P("lccode:w") + P("uccode:w") + P("catcode:w")))) + P("h") * (P("o") * (P("w") * (P("_") * (P("v") * (P("a") * (P("l") * (P("u") * (P("e") * (P("_") * (P("sfcode:w") + P("mathcode:w") + P("lccode:w") + P("uccode:w") + P("catcode:w")))))))))))) + P("v") * (P("a") * (P("l") * (P("u") * (P("e") * (P("_") * (P("sfcode:w") + P("mathcode:w") + P("lccode:w") + P("uccode:w") + P("catcode:w")))))))))) + P("k_if_free_cs:N")) + P("_") * (P("zero") + P("xetex_is_engine_bool") + P("keys_code_root_tl") + P("job_name_tl") + P("m") * (P("ath_shift_token") + P("inus_one")) + P("l") * (P("etter_token") + P("uatex_is_engine_bool")) + P("o") * (P("ther_char_token") + P("n") * (P("e") + P("e") * (P("_") * (P("thousand") + P("hundred"))))) + P("nine") + P("a") * (P("ctive_char_token") + P("lignment_tab_token")) + P("pdftex_is_engine_bool") + P("catcode_active_tl") + P("undefined_fp") + P("e") * (P("ight") + P("leven") + P("mpty_toks")) + P("t") * (P("e") * (P("n") + P("n_thousand") + P("rm_ior")) + P("h") * (P("i") * (P("r") * (P("t") * (P("een") + P("y_two")))) + P("ree")) + P("w") * (P("o") + P("elve") + P("o") * (P("_") * (P("h") * (P("u") * (P("n") * (P("d") * (P("r") * (P("e") * (P("d") * (P("_") * (P("f") * (P("i") * (P("f") * (P("t") * (P("y") * (P("_") * (P("six") + P("five"))))))))))))))))))) + P("s") * (P("i") * (P("x") + P("xteen")) + P("tring_cctab") + P("even")) + P("f") * (P("i") * (P("ve") + P("fteen")) + P("o") * (P("u") * (P("r") + P("rteen"))))) + P("l") * (P("i") * (P("s") * (P("t") * (P("_") * (P("i") * (P("f") * (P("_") * (P("e") * (P("q") * (P("_") * (P("p") * (P(":") * (P("c") * (P("c") + P("N")) + P("Nc")))) + P(":") * (P("c") * (P("c") * (P("F") + P("T") + P("TF")) + P("N") * (P("F") + P("T") + P("TF"))) + P("N") * (P("c") * (P("F") + P("T") + P("TF"))))))))) + P("t") * (P("op:cN") + P("r") * (P("i") * (P("m") * (P("_") * (P("s") * (P("p") * (P("a") * (P("c") * (P("e") * (P("s") * (P(":") * (P("c") + P("N"))))))))))))) + P("remove_element:Nn") + P("u") * (P("s") * (P("e") * (P(":") * (P("c") + P("N"))))) + P("l") * (P("e") * (P("n") * (P("g") * (P("t") * (P("h") * (P(":") * (P("c") + P("N") + P("n")))))))) + P("g") * (P("t") * (P("r") * (P("i") * (P("m") * (P("_") * (P("s") * (P("p") * (P("a") * (P("c") * (P("e") * (P("s") * (P(":") * (P("c") + P("N"))))))))))))) + P("remove_element:Nn")) + P("d") * (P("i") * (P("s") * (P("p") * (P("l") * (P("a") * (P("y") * (P(":") * (P("c") + P("N"))))))))))))))) + P("b") * (P("o") * (P("x") * (P("_") * (P("u") * (P("s") * (P("e") * (P("_") * (P("c") * (P("l") * (P("e") * (P("a") * (P("r") * (P(":") * (P("c") + P("N"))))))))))) + P("s") * (P("e") * (P("t") * (P("_") * (P("e") * (P("q") * (P("_") * (P("c") * (P("l") * (P("e") * (P("a") * (P("r") * (P(":") * (P("c") * (P("c") + P("N")) + P("N") * (P("c") + P("N"))))))))))))))) + P("g") * (P("s") * (P("e") * (P("t") * (P("_") * (P("e") * (P("q") * (P("_") * (P("c") * (P("l") * (P("e") * (P("a") * (P("r") * (P(":") * (P("c") * (P("c") + P("N")) + P("N") * (P("c") + P("N")))))))))))))))) + P("r") * (P("e") * (P("s") * (P("i") * (P("z") * (P("e") * (P(":") * (P("cnn") + P("Nnn")))))))))))) + P("e") * (P("t") * (P("e") * (P("x") * (P("_") * (wildcard * P(".:D")))))) + P("d") * (P("i") * (P("m") * (P("_") * (P("e") * (P("v") * (P("a") * (P("l") * (P("_end:") + P(":w"))))) + P("s") * (P("e") * (P("t") * (P("_") * (P("m") * (P("a") * (P("x") * (P(":") * (P("cn") + P("Nn")))) + P("i") * (P("n") * (P(":") * (P("cn") + P("Nn"))))))))) + P("case:nnn") + P("g") * (P("s") * (P("e") * (P("t") * (P("_") * (P("m") * (P("a") * (P("x") * (P(":") * (P("cn") + P("Nn")))) + P("i") * (P("n") * (P(":") * (P("cn") + P("Nn")))))))))))))) + P("g") * (P("_") * (P("t") * (P("m") * (P("p") * (P("a_toks") + P("c_toks") + P("b_toks")))) + P("file_current_name_tl")) + P("roup_execute_after:N")) + P("f") * (P("i") * (P("l") * (P("e") * (P("_") * (P("i") * (P("f") * (P("_") * (P("e") * (P("x") * (P("i") * (P("s") * (P("t") * (P("_") * (P("i") * (P("n") * (P("p") * (P("u") * (P("t") * (P(":") * (P("n") * (P("T") + P("TF"))))))))))))))))) + P("list:") + P("p") * (P("a") * (P("t") * (P("h") * (P("_") * (P("include:n") + P("remove:n")))))) + P("add_path:nN"))))) + P("p") * (P("_") * (P("i") * (P("f") * (P("_") * (P("u") * (P("n") * (P("d") * (P("e") * (P("f") * (P("i") * (P("n") * (P("e") * (P("d") * (P("_p:N") + P(":") * (P("N") * (P("F") + P("T") + P("TF")))))))))))) + P("z") * (P("e") * (P("r") * (P("o") * (P("_p:N") + P(":") * (P("N") * (P("F") + P("T") + P("TF")))))))))) + P("m") * (P("u") * (P("l") * (P(":") * (P("cn") + P("Nn"))))) + P("l") * (P("n") * (P(":") * (P("cn") + P("Nn")))) + P("t") * (P("a") * (P("n") * (P(":") * (P("cn") + P("Nn"))))) + P("n") * (P("e") * (P("g") * (P(":") * (P("c") + P("N"))))) + P("a") * (P("b") * (P("s") * (P(":") * (P("c") + P("N"))))) + P("p") * (P("o") * (P("w") * (P(":") * (P("cn") + P("Nn"))))) + P("c") * (P("o") * (P("m") * (P("p") * (P("a") * (P("r") * (P("e") * (P(":") * (P("N") * (P("N") * (P("N") * (P("F") + P("T") + P("TF")))))))))) + P("s") * (P(":") * (P("cn") + P("Nn"))))) + P("r") * (P("o") * (P("u") * (P("n") * (P("d") * (P("_") * (P("p") * (P("l") * (P("a") * (P("c") * (P("e") * (P("s") * (P(":") * (P("cn") + P("Nn")))))))) + P("f") * (P("i") * (P("g") * (P("u") * (P("r") * (P("e") * (P("s") * (P(":") * (P("cn") + P("Nn"))))))))))))))) + P("e") * (P("x") * (P("p") * (P(":") * (P("cn") + P("Nn"))))) + P("d") * (P("i") * (P("v") * (P(":") * (P("cn") + P("Nn"))))) + P("g") * (P("m") * (P("u") * (P("l") * (P(":") * (P("cn") + P("Nn"))))) + P("l") * (P("n") * (P(":") * (P("cn") + P("Nn")))) + P("n") * (P("e") * (P("g") * (P(":") * (P("c") + P("N"))))) + P("a") * (P("b") * (P("s") * (P(":") * (P("c") + P("N"))))) + P("p") * (P("o") * (P("w") * (P(":") * (P("cn") + P("Nn"))))) + P("c") * (P("o") * (P("s") * (P(":") * (P("cn") + P("Nn"))))) + P("r") * (P("o") * (P("u") * (P("n") * (P("d") * (P("_") * (P("p") * (P("l") * (P("a") * (P("c") * (P("e") * (P("s") * (P(":") * (P("cn") + P("Nn")))))))) + P("f") * (P("i") * (P("g") * (P("u") * (P("r") * (P("e") * (P("s") * (P(":") * (P("cn") + P("Nn"))))))))))))))) + P("e") * (P("x") * (P("p") * (P(":") * (P("cn") + P("Nn"))))) + P("d") * (P("i") * (P("v") * (P(":") * (P("cn") + P("Nn"))))) + P("t") * (P("a") * (P("n") * (P(":") * (P("cn") + P("Nn"))))) + P("s") * (P("i") * (P("n") * (P(":") * (P("cn") + P("Nn")))))) + P("s") * (P("i") * (P("n") * (P(":") * (P("cn") + P("Nn")))))))) + P("i") * (P("n") * (P("t") * (P("_") * (P("c") * (P("ase:nnn") + P("o") * (P("n") * (P("v") * (P("e") * (P("r") * (P("t") * (P("_") * (P("to_base_ten:nn") + P("f") * (P("r") * (P("o") * (P("m") * (P("_") * (P("symbols:nn") + P("base_ten:nn")))))))))))))) + P("e") * (P("v") * (P("a") * (P("l") * (P("_end:") + P(":w"))))) + P("t") * (P("o") * (P("_") * (P("symbol:n") + P("hexadecimal:n") + P("octal:n") + P("binary:n")))) + P("value:w") + P("f") * (P("r") * (P("o") * (P("m") * (P("_") * (P("hexadecimal:n") + P("octal:n") + P("binary:n"))))))))) + P("o") * (P("w") * (P("_") * (P("wrap:xnnnN") + P("l") * (P("ist_streams:") + P("og_streams:")) + P("open_streams:") + P("n") * (P("o") * (P("w") * (P("_") * (P("w") * (P("h") * (P("e") * (P("n") * (P("_") * (P("a") * (P("v") * (P("a") * (P("i") * (P("l") * (P(":") * (P("N") * (P("n") + P("x"))))))))))))) + P("b") * (P("u") * (P("f") * (P("f") * (P("e") * (P("r") * (P("_") * (P("s") * (P("a") * (P("f") * (P("e") * (P(":") * (P("N") * (P("n") + P("x")))))))))))))))))))) + P("r") * (P("_") * (P("s") * (P("t") * (P("r") * (P("_") * (P("gto:NN") + P("to:NN"))))) + P("to:NN") + P("l") * (P("ist_streams:") + P("og_streams:")) + P("open_streams:") + P("g") * (P("et_str:NN") + P("to:NN"))))) + P("f_num:w")) + P("h") * (P("b") * (P("o") * (P("x") * (P("_") * (P("u") * (P("n") * (P("p") * (P("a") * (P("c") * (P("k") * (P("_") * (P("c") * (P("l") * (P("e") * (P("a") * (P("r") * (P(":") * (P("c") + P("N")))))))))))))) + P("s") * (P("e") * (P("t") * (P("_") * (P("i") * (P("n") * (P("l") * (P("i") * (P("n") * (P("e") * (P("_") * (P("end:") + P("b") * (P("e") * (P("g") * (P("i") * (P("n") * (P(":") * (P("c") + P("N")))))))))))))))))) + P("g") * (P("s") * (P("e") * (P("t") * (P("_") * (P("i") * (P("n") * (P("l") * (P("i") * (P("n") * (P("e") * (P("_") * (P("end:") + P("b") * (P("e") * (P("g") * (P("i") * (P("n") * (P(":") * (P("c") + P("N")))))))))))))))))))))))) + P("K") * (P("V") * (P("_") * (P("p") * (P("r") * (P("o") * (P("c") * (P("e") * (P("s") * (P("s") * (P("_") * (P("s") * (P("p") * (P("a") * (P("c") * (P("e") * (P("_") * (P("r") * (P("e") * (P("m") * (P("o") * (P("v") * (P("a") * (P("l") * (P("_") * (P("sanitize:NNn") + P("no_sanitize:NNn"))))))))))))))) + P("no_space_removal_no_sanitize:NNn")))))))))))) + P("m") * (P("s") * (P("g") * (P("_") * (P("i") * (P("n") * (P("t") * (P("e") * (P("r") * (P("r") * (P("u") * (P("p") * (P("t") * (P(":") * (P("xxx") + P("nn"))))))))))) + P("c") * (P("l") * (P("a") * (P("s") * (P("s") * (P("_") * (P("set:nn") + P("new:nn"))))))) + P("t") * (P("e") * (P("r") * (P("m") * (P(":") * (P("n") + P("x"))))) + P("wo_newlines:") + P("r") * (P("a") * (P("c") * (P("e") * (P(":") * (P("n") * (P("n") + P("n") * (P("x") + P("x") * (P("x") + P("x") * (P("x") + P("xx"))))))))))) + P("l") * (P("o") * (P("g") * (P(":") * (P("n") + P("x"))))) + P("d") * (P("i") * (P("r") * (P("e") * (P("c") * (P("t") * (P("_") * (P("interrupt:xxxxx") + P("log:xx") + P("term:xx")))))))) + P("g") * (P("e") * (P("n") * (P("e") * (P("r") * (P("i") * (P("c") * (P("_") * (P("s") * (P("e") * (P("t") * (P(":") * (P("n") * (P("n") + P("nn")))))) + P("n") * (P("e") * (P("w") * (P(":") * (P("n") * (P("n") + P("nn")))))))))))))) + P("newline:"))))) + P("l") * (P("_") * (P("iow_line_length_int") + P("last_box") + P("t") * (P("m") * (P("p") * (P("a_toks") + P("c") * (P("_") * (P("int") + P("toks"))) + P("b_toks"))) + P("l_replace_toks"))) + P("u") * (P("a") * (P("_") * (P("escape_x:n") + P("shipout_x:n") + P("now_x:n")) + P("t") * (P("e") * (P("x") * (P("_") * (P("i") * (P("f") * (P("_") * (P("e") * (P("n") * (P("g") * (P("i") * (P("n") * (P("e") * (P("_p:") + P(":") * (P("F") + P("T") + P("TF"))))))))))) + wildcard * P(":D")))))))) + P("q") * (P("u") * (P("a") * (P("r") * (P("k") * (P("_") * (P("i") * (P("f") * (P("_") * (P("r") * (P("e") * (P("c") * (P("u") * (P("r") * (P("s") * (P("i") * (P("o") * (P("n") * (P("_") * (P("t") * (P("a") * (P("i") * (P("l") * (P("_") * (P("b") * (P("r") * (P("e") * (P("a") * (P("k") * (P(":") * (P("N") + P("n"))))))))))))))))))))))))))))))) + P("p") * (P("e") * (P("e") * (P("k") * (P("_") * (P("after:NN") + P("gafter:NN"))))) + P("d") * (P("f") * (P("t") * (P("e") * (P("x") * (P("_") * (P("i") * (P("f") * (P("_") * (P("e") * (P("n") * (P("g") * (P("i") * (P("n") * (P("e") * (P("_p:") + P(":") * (P("F") + P("T") + P("TF"))))))))))) + wildcard * P(":D"))))))) + P("t") * (P("e") * (P("x") * (P("_") * (wildcard * P(":D"))))) + P("r") * (P("g") * (P("_") * (P("s") * (P("et_map_functions:Nn") + P("t") * (P("e") * (P("p") * (P("w") * (P("i") * (P("s") * (P("e") * (P("_") * (P("inline:nnnn") + P("variable:nnnNn") + P("function:nnnN")))))))))) + P("c") * (P("a") * (P("s") * (P("e") * (P("_") * (P("int:nnn") + P("dim:nnn") + P("s") * (P("t") * (P("r") * (P(":") * (P("xxn") + P("onn") + P("nnn"))))) + P("t") * (P("l") * (P(":") * (P("cnn") + P("Nnn"))))))))) + P("new_map_functions:Nn"))) + P("o") * (P("p") * (P("_") * (P("i") * (P("f") * (P("_") * (P("e") * (P("q") * (P("_") * (P("p") * (P(":") * (P("c") * (P("c") + P("N")) + P("N") * (P("c") + P("N"))))) + P(":") * (P("c") * (P("c") * (P("F") + P("T") + P("TF")) + P("N") * (P("F") + P("T") + P("TF"))) + P("N") * (P("c") * (P("F") + P("T") + P("TF")) + P("N") * (P("F") + P("T") + P("TF")))))) + P("i") * (P("n") * (P(":") * (P("c") * (P("c") * (P("F") + P("T") + P("TF"))))))))) + P("d") * (P("e") * (P("l") * (P(":") * (P("c") * (P("n") + P("V")) + P("N") * (P("n") + P("V"))))) + P("i") * (P("s") * (P("p") * (P("l") * (P("a") * (P("y") * (P(":") * (P("c") + P("N"))))))))) + P("g") * (P("e") * (P("t") * (P("_gdel:NnN") + P(":") * (P("cn") + P("Nn")))) + P("d") * (P("e") * (P("l") * (P(":") * (P("c") * (P("n") + P("V")) + P("N") * (P("n") + P("V")))))) + P("g") * (P("e") * (P("t") * (P(":") * (P("c") * (P("VN") + P("nN")) + P("N") * (P("VN") + P("nN")))))) + P("put:ccx"))))))) + P("s") * (P("k") * (P("i") * (P("p") * (P("_") * (P("i") * (P("f") * (P("_") * (P("i") * (P("n") * (P("f") * (P("i") * (P("n") * (P("i") * (P("t") * (P("e") * (P("_") * (P("g") * (P("l") * (P("u") * (P("e") * (P("_p:n") + P(":") * (P("n") * (P("F") + P("T") + P("TF"))))))))))))))))))))))) + P("e") * (P("q") * (P("_") * (P("u") * (P("s") * (P("e") * (P(":") * (P("c") + P("N"))))) + P("d") * (P("i") * (P("s") * (P("p") * (P("l") * (P("a") * (P("y") * (P(":") * (P("c") + P("N"))))))))) + P("t") * (P("o") * (P("p") * (P(":") * (P("cN") + P("NN"))))) + P("l") * (P("e") * (P("n") * (P("g") * (P("t") * (P("h") * (P(":") * (P("c") + P("N"))))))))))) + P("t") * (P("r") * (P("_") * (P("c") * (P("a") * (P("s") * (P("e") * (P("_") * (P("x") * (P(":") * (P("n") * (P("n") + P("n") * (P("F") + P("n") + P("T") + P("TF")))))) + P(":") * (P("onn") + P("nnn")))))) + P("i") * (P("f") * (P("_") * (P("e") * (P("q") * (P("_") * (P("x") * (P("_p:nn") + P(":") * (P("n") * (P("n") * (P("F") + P("T") + P("TF"))))) + P("p:xx")) + P(":") * (P("x") * (P("x") * (P("F") + P("T") + P("TF")))))))))))) + P("o") * (P("r") * (P("t") * (P("_") * (P("ordered:") + P("reversed:"))))) + P("can_align_safe_stop:")) + P("u") * (P("p") * (P("t") * (P("e") * (P("x") * (P("_") * (wildcard * P(":D")))))) + P("s") * (P("e") * (P("_") * (P("i") * (P("_") * (P("a") * (P("f") * (P("t") * (P("e") * (P("r") * (P("_") * (P("else:nw") + P("o") * (P("r") * (P("else:nw") + P(":nw"))) + P("fi:nw")))))))))))) + P("t") * (P("e") * (P("x") * (P("_") * (wildcard * P(":D")))))) + P("t") * (P("o") * (P("k") * (P("e") * (P("n") * (P("_") * (P("i") * (P("f") * (P("_") * (P("m") * (P("a") * (P("t") * (P("h") * (P("_") * (P("s") * (P("h") * (P("i") * (P("f") * (P("t") * (P("_p:N") + P(":") * (P("N") * (P("F") + P("T") + P("TF"))))))))))))) + P("o") * (P("t") * (P("h") * (P("e") * (P("r") * (P("_") * (P("c") * (P("h") * (P("a") * (P("r") * (P("_p:N") + P(":") * (P("N") * (P("F") + P("T") + P("TF"))))))))))))) + P("a") * (P("c") * (P("t") * (P("i") * (P("v") * (P("e") * (P("_") * (P("c") * (P("h") * (P("a") * (P("r") * (P("_p:N") + P(":") * (P("N") * (P("F") + P("T") + P("TF"))))))))))))) + P("l") * (P("i") * (P("g") * (P("n") * (P("m") * (P("e") * (P("n") * (P("t") * (P("_") * (P("t") * (P("a") * (P("b") * (P("_p:N") + P(":") * (P("N") * (P("F") + P("T") + P("TF"))))))))))))))))))) + P("g") * (P("e") * (P("t") * (P("_") * (P("arg_spec:N") + P("prefix_spec:N") + P("replacement_spec:N"))))) + P("new:Nn")))) + P("s") * (P("_") * (P("i") * (P("f") * (P("_") * (P("e") * (P("m") * (P("p") * (P("t") * (P("y") * (P("_") * (P("p") * (P(":") * (P("c") + P("N")))) + P(":") * (P("c") * (P("F") + P("T") + P("TF")) + P("N") * (P("F") + P("T") + P("TF"))))))) + P("q") * (P("_") * (P("p") * (P(":") * (P("c") * (P("c") + P("N")) + P("N") * (P("c") + P("N"))))) + P(":") * (P("c") * (P("c") * (P("F") + P("T") + P("TF")) + P("N") * (P("F") + P("T") + P("TF"))) + P("N") * (P("c") * (P("F") + P("T") + P("TF")) + P("N") * (P("F") + P("T") + P("TF"))))))))) + P("p") * (P("u") * (P("t") * (P("_") * (P("r") * (P("i") * (P("g") * (P("h") * (P("t") * (P(":") * (P("c") * (P("n") + P("o") + P("V")) + P("N") * (P("f") + P("n") + P("o") + P("V") + P("x")))))))) + P("l") * (P("e") * (P("f") * (P("t") * (P(":") * (P("c") * (P("n") + P("o") + P("V")) + P("N") * (P("n") + P("o") + P("V") + P("x"))))))))))) + P("c") * (P("l") * (P("e") * (P("a") * (P("r") * (P(":") * (P("c") + P("N"))))))) + P("u") * (P("s") * (P("e") * (P("_") * (P("c") * (P("l") * (P("e") * (P("a") * (P("r") * (P(":") * (P("c") + P("N"))))))) + P("g") * (P("c") * (P("l") * (P("e") * (P("a") * (P("r") * (P(":") * (P("c") + P("N"))))))))) + P(":") * (P("c") + P("N"))))) + P("s") * (P("e") * (P("t") * (P("_") * (P("e") * (P("q") * (P(":") * (P("c") * (P("c") + P("N")) + P("N") * (P("c") + P("N")))))) + P(":") * (P("c") * (P("f") + P("n") + P("o") + P("V") + P("v") + P("x")) + P("N") * (P("f") + P("n") + P("o") + P("V") + P("v") + P("x"))))) + P("h") * (P("o") * (P("w") * (P(":") * (P("c") + P("N")))))) + P("g") * (P("p") * (P("u") * (P("t") * (P("_") * (P("r") * (P("i") * (P("g") * (P("h") * (P("t") * (P(":") * (P("c") * (P("n") + P("o") + P("V")) + P("N") * (P("n") + P("o") + P("V") + P("x")))))))) + P("l") * (P("e") * (P("f") * (P("t") * (P(":") * (P("c") * (P("n") + P("o") + P("V")) + P("N") * (P("n") + P("o") + P("V") + P("x"))))))))))) + P("c") * (P("l") * (P("e") * (P("a") * (P("r") * (P(":") * (P("c") + P("N"))))))) + P("s") * (P("e") * (P("t") * (P("_") * (P("e") * (P("q") * (P(":") * (P("c") * (P("c") + P("N")) + P("N") * (P("c") + P("N")))))) + P(":") * (P("c") * (P("n") + P("o") + P("V") + P("x")) + P("N") * (P("n") + P("o") + P("V") + P("x"))))))) + P("n") * (P("e") * (P("w") * (P(":") * (P("c") + P("N"))))))))) + P("l") * (P("_") * (P("i") * (P("f") * (P("_") * (P("e") * (P("m") * (P("p") * (P("t") * (P("y") * (P(":") * (P("x") * (P("F") + P("T") + P("TF")))))))) + P("h") * (P("e") * (P("a") * (P("d") * (P("_") * (P("s") * (P("p") * (P("a") * (P("c") * (P("e") * (P("_p:n") + P(":") * (P("n") * (P("F") + P("T") + P("TF")))))))) + P("g") * (P("r") * (P("o") * (P("u") * (P("p") * (P("_p:n") + P(":") * (P("n") * (P("F") + P("T") + P("TF")))))))) + P("N") * (P("_") * (P("t") * (P("y") * (P("p") * (P("e") * (P("_p:n") + P(":") * (P("n") * (P("F") + P("T") + P("TF"))))))))))))))))) + P("h") * (P("e") * (P("a") * (P("d") * (P("_") * (P("i") * (P("i") * (P("i") * (P(":") * (P("f") + P("n") + P("w")))) + P(":") * (P("n") + P("w")))))))) + P("l") * (P("e") * (P("n") * (P("g") * (P("t") * (P("h") * (P(":") * (P("c") + P("n") + P("N") + P("o") + P("V")))))))) + P("n") * (P("e") * (P("w") * (P(":") * (P("cn") + P("N") * (P("n") + P("x")))))) + P("c") * (P("a") * (P("s") * (P("e") * (P(":") * (P("cnn") + P("Nnn")))))) + P("r") * (P("e") * (P("m") * (P("o") * (P("v") * (P("e") * (P("_") * (P("a") * (P("l") * (P("l") * (P("_") * (P("i") * (P("n") * (P(":") * (P("cn") + P("Nn")))))))) + P("i") * (P("n") * (P(":") * (P("cn") + P("Nn"))))))))) + P("p") * (P("l") * (P("a") * (P("c") * (P("e") * (P("_") * (P("a") * (P("l") * (P("l") * (P("_") * (P("i") * (P("n") * (P(":") * (P("cnn") + P("Nnn")))))))) + P("i") * (P("n") * (P(":") * (P("cnn") + P("Nnn")))))))))))) + P("e") * (P("l") * (P("t") * (P("_") * (P("c") * (P("o") * (P("u") * (P("n") * (P("t") * (P(":") * (P("c") + P("n") + P("N") + P("o") + P("V"))))))))))) + P("t") * (P("ail:w") + P("o") * (P("_") * (P("uppercase:n") + P("lowercase:n")))) + P("g") * (P("s") * (P("e") * (P("t") * (P("_") * (P("f") * (P("r") * (P("o") * (P("m") * (P("_") * (P("f") * (P("i") * (P("l") * (P("e") * (P("_") * (P("x") * (P(":") * (P("cnn") + P("Nnn")))) + P(":") * (P("cnn") + P("Nnn")))))))))))) + P(":Nc")))) + P("r") * (P("e") * (P("m") * (P("o") * (P("v") * (P("e") * (P("_") * (P("a") * (P("l") * (P("l") * (P("_") * (P("i") * (P("n") * (P(":") * (P("cn") + P("Nn")))))))) + P("i") * (P("n") * (P(":") * (P("cn") + P("Nn"))))))))) + P("p") * (P("l") * (P("a") * (P("c") * (P("e") * (P("_") * (P("a") * (P("l") * (P("l") * (P("_") * (P("i") * (P("n") * (P(":") * (P("cnn") + P("Nnn")))))))) + P("i") * (P("n") * (P(":") * (P("cnn") + P("Nnn"))))))))))))) + P("s") * (P("e") * (P("t") * (P("_") * (P("f") * (P("r") * (P("o") * (P("m") * (P("_") * (P("f") * (P("i") * (P("l") * (P("e") * (P("_") * (P("x") * (P(":") * (P("cnn") + P("Nnn")))) + P(":") * (P("cnn") + P("Nnn")))))))))))) + P(":Nc"))) + P("h") * (P("o") * (P("w") * (P("_") * (P("a") * (P("n") * (P("a") * (P("l") * (P("y") * (P("s") * (P("i") * (P("s") * (P(":") * (P("N") + P("n")))))))))))))))))) + P("E") * (P("x") * (P("p") * (P("l") * (P("S") * (P("y") * (P("n") * (P("t") * (P("a") * (P("x") * (P("N") * (P("a") * (P("m") * (P("e") * (P("s") * (P("O") * (P("n") + P("ff"))))))))))))))))) + P("v") * (P("b") * (P("o") * (P("x") * (P("_") * (P("u") * (P("n") * (P("p") * (P("a") * (P("c") * (P("k") * (P("_") * (P("c") * (P("l") * (P("e") * (P("a") * (P("r") * (P(":") * (P("c") + P("N")))))))))))))) + P("s") * (P("e") * (P("t") * (P("_") * (P("i") * (P("n") * (P("l") * (P("i") * (P("n") * (P("e") * (P("_") * (P("end:") + P("b") * (P("e") * (P("g") * (P("i") * (P("n") * (P(":") * (P("c") + P("N")))))))))))))))))) + P("g") * (P("s") * (P("e") * (P("t") * (P("_") * (P("i") * (P("n") * (P("l") * (P("i") * (P("n") * (P("e") * (P("_") * (P("end:") + P("b") * (P("e") * (P("g") * (P("i") * (P("n") * (P(":") * (P("c") + P("N")))))))))))))))))))))))) * eof
+obsolete.deprecated_csname = P("i") * (P("o") * (P("w") * (P("_") * (P("s") * (P("h") * (P("i") * (P("p") * (P("o") * (P("u") * (P("t") * (P("_") * (P("x") * (P(":") * (P("c") * (P("n") + P("x")) + P("N") * (P("n") + P("x")))))))))))))))) + P("p") * (P("e") * (P("e") * (P("k") * (P("_") * (P("c") * (P("a") * (P("t") * (P("c") * (P("o") * (P("d") * (P("e") * (P("_") * (P("ignore_spaces:N") + P("remove_ignore_spaces:N")))))))) + P("h") * (P("a") * (P("r") * (P("c") * (P("o") * (P("d") * (P("e") * (P("_") * (P("ignore_spaces:N") + P("remove_ignore_spaces:N")))))))))) + P("m") * (P("e") * (P("a") * (P("n") * (P("i") * (P("n") * (P("g") * (P("_") * (P("ignore_spaces:N") + P("remove_ignore_spaces:N"))))))))))))) + P("d") * (P("f") * (P("_") * (P("o") * (P("b") * (P("j") * (P("e") * (P("c") * (P("t") * (P("_") * (P("w") * (P("r") * (P("i") * (P("t") * (P("e") * (P(":") * (P("n") * (P("n") + P("x")))))))) + P("new:nn"))))))))))) + P("r") * (P("o") * (P("p") * (P("_") * (P("g") * (P("p") * (P("u") * (P("t") * (P("_") * (P("i") * (P("f") * (P("_") * (P("n") * (P("e") * (P("w") * (P(":") * (P("c") * (P("Vn") + P("n") * (P("n") + P("V"))) + P("N") * (P("Vn") + P("n") * (P("n") + P("V"))))))))))))))) + P("p") * (P("u") * (P("t") * (P("_") * (P("i") * (P("f") * (P("_") * (P("n") * (P("e") * (P("w") * (P(":") * (P("c") * (P("Vn") + P("n") * (P("n") + P("V"))) + P("N") * (P("Vn") + P("n") * (P("n") + P("V"))))))))))))))))))) + P("k") * (P("e") * (P("y") * (P("s") * (P("_") * (P("s") * (P("e") * (P("t") * (P("_") * (P("f") * (P("i") * (P("l") * (P("t") * (P("e") * (P("r") * (P(":") * (P("n") * (P("n") * (P("n") + P("V") + P("v") + P("o") + P("V") * (P("N") + P("nN")) + P("v") * (P("N") + P("nN")) + P("o") * (P("N") + P("nN")) + P("n") * (P("N") + P("nN")))))))))))))))))))) + P("t") * (P("e") * (P("x") * (P("t") * (P("_") * (P("t") * (P("i") * (P("t") * (P("l") * (P("e") * (P("c") * (P("a") * (P("s") * (P("e") * (P(":") * (P("n") + P("nn"))))))))))))))) + P("l") * (P("_") * (P("c") * (P("a") * (P("s") * (P("e") * (P(":") * (P("c") * (P("n") + P("n") * (P("F") + P("T") + P("TF"))) + P("N") * (P("n") + P("n") * (P("F") + P("T") + P("TF")))))))) + P("b") * (P("u") * (P("i") * (P("l") * (P("d") * (P("_") * (P("clear:N") + P("g") * (P("clear:N") + P("et:NN")))))))) + P("m") * (P("i") * (P("x") * (P("e") * (P("d") * (P("_") * (P("c") * (P("a") * (P("s") * (P("e") * (P(":") * (P("n") + P("nn")))))))))))) + P("l") * (P("o") * (P("w") * (P("e") * (P("r") * (P("_") * (P("c") * (P("a") * (P("s") * (P("e") * (P(":") * (P("n") + P("nn")))))))))))) + P("u") * (P("p") * (P("p") * (P("e") * (P("r") * (P("_") * (P("c") * (P("a") * (P("s") * (P("e") * (P(":") * (P("n") + P("nn"))))))))))))))) + P("m") * (P("s") * (P("g") * (P("_") * (P("g") * (P("s") * (P("e") * (P("t") * (P(":") * (P("n") * (P("n") * (P("n") + P("nn")))))))))))) + P("l") * (P("_") * (P("k") * (P("e") * (P("y") * (P("s") * (P("_") * (P("key_tl") + P("path_tl")))))) + P("t") * (P("e") * (P("x") * (P("t") * (P("_") * (P("accents_tl") + P("letterlike_tl")))))))) + P("s") * (P("e") * (P("q") * (P("_") * (P("i") * (P("n") * (P("d") * (P("e") * (P("x") * (P("e") * (P("d") * (P("_") * (P("m") * (P("a") * (P("p") * (P("_") * (P("inline:Nn") + P("function:NN"))))))))))))) + P("gset_map_x:NNn") + P("set_map_x:NNn")))) + P("t") * (P("r") * (P("_") * (P("u") * (P("p") * (P("p") * (P("e") * (P("r") * (P("_") * (P("c") * (P("a") * (P("s") * (P("e") * (P(":") * (P("f") + P("n")))))))))))) + P("declare_eight_bit_encoding:nnn") + P("l") * (P("o") * (P("w") * (P("e") * (P("r") * (P("_") * (P("c") * (P("a") * (P("s") * (P("e") * (P(":") * (P("f") + P("n")))))))))))) + P("f") * (P("o") * (P("l") * (P("d") * (P("_") * (P("c") * (P("a") * (P("s") * (P("e") * (P(":") * (P("n") + P("V"))))))) + P("c") * (P("a") * (P("s") * (P("e") * (P(":") * (P("n") + P("V"))))))))))))) + P("ys_load_deprecation:")) + P("c") * (P("s_argument_spec:N") + P("h") * (P("a") * (P("r") * (P("_") * (P("s") * (P("t") * (P("r") * (P("_") * (P("u") * (P("p") * (P("p") * (P("e") * (P("r") * (P("_case:N") + P("case:N")))))) + P("mixed_case:N") + P("titlecase:N") + P("l") * (P("o") * (P("w") * (P("e") * (P("r") * (P("_case:N") + P("case:N")))))) + P("f") * (P("o") * (P("l") * (P("d") * (P("_case:N") + P("case:N"))))))))) + P("u") * (P("p") * (P("p") * (P("e") * (P("r") * (P("_case:N") + P("case:N")))))) + P("mixed_case:N") + P("t") * (P("itlecase:N") + P("o") * (P("_") * (P("utfviii_bytes:n") + P("nfd:N")))) + P("l") * (P("o") * (P("w") * (P("e") * (P("r") * (P("_case:N") + P("case:N")))))) + P("f") * (P("o") * (P("l") * (P("d") * (P("_case:N") + P("case:N")))))))))) * eof
+-- luacheck: pop
+
+return obsolete
diff --git a/Master/texmf-dist/scripts/expltools/explcheck-parsers.lua b/Master/texmf-dist/scripts/expltools/explcheck-parsers.lua
new file mode 100755
index 00000000000..3806238d70f
--- /dev/null
+++ b/Master/texmf-dist/scripts/expltools/explcheck-parsers.lua
@@ -0,0 +1,387 @@
+-- Common LPEG parsers used by different modules of the static analyzer explcheck.
+
+local lpeg = require("lpeg")
+local C, Cp, Cs, Ct, Cmt, P, R, S = lpeg.C, lpeg.Cp, lpeg.Cs, lpeg.Ct, lpeg.Cmt, lpeg.P, lpeg.R, lpeg.S
+
+-- Base parsers
+---- Generic
+local any = P(1)
+local eof = -any
+local fail = P(false)
+
+---- Tokens
+local ampersand = P("&")
+local backslash = P([[\]])
+local circumflex = P("^")
+local colon = P(":")
+local control_character = R("\x00\x1F") + P("\x7F")
+local dollar_sign = P("$")
+local form_feed = P("\x0C")
+local hash_sign = P("#")
+local lbrace = P("{")
+local letter = R("AZ", "az")
+local percent_sign = P("%")
+local rbrace = P("}")
+local tilde = P("~")
+local underscore = P("_")
+local decimal_digit = R("09")
+local lowercase_hexadecimal_digit = decimal_digit + R("af")
+local lower_half_ascii_character = R("\x00\x3F")
+local upper_half_ascii_character = R("\x40\x7F")
+
+---- Spacing
+local newline = (
+ P("\n")
+ + P("\r\n")
+ + P("\r")
+)
+local linechar = any - newline
+local space = S(" ")
+local tab = S("\t")
+
+-- Intermediate parsers
+---- Default expl3 category code table, corresponds to `\c_code_cctab` in expl3
+local expl3_endlinechar = ' ' -- luacheck: ignore expl3_endlinechar
+local expl3_catcodes = {
+ [0] = backslash, -- escape character
+ [1] = lbrace, -- begin grouping
+ [2] = rbrace, -- end grouping
+ [3] = dollar_sign, -- math shift
+ [4] = ampersand, -- alignment tab
+ [5] = newline, -- end of line
+ [6] = hash_sign, -- parameter
+ [7] = circumflex, -- superscript
+ [8] = fail, -- subscript
+ [9] = space + tab, -- ignored character
+ [10] = tilde, -- space
+ [11] = letter + colon + underscore, -- letter
+ [13] = form_feed, -- active character
+ [14] = percent_sign, -- comment character
+ [15] = control_character, -- invalid character
+}
+expl3_catcodes[12] = any -- other
+for catcode, parser in pairs(expl3_catcodes) do
+ if catcode ~= 12 then
+ expl3_catcodes[12] = expl3_catcodes[12] - parser
+ end
+end
+
+local determine_expl3_catcode = fail
+for catcode, parser in pairs(expl3_catcodes) do
+ determine_expl3_catcode = (
+ determine_expl3_catcode
+ + parser / function() return catcode end
+ )
+end
+
+---- Syntax recognized by TeX's input and token processors
+local optional_spaces = space^0
+local optional_spaces_and_newline = (
+ optional_spaces
+ * (
+ newline
+ * optional_spaces
+ )^-1
+)
+local blank_line = (
+ optional_spaces
+ * newline
+)
+local blank_or_empty_last_line = (
+ optional_spaces
+ * (
+ newline
+ + eof
+ )
+)
+local tex_line = (
+ (
+ (
+ linechar
+ - (space * #blank_or_empty_last_line)
+ )^1
+ * (
+ blank_or_empty_last_line / ""
+ )
+ )
+ + (
+ (
+ linechar
+ - (space * #blank_line)
+ )^0
+ * (
+ blank_line / ""
+ )
+ )
+)
+local tex_lines = Ct(
+ Ct(
+ Cp()
+ * Cs(tex_line)
+ * Cp()
+ )^0
+)
+
+local double_superscript_convention = (
+ Cmt(
+ C(expl3_catcodes[7]),
+ function(input, position, capture)
+ if input:sub(position, position) == capture then
+ return position + 1
+ else
+ return nil
+ end
+ end
+ )
+ * (
+ C(lowercase_hexadecimal_digit * lowercase_hexadecimal_digit)
+ / function(hexadecimal_digits)
+ return string.char(tonumber(hexadecimal_digits, 16)), 4
+ end
+ + C(lower_half_ascii_character)
+ / function(character)
+ return string.char(string.byte(character) + 64), 3
+ end
+ + C(upper_half_ascii_character)
+ / function(character)
+ return string.char(string.byte(character) - 64), 3
+ end
+ )
+)
+
+---- Arguments and argument specifiers
+local argument = (
+ expl3_catcodes[1]
+ * (any - expl3_catcodes[2])^0
+ * expl3_catcodes[2]
+)
+
+local weird_argument_specifier = S("wD")
+local argument_specifier = S("NncVvoxefTFp") + weird_argument_specifier
+local argument_specifiers = argument_specifier^0 * eof
+local weird_argument_specifiers = (
+ (
+ argument_specifier
+ - weird_argument_specifier
+ )^0
+ * weird_argument_specifier
+)
+
+---- Function, variable, and constant names
+local expl3_function_csname = (
+ (underscore * underscore)^-1 * letter^1 -- module
+ * underscore
+ * letter * (letter + underscore)^0 -- description
+ * colon
+ * argument_specifier^0 -- argspec
+ * (eof + -letter)
+)
+local expl3_function = expl3_catcodes[0] * expl3_function_csname
+
+local any_type = (
+ letter^1 -- type
+ * (eof + -letter)
+)
+local any_expl3_variable_or_constant = (
+ expl3_catcodes[0]
+ * S("cgl") -- scope
+ * underscore
+ * (
+ letter * (letter + underscore * -#any_type)^0 -- just description
+ + underscore^-1 * letter^1 -- module
+ * underscore
+ * letter * (letter + underscore * -#any_type)^0 -- description
+ )
+ * underscore
+ * any_type
+)
+
+local expl3like_material = (
+ expl3_function
+ + any_expl3_variable_or_constant
+)
+
+---- Comments
+local commented_line_letter = (
+ linechar
+ + newline
+ - expl3_catcodes[0]
+ - expl3_catcodes[14]
+)
+local commented_line = (
+ (
+ (
+ commented_line_letter
+ - newline
+ )^1 -- initial state
+ + (
+ expl3_catcodes[0] -- even backslash
+ * (
+ expl3_catcodes[0]
+ + #newline
+ )
+ )^1
+ + (
+ expl3_catcodes[0]
+ * (
+ expl3_catcodes[14]
+ + commented_line_letter
+ )
+ )
+ )^0
+ * (
+ #expl3_catcodes[14]
+ * Cp()
+ * (
+ (
+ expl3_catcodes[14] -- comment
+ * linechar^0
+ * Cp()
+ * newline
+ * #blank_line -- blank line
+ )
+ + expl3_catcodes[14] -- comment
+ * linechar^0
+ * Cp()
+ * newline
+ * optional_spaces -- leading spaces
+ )
+ + newline
+ )
+)
+
+---- Standard delimiters
+local provides = (
+ expl3_catcodes[0]
+ * P([[ProvidesExpl]])
+ * (
+ P("Package")
+ + P("Class")
+ + P("File")
+ )
+ * optional_spaces_and_newline
+ * argument
+ * optional_spaces_and_newline
+ * argument
+ * optional_spaces_and_newline
+ * argument
+ * optional_spaces_and_newline
+ * argument
+)
+local expl_syntax_on = expl3_catcodes[0] * P([[ExplSyntaxOn]])
+local expl_syntax_off = expl3_catcodes[0] * P([[ExplSyntaxOff]])
+
+---- Assigning functions
+local expl3_function_assignment_csname = (
+ P("cs_")
+ * (
+ (
+ P("new")
+ + P("g")^-1
+ * P("set")
+ )
+ * (
+ P("_eq")
+ + P("_protected")^-1
+ * P("_nopar")^-1
+ )
+ + P("generate_from_arg_count")
+ )
+ * P(":N")
+)
+
+---- Using variables/constants
+local expl3_variable_or_constant_type = (
+ P("bitset")
+ + S("hv")^-1 * P("box")
+ + P("bool")
+ + P("cctab")
+ + P("clist")
+ + P("coffin")
+ + P("dim")
+ + P("flag")
+ + P("fp") * P("array")^-1
+ + P("int") * P("array")^-1
+ + P("ior")
+ + P("iow")
+ + P("muskip")
+ + P("prop")
+ + P("regex")
+ + P("seq")
+ + P("skip")
+ + P("str")
+ + P("tl")
+)
+
+local expl3_variable_or_constant_use_csname = (
+ expl3_variable_or_constant_type
+ * P("_")
+ * (
+ P("const")
+ + P("new")
+ + P("g")^-1
+ * P("set")
+ * P("_eq")^-1
+ + P("use")
+ )
+ * P(":N")
+)
+
+local expl3_variable_or_constant_csname = (
+ S("cgl") -- scope
+ * underscore
+ * (
+ underscore^-1 * letter^1 -- module
+ * underscore
+ * letter * (letter + underscore * -#expl3_variable_or_constant_type)^0 -- description
+ )
+ * underscore
+ * expl3_variable_or_constant_type
+ * eof
+)
+local expl3_scratch_variable_csname = (
+ P("l")
+ * underscore
+ * P("tmp") * S("ab")
+ * underscore
+ * expl3_variable_or_constant_type
+ * eof
+)
+
+---- Defining quarks and scan marks
+local expl3_quark_or_scan_mark_definition_csname = (
+ (
+ P("quark")
+ + P("scan")
+ )
+ * P("_new:N")
+ * eof
+)
+local expl3_quark_or_scan_mark_csname = S("qs") * P("_")
+
+return {
+ any = any,
+ argument_specifiers = argument_specifiers,
+ commented_line = commented_line,
+ decimal_digit = decimal_digit,
+ determine_expl3_catcode = determine_expl3_catcode,
+ double_superscript_convention = double_superscript_convention,
+ eof = eof,
+ fail = fail,
+ expl3like_material = expl3like_material,
+ expl3_endlinechar = expl3_endlinechar,
+ expl3_function_assignment_csname = expl3_function_assignment_csname,
+ expl3_function_csname = expl3_function_csname,
+ expl3_scratch_variable_csname = expl3_scratch_variable_csname,
+ expl3_variable_or_constant_csname = expl3_variable_or_constant_csname,
+ expl3_variable_or_constant_use_csname = expl3_variable_or_constant_use_csname,
+ expl3_quark_or_scan_mark_csname = expl3_quark_or_scan_mark_csname,
+ expl3_quark_or_scan_mark_definition_csname = expl3_quark_or_scan_mark_definition_csname,
+ expl_syntax_off = expl_syntax_off,
+ expl_syntax_on = expl_syntax_on,
+ linechar = linechar,
+ newline = newline,
+ provides = provides,
+ tex_lines = tex_lines,
+ weird_argument_specifiers = weird_argument_specifiers,
+}
diff --git a/Master/texmf-dist/scripts/expltools/explcheck-preprocessing-comments.lua b/Master/texmf-dist/scripts/expltools/explcheck-preprocessing-comments.lua
deleted file mode 100755
index 81ee0ee234c..00000000000
--- a/Master/texmf-dist/scripts/expltools/explcheck-preprocessing-comments.lua
+++ /dev/null
@@ -1,108 +0,0 @@
--- The TeX comment removal part for the preprocessing step of static analysis.
-
-local lpeg = require("lpeg")
-local P, S, Cp, Ct = lpeg.P, lpeg.S, lpeg.Cp, lpeg.Ct
-
--- Define base parsers.
----- Generic
-local any = P(1)
-
----- Tokens
-local percent_sign = P("%")
-local backslash = P([[\]])
-
----- Spacing
-local spacechar = S("\t ")
-local optional_spaces = spacechar^0
-local newline = (
- P("\n")
- + P("\r\n")
- + P("\r")
-)
-local linechar = any - newline
-local blank_line = optional_spaces * newline
-
--- Define intermediate parsers.
-local commented_line_letter = (
- linechar
- + newline
- - backslash
- - percent_sign
-)
-local commented_line = (
- (
- (
- commented_line_letter
- - newline
- )^1 -- initial state
- + (
- backslash -- even backslash
- * (
- backslash
- + #newline
- )
- )^1
- + (
- backslash
- * (
- percent_sign
- + commented_line_letter
- )
- )
- )^0
- * (
- #percent_sign
- * Cp()
- * (
- (
- percent_sign -- comment
- * linechar^0
- * Cp()
- * newline
- * #blank_line -- blank line
- )
- + percent_sign -- comment
- * linechar^0
- * Cp()
- * newline
- * optional_spaces -- leading spaces
- )
- + newline
- )
-)
-
--- Strip TeX comments from a text. Besides the transformed text, also return
--- a function that maps positions in the transformed text back to the original
--- text.
-local function strip_comments(text)
- local transformed_index = 0
- local numbers_of_bytes_removed = {}
- local transformed_text_table = {}
- for index, text_position in ipairs(lpeg.match(Ct(commented_line^1), text)) do
- local span_size = text_position - transformed_index - 1
- if span_size > 0 then
- if index % 2 == 1 then -- chunk of text
- table.insert(transformed_text_table, text:sub(transformed_index + 1, text_position - 1))
- else -- comment
- table.insert(numbers_of_bytes_removed, {transformed_index, span_size})
- end
- transformed_index = transformed_index + span_size
- end
- end
- table.insert(transformed_text_table, text:sub(transformed_index + 1, -1))
- local transformed_text = table.concat(transformed_text_table, "")
- local function map_back(index)
- for _, where_and_number_of_bytes_removed in ipairs(numbers_of_bytes_removed) do
- local where, number_of_bytes_removed = table.unpack(where_and_number_of_bytes_removed)
- if index > where then
- index = index + number_of_bytes_removed
- else
- break
- end
- end
- return index
- end
- return transformed_text, map_back
-end
-
-return strip_comments
diff --git a/Master/texmf-dist/scripts/expltools/explcheck-preprocessing.lua b/Master/texmf-dist/scripts/expltools/explcheck-preprocessing.lua
index fbc9eb814ad..f1929fb2e8a 100755
--- a/Master/texmf-dist/scripts/expltools/explcheck-preprocessing.lua
+++ b/Master/texmf-dist/scripts/expltools/explcheck-preprocessing.lua
@@ -1,104 +1,49 @@
-- The preprocessing step of static analysis determines which parts of the input files contain expl3 code.
-local config = require("explcheck-config")
-local strip_comments = require("explcheck-preprocessing-comments")
+local parsers = require("explcheck-parsers")
+local utils = require("explcheck-utils")
local lpeg = require("lpeg")
-local Cp, P, R, S, V = lpeg.Cp, lpeg.P, lpeg.R, lpeg.S, lpeg.V
-
--- Define base parsers.
----- Generic
-local any = P(1)
-local eof = -any
-local fail = P(false)
-
----- Tokens
-local lbrace = P("{")
-local rbrace = P("}")
-local backslash = P([[\]])
-local letter = R("AZ","az")
-local underscore = P("_")
-local colon = P(":")
-
----- Spacing
-local newline = (
- P("\n")
- + P("\r\n")
- + P("\r")
-)
-local linechar = any - newline
-local spacechar = S("\t ")
-local optional_spaces = spacechar^0
-local optional_spaces_and_newline = (
- optional_spaces
- * (
- newline
- * optional_spaces
- )^-1
-)
-
--- Define intermediate parsers.
----- Parts of TeX syntax
-local argument = (
- lbrace
- * (any - rbrace)^0
- * rbrace
-)
-
-local expl3_function = (
- backslash
- * (underscore * underscore)^-1 * letter^1 -- module
- * underscore
- * letter^1 -- description
- * colon
- * S("NncVvoxefTFpwD")^1 -- argspec
- * (eof + -letter)
-)
-local expl3_variable_or_constant = (
- backslash
- * S("cgl") -- scope
- * underscore
- * (
- letter^1 -- just description
- + underscore^-1 * letter^1 -- module
- * underscore
- * letter^1 -- description
- )
- * underscore
- * letter^1 -- type
- * (eof + -letter)
-)
-local expl3like_material = (
- expl3_function
- + expl3_variable_or_constant
-)
-
----- Standard delimiters
-local provides = (
- P([[\ProvidesExpl]])
- * (
- P("Package")
- + P("Class")
- + P("File")
- )
- * optional_spaces_and_newline
- * argument
- * optional_spaces_and_newline
- * argument
- * optional_spaces_and_newline
- * argument
- * optional_spaces_and_newline
- * argument
-)
-local expl_syntax_on = P([[\ExplSyntaxOn]])
-local expl_syntax_off = P([[\ExplSyntaxOff]])
-
--- Get the value of an option or the default value if unspecified.
-local function get_option(options, key)
- if options == nil or options[key] == nil then
- return config[key]
+local Cp, Ct, P, V = lpeg.Cp, lpeg.Ct, lpeg.P, lpeg.V
+
+-- Strip TeX comments from a text. Besides the transformed text, also return
+-- a function that maps positions in the transformed text back to the original
+-- text.
+local function strip_comments(text)
+ local transformed_index = 0
+ local numbers_of_bytes_removed = {}
+ local transformed_text_table = {}
+ for index, text_position in ipairs(lpeg.match(Ct(parsers.commented_line^1), text)) do
+ local span_size = text_position - transformed_index - 1
+ if span_size > 0 then
+ if index % 2 == 1 then -- chunk of text
+ table.insert(transformed_text_table, text:sub(transformed_index + 1, text_position - 1))
+ else -- comment
+ table.insert(numbers_of_bytes_removed, {transformed_index, span_size})
+ end
+ transformed_index = transformed_index + span_size
+ end
+ end
+ table.insert(transformed_text_table, text:sub(transformed_index + 1, -1))
+ local transformed_text = table.concat(transformed_text_table, "")
+ local function map_back(index)
+ local mapped_index = index
+ for _, where_and_number_of_bytes_removed in ipairs(numbers_of_bytes_removed) do
+ local where, number_of_bytes_removed = table.unpack(where_and_number_of_bytes_removed)
+ if mapped_index > where then
+ mapped_index = mapped_index + number_of_bytes_removed
+ else
+ break
+ end
+ end
+ assert(mapped_index > 0)
+ assert(mapped_index <= #text + 1)
+ if mapped_index <= #text then
+ assert(transformed_text[index] == text[mapped_index])
+ end
+ return mapped_index
end
- return options[key]
+ return transformed_text, map_back
end
-- Preprocess the content and register any issues.
@@ -120,10 +65,10 @@ local function preprocessing(issues, content, options)
* (
(
(
- Cp() * linechar^(get_option(options, 'max_line_length') + 1) * Cp() / line_too_long
- + linechar^0
+ Cp() * parsers.linechar^(utils.get_option(options, 'max_line_length') + 1) * Cp() / line_too_long
+ + parsers.linechar^0
)
- * newline
+ * parsers.newline
* Cp()
) / record_line
)^0
@@ -138,7 +83,7 @@ local function preprocessing(issues, content, options)
local function capture_range(range_start, range_end)
range_start, range_end = map_back(range_start), map_back(range_end)
- table.insert(expl_ranges, {range_start, range_end + 1})
+ table.insert(expl_ranges, {range_start, range_end})
end
local function unexpected_pattern(pattern, code, message, test)
@@ -152,7 +97,7 @@ local function preprocessing(issues, content, options)
local num_provides = 0
local Opener = unexpected_pattern(
- provides,
+ parsers.provides,
"e104",
[[multiple delimiters `\ProvidesExpl*` in a single file]],
function()
@@ -160,14 +105,14 @@ local function preprocessing(issues, content, options)
return num_provides > 1
end
)
- local Closer = fail
- if not get_option(options, 'expect_expl3_everywhere') then
+ local Closer = parsers.fail
+ if not utils.get_option(options, 'expect_expl3_everywhere') then
Opener = (
- expl_syntax_on
+ parsers.expl_syntax_on
+ Opener
)
Closer = (
- expl_syntax_off
+ parsers.expl_syntax_off
+ Closer
)
end
@@ -189,11 +134,11 @@ local function preprocessing(issues, content, options)
"unexpected delimiters"
)
+ unexpected_pattern(
- expl3like_material,
+ parsers.expl3like_material,
"e102",
"expl3 material in non-expl3 parts"
)
- + (any - V"Opener")
+ + (parsers.any - V"Opener")
)^0
),
ExplPart = (
@@ -205,10 +150,10 @@ local function preprocessing(issues, content, options)
"w101",
"unexpected delimiters"
)
- + (any - V"Closer")
+ + (parsers.any - V"Closer")
)^0
* Cp()
- * (V"Closer" + eof)
+ * (V"Closer" + parsers.eof)
),
Opener = Opener,
Closer = Closer,
@@ -217,10 +162,10 @@ local function preprocessing(issues, content, options)
-- If no parts were detected, assume that the whole input file is in expl3.
if(#expl_ranges == 0 and #content > 0) then
- table.insert(expl_ranges, {0, #content})
- if not get_option(options, 'expect_expl3_everywhere') then
+ table.insert(expl_ranges, {1, #content + 1})
+ issues:ignore('e102')
+ if not utils.get_option(options, 'expect_expl3_everywhere') then
issues:add('w100', 'no standard delimiters')
- issues:ignore('e102')
end
end
return line_starting_byte_numbers, expl_ranges
diff --git a/Master/texmf-dist/scripts/expltools/explcheck-utils.lua b/Master/texmf-dist/scripts/expltools/explcheck-utils.lua
new file mode 100755
index 00000000000..136e8834be1
--- /dev/null
+++ b/Master/texmf-dist/scripts/expltools/explcheck-utils.lua
@@ -0,0 +1,15 @@
+-- Common functions used by different modules of the static analyzer explcheck.
+
+local config = require("explcheck-config")
+
+-- Get the value of an option or the default value if unspecified.
+local function get_option(options, key)
+ if options == nil or options[key] == nil then
+ return config[key]
+ end
+ return options[key]
+end
+
+return {
+ get_option = get_option,
+}