summaryrefslogtreecommitdiff
path: root/Master/texmf-dist/tex
diff options
context:
space:
mode:
Diffstat (limited to 'Master/texmf-dist/tex')
-rw-r--r--Master/texmf-dist/tex/luatex/lua-uni-algos/lua-uni-algos.lua20
-rw-r--r--Master/texmf-dist/tex/luatex/lua-uni-algos/lua-uni-case.lua67
-rw-r--r--Master/texmf-dist/tex/luatex/lua-uni-algos/lua-uni-graphemes.lua168
-rw-r--r--Master/texmf-dist/tex/luatex/lua-uni-algos/lua-uni-normalize.lua269
-rw-r--r--Master/texmf-dist/tex/luatex/lua-uni-algos/lua-uni-parse.lua71
5 files changed, 595 insertions, 0 deletions
diff --git a/Master/texmf-dist/tex/luatex/lua-uni-algos/lua-uni-algos.lua b/Master/texmf-dist/tex/luatex/lua-uni-algos/lua-uni-algos.lua
new file mode 100644
index 00000000000..34581f1a741
--- /dev/null
+++ b/Master/texmf-dist/tex/luatex/lua-uni-algos/lua-uni-algos.lua
@@ -0,0 +1,20 @@
+-- lua-uni-algos.lua
+-- Copyright 2020 Marcel Krüger
+--
+-- This work may be distributed and/or modified under the
+-- conditions of the LaTeX Project Public License, either version 1.3
+-- of this license or (at your option) any later version.
+-- The latest version of this license is in
+-- http://www.latex-project.org/lppl.txt
+-- and version 1.3 or later is part of all distributions of LaTeX
+-- version 2005/12/01 or later.
+--
+-- This work has the LPPL maintenance status `maintained'.
+--
+-- The Current Maintainer of this work is Marcel Krüger
+
+return {
+ case = require'lua-uni-case',
+ graphemes = require'lua-uni-graphemes',
+ normalize = require'lua-uni-normalize',
+}
diff --git a/Master/texmf-dist/tex/luatex/lua-uni-algos/lua-uni-case.lua b/Master/texmf-dist/tex/luatex/lua-uni-algos/lua-uni-case.lua
new file mode 100644
index 00000000000..90142586964
--- /dev/null
+++ b/Master/texmf-dist/tex/luatex/lua-uni-algos/lua-uni-case.lua
@@ -0,0 +1,67 @@
+-- lua-uni-graphemes.lua
+-- Copyright 2020 Marcel Krüger
+--
+-- This work may be distributed and/or modified under the
+-- conditions of the LaTeX Project Public License, either version 1.3
+-- of this license or (at your option) any later version.
+-- The latest version of this license is in
+-- http://www.latex-project.org/lppl.txt
+-- and version 1.3 or later is part of all distributions of LaTeX
+-- version 2005/12/01 or later.
+--
+-- This work has the LPPL maintenance status `maintained'.
+--
+-- The Current Maintainer of this work is Marcel Krüger
+
+local unpack = table.unpack
+local move = table.move
+local codes = utf8.codes
+local utf8char = utf8.char
+
+local empty = {}
+local result = {}
+
+local casefold, casefold_lookup do
+ local p = require'lua-uni-parse'
+ local l = lpeg or require'lpeg'
+
+ local data = p.parse_file('CaseFolding', l.Cf(
+ l.Ct(l.Cg(l.Ct'', 'C') * l.Cg(l.Ct'', 'F') * l.Cg(l.Ct'', 'S') * l.Cg(l.Ct'', 'T'))
+ * (l.Cg(p.fields(p.codepoint, l.C(1), l.Ct(p.codepoint * (' ' * p.codepoint)^0), true)) + p.eol)^0
+ * -1
+ , function(t, base, class, mapping)
+ t[class][base] = mapping
+ return t
+ end))
+ local C, F, S, T = data.C, data.F, data.S, data.T
+ data = nil
+
+ function casefold_lookup(c, full, special)
+ return (special and T[c]) or C[c] or (full and F or S)[c]
+ end
+ function casefold(s, full, special)
+ local first = special and T or empty
+ local second = C
+ local third = full and F or S
+ local result = result
+ for i = #result, 1, -1 do result[i] = nil end
+ local i = 1
+ for _, c in codes(s) do
+ local datum = first[c] or second[c] or third[c]
+ if datum then
+ local l = #datum
+ move(datum, 1, l, i, result)
+ i = i + l
+ else
+ result[i] = c
+ i = i + 1
+ end
+ end
+ return utf8char(unpack(result))
+ end
+end
+
+return {
+ casefold = casefold,
+ casefold_lookup = casefold_lookup,
+}
diff --git a/Master/texmf-dist/tex/luatex/lua-uni-algos/lua-uni-graphemes.lua b/Master/texmf-dist/tex/luatex/lua-uni-algos/lua-uni-graphemes.lua
new file mode 100644
index 00000000000..5c7efc28c17
--- /dev/null
+++ b/Master/texmf-dist/tex/luatex/lua-uni-algos/lua-uni-graphemes.lua
@@ -0,0 +1,168 @@
+-- lua-uni-graphemes.lua
+-- Copyright 2020 Marcel Krüger
+--
+-- This work may be distributed and/or modified under the
+-- conditions of the LaTeX Project Public License, either version 1.3
+-- of this license or (at your option) any later version.
+-- The latest version of this license is in
+-- http://www.latex-project.org/lppl.txt
+-- and version 1.3 or later is part of all distributions of LaTeX
+-- version 2005/12/01 or later.
+--
+-- This work has the LPPL maintenance status `maintained'.
+--
+-- The Current Maintainer of this work is Marcel Krüger
+
+local property do
+ local p = require'lua-uni-parse'
+ local l = lpeg or require'lpeg'
+
+ property = p.parse_file('emoji-data',
+ l.Cg(p.fields(p.codepoint_range, l.C'Extended_Pictographic')) + p.ignore_line,
+ p.multiset)
+
+ property = p.parse_file('GraphemeBreakProperty', l.Cf(
+ l.Carg(1)
+ * (l.Cg(p.fields(p.codepoint_range, l.C(l.R('az', 'AZ', '__')^1))) + p.ignore_line)^0
+ * -1, p.multiset),
+ nil,
+ property)
+ if not property then
+ error[[Break Property matching failed]]
+ end
+end
+
+local controls = { CR = true, LF = true, Control = true, }
+local precore_lookup = {
+ Prepend = "PRECORE",
+ L = "L",
+ V = "V",
+ LV = "V",
+ LVT = "T",
+ T = "T",
+ Regional_Indicator = "RI",
+ Extended_Pictographic = "POST_PICTO",
+}
+local l_lookup = {
+ L = "L",
+ V = "V",
+ LV = "V",
+ LVT = "T",
+}
+local postcore_map = { Extend = true, ZWJ = true, SpacingMark = true, }
+local state_map state_map = {
+ START = function(prop)
+ if prop == 'CR' then
+ return 'CR', true
+ end
+ if prop == 'LF' or prop == 'Control' then
+ return 'START', true
+ end
+ return state_map.PRECORE(prop), true
+ end,
+ PRECORE = function(prop)
+ if controls[prop] then
+ return state_map.START(prop)
+ end
+ return precore_lookup[prop] or 'POSTCORE'
+ end,
+ POSTCORE = function(prop)
+ if postcore_map[prop] then
+ return 'POSTCORE'
+ end
+ return state_map.START(prop)
+ end,
+ RI = function(prop)
+ if prop == 'Regional_Indicator' then
+ return 'POSTCORE'
+ end
+ return state_map.POSTCORE(prop)
+ end,
+ PRE_PICTO = function(prop)
+ if prop == "Extended_Pictographic" then
+ return "POST_PICTO"
+ end
+ return state_map.POSTCORE(prop)
+ end,
+ POST_PICTO = function(prop)
+ if prop == "Extend" then
+ return "POST_PICTO"
+ end
+ if prop == "ZWJ" then
+ return "PRE_PICTO"
+ end
+ return state_map.POSTCORE(prop)
+ end,
+ L = function(prop)
+ local nextstate = l_lookup[prop]
+ if nextstate then
+ return nextstate
+ end
+ return state_map.POSTCORE(prop)
+ end,
+ V = function(prop)
+ if prop == 'V' then
+ return 'V'
+ end
+ return state_map.T(prop)
+ end,
+ T = function(prop)
+ if prop == 'T' then
+ return 'T'
+ end
+ return state_map.POSTCORE(prop)
+ end,
+ CR = function(prop)
+ if prop == 'LF' then
+ return 'START'
+ else
+ return state_map.START(prop)
+ end
+ end,
+}
+
+-- The value of "state" is considered internal and should not be relied upon.
+-- Just pass it to the function as is or pass nil. `nil` should only be passed when the passed codepoint starts a new cluster
+function read_codepoint(cp, state)
+ local new_cluster
+ state, new_cluster = state_map[state or 'START'](property[cp])
+ return new_cluster, state
+end
+
+-- A Lua iterator for strings -- Only reporting the beginning of every grapheme cluster
+local function graphemes_start(str)
+ local nextcode, str, i = utf8.codes(str)
+ local state = "START"
+ return function()
+ local new_cluster, code
+ repeat
+ i, code = nextcode(str, i)
+ if not i then return end
+ new_cluster, state = read_codepoint(code, state)
+ until new_cluster
+ return i, code
+ end
+end
+-- A more useful iterator: returns the byterange of the graphemecluster in reverse order followed by a string with te cluster
+local function graphemes(str)
+ local iter = graphemes_start(str)
+ return function(_, cur)
+ if cur == #str then return end
+ local new = iter()
+ if not new then return #str, cur + 1, str:sub(cur + 1) end
+ return new - 1, cur + 1, str:sub(cur + 1, new - 1)
+ end, nil, iter() - 1
+end
+return {
+ read_codepoint = read_codepoint,
+ graphemes_start = graphemes_start,
+ graphemes = graphemes,
+}
+--[[
+for i, c in graphemes_start'äbcdef' do
+ print(i, utf8.char(c))
+end
+for i, j, s in graphemes'Z͑ͫ̓ͪ̂ͫ̽͏̴̙̤̞͉͚̯̞̠͍A̴̵̜̰͔ͫ͗͢L̠ͨͧͩ͘G̴̻͈͍͔̹̑͗̎̅͛́Ǫ̵̹̻̝̳͂̌̌͘!͖̬̰̙̗̿̋ͥͥ̂ͣ̐́́͜͞' do
+ print(j, i, s)
+end
+]]
diff --git a/Master/texmf-dist/tex/luatex/lua-uni-algos/lua-uni-normalize.lua b/Master/texmf-dist/tex/luatex/lua-uni-algos/lua-uni-normalize.lua
new file mode 100644
index 00000000000..dc1356568cb
--- /dev/null
+++ b/Master/texmf-dist/tex/luatex/lua-uni-algos/lua-uni-normalize.lua
@@ -0,0 +1,269 @@
+-- lua-uni-normalize.lua
+-- Copyright 2020 Marcel Krüger
+--
+-- This work may be distributed and/or modified under the
+-- conditions of the LaTeX Project Public License, either version 1.3
+-- of this license or (at your option) any later version.
+-- The latest version of this license is in
+-- http://www.latex-project.org/lppl.txt
+-- and version 1.3 or later is part of all distributions of LaTeX
+-- version 2005/12/01 or later.
+--
+-- This work has the LPPL maintenance status `maintained'.
+--
+-- The Current Maintainer of this work is Marcel Krüger
+
+-- Provide all four kinds of Unicode normalization
+
+local newtable = lua.newtable
+local move = table.move
+local char = utf8.char
+local codes = utf8.codes
+local unpack = table.unpack
+
+kpse.set_program_name'kpsewhich'
+local ccc, composition_mapping, decomposition_mapping, compatibility_mapping do
+ local function doubleset(ts, key, v1, kind, v2)
+ ts[1][key] = v1
+ ts[3][key] = v2
+ if not kind then
+ ts[2][key] = v2
+ end
+ return ts
+ end
+ local p = require'lua-uni-parse'
+ local l = lpeg
+ local Cnil = l.Cc(nil)
+ local letter = lpeg.R('AZ', 'az')
+ ccc, decomposition_mapping, compatibility_mapping
+ = unpack(p.parse_file('UnicodeData', l.Cf(
+ l.Ct(l.Ct'' * l.Ct'' * l.Ct'') * (
+ l.Cg(p.fields(p.codepoint,
+ p.ignore_field,
+ p.ignore_field,
+ '0' * Cnil + p.number,
+ p.ignore_field,
+ ('<' * l.C(letter^1) * '> ' + Cnil)
+ * l.Ct(p.codepoint * (' ' * p.codepoint)^0)^-1,
+ p.ignore_line)) + p.eol
+ )^0 * -1, doubleset)))
+
+ composition_mapping = {}
+ local composition_exclusions = { [0x00958] = true, [0x00959] = true,
+ [0x0095A] = true, [0x0095B] = true, [0x0095C] = true, [0x0095D] = true,
+ [0x0095E] = true, [0x0095F] = true, [0x009DC] = true, [0x009DD] = true,
+ [0x009DF] = true, [0x00A33] = true, [0x00A36] = true, [0x00A59] = true,
+ [0x00A5A] = true, [0x00A5B] = true, [0x00A5E] = true, [0x00B5C] = true,
+ [0x00B5D] = true, [0x00F43] = true, [0x00F4D] = true, [0x00F52] = true,
+ [0x00F57] = true, [0x00F5C] = true, [0x00F69] = true, [0x00F76] = true,
+ [0x00F78] = true, [0x00F93] = true, [0x00F9D] = true, [0x00FA2] = true,
+ [0x00FA7] = true, [0x00FAC] = true, [0x00FB9] = true, [0x0FB1D] = true,
+ [0x0FB1F] = true, [0x0FB2A] = true, [0x0FB2B] = true, [0x0FB2C] = true,
+ [0x0FB2D] = true, [0x0FB2E] = true, [0x0FB2F] = true, [0x0FB30] = true,
+ [0x0FB31] = true, [0x0FB32] = true, [0x0FB33] = true, [0x0FB34] = true,
+ [0x0FB35] = true, [0x0FB36] = true, [0x0FB38] = true, [0x0FB39] = true,
+ [0x0FB3A] = true, [0x0FB3B] = true, [0x0FB3C] = true, [0x0FB3E] = true,
+ [0x0FB40] = true, [0x0FB41] = true, [0x0FB43] = true, [0x0FB44] = true,
+ [0x0FB46] = true, [0x0FB47] = true, [0x0FB48] = true, [0x0FB49] = true,
+ [0x0FB4A] = true, [0x0FB4B] = true, [0x0FB4C] = true, [0x0FB4D] = true,
+ [0x0FB4E] = true,
+ [0x02ADC] = true, [0x1D15E] = true, [0x1D15F] = true, [0x1D160] = true,
+ [0x1D161] = true, [0x1D162] = true, [0x1D163] = true, [0x1D164] = true,
+ [0x1D1BB] = true, [0x1D1BC] = true, [0x1D1BD] = true, [0x1D1BE] = true,
+ [0x1D1BF] = true, [0x1D1C0] = true,
+ }
+
+ for cp, decomp in next, decomposition_mapping do
+ if #decomp > 1 and not (composition_exclusions[cp] or ccc[decomp[1]]) then
+ local mapping = composition_mapping[decomp[1]]
+ if not mapping then
+ mapping = {}
+ composition_mapping[decomp[1]] = mapping
+ end
+ mapping[decomp[2]] = cp
+ end
+ end
+
+ local function fixup_decomp(decomp)
+ local first = decomp[1]
+ local first_decomp = decomposition_mapping[first]
+ if not first_decomp then return false end
+ if fixup_decomp(first_decomp) then
+ print('nested', first)
+ end
+ move(decomp, 2, #decomp, #first_decomp + 1)
+ move(first_decomp, 1, #first_decomp, 1, decomp)
+ return true
+ end
+ -- Fixup stage
+ for cp, decomp in next, decomposition_mapping do
+ if fixup_decomp(decomp) then
+ -- print(':(', cp)
+ end
+ end
+
+ -- NFKD edition
+ local DEBUG = false
+ local function fixup_decomp(orig, decomp)
+ local work
+ local shared = decomposition_mapping[orig] == decomp
+ local j = 0
+ for i = 1, #decomp do
+ local cp = decomp[i]
+ local cp_decomp = compatibility_mapping[cp]
+ if cp_decomp then
+ if shared then
+ local old = decomp
+ decomp = {}
+ compatibility_mapping[orig] = decomp
+ move(old, 1, #old, 1, decomp)
+ end
+ decomp[i] = cp_decomp
+ j = j + #cp_decomp
+ work = true
+ else
+ j = j + 1
+ end
+ end
+ if not work then return decomp end
+ for i = #decomp, 1, -1 do
+ local v = decomp[i]
+ if type(v) == 'number' then
+ decomp[j] = v
+ j = j - 1
+ else
+ local count = #v
+ move(v, 1, count, j - count + 1, decomp)
+ j = j - count
+ end
+ end
+ assert(j == 0)
+ return decomp
+ end
+ -- Fixup stage
+ for cp, decomp in next, compatibility_mapping do
+ fixup_decomp(cp, decomp)
+ end
+end
+
+local function ccc_reorder(codepoints, i, j, k)
+ if k >= j then return end
+ local first = codepoints[k]
+ local first_ccc = ccc[first]
+ if not first_ccc then
+ return ccc_reorder(codepoints, k+1, j, k+1)
+ end
+ local new_pos = k
+ local cur_ccc
+ repeat
+ new_pos = new_pos + 1
+ if new_pos > j then break end
+ local cur = codepoints[new_pos]
+ cur_ccc = ccc[cur]
+ until (not cur_ccc) or (cur_ccc >= first_ccc)
+ new_pos = new_pos - 1
+ if new_pos == k then
+ return ccc_reorder(codepoints, i, j, k+1)
+ end
+ move(codepoints, k+1, new_pos, k)
+ codepoints[new_pos] = first
+ return ccc_reorder(codepoints, i, j, k == i and i or k-1)
+end
+function to_nfd_table(s, decomposition_mapping)
+ local new_codepoints = newtable(#s, 0)
+ local j = 1
+ for _, c in codes(s) do
+ local decomposed = decomposition_mapping[c]
+ if decomposed then
+ move(decomposed, 1, #decomposed, j, new_codepoints)
+ j = j + #decomposed
+ elseif c >= 0xAC00 and c <= 0xD7A3 then
+ c = c - 0xAC00
+ local tIndex = c % 28
+ c = c // 28
+ local vIndex = c % 21
+ local lIndex = c // 21
+ new_codepoints[j] = 0x1100 + lIndex
+ new_codepoints[j+1] = 0x1161 + vIndex
+ if tIndex == 0 then
+ j = j + 2
+ else
+ new_codepoints[j+2] = 0x11A7 + tIndex
+ j = j + 3
+ end
+ else
+ new_codepoints[j] = c
+ j = j + 1
+ end
+ end
+ ccc_reorder(new_codepoints, 1, #new_codepoints, 1)
+ return new_codepoints
+end
+local function to_nfd(s)
+ return char(unpack(to_nfd_table(s, decomposition_mapping)))
+end
+local function to_nfkd(s)
+ return char(unpack(to_nfd_table(s, compatibility_mapping)))
+end
+local function to_nfc_generic(s, decomposition_mapping)
+ local codepoints = to_nfd_table(s, decomposition_mapping)
+ local starter, lookup, last_ccc, lvt
+ local j = 1
+ for i, c in ipairs(codepoints) do
+ local cur_ccc = ccc[c]
+ if lookup then
+ if (cur_ccc == nil) == (cur_ccc == last_ccc) then -- unblocked
+ local composed = lookup[c]
+ if composed then
+ codepoints[starter] = composed
+ lookup = composition_mapping[composed]
+ goto CONTINUE
+ end
+ end
+ elseif lvt then
+ if lvt == 1 then
+ if c >= 0x1161 and c <= 0x11A7 then
+ lvt = 2
+ codepoints[starter] = ((codepoints[starter] - 0x1100) * 21 + c - 0x1161) * 28 + 0xAC00
+ goto CONTINUE
+ end
+ else -- if lvt == 2 then
+ if c >= 0x11A8 and c <= 0x11C2 then
+ lvt = nil
+ codepoints[starter] = codepoints[starter] + c - 0x11A7
+ goto CONTINUE
+ end
+ end
+ end
+ codepoints[j] = c
+ lvt = nil
+ if not cur_ccc then
+ starter = j
+ lookup = composition_mapping[c]
+ if not lookup and c >= 0x1100 and c <= 0x1112 then
+ lvt = 1
+ end
+ end
+ j = j + 1
+ last_ccc = cur_ccc
+ ::CONTINUE::
+ end
+ for i = j,#codepoints do codepoints[i] = nil end
+ return char(unpack(codepoints))
+end
+local function to_nfc(s)
+ return to_nfc_generic(s, decomposition_mapping)
+end
+local function to_nfkc(s)
+ return to_nfc_generic(s, compatibility_mapping)
+end
+
+return {
+ NFD = to_nfd,
+ NFC = to_nfc,
+ NFKD = to_nfkd,
+ NFKC = to_nfkc,
+}
+-- print(require'inspect'{to_nfd{0x1E0A}, to_nfc{0x1E0A}})
+
+-- print(require'inspect'{to_nfd{0x1100, 0x1100, 0x1161, 0x11A8}, to_nfc{0x1100, 0x1100, 0x1161, 0x11A8}})
diff --git a/Master/texmf-dist/tex/luatex/lua-uni-algos/lua-uni-parse.lua b/Master/texmf-dist/tex/luatex/lua-uni-algos/lua-uni-parse.lua
new file mode 100644
index 00000000000..a264a2573e3
--- /dev/null
+++ b/Master/texmf-dist/tex/luatex/lua-uni-algos/lua-uni-parse.lua
@@ -0,0 +1,71 @@
+-- lua-uni-parse.lua
+-- Copyright 2020 Marcel Krüger
+--
+-- This work may be distributed and/or modified under the
+-- conditions of the LaTeX Project Public License, either version 1.3
+-- of this license or (at your option) any later version.
+-- The latest version of this license is in
+-- http://www.latex-project.org/lppl.txt
+-- and version 1.3 or later is part of all distributions of LaTeX
+-- version 2005/12/01 or later.
+--
+-- This work has the LPPL maintenance status `maintained'.
+--
+-- The Current Maintainer of this work is Marcel Krüger
+
+-- Just a simple helper module to make UCD parsing more readable
+
+local lpeg = lpeg or require'lpeg'
+local R = lpeg.R
+local tonumber = tonumber
+
+local codepoint = lpeg.R('09', 'AF')^4 / function(c) return tonumber(c, 16) end
+local sep = lpeg.P' '^0 * ';' * lpeg.P' '^0
+local codepoint_range = codepoint * ('..' * codepoint + lpeg.Cc(false))
+local ignore_line = (1-lpeg.P'\n')^0 * '\n'
+local eol = lpeg.S' \t'^0 * ('#' * ignore_line + '\n')
+local ignored = (1-lpeg.S';#\n')^0
+local number = lpeg.R'09'^1 / tonumber
+
+local function fields(first, ...)
+ if first == ignore_line then
+ assert(select('#', ...) == 0)
+ return ignore_line
+ end
+ local tail = select('#', ...) == 0 and eol or sep * fields(...)
+ return first * tail
+end
+
+local function multiset(table, key1, key2, value)
+ for key = key1,(key2 or key1) do
+ table[key] = value
+ end
+ return table
+end
+
+local function parse_uni_file(filename, patt, func, ...)
+ if func then
+ return parse_uni_file(filename, lpeg.Cf(lpeg.Ct'' * patt^0 * -1, func), nil, ...)
+ end
+ local resolved = kpse.find_file(filename .. '.txt')
+ if not resolved then
+ error(string.format("Unable to find Unicode datafile %q", filename))
+ end
+ local f = assert(io.open(resolved))
+ local data = f:read'*a'
+ f:close()
+ return lpeg.match(patt, data, 1, ...)
+end
+
+return {
+ codepoint = codepoint,
+ codepoint_range = codepoint_range,
+ ignore_line = ignore_line,
+ ignore_field = ignored,
+ eol = eol,
+ sep = sep,
+ number = number,
+ fields = fields,
+ multiset = multiset,
+ parse_file = parse_uni_file,
+}