summaryrefslogtreecommitdiff
path: root/Master/texmf-dist/tex/luatex/lua-uni-algos/lua-uni-normalize.lua
diff options
context:
space:
mode:
authorKarl Berry <karl@freefriends.org>2021-08-08 20:55:17 +0000
committerKarl Berry <karl@freefriends.org>2021-08-08 20:55:17 +0000
commitf03a7a678fbbe006c2272f28b48465f049327d40 (patch)
treed65edf6c8010b255ea5163499dd2447c5215fbe4 /Master/texmf-dist/tex/luatex/lua-uni-algos/lua-uni-normalize.lua
parenta17b01b849389432a2351e5fa26f64a77b87550a (diff)
lua-uni-algos (8aug21)
git-svn-id: svn://tug.org/texlive/trunk@60194 c570f23f-e606-0410-a88d-b1316a301751
Diffstat (limited to 'Master/texmf-dist/tex/luatex/lua-uni-algos/lua-uni-normalize.lua')
-rw-r--r--Master/texmf-dist/tex/luatex/lua-uni-algos/lua-uni-normalize.lua358
1 files changed, 272 insertions, 86 deletions
diff --git a/Master/texmf-dist/tex/luatex/lua-uni-algos/lua-uni-normalize.lua b/Master/texmf-dist/tex/luatex/lua-uni-algos/lua-uni-normalize.lua
index e2c247202d7..d54b82032bf 100644
--- a/Master/texmf-dist/tex/luatex/lua-uni-algos/lua-uni-normalize.lua
+++ b/Master/texmf-dist/tex/luatex/lua-uni-algos/lua-uni-normalize.lua
@@ -1,5 +1,5 @@
-- lua-uni-normalize.lua
--- Copyright 2020 Marcel Krüger
+-- Copyright 2020--2021 Marcel Krüger
--
-- This work may be distributed and/or modified under the
-- conditions of the LaTeX Project Public License, either version 1.3
@@ -21,8 +21,10 @@ local char = utf8.char
local codes = utf8.codes
local unpack = table.unpack
-kpse.set_program_name'kpsewhich'
-local ccc, composition_mapping, decomposition_mapping, compatibility_mapping do
+if tex.initialize then
+ kpse.set_program_name'kpsewhich'
+end
+local ccc, composition_mapping, decomposition_mapping, compatibility_mapping, nfc_qc do
local function doubleset(ts, key, v1, kind, v2)
ts[1][key] = v1
ts[3][key] = v2
@@ -73,6 +75,30 @@ local ccc, composition_mapping, decomposition_mapping, compatibility_mapping do
[0x1D1BF] = true, [0x1D1C0] = true,
}
+ -- We map for NFC_QC:
+ -- No -> false
+ -- Maybe -> true
+ -- Yes -> nil
+ -- since Yes should be the default.
+ nfc_qc = {}
+ for cp, decomp in next, decomposition_mapping do
+ if ccc[cp] or ccc[decomp[1]] then
+ nfc_qc[cp] = false
+ elseif #decomp == 1 then
+ nfc_qc[cp] = false
+ elseif composition_exclusions[cp] then
+ nfc_qc[cp] = false
+ else
+ nfc_qc[decomp[2]] = true
+ end
+ end
+ for i=0x1161, 0x1175 do
+ nfc_qc[i] = true
+ end
+ for i=0x11A8, 0x11C2 do
+ nfc_qc[i] = true
+ end
+
for cp, decomp in next, decomposition_mapping do
if #decomp > 1 and not (composition_exclusions[cp] or ccc[decomp[1]]) then
local mapping = composition_mapping[decomp[1]]
@@ -144,6 +170,25 @@ local ccc, composition_mapping, decomposition_mapping, compatibility_mapping do
for cp, decomp in next, compatibility_mapping do
fixup_decomp(cp, decomp)
end
+
+ --[[ To verify that nfc_qc is correctly generated
+ local ref_nfc_qc = p.parse_file('DerivedNormalizationProps', l.Cf(
+ l.Ct'' * (
+ l.Cg(p.fields(p.codepoint_range,
+ 'NFC_QC',
+ 'N' * l.Cc(false) + 'M' * l.Cc(true))) + p.ignore_line
+ )^0 * -1, p.multiset))
+ for k,v in next, ref_nfc_qc do
+ if nfc_qc[k] ~= v then
+ print('MISMATCH1', k, v, nfc_qc[k])
+ end
+ end
+ for k,v in next, nfc_qc do
+ if ref_nfc_qc[k] ~= v then
+ print('MISMATCH2', k, v)
+ end
+ end
+ ]]
end
local function ccc_reorder(codepoints, i, j, k)
@@ -270,81 +315,111 @@ local function to_nfkc(s)
return to_nfc_generic(s, compatibility_mapping)
end
+if tex.initialize then
+ return {
+ NFD = to_nfd,
+ NFC = to_nfc,
+ NFKD = to_nfkd,
+ NFKC = to_nfkc,
+ }
+end
+
+local direct = node.direct
+local node_new = direct.new
+local node_copy = direct.copy
+local is_char = direct.is_char
+local setchar = direct.setchar
+local insert_after = direct.insert_after
+local insert_before = direct.insert_before
+local getnext = direct.getnext
+local remove = direct.remove
+local free = direct.free
+local getattrlist = direct.getattributelist
+local getprev = direct.getprev
+local setprev = direct.setprev
+local getboth = direct.getboth
+local setlink = direct.setlink
+
-- allowed_characters only works reliably if it's closed under canonical decomposition mappings
-- but it should fail in reasonable ways as long as it's at least closed under full canonical decompositions
+--
+-- This could be adapted to NFKC as above except that we would either need to handle Hangul syllables
+-- while iterating over starter_decomposition or adapt ~5 entries in compatibility_mapping to not decompose the syllables.
+-- We don't do this currently since I don't see a usecase for NFKC normalized nodes.
local function nodes_to_nfc(head, f, allowed_characters, preserve_attr)
if not head then return head end
- local tmp_node = node.new'temp'
+ local tmp_node = node_new'temp'
-- This is more complicated since we want to ensure that nodes (including their attributes and properties) are preserved whenever possible
--
-- We use three passes:
- -- 1. Decompose composition exclusions etc.
+ -- 1&2. Decompose everything with NFC_Quick_Check == No and reorder marks
+ local last_ccc
local n = head
+ local prev = getprev(head)
+ setlink(tmp_node, head)
+ local require_work
while n do
- local char = node.is_char(n, f)
+ local char = is_char(n, f)
if char then
- local decomposed = decomposition_mapping[char]
- if decomposed then
- local compose_lookup = composition_mapping[decomposed[1]]
- if not (compose_lookup and compose_lookup[decomposed[2]]) then
- local available = true
- if allowed_characters then
- -- This is probably buggy for werd fonts
- for i=1, #decomposed do
- if not allowed_characters[decomposed[i]] then
- available = false
- break
- end
+ local qc = nfc_qc[char]
+ if qc == false then
+ local decomposed = decomposition_mapping[char]
+ local available = true
+ if allowed_characters then
+ -- This is probably buggy for weird fonts
+ for i=1, #decomposed do
+ if not allowed_characters[decomposed[i]] then
+ available = false
+ break
end
end
- if available then
- -- Here we never want to compose again, so we can decompose directly
- n.char = decomposed[1]
- for i=2, #decomposed do
- local nn = node.copy(n)
- nn.char = decomposed[i]
- node.insert_after(head, n, nn)
- n = nn
- end
+ end
+ if available then
+ -- Here we never want to compose again, so we can decompose directly
+ local n = n
+ char = decomposed[1]
+ qc = nfc_qc[char]
+ setchar(n, char)
+ for i=2, #decomposed do
+ local nn = node_copy(n)
+ setchar(nn, decomposed[i])
+ insert_after(head, n, nn)
+ n = nn
end
end
end
- end
- n = n.next
- end
- -- 2. Reorder marks
- local last_ccc
- n = head
- local prev = head.prev
- tmp_node.next, head.prev = head, tmp_node
- while n do
- local char = node.is_char(n, f)
- if char then
+ -- Now reorder marks. The goal here is to reduce the overhead
+ -- in the common case that no reordering is needed
local this_ccc = ccc[char]
if last_ccc and this_ccc and last_ccc > this_ccc then
local nn = n
while nn ~= tmp_node do
- nn = nn.prev
- local nn_char = node.is_char(nn, f)
+ nn = getprev(nn)
+ local nn_char = is_char(nn, f)
if not nn_char then break end
local nn_ccc = ccc[nn_char]
if not nn_ccc or nn_ccc <= this_ccc then break end
end
- local before, after = n.prev, n.next
- node.insert_after(head, nn, n)
- before.next = after
- if after then after.prev = before end
+ local before, after = getboth(n)
+ insert_after(head, nn, n)
+ setlink(before, after)
n = after
else
- n = n.next
+ n = getnext(n)
last_ccc = this_ccc
end
+ require_work = require_work or qc
else
- n = n.next
+ n = getnext(n)
last_ccc = nil
end
end
- head, head.prev = tmp_node.next, prev
+ head = getnext(tmp_node)
+ setprev(head, prev)
+ if not require_work then
+ free(tmp_node)
+ return head
+ end
-- 3. The rest: Maybe decompose and then compose again
local starter_n, starter, lookup
local starter_decomposition
@@ -352,20 +427,21 @@ local function nodes_to_nfc(head, f, allowed_characters, preserve_attr)
local i -- index into starter_decomposition
local i_ccc
n = head
- node.insert_after(head, nil, tmp_node)
+ insert_after(head, nil, tmp_node)
repeat
- local char = node.is_char(n, f)
+ local char = is_char(n, f)
local this_ccc = ccc[char] or 300
+ local is_composed -- Did we generate char through composition?
while i and i_ccc <= this_ccc do
local new_starter = lookup and lookup[starter_decomposition[i]]
if new_starter and (not allowed_characters or allowed_characters[new_starter]) then
starter = new_starter
- starter_n.char = starter
+ setchar(starter_n, starter)
lookup = composition_mapping[starter]
else
- local nn = node.copy(starter_n)
- nn.char = starter_decomposition[i]
- node.insert_before(head, n, nn)
+ local nn = node_copy(starter_n)
+ setchar(nn, starter_decomposition[i])
+ insert_before(head, n, nn)
last_ccc = i_ccc
end
i = i + 1
@@ -379,13 +455,14 @@ local function nodes_to_nfc(head, f, allowed_characters, preserve_attr)
if char then
if lookup and (this_ccc == 300) == (this_ccc == last_ccc) then
local new_starter = lookup[char]
- if new_starter and (not allowed_characters or allowed_characters[new_starter]) and (not preserve_attr or starter_n.attr == n.attr) then
- local last = n.prev
- node.remove(head, n)
- node.free(n)
+ if new_starter and (not allowed_characters or allowed_characters[new_starter]) and (not preserve_attr or getattrlist(starter_n) == getattrlist(n)) then
+ local last = getprev(n)
+ remove(head, n)
+ free(n)
n = last
starter = new_starter
- starter_n.char, char = starter, starter
+ setchar(starter_n, starter)
+ char, is_composed = starter, true
lookup = composition_mapping[starter]
else
last_ccc = this_ccc
@@ -394,21 +471,23 @@ local function nodes_to_nfc(head, f, allowed_characters, preserve_attr)
elseif not lookup and this_ccc == 300 and last_ccc == 300 then
if starter >= 0x1100 and starter <= 0x1112 and char >= 0x1161 and char <= 0x1175 then -- L + V -> LV
local new_starter = ((starter - 0x1100) * 21 + char - 0x1161) * 28 + 0xAC00
- if (not allowed_characters or allowed_characters[new_starter]) and (not preserve_attr or starter_n.attr == n.attr) then
- node.remove(head, n)
- node.free(n)
- starter = starter
- starter_n.char, char = starter, starter
+ if (not allowed_characters or allowed_characters[new_starter]) and (not preserve_attr or getattrlist(starter_n) == getattrlist(n)) then
+ remove(head, n)
+ free(n)
+ starter = new_starter
+ setchar(starter_n, starter)
+ char, is_composed = starter, true
lookup = composition_mapping[starter]
n = starter_n
end
elseif char >= 0x11A8 and char <= 0x11C2 and starter >= 0xAC00 and starter <= 0xD7A3 and (starter-0xAC00) % 28 == 0 then -- LV + T -> LVT
local new_starter = starter + char - 0x11A7
- if (not allowed_characters or allowed_characters[new_starter]) and (not preserve_attr or starter_n.attr == n.attr) then
- node.remove(head, n)
- node.free(n)
+ if (not allowed_characters or allowed_characters[new_starter]) and (not preserve_attr or getattrlist(starter_n) == getattrlist(n)) then
+ remove(head, n)
+ free(n)
starter = new_starter
- starter_n.char, char = starter, starter
+ setchar(starter_n, starter)
+ char, is_composed = starter, true
lookup = composition_mapping[starter]
n = starter_n
end
@@ -418,38 +497,141 @@ local function nodes_to_nfc(head, f, allowed_characters, preserve_attr)
end
if this_ccc == 300 then
starter_n = n
- starter_decomposition = decomposition_mapping[char]
- if allowed_characters and starter_decomposition then
- for i=1, #starter_decomposition do
- if not allowed_characters[starter_decomposition[i]] then
- starter_decomposition = nil
- break
+ if is_composed then -- If we just composed starter, we don't want to decompose it again
+ starter = char
+ else
+ starter_decomposition = decomposition_mapping[char]
+ if allowed_characters and starter_decomposition then
+ for i=1, #starter_decomposition do
+ if not allowed_characters[starter_decomposition[i]] then
+ starter_decomposition = nil
+ break
+ end
end
end
+ starter = starter_decomposition and starter_decomposition[1] or char
+ setchar(starter_n, starter)
+ if starter_decomposition then
+ i, i_ccc = 2, ccc[starter_decomposition[2]] or 300
+ else
+ i, i_ccc = nil
+ end
end
- starter = starter_decomposition and starter_decomposition[1] or char
- starter_n.char = starter
lookup = composition_mapping[starter]
- if starter_decomposition then
- i, i_ccc = 2, ccc[starter_decomposition[2]] or 300
- else
- i, i_ccc = nil
- end
end
else
starter, lookup, last_ccc, last_decomposition, i, i_ccc = nil
end
if n == tmp_node then
- node.remove(head, tmp_node)
+ remove(head, tmp_node)
break
end
- n = n.next
+ n = getnext(n)
until false
- node.free(tmp_node)
+ free(tmp_node)
return head
end
-local todirect, tonode = node.direct.todirect, node.direct.tonode
+-- This is almost the same as the first loop from nodes_to_nfc, just without checking for NFC_QC and decomposing everything instead.
+-- Also we have to decompose Hangul syllables.
+-- No preserve_attr parameter since we never compose.
+local function nodes_to_nfd_generic(decomposition_mapping, head, f, allowed_characters)
+ if not head then return head end
+ local tmp_node = node_new'temp'
+ -- This is more complicated since we want to ensure that nodes (including their attributes and properties) are preserved whenever possible
+ --
+ -- We use three passes:
+ -- 1&2. Decompose everything with NFC_Quick_Check == No and reorder marks
+ local last_ccc
+ local n = head
+ local prev = getprev(head)
+ setlink(tmp_node, head)
+ while n do
+ local char = is_char(n, f)
+ if char then
+ local decomposed = decomposition_mapping[char]
+ if decomposed then
+ local available = true
+ if allowed_characters then
+ -- This is probably buggy for weird fonts
+ for i=1, #decomposed do
+ if not allowed_characters[decomposed[i]] then
+ available = false
+ break
+ end
+ end
+ end
+ if available then
+ local n = n
+ char = decomposed[1]
+ setchar(n, char)
+ for i=2, #decomposed do
+ local nn = node_copy(n)
+ setchar(nn, decomposed[i])
+ insert_after(head, n, nn)
+ n = nn
+ end
+ end
+ elseif char >= 0xAC00 and char <= 0xD7A3 then -- Hangul clusters. In this case we update n since we never need to reorder them anyway
+ local c = char - 0xAC00
+ local t = 0x11A7 + c % 28
+ c = c // 28
+ local l = 0x1100 + c // 21
+ local v = 0x1161 + c % 21
+ if not allowed_characters or (allowed_characters[l] and allowed_characters[v] and (t == 0x11A7 or allowed_characters[t])) then
+ setchar(n, l)
+ local nn = node_copy(n)
+ setchar(nn, v)
+ insert_after(head, n, nn)
+ n = nn
+ char = v
+ if t ~= 0x11A7 then
+ nn = node_copy(n)
+ setchar(nn, t)
+ insert_after(head, n, nn)
+ n = nn
+ char = t
+ end
+ end
+ end
+ -- Now reorder marks. The goal here is to reduce the overhead
+ -- in the common case that no reordering is needed
+ local this_ccc = ccc[char]
+ if last_ccc and this_ccc and last_ccc > this_ccc then
+ local nn = n
+ while nn ~= tmp_node do
+ nn = getprev(nn)
+ local nn_char = is_char(nn, f)
+ if not nn_char then break end
+ local nn_ccc = ccc[nn_char]
+ if not nn_ccc or nn_ccc <= this_ccc then break end
+ end
+ local before, after = getboth(n)
+ insert_after(head, nn, n)
+ setlink(before, after)
+ n = after
+ else
+ n = getnext(n)
+ last_ccc = this_ccc
+ end
+ else
+ n = getnext(n)
+ last_ccc = nil
+ end
+ end
+ head = getnext(tmp_node)
+ setprev(head, prev)
+ free(tmp_node)
+ return head
+end
+local function nodes_to_nfd(head, f, allowed_characters)
+ return nodes_to_nfd_generic(decomposition_mapping, head, f, allowed_characters)
+end
+local function nodes_to_nfkd(head, f, allowed_characters)
+ return nodes_to_nfd_generic(compatibility_mapping, head, f, allowed_characters)
+end
+
+local todirect, tonode = direct.todirect, direct.tonode
return {
NFD = to_nfd,
@@ -457,10 +639,14 @@ return {
NFKD = to_nfkd,
NFKC = to_nfkc,
node = {
- NFC = nodes_to_nfc,
+ NFC = function(head, ...) return tonode(nodes_to_nfc(todirect(head), ...)) end,
+ NFD = function(head, ...) return tonode(nodes_to_nfd(todirect(head), ...)) end,
+ NFKD = function(head, ...) return tonode(nodes_to_nfkd(todirect(head), ...)) end,
},
direct = {
- NFC = function(head, ...) return todirect(nodes_to_nfc(tonode(head), ...)) end,
+ NFC = nodes_to_nfc,
+ NFD = nodes_to_nfd,
+ NFKD = nodes_to_nfkd,
},
}
-- print(require'inspect'{to_nfd{0x1E0A}, to_nfc{0x1E0A}})