diff options
Diffstat (limited to 'Master/texmf-dist/tex/luatex/luaotfload/otfl-font-otf.lua')
-rw-r--r-- | Master/texmf-dist/tex/luatex/luaotfload/otfl-font-otf.lua | 476 |
1 files changed, 292 insertions, 184 deletions
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/otfl-font-otf.lua b/Master/texmf-dist/tex/luatex/luaotfload/otfl-font-otf.lua index 6900b844591..29c56cd0012 100644 --- a/Master/texmf-dist/tex/luatex/luaotfload/otfl-font-otf.lua +++ b/Master/texmf-dist/tex/luatex/luaotfload/otfl-font-otf.lua @@ -11,6 +11,7 @@ local utf = unicode.utf8 local concat, getn, utfbyte = table.concat, table.getn, utf.byte local format, gmatch, gsub, find, match, lower, strip = string.format, string.gmatch, string.gsub, string.find, string.match, string.lower, string.strip local type, next, tonumber, tostring = type, next, tonumber, tostring +local lpegmatch = lpeg.match local trace_private = false trackers.register("otf.private", function(v) trace_private = v end) local trace_loading = false trackers.register("otf.loading", function(v) trace_loading = v end) @@ -18,14 +19,10 @@ local trace_features = false trackers.register("otf.features", function(v local trace_dynamics = false trackers.register("otf.dynamics", function(v) trace_dynamics = v end) local trace_sequences = false trackers.register("otf.sequences", function(v) trace_sequences = v end) local trace_math = false trackers.register("otf.math", function(v) trace_math = v end) -local trace_unimapping = false trackers.register("otf.unimapping", function(v) trace_unimapping = v end) local trace_defining = false trackers.register("fonts.defining", function(v) trace_defining = v end) --~ trackers.enable("otf.loading") -local zwnj = 0x200C -local zwj = 0x200D - --[[ldx-- <p>The fontforge table has organized lookups in a certain way. A first implementation of this code was organized featurewise: information related to features was @@ -83,13 +80,16 @@ otf.features.default = otf.features.default or { } otf.enhancers = otf.enhancers or { } otf.glists = { "gsub", "gpos" } -otf.version = 2.636 -- beware: also sync font-mis.lua +otf.version = 2.645 -- beware: also sync font-mis.lua otf.pack = true -- beware: also sync font-mis.lua otf.syncspace = true otf.notdef = false otf.cache = containers.define("fonts", "otf", otf.version, true) otf.cleanup_aat = false -- only context +local wildcard = "*" +local default = "dflt" + --[[ldx-- <p>We start with a lot of tables and related functions.</p> --ldx]]-- @@ -178,7 +178,7 @@ otf.tables.valid_fields = { local function load_featurefile(ff,featurefile) if featurefile then - featurefile = resolvers.find_file(file.addsuffix(featurefile,'fea')) -- "FONTFEATURES" + featurefile = resolvers.find_file(file.addsuffix(featurefile,'fea'),'fea') if featurefile and featurefile ~= "" then if trace_loading then logs.report("load otf", "featurefile: %s", featurefile) @@ -192,7 +192,7 @@ function otf.enhance(name,data,filename,verbose) local enhancer = otf.enhancers[name] if enhancer then if (verbose ~= nil and verbose) or trace_loading then - logs.report("load otf","enhance: %s",name) + logs.report("load otf","enhance: %s (%s)",name,filename) end enhancer(data,filename) end @@ -207,6 +207,7 @@ local enhancers = { "reorganize mark classes", "reorganize kerns", -- moved here "flatten glyph lookups", "flatten anchor tables", "flatten feature tables", + "simplify glyph lookups", -- some saving "prepare luatex tables", "analyse features", "rehash features", "analyse anchors", "analyse marks", "analyse unicodes", "analyse subtables", @@ -214,6 +215,7 @@ local enhancers = { "share widths", "strip not needed data", "migrate metadata", + "check math parameters", } function otf.load(filename,format,sub,featurefile) @@ -238,9 +240,15 @@ function otf.load(filename,format,sub,featurefile) ff, messages = fontloader.open(filename) end if trace_loading and messages and #messages > 0 then - for m=1,#messages do - logs.report("load otf","warning: %s",messages[m]) + if type(messages) == "string" then + logs.report("load otf","warning: %s",messages) + else + for m=1,#messages do + logs.report("load otf","warning: %s",tostring(messages[m])) + end end + else + logs.report("load otf","font loaded okay") end if ff then load_featurefile(ff,featurefile) @@ -251,6 +259,7 @@ function otf.load(filename,format,sub,featurefile) logs.report("load otf","enhancing ...") for e=1,#enhancers do otf.enhance(enhancers[e],data,filename) + io.flush() -- we want instant messages end if otf.pack and not fonts.verbose then otf.enhance("pack",data,filename) @@ -494,142 +503,7 @@ otf.enhancers["analyse marks"] = function(data,filename) end end -local separator = lpeg.S("_.") -local other = lpeg.C((1 - separator)^1) -local ligsplitter = lpeg.Ct(other * (separator * other)^0) - ---~ print(table.serialize(ligsplitter:match("this"))) ---~ print(table.serialize(ligsplitter:match("this.that"))) ---~ print(table.serialize(ligsplitter:match("japan1.123"))) ---~ print(table.serialize(ligsplitter:match("such_so_more"))) ---~ print(table.serialize(ligsplitter:match("such_so_more.that"))) - -otf.enhancers["analyse unicodes"] = function(data,filename) - local tounicode16, tounicode16sequence = fonts.map.tounicode16, fonts.map.tounicode16sequence - local unicodes = data.luatex.unicodes - -- we need to move this code - unicodes['space'] = unicodes['space'] or 32 -- handly later on - unicodes['hyphen'] = unicodes['hyphen'] or 45 -- handly later on - unicodes['zwj'] = unicodes['zwj'] or zwj -- handly later on - unicodes['zwnj'] = unicodes['zwnj'] or zwnj -- handly later on - -- the tounicode mapping is sparse and only needed for alternatives - local tounicode, originals, ns, nl, private, unknown = { }, { }, 0, 0, fonts.private, format("%04X",utfbyte("?")) - data.luatex.tounicode, data.luatex.originals = tounicode, originals - local lumunic, uparser, oparser - if false then -- will become an option - lumunic = fonts.map.load_lum_table(filename) - lumunic = lumunic and lumunic.tounicode - end - local cidinfo, cidnames, cidcodes = data.cidinfo - local usedmap = cidinfo and cidinfo.usedname - usedmap = usedmap and fonts.cid.map[usedmap] - if usedmap then - oparser = usedmap and fonts.map.make_name_parser(cidinfo.ordering) - cidnames = usedmap.names - cidcodes = usedmap.unicodes - end - uparser = fonts.map.make_name_parser() - local aglmap = fonts.map and fonts.map.agl_to_unicode - for index, glyph in next, data.glyphs do - local name, unic = glyph.name, glyph.unicode or -1 -- play safe - if unic == -1 or unic >= private or (unic >= 0xE000 and unic <= 0xF8FF) or unic == 0xFFFE or unic == 0xFFFF then - local unicode = lumunic and lumunic[name] - if unicode then - originals[index], tounicode[index], ns = unicode, tounicode16(unicode), ns + 1 - end - -- cidmap heuristics, beware, there is no guarantee for a match unless - -- the chain resolves - if (not unicode) and usedmap then - local foundindex = oparser:match(name) - if foundindex then - unicode = cidcodes[foundindex] -- name to number - if not unicode then - local reference = cidnames[foundindex] -- number to name - if reference then - local foundindex = oparser:match(reference) - if foundindex then - unicode = cidcodes[foundindex] - if unicode then - originals[index], tounicode[index], ns = unicode, tounicode16(unicode), ns + 1 - end - end - if not unicode then - local foundcodes, multiple = uparser:match(reference) - if foundcodes then - if multiple then - originals[index], tounicode[index], nl, unicode = foundcodes, tounicode16sequence(foundcodes), nl + 1, true - else - originals[index], tounicode[index], ns, unicode = foundcodes, tounicode16(foundcodes), ns + 1, foundcodes - end - end - end - end - end - end - end - -- a.whatever or a_b_c.whatever or a_b_c (no numbers) - if not unicode then - local split = ligsplitter:match(name) - local nplit = (split and #split) or 0 - if nplit == 0 then - -- skip - elseif nplit == 1 then - local base = split[1] - unicode = unicodes[base] or (agl and agl[base]) - if unicode then - if type(unicode) == "table" then - unicode = unicode[1] - end - originals[index], tounicode[index], ns = unicode, tounicode16(unicode), ns + 1 - end - else - local t = { } - for l=1,nplit do - local base = split[l] - local u = unicodes[base] or (agl and agl[base]) - if not u then - break - elseif type(u) == "table" then - t[#t+1] = u[1] - else - t[#t+1] = u - end - end - if #t > 0 then -- done then - originals[index], tounicode[index], nl, unicode = t, tounicode16sequence(t), nl + 1, true - end - end - end - -- last resort - if not unicode then - local foundcodes, multiple = uparser:match(name) - if foundcodes then - if multiple then - originals[index], tounicode[index], nl, unicode = foundcodes, tounicode16sequence(foundcodes), nl + 1, true - else - originals[index], tounicode[index], ns, unicode = foundcodes, tounicode16(foundcodes), ns + 1, foundcodes - end - end - end - if not unicode then - originals[index], tounicode[index] = 0xFFFD, "FFFD" - end - end - end - if trace_unimapping then - for index, glyph in table.sortedpairs(data.glyphs) do - local toun, name, unic = tounicode[index], glyph.name, glyph.unicode or -1 -- play safe - if toun then - logs.report("load otf","internal: 0x%05X, name: %s, unicode: 0x%05X, tounicode: %s",index,name,unic,toun) - else - logs.report("load otf","internal: 0x%05X, name: %s, unicode: 0x%05X",index,name,unic) - end - end - end - if trace_loading and (ns > 0 or nl > 0) then - logs.report("load otf","enhance: %s tounicode entries added (%s ligatures)",nl+ns, ns) - end -end +otf.enhancers["analyse unicodes"] = fonts.map.add_to_unicode otf.enhancers["analyse subtables"] = function(data,filename) data.luatex = data.luatex or { } @@ -986,6 +860,129 @@ end -- kern: ttf has a table with kerns +--~ otf.enhancers["reorganize kerns"] = function(data,filename) +--~ local glyphs, mapmap, unicodes = data.glyphs, data.luatex.indices, data.luatex.unicodes +--~ local mkdone = false +--~ for index, glyph in next, data.glyphs do +--~ if glyph.kerns then +--~ local mykerns = { } +--~ for k,v in next, glyph.kerns do +--~ local vc, vo, vl = v.char, v.off, v.lookup +--~ if vc and vo and vl then -- brrr, wrong! we miss the non unicode ones +--~ local uvc = unicodes[vc] +--~ if not uvc then +--~ if trace_loading then +--~ logs.report("load otf","problems with unicode %s of kern %s at glyph %s",vc,k,index) +--~ end +--~ else +--~ if type(vl) ~= "table" then +--~ vl = { vl } +--~ end +--~ for l=1,#vl do +--~ local vll = vl[l] +--~ local mkl = mykerns[vll] +--~ if not mkl then +--~ mkl = { } +--~ mykerns[vll] = mkl +--~ end +--~ if type(uvc) == "table" then +--~ for u=1,#uvc do +--~ mkl[uvc[u]] = vo +--~ end +--~ else +--~ mkl[uvc] = vo +--~ end +--~ end +--~ end +--~ end +--~ end +--~ glyph.mykerns = mykerns +--~ glyph.kerns = nil -- saves space and time +--~ mkdone = true +--~ end +--~ end +--~ if trace_loading and mkdone then +--~ logs.report("load otf", "replacing 'kerns' tables by 'mykerns' tables") +--~ end +--~ if data.kerns then +--~ if trace_loading then +--~ logs.report("load otf", "removing global 'kern' table") +--~ end +--~ data.kerns = nil +--~ end +--~ local dgpos = data.gpos +--~ if dgpos then +--~ for gp=1,#dgpos do +--~ local gpos = dgpos[gp] +--~ local subtables = gpos.subtables +--~ if subtables then +--~ for s=1,#subtables do +--~ local subtable = subtables[s] +--~ local kernclass = subtable.kernclass -- name is inconsistent with anchor_classes +--~ if kernclass then -- the next one is quite slow +--~ for k=1,#kernclass do +--~ local kcl = kernclass[k] +--~ local firsts, seconds, offsets, lookups = kcl.firsts, kcl.seconds, kcl.offsets, kcl.lookup -- singular +--~ if type(lookups) ~= "table" then +--~ lookups = { lookups } +--~ end +--~ for l=1,#lookups do +--~ local lookup = lookups[l] +--~ -- weird, as maxfirst and maxseconds can have holes +--~ local maxfirsts, maxseconds = getn(firsts), getn(seconds) +--~ if trace_loading then +--~ logs.report("load otf", "adding kernclass %s with %s times %s pairs",lookup, maxfirsts, maxseconds) +--~ end +--~ for fk, fv in next, firsts do +--~ for first in gmatch(fv,"[^ ]+") do +--~ local first_unicode = unicodes[first] +--~ if type(first_unicode) == "number" then +--~ first_unicode = { first_unicode } +--~ end +--~ for f=1,#first_unicode do +--~ local glyph = glyphs[mapmap[first_unicode[f]]] +--~ if glyph then +--~ local mykerns = glyph.mykerns +--~ if not mykerns then +--~ mykerns = { } -- unicode indexed ! +--~ glyph.mykerns = mykerns +--~ end +--~ local lookupkerns = mykerns[lookup] +--~ if not lookupkerns then +--~ lookupkerns = { } +--~ mykerns[lookup] = lookupkerns +--~ end +--~ for sk, sv in next, seconds do +--~ local offset = offsets[(fk-1) * maxseconds + sk] +--~ --~ local offset = offsets[sk] -- (fk-1) * maxseconds + sk] +--~ for second in gmatch(sv,"[^ ]+") do +--~ local second_unicode = unicodes[second] +--~ if type(second_unicode) == "number" then +--~ lookupkerns[second_unicode] = offset +--~ else +--~ for s=1,#second_unicode do +--~ lookupkerns[second_unicode[s]] = offset +--~ end +--~ end +--~ end +--~ end +--~ elseif trace_loading then +--~ logs.report("load otf", "no glyph data for U+%04X", first_unicode[f]) +--~ end +--~ end +--~ end +--~ end +--~ end +--~ end +--~ subtable.comment = "The kernclass table is merged into mykerns in the indexed glyph tables." +--~ subtable.kernclass = { } +--~ end +--~ end +--~ end +--~ end +--~ end +--~ end + otf.enhancers["reorganize kerns"] = function(data,filename) local glyphs, mapmap, unicodes = data.glyphs, data.luatex.indices, data.luatex.unicodes local mkdone = false @@ -1038,6 +1035,9 @@ otf.enhancers["reorganize kerns"] = function(data,filename) end local dgpos = data.gpos if dgpos then + local separator = lpeg.P(" ") + local other = ((1 - separator)^0) / unicodes + local splitter = lpeg.Ct(other * (separator * other)^0) for gp=1,#dgpos do local gpos = dgpos[gp] local subtables = gpos.subtables @@ -1045,54 +1045,75 @@ otf.enhancers["reorganize kerns"] = function(data,filename) for s=1,#subtables do local subtable = subtables[s] local kernclass = subtable.kernclass -- name is inconsistent with anchor_classes - if kernclass then + if kernclass then -- the next one is quite slow for k=1,#kernclass do local kcl = kernclass[k] local firsts, seconds, offsets, lookups = kcl.firsts, kcl.seconds, kcl.offsets, kcl.lookup -- singular if type(lookups) ~= "table" then lookups = { lookups } end + local split = { } for l=1,#lookups do local lookup = lookups[l] + -- weird, as maxfirst and maxseconds can have holes, first seems to be indexed, seconds starts at 2 local maxfirsts, maxseconds = getn(firsts), getn(seconds) + for _, s in next, firsts do + split[s] = split[s] or lpegmatch(splitter,s) + end + for _, s in next, seconds do + split[s] = split[s] or lpegmatch(splitter,s) + end if trace_loading then logs.report("load otf", "adding kernclass %s with %s times %s pairs",lookup, maxfirsts, maxseconds) end - for fk, fv in next, firsts do - for first in gmatch(fv,"[^ ]+") do - local first_unicode = unicodes[first] - if type(first_unicode) == "number" then - first_unicode = { first_unicode } + local function do_it(fk,first_unicode) + local glyph = glyphs[mapmap[first_unicode]] + if glyph then + local mykerns = glyph.mykerns + if not mykerns then + mykerns = { } -- unicode indexed ! + glyph.mykerns = mykerns end - for f=1,#first_unicode do - local glyph = glyphs[mapmap[first_unicode[f]]] - if glyph then - local mykerns = glyph.mykerns - if not mykerns then - mykerns = { } -- unicode indexed ! - glyph.mykerns = mykerns - end - local lookupkerns = mykerns[lookup] - if not lookupkerns then - lookupkerns = { } - mykerns[lookup] = lookupkerns - end - for sk, sv in next, seconds do - local offset = offsets[(fk-1) * maxseconds + sk] - --~ local offset = offsets[sk] -- (fk-1) * maxseconds + sk] - for second in gmatch(sv,"[^ ]+") do - local second_unicode = unicodes[second] - if type(second_unicode) == "number" then - lookupkerns[second_unicode] = offset - else - for s=1,#second_unicode do - lookupkerns[second_unicode[s]] = offset - end + local lookupkerns = mykerns[lookup] + if not lookupkerns then + lookupkerns = { } + mykerns[lookup] = lookupkerns + end + local baseoffset = (fk-1) * maxseconds + for sk=2,maxseconds do + local sv = seconds[sk] + local offset = offsets[baseoffset + sk] + --~ local offset = offsets[sk] -- (fk-1) * maxseconds + sk] + local splt = split[sv] + if splt then + for i=1,#splt do + local second_unicode = splt[i] + if tonumber(second_unicode) then + lookupkerns[second_unicode] = offset + else + for s=1,#second_unicode do + lookupkerns[second_unicode[s]] = offset end end end - elseif trace_loading then - logs.report("load otf", "no glyph data for U+%04X", first_unicode[f]) + end + end + elseif trace_loading then + logs.report("load otf", "no glyph data for U+%04X", first_unicode) + end + end + for fk=1,#firsts do + local fv = firsts[fk] + local splt = split[fv] + if splt then + for i=1,#splt do + local first_unicode = splt[i] + if tonumber(first_unicode) then + do_it(fk,first_unicode) + else + for f=1,#first_unicode do + do_it(fk,first_unicode[f]) + end end end end @@ -1166,10 +1187,31 @@ otf.enhancers["migrate metadata"] = function(data,filename) metadata.charwidth = pfminfo and pfminfo.avgwidth end +local private_math_parameters = { + "FractionDelimiterSize", + "FractionDelimiterDisplayStyleSize", +} + +otf.enhancers["check math parameters"] = function(data,filename) + local mathdata = data.metadata.math + if mathdata then + for m=1,#private_math_parameters do + local pmp = private_math_parameters[m] + if not mathdata[pmp] then + if trace_loading then + logs.report("load otf", "setting math parameter '%s' to 0", pmp) + end + mathdata[pmp] = 0 + end + end + end +end + otf.enhancers["flatten glyph lookups"] = function(data,filename) for k, v in next, data.glyphs do - if v.lookups then - for kk, vv in next, v.lookups do + local lookups = v.lookups + if lookups then + for kk, vv in next, lookups do for kkk=1,#vv do local vvv = vv[kkk] local s = vvv.specification @@ -1219,6 +1261,31 @@ otf.enhancers["flatten glyph lookups"] = function(data,filename) end end +otf.enhancers["simplify glyph lookups"] = function(data,filename) + for k, v in next, data.glyphs do + local lookups = v.lookups + if lookups then + local slookups, mlookups + for kk, vv in next, lookups do + if #vv == 1 then + if not slookups then + slookups = { } + v.slookups = slookups + end + slookups[kk] = vv[1] + else + if not mlookups then + mlookups = { } + v.mlookups = mlookups + end + mlookups[kk] = vv + end + end + v.lookups = nil + end + end +end + otf.enhancers["flatten anchor tables"] = function(data,filename) for k, v in next, data.glyphs do if v.anchors then @@ -1459,7 +1526,7 @@ function otf.copy_to_tfm(data,cache_id) -- we can save a copy when we reorder th local variants = m.horiz_variants if variants then local c = char - for n in variants:gmatch("[^ ]+") do + for n in gmatch(variants,"[^ ]+") do local un = unicodes[n] if un and u ~= un then c.next = un @@ -1471,7 +1538,7 @@ function otf.copy_to_tfm(data,cache_id) -- we can save a copy when we reorder th local variants = m.vert_variants if variants then local c = char - for n in variants:gmatch("[^ ]+") do + for n in gmatch(variants,"[^ ]+") do local un = unicodes[n] if un and u ~= un then c.next = un @@ -1505,8 +1572,6 @@ function otf.copy_to_tfm(data,cache_id) -- we can save a copy when we reorder th tfm.cidinfo = data.cidinfo tfm.cidinfo.registry = tfm.cidinfo.registry or "" tfm.type = "real" - tfm.stretch = 0 -- stretch - tfm.slant = 0 -- slant tfm.direction = 0 tfm.boundarychar_label = 0 tfm.boundarychar = 65536 @@ -1615,7 +1680,7 @@ function tfm.read_from_open_type(specification) end end end - tfmtable = tfm.scale(tfmtable,s) + tfmtable = tfm.scale(tfmtable,s,specification.relativeid) -- here we resolve the name; file can be relocated, so this info is not in the cache local filename = (otfdata and otfdata.luatex and otfdata.luatex.filename) or specification.filename if not filename then @@ -1635,9 +1700,52 @@ function tfm.read_from_open_type(specification) tfmtable.format = specification.format end tfmtable.name = tfmtable.filename or tfmtable.fullname or tfmtable.fontname + if tfm.fontname_mode == "specification" then + -- not to be used in context ! + local specname = specification.specification + if specname then + tfmtable.name = specname + if trace_defining then + logs.report("define font","overloaded fontname: '%s'",specname) + end + end + end end fonts.logger.save(tfmtable,file.extname(specification.filename),specification) end --~ print(tfmtable.fullname) return tfmtable end + +-- helpers + +function otf.collect_lookups(otfdata,kind,script,language) + -- maybe store this in the font + local sequences = otfdata.luatex.sequences + if sequences then + local featuremap, featurelist = { }, { } + for s=1,#sequences do + local sequence = sequences[s] + local features = sequence.features + features = features and features[kind] + features = features and (features[script] or features[default] or features[wildcard]) + features = features and (features[language] or features[default] or features[wildcard]) + if features then + local subtables = sequence.subtables + if subtables then + for s=1,#subtables do + local ss = subtables[s] + if not featuremap[s] then + featuremap[ss] = true + featurelist[#featurelist+1] = ss + end + end + end + end + end + if #featurelist > 0 then + return featuremap, featurelist + end + end + return nil, nil +end |