summaryrefslogtreecommitdiff
path: root/Master/texmf-dist/tex/luatex
diff options
context:
space:
mode:
Diffstat (limited to 'Master/texmf-dist/tex/luatex')
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/font-age.lua4302
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/luaotfload-auxiliary.lua451
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/luaotfload-basics-gen.lua308
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/luaotfload-basics-nod.lua95
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/luaotfload-blacklist.cnf (renamed from Master/texmf-dist/tex/luatex/luaotfload/otfl-blacklist.cnf)6
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/luaotfload-colors.lua307
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/luaotfload-database.lua1540
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/luaotfload-features.lua747
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/luaotfload-fonts-cbk.lua68
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/luaotfload-fonts-def.lua97
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/luaotfload-fonts-enc.lua28
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/luaotfload-fonts-ext.lua272
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/luaotfload-fonts-lua.lua33
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/luaotfload-fonts-tfm.lua38
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/luaotfload-lib-dir.lua449
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/luaotfload-loaders.lua32
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/luaotfload-merged.lua11903
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/luaotfload-override.lua84
-rwxr-xr-xMaster/texmf-dist/tex/luatex/luaotfload/luaotfload-tool.lua454
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/luaotfload.lua404
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/luaotfload.sty17
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/otfl-data-con.lua132
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/otfl-font-cid.lua147
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/otfl-font-clr.lua170
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/otfl-font-def.lua662
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/otfl-font-dum.lua400
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/otfl-font-ini.lua96
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/otfl-font-map.lua370
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/otfl-font-nms.lua688
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/otfl-font-ota.lua287
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/otfl-font-otb.lua373
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/otfl-font-otc.lua217
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/otfl-font-otd.lua79
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/otfl-font-otf.lua1787
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/otfl-font-oti.lua57
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/otfl-font-otn.lua2688
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/otfl-font-ott.lua955
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/otfl-font-tfm.lua741
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/otfl-font-xtx.lua229
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/otfl-luat-dum.lua185
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/otfl-luat-ovr.lua46
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/otfl-node-dum.lua127
-rw-r--r--Master/texmf-dist/tex/luatex/luaotfload/otfl-node-inj.lua443
43 files changed, 21514 insertions, 11000 deletions
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/font-age.lua b/Master/texmf-dist/tex/luatex/luaotfload/font-age.lua
new file mode 100644
index 00000000000..3a7098e0ffd
--- /dev/null
+++ b/Master/texmf-dist/tex/luatex/luaotfload/font-age.lua
@@ -0,0 +1,4302 @@
+if not modules then modules = { } end modules ["font-age"] = {
+ version = 2.200,
+ comment = "part of the luaotfload package",
+ author = "luaotfload team / mkglyphlist",
+ copyright = "derived from http://partners.adobe.com/public/developer/en/opentype/glyphlist.txt",
+ original = "Adobe Glyph List, version 2.0, September 20, 2002",
+ dataonly = true,
+}
+
+if context then
+ logs.report("fatal error","this module is not for context")
+ os.exit(-1)
+end
+
+--[[doc--
+Everything below has been autogenerated. Run mkglyphlist to rebuild
+font-age.lua.
+--doc]]--
+
+return {
+ ["A"]=65,
+ ["AE"]=198,
+ ["AEacute"]=508,
+ ["AEmacron"]=482,
+ ["AEsmall"]=63462,
+ ["Aacute"]=193,
+ ["Aacutesmall"]=63457,
+ ["Abreve"]=258,
+ ["Abreveacute"]=7854,
+ ["Abrevecyrillic"]=1232,
+ ["Abrevedotbelow"]=7862,
+ ["Abrevegrave"]=7856,
+ ["Abrevehookabove"]=7858,
+ ["Abrevetilde"]=7860,
+ ["Acaron"]=461,
+ ["Acircle"]=9398,
+ ["Acircumflex"]=194,
+ ["Acircumflexacute"]=7844,
+ ["Acircumflexdotbelow"]=7852,
+ ["Acircumflexgrave"]=7846,
+ ["Acircumflexhookabove"]=7848,
+ ["Acircumflexsmall"]=63458,
+ ["Acircumflextilde"]=7850,
+ ["Acute"]=63177,
+ ["Acutesmall"]=63412,
+ ["Acyrillic"]=1040,
+ ["Adblgrave"]=512,
+ ["Adieresis"]=196,
+ ["Adieresiscyrillic"]=1234,
+ ["Adieresismacron"]=478,
+ ["Adieresissmall"]=63460,
+ ["Adotbelow"]=7840,
+ ["Adotmacron"]=480,
+ ["Agrave"]=192,
+ ["Agravesmall"]=63456,
+ ["Ahookabove"]=7842,
+ ["Aiecyrillic"]=1236,
+ ["Ainvertedbreve"]=514,
+ ["Alpha"]=913,
+ ["Alphatonos"]=902,
+ ["Amacron"]=256,
+ ["Amonospace"]=65313,
+ ["Aogonek"]=260,
+ ["Aring"]=197,
+ ["Aringacute"]=506,
+ ["Aringbelow"]=7680,
+ ["Aringsmall"]=63461,
+ ["Asmall"]=63329,
+ ["Atilde"]=195,
+ ["Atildesmall"]=63459,
+ ["Aybarmenian"]=1329,
+ ["B"]=66,
+ ["Bcircle"]=9399,
+ ["Bdotaccent"]=7682,
+ ["Bdotbelow"]=7684,
+ ["Becyrillic"]=1041,
+ ["Benarmenian"]=1330,
+ ["Beta"]=914,
+ ["Bhook"]=385,
+ ["Blinebelow"]=7686,
+ ["Bmonospace"]=65314,
+ ["Brevesmall"]=63220,
+ ["Bsmall"]=63330,
+ ["Btopbar"]=386,
+ ["C"]=67,
+ ["Caarmenian"]=1342,
+ ["Cacute"]=262,
+ ["Caron"]=63178,
+ ["Caronsmall"]=63221,
+ ["Ccaron"]=268,
+ ["Ccedilla"]=199,
+ ["Ccedillaacute"]=7688,
+ ["Ccedillasmall"]=63463,
+ ["Ccircle"]=9400,
+ ["Ccircumflex"]=264,
+ ["Cdot"]=266,
+ ["Cdotaccent"]=266,
+ ["Cedillasmall"]=63416,
+ ["Chaarmenian"]=1353,
+ ["Cheabkhasiancyrillic"]=1212,
+ ["Checyrillic"]=1063,
+ ["Chedescenderabkhasiancyrillic"]=1214,
+ ["Chedescendercyrillic"]=1206,
+ ["Chedieresiscyrillic"]=1268,
+ ["Cheharmenian"]=1347,
+ ["Chekhakassiancyrillic"]=1227,
+ ["Cheverticalstrokecyrillic"]=1208,
+ ["Chi"]=935,
+ ["Chook"]=391,
+ ["Circumflexsmall"]=63222,
+ ["Cmonospace"]=65315,
+ ["Coarmenian"]=1361,
+ ["Csmall"]=63331,
+ ["D"]=68,
+ ["DZ"]=497,
+ ["DZcaron"]=452,
+ ["Daarmenian"]=1332,
+ ["Dafrican"]=393,
+ ["Dcaron"]=270,
+ ["Dcedilla"]=7696,
+ ["Dcircle"]=9401,
+ ["Dcircumflexbelow"]=7698,
+ ["Dcroat"]=272,
+ ["Ddotaccent"]=7690,
+ ["Ddotbelow"]=7692,
+ ["Decyrillic"]=1044,
+ ["Deicoptic"]=1006,
+ ["Delta"]=8710,
+ ["Deltagreek"]=916,
+ ["Dhook"]=394,
+ ["Dieresis"]=63179,
+ ["DieresisAcute"]=63180,
+ ["DieresisGrave"]=63181,
+ ["Dieresissmall"]=63400,
+ ["Digammagreek"]=988,
+ ["Djecyrillic"]=1026,
+ ["Dlinebelow"]=7694,
+ ["Dmonospace"]=65316,
+ ["Dotaccentsmall"]=63223,
+ ["Dslash"]=272,
+ ["Dsmall"]=63332,
+ ["Dtopbar"]=395,
+ ["Dz"]=498,
+ ["Dzcaron"]=453,
+ ["Dzeabkhasiancyrillic"]=1248,
+ ["Dzecyrillic"]=1029,
+ ["Dzhecyrillic"]=1039,
+ ["E"]=69,
+ ["Eacute"]=201,
+ ["Eacutesmall"]=63465,
+ ["Ebreve"]=276,
+ ["Ecaron"]=282,
+ ["Ecedillabreve"]=7708,
+ ["Echarmenian"]=1333,
+ ["Ecircle"]=9402,
+ ["Ecircumflex"]=202,
+ ["Ecircumflexacute"]=7870,
+ ["Ecircumflexbelow"]=7704,
+ ["Ecircumflexdotbelow"]=7878,
+ ["Ecircumflexgrave"]=7872,
+ ["Ecircumflexhookabove"]=7874,
+ ["Ecircumflexsmall"]=63466,
+ ["Ecircumflextilde"]=7876,
+ ["Ecyrillic"]=1028,
+ ["Edblgrave"]=516,
+ ["Edieresis"]=203,
+ ["Edieresissmall"]=63467,
+ ["Edot"]=278,
+ ["Edotaccent"]=278,
+ ["Edotbelow"]=7864,
+ ["Efcyrillic"]=1060,
+ ["Egrave"]=200,
+ ["Egravesmall"]=63464,
+ ["Eharmenian"]=1335,
+ ["Ehookabove"]=7866,
+ ["Eightroman"]=8551,
+ ["Einvertedbreve"]=518,
+ ["Eiotifiedcyrillic"]=1124,
+ ["Elcyrillic"]=1051,
+ ["Elevenroman"]=8554,
+ ["Emacron"]=274,
+ ["Emacronacute"]=7702,
+ ["Emacrongrave"]=7700,
+ ["Emcyrillic"]=1052,
+ ["Emonospace"]=65317,
+ ["Encyrillic"]=1053,
+ ["Endescendercyrillic"]=1186,
+ ["Eng"]=330,
+ ["Enghecyrillic"]=1188,
+ ["Enhookcyrillic"]=1223,
+ ["Eogonek"]=280,
+ ["Eopen"]=400,
+ ["Epsilon"]=917,
+ ["Epsilontonos"]=904,
+ ["Ercyrillic"]=1056,
+ ["Ereversed"]=398,
+ ["Ereversedcyrillic"]=1069,
+ ["Escyrillic"]=1057,
+ ["Esdescendercyrillic"]=1194,
+ ["Esh"]=425,
+ ["Esmall"]=63333,
+ ["Eta"]=919,
+ ["Etarmenian"]=1336,
+ ["Etatonos"]=905,
+ ["Eth"]=208,
+ ["Ethsmall"]=63472,
+ ["Etilde"]=7868,
+ ["Etildebelow"]=7706,
+ ["Euro"]=8364,
+ ["Ezh"]=439,
+ ["Ezhcaron"]=494,
+ ["Ezhreversed"]=440,
+ ["F"]=70,
+ ["Fcircle"]=9403,
+ ["Fdotaccent"]=7710,
+ ["Feharmenian"]=1366,
+ ["Feicoptic"]=996,
+ ["Fhook"]=401,
+ ["Fitacyrillic"]=1138,
+ ["Fiveroman"]=8548,
+ ["Fmonospace"]=65318,
+ ["Fourroman"]=8547,
+ ["Fsmall"]=63334,
+ ["G"]=71,
+ ["GBsquare"]=13191,
+ ["Gacute"]=500,
+ ["Gamma"]=915,
+ ["Gammaafrican"]=404,
+ ["Gangiacoptic"]=1002,
+ ["Gbreve"]=286,
+ ["Gcaron"]=486,
+ ["Gcedilla"]=290,
+ ["Gcircle"]=9404,
+ ["Gcircumflex"]=284,
+ ["Gcommaaccent"]=290,
+ ["Gdot"]=288,
+ ["Gdotaccent"]=288,
+ ["Gecyrillic"]=1043,
+ ["Ghadarmenian"]=1346,
+ ["Ghemiddlehookcyrillic"]=1172,
+ ["Ghestrokecyrillic"]=1170,
+ ["Gheupturncyrillic"]=1168,
+ ["Ghook"]=403,
+ ["Gimarmenian"]=1331,
+ ["Gjecyrillic"]=1027,
+ ["Gmacron"]=7712,
+ ["Gmonospace"]=65319,
+ ["Grave"]=63182,
+ ["Gravesmall"]=63328,
+ ["Gsmall"]=63335,
+ ["Gsmallhook"]=667,
+ ["Gstroke"]=484,
+ ["H"]=72,
+ ["H18533"]=9679,
+ ["H18543"]=9642,
+ ["H18551"]=9643,
+ ["H22073"]=9633,
+ ["HPsquare"]=13259,
+ ["Haabkhasiancyrillic"]=1192,
+ ["Hadescendercyrillic"]=1202,
+ ["Hardsigncyrillic"]=1066,
+ ["Hbar"]=294,
+ ["Hbrevebelow"]=7722,
+ ["Hcedilla"]=7720,
+ ["Hcircle"]=9405,
+ ["Hcircumflex"]=292,
+ ["Hdieresis"]=7718,
+ ["Hdotaccent"]=7714,
+ ["Hdotbelow"]=7716,
+ ["Hmonospace"]=65320,
+ ["Hoarmenian"]=1344,
+ ["Horicoptic"]=1000,
+ ["Hsmall"]=63336,
+ ["Hungarumlaut"]=63183,
+ ["Hungarumlautsmall"]=63224,
+ ["Hzsquare"]=13200,
+ ["I"]=73,
+ ["IAcyrillic"]=1071,
+ ["IJ"]=306,
+ ["IUcyrillic"]=1070,
+ ["Iacute"]=205,
+ ["Iacutesmall"]=63469,
+ ["Ibreve"]=300,
+ ["Icaron"]=463,
+ ["Icircle"]=9406,
+ ["Icircumflex"]=206,
+ ["Icircumflexsmall"]=63470,
+ ["Icyrillic"]=1030,
+ ["Idblgrave"]=520,
+ ["Idieresis"]=207,
+ ["Idieresisacute"]=7726,
+ ["Idieresiscyrillic"]=1252,
+ ["Idieresissmall"]=63471,
+ ["Idot"]=304,
+ ["Idotaccent"]=304,
+ ["Idotbelow"]=7882,
+ ["Iebrevecyrillic"]=1238,
+ ["Iecyrillic"]=1045,
+ ["Ifraktur"]=8465,
+ ["Igrave"]=204,
+ ["Igravesmall"]=63468,
+ ["Ihookabove"]=7880,
+ ["Iicyrillic"]=1048,
+ ["Iinvertedbreve"]=522,
+ ["Iishortcyrillic"]=1049,
+ ["Imacron"]=298,
+ ["Imacroncyrillic"]=1250,
+ ["Imonospace"]=65321,
+ ["Iniarmenian"]=1339,
+ ["Iocyrillic"]=1025,
+ ["Iogonek"]=302,
+ ["Iota"]=921,
+ ["Iotaafrican"]=406,
+ ["Iotadieresis"]=938,
+ ["Iotatonos"]=906,
+ ["Ismall"]=63337,
+ ["Istroke"]=407,
+ ["Itilde"]=296,
+ ["Itildebelow"]=7724,
+ ["Izhitsacyrillic"]=1140,
+ ["Izhitsadblgravecyrillic"]=1142,
+ ["J"]=74,
+ ["Jaarmenian"]=1345,
+ ["Jcircle"]=9407,
+ ["Jcircumflex"]=308,
+ ["Jecyrillic"]=1032,
+ ["Jheharmenian"]=1355,
+ ["Jmonospace"]=65322,
+ ["Jsmall"]=63338,
+ ["K"]=75,
+ ["KBsquare"]=13189,
+ ["KKsquare"]=13261,
+ ["Kabashkircyrillic"]=1184,
+ ["Kacute"]=7728,
+ ["Kacyrillic"]=1050,
+ ["Kadescendercyrillic"]=1178,
+ ["Kahookcyrillic"]=1219,
+ ["Kappa"]=922,
+ ["Kastrokecyrillic"]=1182,
+ ["Kaverticalstrokecyrillic"]=1180,
+ ["Kcaron"]=488,
+ ["Kcedilla"]=310,
+ ["Kcircle"]=9408,
+ ["Kcommaaccent"]=310,
+ ["Kdotbelow"]=7730,
+ ["Keharmenian"]=1364,
+ ["Kenarmenian"]=1343,
+ ["Khacyrillic"]=1061,
+ ["Kheicoptic"]=998,
+ ["Khook"]=408,
+ ["Kjecyrillic"]=1036,
+ ["Klinebelow"]=7732,
+ ["Kmonospace"]=65323,
+ ["Koppacyrillic"]=1152,
+ ["Koppagreek"]=990,
+ ["Ksicyrillic"]=1134,
+ ["Ksmall"]=63339,
+ ["L"]=76,
+ ["LJ"]=455,
+ ["LL"]=63167,
+ ["Lacute"]=313,
+ ["Lambda"]=923,
+ ["Lcaron"]=317,
+ ["Lcedilla"]=315,
+ ["Lcircle"]=9409,
+ ["Lcircumflexbelow"]=7740,
+ ["Lcommaaccent"]=315,
+ ["Ldot"]=319,
+ ["Ldotaccent"]=319,
+ ["Ldotbelow"]=7734,
+ ["Ldotbelowmacron"]=7736,
+ ["Liwnarmenian"]=1340,
+ ["Lj"]=456,
+ ["Ljecyrillic"]=1033,
+ ["Llinebelow"]=7738,
+ ["Lmonospace"]=65324,
+ ["Lslash"]=321,
+ ["Lslashsmall"]=63225,
+ ["Lsmall"]=63340,
+ ["M"]=77,
+ ["MBsquare"]=13190,
+ ["Macron"]=63184,
+ ["Macronsmall"]=63407,
+ ["Macute"]=7742,
+ ["Mcircle"]=9410,
+ ["Mdotaccent"]=7744,
+ ["Mdotbelow"]=7746,
+ ["Menarmenian"]=1348,
+ ["Mmonospace"]=65325,
+ ["Msmall"]=63341,
+ ["Mturned"]=412,
+ ["Mu"]=924,
+ ["N"]=78,
+ ["NJ"]=458,
+ ["Nacute"]=323,
+ ["Ncaron"]=327,
+ ["Ncedilla"]=325,
+ ["Ncircle"]=9411,
+ ["Ncircumflexbelow"]=7754,
+ ["Ncommaaccent"]=325,
+ ["Ndotaccent"]=7748,
+ ["Ndotbelow"]=7750,
+ ["Nhookleft"]=413,
+ ["Nineroman"]=8552,
+ ["Nj"]=459,
+ ["Njecyrillic"]=1034,
+ ["Nlinebelow"]=7752,
+ ["Nmonospace"]=65326,
+ ["Nowarmenian"]=1350,
+ ["Nsmall"]=63342,
+ ["Ntilde"]=209,
+ ["Ntildesmall"]=63473,
+ ["Nu"]=925,
+ ["O"]=79,
+ ["OE"]=338,
+ ["OEsmall"]=63226,
+ ["Oacute"]=211,
+ ["Oacutesmall"]=63475,
+ ["Obarredcyrillic"]=1256,
+ ["Obarreddieresiscyrillic"]=1258,
+ ["Obreve"]=334,
+ ["Ocaron"]=465,
+ ["Ocenteredtilde"]=415,
+ ["Ocircle"]=9412,
+ ["Ocircumflex"]=212,
+ ["Ocircumflexacute"]=7888,
+ ["Ocircumflexdotbelow"]=7896,
+ ["Ocircumflexgrave"]=7890,
+ ["Ocircumflexhookabove"]=7892,
+ ["Ocircumflexsmall"]=63476,
+ ["Ocircumflextilde"]=7894,
+ ["Ocyrillic"]=1054,
+ ["Odblacute"]=336,
+ ["Odblgrave"]=524,
+ ["Odieresis"]=214,
+ ["Odieresiscyrillic"]=1254,
+ ["Odieresissmall"]=63478,
+ ["Odotbelow"]=7884,
+ ["Ogoneksmall"]=63227,
+ ["Ograve"]=210,
+ ["Ogravesmall"]=63474,
+ ["Oharmenian"]=1365,
+ ["Ohm"]=8486,
+ ["Ohookabove"]=7886,
+ ["Ohorn"]=416,
+ ["Ohornacute"]=7898,
+ ["Ohorndotbelow"]=7906,
+ ["Ohorngrave"]=7900,
+ ["Ohornhookabove"]=7902,
+ ["Ohorntilde"]=7904,
+ ["Ohungarumlaut"]=336,
+ ["Oi"]=418,
+ ["Oinvertedbreve"]=526,
+ ["Omacron"]=332,
+ ["Omacronacute"]=7762,
+ ["Omacrongrave"]=7760,
+ ["Omega"]=8486,
+ ["Omegacyrillic"]=1120,
+ ["Omegagreek"]=937,
+ ["Omegaroundcyrillic"]=1146,
+ ["Omegatitlocyrillic"]=1148,
+ ["Omegatonos"]=911,
+ ["Omicron"]=927,
+ ["Omicrontonos"]=908,
+ ["Omonospace"]=65327,
+ ["Oneroman"]=8544,
+ ["Oogonek"]=490,
+ ["Oogonekmacron"]=492,
+ ["Oopen"]=390,
+ ["Oslash"]=216,
+ ["Oslashacute"]=510,
+ ["Oslashsmall"]=63480,
+ ["Osmall"]=63343,
+ ["Ostrokeacute"]=510,
+ ["Otcyrillic"]=1150,
+ ["Otilde"]=213,
+ ["Otildeacute"]=7756,
+ ["Otildedieresis"]=7758,
+ ["Otildesmall"]=63477,
+ ["P"]=80,
+ ["Pacute"]=7764,
+ ["Pcircle"]=9413,
+ ["Pdotaccent"]=7766,
+ ["Pecyrillic"]=1055,
+ ["Peharmenian"]=1354,
+ ["Pemiddlehookcyrillic"]=1190,
+ ["Phi"]=934,
+ ["Phook"]=420,
+ ["Pi"]=928,
+ ["Piwrarmenian"]=1363,
+ ["Pmonospace"]=65328,
+ ["Psi"]=936,
+ ["Psicyrillic"]=1136,
+ ["Psmall"]=63344,
+ ["Q"]=81,
+ ["Qcircle"]=9414,
+ ["Qmonospace"]=65329,
+ ["Qsmall"]=63345,
+ ["R"]=82,
+ ["Raarmenian"]=1356,
+ ["Racute"]=340,
+ ["Rcaron"]=344,
+ ["Rcedilla"]=342,
+ ["Rcircle"]=9415,
+ ["Rcommaaccent"]=342,
+ ["Rdblgrave"]=528,
+ ["Rdotaccent"]=7768,
+ ["Rdotbelow"]=7770,
+ ["Rdotbelowmacron"]=7772,
+ ["Reharmenian"]=1360,
+ ["Rfraktur"]=8476,
+ ["Rho"]=929,
+ ["Ringsmall"]=63228,
+ ["Rinvertedbreve"]=530,
+ ["Rlinebelow"]=7774,
+ ["Rmonospace"]=65330,
+ ["Rsmall"]=63346,
+ ["Rsmallinverted"]=641,
+ ["Rsmallinvertedsuperior"]=694,
+ ["S"]=83,
+ ["SF010000"]=9484,
+ ["SF020000"]=9492,
+ ["SF030000"]=9488,
+ ["SF040000"]=9496,
+ ["SF050000"]=9532,
+ ["SF060000"]=9516,
+ ["SF070000"]=9524,
+ ["SF080000"]=9500,
+ ["SF090000"]=9508,
+ ["SF100000"]=9472,
+ ["SF110000"]=9474,
+ ["SF190000"]=9569,
+ ["SF200000"]=9570,
+ ["SF210000"]=9558,
+ ["SF220000"]=9557,
+ ["SF230000"]=9571,
+ ["SF240000"]=9553,
+ ["SF250000"]=9559,
+ ["SF260000"]=9565,
+ ["SF270000"]=9564,
+ ["SF280000"]=9563,
+ ["SF360000"]=9566,
+ ["SF370000"]=9567,
+ ["SF380000"]=9562,
+ ["SF390000"]=9556,
+ ["SF400000"]=9577,
+ ["SF410000"]=9574,
+ ["SF420000"]=9568,
+ ["SF430000"]=9552,
+ ["SF440000"]=9580,
+ ["SF450000"]=9575,
+ ["SF460000"]=9576,
+ ["SF470000"]=9572,
+ ["SF480000"]=9573,
+ ["SF490000"]=9561,
+ ["SF500000"]=9560,
+ ["SF510000"]=9554,
+ ["SF520000"]=9555,
+ ["SF530000"]=9579,
+ ["SF540000"]=9578,
+ ["Sacute"]=346,
+ ["Sacutedotaccent"]=7780,
+ ["Sampigreek"]=992,
+ ["Scaron"]=352,
+ ["Scarondotaccent"]=7782,
+ ["Scaronsmall"]=63229,
+ ["Scedilla"]=350,
+ ["Schwa"]=399,
+ ["Schwacyrillic"]=1240,
+ ["Schwadieresiscyrillic"]=1242,
+ ["Scircle"]=9416,
+ ["Scircumflex"]=348,
+ ["Scommaaccent"]=536,
+ ["Sdotaccent"]=7776,
+ ["Sdotbelow"]=7778,
+ ["Sdotbelowdotaccent"]=7784,
+ ["Seharmenian"]=1357,
+ ["Sevenroman"]=8550,
+ ["Shaarmenian"]=1351,
+ ["Shacyrillic"]=1064,
+ ["Shchacyrillic"]=1065,
+ ["Sheicoptic"]=994,
+ ["Shhacyrillic"]=1210,
+ ["Shimacoptic"]=1004,
+ ["Sigma"]=931,
+ ["Sixroman"]=8549,
+ ["Smonospace"]=65331,
+ ["Softsigncyrillic"]=1068,
+ ["Ssmall"]=63347,
+ ["Stigmagreek"]=986,
+ ["T"]=84,
+ ["Tau"]=932,
+ ["Tbar"]=358,
+ ["Tcaron"]=356,
+ ["Tcedilla"]=354,
+ ["Tcircle"]=9417,
+ ["Tcircumflexbelow"]=7792,
+ ["Tcommaaccent"]=354,
+ ["Tdotaccent"]=7786,
+ ["Tdotbelow"]=7788,
+ ["Tecyrillic"]=1058,
+ ["Tedescendercyrillic"]=1196,
+ ["Tenroman"]=8553,
+ ["Tetsecyrillic"]=1204,
+ ["Theta"]=920,
+ ["Thook"]=428,
+ ["Thorn"]=222,
+ ["Thornsmall"]=63486,
+ ["Threeroman"]=8546,
+ ["Tildesmall"]=63230,
+ ["Tiwnarmenian"]=1359,
+ ["Tlinebelow"]=7790,
+ ["Tmonospace"]=65332,
+ ["Toarmenian"]=1337,
+ ["Tonefive"]=444,
+ ["Tonesix"]=388,
+ ["Tonetwo"]=423,
+ ["Tretroflexhook"]=430,
+ ["Tsecyrillic"]=1062,
+ ["Tshecyrillic"]=1035,
+ ["Tsmall"]=63348,
+ ["Twelveroman"]=8555,
+ ["Tworoman"]=8545,
+ ["U"]=85,
+ ["Uacute"]=218,
+ ["Uacutesmall"]=63482,
+ ["Ubreve"]=364,
+ ["Ucaron"]=467,
+ ["Ucircle"]=9418,
+ ["Ucircumflex"]=219,
+ ["Ucircumflexbelow"]=7798,
+ ["Ucircumflexsmall"]=63483,
+ ["Ucyrillic"]=1059,
+ ["Udblacute"]=368,
+ ["Udblgrave"]=532,
+ ["Udieresis"]=220,
+ ["Udieresisacute"]=471,
+ ["Udieresisbelow"]=7794,
+ ["Udieresiscaron"]=473,
+ ["Udieresiscyrillic"]=1264,
+ ["Udieresisgrave"]=475,
+ ["Udieresismacron"]=469,
+ ["Udieresissmall"]=63484,
+ ["Udotbelow"]=7908,
+ ["Ugrave"]=217,
+ ["Ugravesmall"]=63481,
+ ["Uhookabove"]=7910,
+ ["Uhorn"]=431,
+ ["Uhornacute"]=7912,
+ ["Uhorndotbelow"]=7920,
+ ["Uhorngrave"]=7914,
+ ["Uhornhookabove"]=7916,
+ ["Uhorntilde"]=7918,
+ ["Uhungarumlaut"]=368,
+ ["Uhungarumlautcyrillic"]=1266,
+ ["Uinvertedbreve"]=534,
+ ["Ukcyrillic"]=1144,
+ ["Umacron"]=362,
+ ["Umacroncyrillic"]=1262,
+ ["Umacrondieresis"]=7802,
+ ["Umonospace"]=65333,
+ ["Uogonek"]=370,
+ ["Upsilon"]=933,
+ ["Upsilon1"]=978,
+ ["Upsilonacutehooksymbolgreek"]=979,
+ ["Upsilonafrican"]=433,
+ ["Upsilondieresis"]=939,
+ ["Upsilondieresishooksymbolgreek"]=980,
+ ["Upsilonhooksymbol"]=978,
+ ["Upsilontonos"]=910,
+ ["Uring"]=366,
+ ["Ushortcyrillic"]=1038,
+ ["Usmall"]=63349,
+ ["Ustraightcyrillic"]=1198,
+ ["Ustraightstrokecyrillic"]=1200,
+ ["Utilde"]=360,
+ ["Utildeacute"]=7800,
+ ["Utildebelow"]=7796,
+ ["V"]=86,
+ ["Vcircle"]=9419,
+ ["Vdotbelow"]=7806,
+ ["Vecyrillic"]=1042,
+ ["Vewarmenian"]=1358,
+ ["Vhook"]=434,
+ ["Vmonospace"]=65334,
+ ["Voarmenian"]=1352,
+ ["Vsmall"]=63350,
+ ["Vtilde"]=7804,
+ ["W"]=87,
+ ["Wacute"]=7810,
+ ["Wcircle"]=9420,
+ ["Wcircumflex"]=372,
+ ["Wdieresis"]=7812,
+ ["Wdotaccent"]=7814,
+ ["Wdotbelow"]=7816,
+ ["Wgrave"]=7808,
+ ["Wmonospace"]=65335,
+ ["Wsmall"]=63351,
+ ["X"]=88,
+ ["Xcircle"]=9421,
+ ["Xdieresis"]=7820,
+ ["Xdotaccent"]=7818,
+ ["Xeharmenian"]=1341,
+ ["Xi"]=926,
+ ["Xmonospace"]=65336,
+ ["Xsmall"]=63352,
+ ["Y"]=89,
+ ["Yacute"]=221,
+ ["Yacutesmall"]=63485,
+ ["Yatcyrillic"]=1122,
+ ["Ycircle"]=9422,
+ ["Ycircumflex"]=374,
+ ["Ydieresis"]=376,
+ ["Ydieresissmall"]=63487,
+ ["Ydotaccent"]=7822,
+ ["Ydotbelow"]=7924,
+ ["Yericyrillic"]=1067,
+ ["Yerudieresiscyrillic"]=1272,
+ ["Ygrave"]=7922,
+ ["Yhook"]=435,
+ ["Yhookabove"]=7926,
+ ["Yiarmenian"]=1349,
+ ["Yicyrillic"]=1031,
+ ["Yiwnarmenian"]=1362,
+ ["Ymonospace"]=65337,
+ ["Ysmall"]=63353,
+ ["Ytilde"]=7928,
+ ["Yusbigcyrillic"]=1130,
+ ["Yusbigiotifiedcyrillic"]=1132,
+ ["Yuslittlecyrillic"]=1126,
+ ["Yuslittleiotifiedcyrillic"]=1128,
+ ["Z"]=90,
+ ["Zaarmenian"]=1334,
+ ["Zacute"]=377,
+ ["Zcaron"]=381,
+ ["Zcaronsmall"]=63231,
+ ["Zcircle"]=9423,
+ ["Zcircumflex"]=7824,
+ ["Zdot"]=379,
+ ["Zdotaccent"]=379,
+ ["Zdotbelow"]=7826,
+ ["Zecyrillic"]=1047,
+ ["Zedescendercyrillic"]=1176,
+ ["Zedieresiscyrillic"]=1246,
+ ["Zeta"]=918,
+ ["Zhearmenian"]=1338,
+ ["Zhebrevecyrillic"]=1217,
+ ["Zhecyrillic"]=1046,
+ ["Zhedescendercyrillic"]=1174,
+ ["Zhedieresiscyrillic"]=1244,
+ ["Zlinebelow"]=7828,
+ ["Zmonospace"]=65338,
+ ["Zsmall"]=63354,
+ ["Zstroke"]=437,
+ ["a"]=97,
+ ["aabengali"]=2438,
+ ["aacute"]=225,
+ ["aadeva"]=2310,
+ ["aagujarati"]=2694,
+ ["aagurmukhi"]=2566,
+ ["aamatragurmukhi"]=2622,
+ ["aarusquare"]=13059,
+ ["aavowelsignbengali"]=2494,
+ ["aavowelsigndeva"]=2366,
+ ["aavowelsigngujarati"]=2750,
+ ["abbreviationmarkarmenian"]=1375,
+ ["abbreviationsigndeva"]=2416,
+ ["abengali"]=2437,
+ ["abopomofo"]=12570,
+ ["abreve"]=259,
+ ["abreveacute"]=7855,
+ ["abrevecyrillic"]=1233,
+ ["abrevedotbelow"]=7863,
+ ["abrevegrave"]=7857,
+ ["abrevehookabove"]=7859,
+ ["abrevetilde"]=7861,
+ ["acaron"]=462,
+ ["acircle"]=9424,
+ ["acircumflex"]=226,
+ ["acircumflexacute"]=7845,
+ ["acircumflexdotbelow"]=7853,
+ ["acircumflexgrave"]=7847,
+ ["acircumflexhookabove"]=7849,
+ ["acircumflextilde"]=7851,
+ ["acute"]=180,
+ ["acutebelowcmb"]=791,
+ ["acutecmb"]=769,
+ ["acutecomb"]=769,
+ ["acutedeva"]=2388,
+ ["acutelowmod"]=719,
+ ["acutetonecmb"]=833,
+ ["acyrillic"]=1072,
+ ["adblgrave"]=513,
+ ["addakgurmukhi"]=2673,
+ ["adeva"]=2309,
+ ["adieresis"]=228,
+ ["adieresiscyrillic"]=1235,
+ ["adieresismacron"]=479,
+ ["adotbelow"]=7841,
+ ["adotmacron"]=481,
+ ["ae"]=230,
+ ["aeacute"]=509,
+ ["aekorean"]=12624,
+ ["aemacron"]=483,
+ ["afii00208"]=8213,
+ ["afii08941"]=8356,
+ ["afii10017"]=1040,
+ ["afii10018"]=1041,
+ ["afii10019"]=1042,
+ ["afii10020"]=1043,
+ ["afii10021"]=1044,
+ ["afii10022"]=1045,
+ ["afii10023"]=1025,
+ ["afii10024"]=1046,
+ ["afii10025"]=1047,
+ ["afii10026"]=1048,
+ ["afii10027"]=1049,
+ ["afii10028"]=1050,
+ ["afii10029"]=1051,
+ ["afii10030"]=1052,
+ ["afii10031"]=1053,
+ ["afii10032"]=1054,
+ ["afii10033"]=1055,
+ ["afii10034"]=1056,
+ ["afii10035"]=1057,
+ ["afii10036"]=1058,
+ ["afii10037"]=1059,
+ ["afii10038"]=1060,
+ ["afii10039"]=1061,
+ ["afii10040"]=1062,
+ ["afii10041"]=1063,
+ ["afii10042"]=1064,
+ ["afii10043"]=1065,
+ ["afii10044"]=1066,
+ ["afii10045"]=1067,
+ ["afii10046"]=1068,
+ ["afii10047"]=1069,
+ ["afii10048"]=1070,
+ ["afii10049"]=1071,
+ ["afii10050"]=1168,
+ ["afii10051"]=1026,
+ ["afii10052"]=1027,
+ ["afii10053"]=1028,
+ ["afii10054"]=1029,
+ ["afii10055"]=1030,
+ ["afii10056"]=1031,
+ ["afii10057"]=1032,
+ ["afii10058"]=1033,
+ ["afii10059"]=1034,
+ ["afii10060"]=1035,
+ ["afii10061"]=1036,
+ ["afii10062"]=1038,
+ ["afii10063"]=63172,
+ ["afii10064"]=63173,
+ ["afii10065"]=1072,
+ ["afii10066"]=1073,
+ ["afii10067"]=1074,
+ ["afii10068"]=1075,
+ ["afii10069"]=1076,
+ ["afii10070"]=1077,
+ ["afii10071"]=1105,
+ ["afii10072"]=1078,
+ ["afii10073"]=1079,
+ ["afii10074"]=1080,
+ ["afii10075"]=1081,
+ ["afii10076"]=1082,
+ ["afii10077"]=1083,
+ ["afii10078"]=1084,
+ ["afii10079"]=1085,
+ ["afii10080"]=1086,
+ ["afii10081"]=1087,
+ ["afii10082"]=1088,
+ ["afii10083"]=1089,
+ ["afii10084"]=1090,
+ ["afii10085"]=1091,
+ ["afii10086"]=1092,
+ ["afii10087"]=1093,
+ ["afii10088"]=1094,
+ ["afii10089"]=1095,
+ ["afii10090"]=1096,
+ ["afii10091"]=1097,
+ ["afii10092"]=1098,
+ ["afii10093"]=1099,
+ ["afii10094"]=1100,
+ ["afii10095"]=1101,
+ ["afii10096"]=1102,
+ ["afii10097"]=1103,
+ ["afii10098"]=1169,
+ ["afii10099"]=1106,
+ ["afii10100"]=1107,
+ ["afii10101"]=1108,
+ ["afii10102"]=1109,
+ ["afii10103"]=1110,
+ ["afii10104"]=1111,
+ ["afii10105"]=1112,
+ ["afii10106"]=1113,
+ ["afii10107"]=1114,
+ ["afii10108"]=1115,
+ ["afii10109"]=1116,
+ ["afii10110"]=1118,
+ ["afii10145"]=1039,
+ ["afii10146"]=1122,
+ ["afii10147"]=1138,
+ ["afii10148"]=1140,
+ ["afii10192"]=63174,
+ ["afii10193"]=1119,
+ ["afii10194"]=1123,
+ ["afii10195"]=1139,
+ ["afii10196"]=1141,
+ ["afii10831"]=63175,
+ ["afii10832"]=63176,
+ ["afii10846"]=1241,
+ ["afii299"]=8206,
+ ["afii300"]=8207,
+ ["afii301"]=8205,
+ ["afii57381"]=1642,
+ ["afii57388"]=1548,
+ ["afii57392"]=1632,
+ ["afii57393"]=1633,
+ ["afii57394"]=1634,
+ ["afii57395"]=1635,
+ ["afii57396"]=1636,
+ ["afii57397"]=1637,
+ ["afii57398"]=1638,
+ ["afii57399"]=1639,
+ ["afii57400"]=1640,
+ ["afii57401"]=1641,
+ ["afii57403"]=1563,
+ ["afii57407"]=1567,
+ ["afii57409"]=1569,
+ ["afii57410"]=1570,
+ ["afii57411"]=1571,
+ ["afii57412"]=1572,
+ ["afii57413"]=1573,
+ ["afii57414"]=1574,
+ ["afii57415"]=1575,
+ ["afii57416"]=1576,
+ ["afii57417"]=1577,
+ ["afii57418"]=1578,
+ ["afii57419"]=1579,
+ ["afii57420"]=1580,
+ ["afii57421"]=1581,
+ ["afii57422"]=1582,
+ ["afii57423"]=1583,
+ ["afii57424"]=1584,
+ ["afii57425"]=1585,
+ ["afii57426"]=1586,
+ ["afii57427"]=1587,
+ ["afii57428"]=1588,
+ ["afii57429"]=1589,
+ ["afii57430"]=1590,
+ ["afii57431"]=1591,
+ ["afii57432"]=1592,
+ ["afii57433"]=1593,
+ ["afii57434"]=1594,
+ ["afii57440"]=1600,
+ ["afii57441"]=1601,
+ ["afii57442"]=1602,
+ ["afii57443"]=1603,
+ ["afii57444"]=1604,
+ ["afii57445"]=1605,
+ ["afii57446"]=1606,
+ ["afii57448"]=1608,
+ ["afii57449"]=1609,
+ ["afii57450"]=1610,
+ ["afii57451"]=1611,
+ ["afii57452"]=1612,
+ ["afii57453"]=1613,
+ ["afii57454"]=1614,
+ ["afii57455"]=1615,
+ ["afii57456"]=1616,
+ ["afii57457"]=1617,
+ ["afii57458"]=1618,
+ ["afii57470"]=1607,
+ ["afii57505"]=1700,
+ ["afii57506"]=1662,
+ ["afii57507"]=1670,
+ ["afii57508"]=1688,
+ ["afii57509"]=1711,
+ ["afii57511"]=1657,
+ ["afii57512"]=1672,
+ ["afii57513"]=1681,
+ ["afii57514"]=1722,
+ ["afii57519"]=1746,
+ ["afii57534"]=1749,
+ ["afii57636"]=8362,
+ ["afii57645"]=1470,
+ ["afii57658"]=1475,
+ ["afii57664"]=1488,
+ ["afii57665"]=1489,
+ ["afii57666"]=1490,
+ ["afii57667"]=1491,
+ ["afii57668"]=1492,
+ ["afii57669"]=1493,
+ ["afii57670"]=1494,
+ ["afii57671"]=1495,
+ ["afii57672"]=1496,
+ ["afii57673"]=1497,
+ ["afii57674"]=1498,
+ ["afii57675"]=1499,
+ ["afii57676"]=1500,
+ ["afii57677"]=1501,
+ ["afii57678"]=1502,
+ ["afii57679"]=1503,
+ ["afii57680"]=1504,
+ ["afii57681"]=1505,
+ ["afii57682"]=1506,
+ ["afii57683"]=1507,
+ ["afii57684"]=1508,
+ ["afii57685"]=1509,
+ ["afii57686"]=1510,
+ ["afii57687"]=1511,
+ ["afii57688"]=1512,
+ ["afii57689"]=1513,
+ ["afii57690"]=1514,
+ ["afii57694"]=64298,
+ ["afii57695"]=64299,
+ ["afii57700"]=64331,
+ ["afii57705"]=64287,
+ ["afii57716"]=1520,
+ ["afii57717"]=1521,
+ ["afii57718"]=1522,
+ ["afii57723"]=64309,
+ ["afii57793"]=1460,
+ ["afii57794"]=1461,
+ ["afii57795"]=1462,
+ ["afii57796"]=1467,
+ ["afii57797"]=1464,
+ ["afii57798"]=1463,
+ ["afii57799"]=1456,
+ ["afii57800"]=1458,
+ ["afii57801"]=1457,
+ ["afii57802"]=1459,
+ ["afii57803"]=1474,
+ ["afii57804"]=1473,
+ ["afii57806"]=1465,
+ ["afii57807"]=1468,
+ ["afii57839"]=1469,
+ ["afii57841"]=1471,
+ ["afii57842"]=1472,
+ ["afii57929"]=700,
+ ["afii61248"]=8453,
+ ["afii61289"]=8467,
+ ["afii61352"]=8470,
+ ["afii61573"]=8236,
+ ["afii61574"]=8237,
+ ["afii61575"]=8238,
+ ["afii61664"]=8204,
+ ["afii63167"]=1645,
+ ["afii64937"]=701,
+ ["agrave"]=224,
+ ["agujarati"]=2693,
+ ["agurmukhi"]=2565,
+ ["ahiragana"]=12354,
+ ["ahookabove"]=7843,
+ ["aibengali"]=2448,
+ ["aibopomofo"]=12574,
+ ["aideva"]=2320,
+ ["aiecyrillic"]=1237,
+ ["aigujarati"]=2704,
+ ["aigurmukhi"]=2576,
+ ["aimatragurmukhi"]=2632,
+ ["ainarabic"]=1593,
+ ["ainfinalarabic"]=65226,
+ ["aininitialarabic"]=65227,
+ ["ainmedialarabic"]=65228,
+ ["ainvertedbreve"]=515,
+ ["aivowelsignbengali"]=2504,
+ ["aivowelsigndeva"]=2376,
+ ["aivowelsigngujarati"]=2760,
+ ["akatakana"]=12450,
+ ["akatakanahalfwidth"]=65393,
+ ["akorean"]=12623,
+ ["alef"]=1488,
+ ["alefarabic"]=1575,
+ ["alefdageshhebrew"]=64304,
+ ["aleffinalarabic"]=65166,
+ ["alefhamzaabovearabic"]=1571,
+ ["alefhamzaabovefinalarabic"]=65156,
+ ["alefhamzabelowarabic"]=1573,
+ ["alefhamzabelowfinalarabic"]=65160,
+ ["alefhebrew"]=1488,
+ ["aleflamedhebrew"]=64335,
+ ["alefmaddaabovearabic"]=1570,
+ ["alefmaddaabovefinalarabic"]=65154,
+ ["alefmaksuraarabic"]=1609,
+ ["alefmaksurafinalarabic"]=65264,
+ ["alefmaksurainitialarabic"]=65267,
+ ["alefmaksuramedialarabic"]=65268,
+ ["alefpatahhebrew"]=64302,
+ ["alefqamatshebrew"]=64303,
+ ["aleph"]=8501,
+ ["allequal"]=8780,
+ ["alpha"]=945,
+ ["alphatonos"]=940,
+ ["amacron"]=257,
+ ["amonospace"]=65345,
+ ["ampersand"]=38,
+ ["ampersandmonospace"]=65286,
+ ["ampersandsmall"]=63270,
+ ["amsquare"]=13250,
+ ["anbopomofo"]=12578,
+ ["angbopomofo"]=12580,
+ ["angkhankhuthai"]=3674,
+ ["angle"]=8736,
+ ["anglebracketleft"]=12296,
+ ["anglebracketleftvertical"]=65087,
+ ["anglebracketright"]=12297,
+ ["anglebracketrightvertical"]=65088,
+ ["angleleft"]=9001,
+ ["angleright"]=9002,
+ ["angstrom"]=8491,
+ ["anoteleia"]=903,
+ ["anudattadeva"]=2386,
+ ["anusvarabengali"]=2434,
+ ["anusvaradeva"]=2306,
+ ["anusvaragujarati"]=2690,
+ ["aogonek"]=261,
+ ["apaatosquare"]=13056,
+ ["aparen"]=9372,
+ ["apostrophearmenian"]=1370,
+ ["apostrophemod"]=700,
+ ["apple"]=63743,
+ ["approaches"]=8784,
+ ["approxequal"]=8776,
+ ["approxequalorimage"]=8786,
+ ["approximatelyequal"]=8773,
+ ["araeaekorean"]=12686,
+ ["araeakorean"]=12685,
+ ["arc"]=8978,
+ ["arighthalfring"]=7834,
+ ["aring"]=229,
+ ["aringacute"]=507,
+ ["aringbelow"]=7681,
+ ["arrowboth"]=8596,
+ ["arrowdashdown"]=8675,
+ ["arrowdashleft"]=8672,
+ ["arrowdashright"]=8674,
+ ["arrowdashup"]=8673,
+ ["arrowdblboth"]=8660,
+ ["arrowdbldown"]=8659,
+ ["arrowdblleft"]=8656,
+ ["arrowdblright"]=8658,
+ ["arrowdblup"]=8657,
+ ["arrowdown"]=8595,
+ ["arrowdownleft"]=8601,
+ ["arrowdownright"]=8600,
+ ["arrowdownwhite"]=8681,
+ ["arrowheaddownmod"]=709,
+ ["arrowheadleftmod"]=706,
+ ["arrowheadrightmod"]=707,
+ ["arrowheadupmod"]=708,
+ ["arrowhorizex"]=63719,
+ ["arrowleft"]=8592,
+ ["arrowleftdbl"]=8656,
+ ["arrowleftdblstroke"]=8653,
+ ["arrowleftoverright"]=8646,
+ ["arrowleftwhite"]=8678,
+ ["arrowright"]=8594,
+ ["arrowrightdblstroke"]=8655,
+ ["arrowrightheavy"]=10142,
+ ["arrowrightoverleft"]=8644,
+ ["arrowrightwhite"]=8680,
+ ["arrowtableft"]=8676,
+ ["arrowtabright"]=8677,
+ ["arrowup"]=8593,
+ ["arrowupdn"]=8597,
+ ["arrowupdnbse"]=8616,
+ ["arrowupdownbase"]=8616,
+ ["arrowupleft"]=8598,
+ ["arrowupleftofdown"]=8645,
+ ["arrowupright"]=8599,
+ ["arrowupwhite"]=8679,
+ ["arrowvertex"]=63718,
+ ["asciicircum"]=94,
+ ["asciicircummonospace"]=65342,
+ ["asciitilde"]=126,
+ ["asciitildemonospace"]=65374,
+ ["ascript"]=593,
+ ["ascriptturned"]=594,
+ ["asmallhiragana"]=12353,
+ ["asmallkatakana"]=12449,
+ ["asmallkatakanahalfwidth"]=65383,
+ ["asterisk"]=42,
+ ["asteriskaltonearabic"]=1645,
+ ["asteriskarabic"]=1645,
+ ["asteriskmath"]=8727,
+ ["asteriskmonospace"]=65290,
+ ["asterisksmall"]=65121,
+ ["asterism"]=8258,
+ ["asuperior"]=63209,
+ ["asymptoticallyequal"]=8771,
+ ["at"]=64,
+ ["atilde"]=227,
+ ["atmonospace"]=65312,
+ ["atsmall"]=65131,
+ ["aturned"]=592,
+ ["aubengali"]=2452,
+ ["aubopomofo"]=12576,
+ ["audeva"]=2324,
+ ["augujarati"]=2708,
+ ["augurmukhi"]=2580,
+ ["aulengthmarkbengali"]=2519,
+ ["aumatragurmukhi"]=2636,
+ ["auvowelsignbengali"]=2508,
+ ["auvowelsigndeva"]=2380,
+ ["auvowelsigngujarati"]=2764,
+ ["avagrahadeva"]=2365,
+ ["aybarmenian"]=1377,
+ ["ayin"]=1506,
+ ["ayinaltonehebrew"]=64288,
+ ["ayinhebrew"]=1506,
+ ["b"]=98,
+ ["babengali"]=2476,
+ ["backslash"]=92,
+ ["backslashmonospace"]=65340,
+ ["badeva"]=2348,
+ ["bagujarati"]=2732,
+ ["bagurmukhi"]=2604,
+ ["bahiragana"]=12400,
+ ["bahtthai"]=3647,
+ ["bakatakana"]=12496,
+ ["bar"]=124,
+ ["barmonospace"]=65372,
+ ["bbopomofo"]=12549,
+ ["bcircle"]=9425,
+ ["bdotaccent"]=7683,
+ ["bdotbelow"]=7685,
+ ["beamedsixteenthnotes"]=9836,
+ ["because"]=8757,
+ ["becyrillic"]=1073,
+ ["beharabic"]=1576,
+ ["behfinalarabic"]=65168,
+ ["behinitialarabic"]=65169,
+ ["behiragana"]=12409,
+ ["behmedialarabic"]=65170,
+ ["behmeeminitialarabic"]=64671,
+ ["behmeemisolatedarabic"]=64520,
+ ["behnoonfinalarabic"]=64621,
+ ["bekatakana"]=12505,
+ ["benarmenian"]=1378,
+ ["bet"]=1489,
+ ["beta"]=946,
+ ["betasymbolgreek"]=976,
+ ["betdagesh"]=64305,
+ ["betdageshhebrew"]=64305,
+ ["bethebrew"]=1489,
+ ["betrafehebrew"]=64332,
+ ["bhabengali"]=2477,
+ ["bhadeva"]=2349,
+ ["bhagujarati"]=2733,
+ ["bhagurmukhi"]=2605,
+ ["bhook"]=595,
+ ["bihiragana"]=12403,
+ ["bikatakana"]=12499,
+ ["bilabialclick"]=664,
+ ["bindigurmukhi"]=2562,
+ ["birusquare"]=13105,
+ ["blackcircle"]=9679,
+ ["blackdiamond"]=9670,
+ ["blackdownpointingtriangle"]=9660,
+ ["blackleftpointingpointer"]=9668,
+ ["blackleftpointingtriangle"]=9664,
+ ["blacklenticularbracketleft"]=12304,
+ ["blacklenticularbracketleftvertical"]=65083,
+ ["blacklenticularbracketright"]=12305,
+ ["blacklenticularbracketrightvertical"]=65084,
+ ["blacklowerlefttriangle"]=9699,
+ ["blacklowerrighttriangle"]=9698,
+ ["blackrectangle"]=9644,
+ ["blackrightpointingpointer"]=9658,
+ ["blackrightpointingtriangle"]=9654,
+ ["blacksmallsquare"]=9642,
+ ["blacksmilingface"]=9787,
+ ["blacksquare"]=9632,
+ ["blackstar"]=9733,
+ ["blackupperlefttriangle"]=9700,
+ ["blackupperrighttriangle"]=9701,
+ ["blackuppointingsmalltriangle"]=9652,
+ ["blackuppointingtriangle"]=9650,
+ ["blank"]=9251,
+ ["blinebelow"]=7687,
+ ["block"]=9608,
+ ["bmonospace"]=65346,
+ ["bobaimaithai"]=3610,
+ ["bohiragana"]=12412,
+ ["bokatakana"]=12508,
+ ["bparen"]=9373,
+ ["bqsquare"]=13251,
+ ["braceex"]=63732,
+ ["braceleft"]=123,
+ ["braceleftbt"]=63731,
+ ["braceleftmid"]=63730,
+ ["braceleftmonospace"]=65371,
+ ["braceleftsmall"]=65115,
+ ["bracelefttp"]=63729,
+ ["braceleftvertical"]=65079,
+ ["braceright"]=125,
+ ["bracerightbt"]=63742,
+ ["bracerightmid"]=63741,
+ ["bracerightmonospace"]=65373,
+ ["bracerightsmall"]=65116,
+ ["bracerighttp"]=63740,
+ ["bracerightvertical"]=65080,
+ ["bracketleft"]=91,
+ ["bracketleftbt"]=63728,
+ ["bracketleftex"]=63727,
+ ["bracketleftmonospace"]=65339,
+ ["bracketlefttp"]=63726,
+ ["bracketright"]=93,
+ ["bracketrightbt"]=63739,
+ ["bracketrightex"]=63738,
+ ["bracketrightmonospace"]=65341,
+ ["bracketrighttp"]=63737,
+ ["breve"]=728,
+ ["brevebelowcmb"]=814,
+ ["brevecmb"]=774,
+ ["breveinvertedbelowcmb"]=815,
+ ["breveinvertedcmb"]=785,
+ ["breveinverteddoublecmb"]=865,
+ ["bridgebelowcmb"]=810,
+ ["bridgeinvertedbelowcmb"]=826,
+ ["brokenbar"]=166,
+ ["bstroke"]=384,
+ ["bsuperior"]=63210,
+ ["btopbar"]=387,
+ ["buhiragana"]=12406,
+ ["bukatakana"]=12502,
+ ["bullet"]=8226,
+ ["bulletinverse"]=9688,
+ ["bulletoperator"]=8729,
+ ["bullseye"]=9678,
+ ["c"]=99,
+ ["caarmenian"]=1390,
+ ["cabengali"]=2458,
+ ["cacute"]=263,
+ ["cadeva"]=2330,
+ ["cagujarati"]=2714,
+ ["cagurmukhi"]=2586,
+ ["calsquare"]=13192,
+ ["candrabindubengali"]=2433,
+ ["candrabinducmb"]=784,
+ ["candrabindudeva"]=2305,
+ ["candrabindugujarati"]=2689,
+ ["capslock"]=8682,
+ ["careof"]=8453,
+ ["caron"]=711,
+ ["caronbelowcmb"]=812,
+ ["caroncmb"]=780,
+ ["carriagereturn"]=8629,
+ ["cbopomofo"]=12568,
+ ["ccaron"]=269,
+ ["ccedilla"]=231,
+ ["ccedillaacute"]=7689,
+ ["ccircle"]=9426,
+ ["ccircumflex"]=265,
+ ["ccurl"]=597,
+ ["cdot"]=267,
+ ["cdotaccent"]=267,
+ ["cdsquare"]=13253,
+ ["cedilla"]=184,
+ ["cedillacmb"]=807,
+ ["cent"]=162,
+ ["centigrade"]=8451,
+ ["centinferior"]=63199,
+ ["centmonospace"]=65504,
+ ["centoldstyle"]=63394,
+ ["centsuperior"]=63200,
+ ["chaarmenian"]=1401,
+ ["chabengali"]=2459,
+ ["chadeva"]=2331,
+ ["chagujarati"]=2715,
+ ["chagurmukhi"]=2587,
+ ["chbopomofo"]=12564,
+ ["cheabkhasiancyrillic"]=1213,
+ ["checkmark"]=10003,
+ ["checyrillic"]=1095,
+ ["chedescenderabkhasiancyrillic"]=1215,
+ ["chedescendercyrillic"]=1207,
+ ["chedieresiscyrillic"]=1269,
+ ["cheharmenian"]=1395,
+ ["chekhakassiancyrillic"]=1228,
+ ["cheverticalstrokecyrillic"]=1209,
+ ["chi"]=967,
+ ["chieuchacirclekorean"]=12919,
+ ["chieuchaparenkorean"]=12823,
+ ["chieuchcirclekorean"]=12905,
+ ["chieuchkorean"]=12618,
+ ["chieuchparenkorean"]=12809,
+ ["chochangthai"]=3594,
+ ["chochanthai"]=3592,
+ ["chochingthai"]=3593,
+ ["chochoethai"]=3596,
+ ["chook"]=392,
+ ["cieucacirclekorean"]=12918,
+ ["cieucaparenkorean"]=12822,
+ ["cieuccirclekorean"]=12904,
+ ["cieuckorean"]=12616,
+ ["cieucparenkorean"]=12808,
+ ["cieucuparenkorean"]=12828,
+ ["circle"]=9675,
+ ["circlemultiply"]=8855,
+ ["circleot"]=8857,
+ ["circleplus"]=8853,
+ ["circlepostalmark"]=12342,
+ ["circlewithlefthalfblack"]=9680,
+ ["circlewithrighthalfblack"]=9681,
+ ["circumflex"]=710,
+ ["circumflexbelowcmb"]=813,
+ ["circumflexcmb"]=770,
+ ["clear"]=8999,
+ ["clickalveolar"]=450,
+ ["clickdental"]=448,
+ ["clicklateral"]=449,
+ ["clickretroflex"]=451,
+ ["club"]=9827,
+ ["clubsuitblack"]=9827,
+ ["clubsuitwhite"]=9831,
+ ["cmcubedsquare"]=13220,
+ ["cmonospace"]=65347,
+ ["cmsquaredsquare"]=13216,
+ ["coarmenian"]=1409,
+ ["colon"]=58,
+ ["colonmonetary"]=8353,
+ ["colonmonospace"]=65306,
+ ["colonsign"]=8353,
+ ["colonsmall"]=65109,
+ ["colontriangularhalfmod"]=721,
+ ["colontriangularmod"]=720,
+ ["comma"]=44,
+ ["commaabovecmb"]=787,
+ ["commaaboverightcmb"]=789,
+ ["commaaccent"]=63171,
+ ["commaarabic"]=1548,
+ ["commaarmenian"]=1373,
+ ["commainferior"]=63201,
+ ["commamonospace"]=65292,
+ ["commareversedabovecmb"]=788,
+ ["commareversedmod"]=701,
+ ["commasmall"]=65104,
+ ["commasuperior"]=63202,
+ ["commaturnedabovecmb"]=786,
+ ["commaturnedmod"]=699,
+ ["compass"]=9788,
+ ["congruent"]=8773,
+ ["contourintegral"]=8750,
+ ["control"]=8963,
+ ["controlACK"]=6,
+ ["controlBEL"]=7,
+ ["controlBS"]=8,
+ ["controlCAN"]=24,
+ ["controlCR"]=13,
+ ["controlDC1"]=17,
+ ["controlDC2"]=18,
+ ["controlDC3"]=19,
+ ["controlDC4"]=20,
+ ["controlDEL"]=127,
+ ["controlDLE"]=16,
+ ["controlEM"]=25,
+ ["controlENQ"]=5,
+ ["controlEOT"]=4,
+ ["controlESC"]=27,
+ ["controlETB"]=23,
+ ["controlETX"]=3,
+ ["controlFF"]=12,
+ ["controlFS"]=28,
+ ["controlGS"]=29,
+ ["controlHT"]=9,
+ ["controlLF"]=10,
+ ["controlNAK"]=21,
+ ["controlRS"]=30,
+ ["controlSI"]=15,
+ ["controlSO"]=14,
+ ["controlSOT"]=2,
+ ["controlSTX"]=1,
+ ["controlSUB"]=26,
+ ["controlSYN"]=22,
+ ["controlUS"]=31,
+ ["controlVT"]=11,
+ ["copyright"]=169,
+ ["copyrightsans"]=63721,
+ ["copyrightserif"]=63193,
+ ["cornerbracketleft"]=12300,
+ ["cornerbracketlefthalfwidth"]=65378,
+ ["cornerbracketleftvertical"]=65089,
+ ["cornerbracketright"]=12301,
+ ["cornerbracketrighthalfwidth"]=65379,
+ ["cornerbracketrightvertical"]=65090,
+ ["corporationsquare"]=13183,
+ ["cosquare"]=13255,
+ ["coverkgsquare"]=13254,
+ ["cparen"]=9374,
+ ["cruzeiro"]=8354,
+ ["cstretched"]=663,
+ ["curlyand"]=8911,
+ ["curlyor"]=8910,
+ ["currency"]=164,
+ ["cyrBreve"]=63185,
+ ["cyrFlex"]=63186,
+ ["cyrbreve"]=63188,
+ ["cyrflex"]=63189,
+ ["d"]=100,
+ ["daarmenian"]=1380,
+ ["dabengali"]=2470,
+ ["dadarabic"]=1590,
+ ["dadeva"]=2342,
+ ["dadfinalarabic"]=65214,
+ ["dadinitialarabic"]=65215,
+ ["dadmedialarabic"]=65216,
+ ["dagesh"]=1468,
+ ["dageshhebrew"]=1468,
+ ["dagger"]=8224,
+ ["daggerdbl"]=8225,
+ ["dagujarati"]=2726,
+ ["dagurmukhi"]=2598,
+ ["dahiragana"]=12384,
+ ["dakatakana"]=12480,
+ ["dalarabic"]=1583,
+ ["dalet"]=1491,
+ ["daletdagesh"]=64307,
+ ["daletdageshhebrew"]=64307,
+ ["dalethatafpatah"]=1491,
+ ["dalethatafpatahhebrew"]=1491,
+ ["dalethatafsegol"]=1491,
+ ["dalethatafsegolhebrew"]=1491,
+ ["dalethebrew"]=1491,
+ ["dalethiriq"]=1491,
+ ["dalethiriqhebrew"]=1491,
+ ["daletholam"]=1491,
+ ["daletholamhebrew"]=1491,
+ ["daletpatah"]=1491,
+ ["daletpatahhebrew"]=1491,
+ ["daletqamats"]=1491,
+ ["daletqamatshebrew"]=1491,
+ ["daletqubuts"]=1491,
+ ["daletqubutshebrew"]=1491,
+ ["daletsegol"]=1491,
+ ["daletsegolhebrew"]=1491,
+ ["daletsheva"]=1491,
+ ["daletshevahebrew"]=1491,
+ ["dalettsere"]=1491,
+ ["dalettserehebrew"]=1491,
+ ["dalfinalarabic"]=65194,
+ ["dammaarabic"]=1615,
+ ["dammalowarabic"]=1615,
+ ["dammatanaltonearabic"]=1612,
+ ["dammatanarabic"]=1612,
+ ["danda"]=2404,
+ ["dargahebrew"]=1447,
+ ["dargalefthebrew"]=1447,
+ ["dasiapneumatacyrilliccmb"]=1157,
+ ["dblGrave"]=63187,
+ ["dblanglebracketleft"]=12298,
+ ["dblanglebracketleftvertical"]=65085,
+ ["dblanglebracketright"]=12299,
+ ["dblanglebracketrightvertical"]=65086,
+ ["dblarchinvertedbelowcmb"]=811,
+ ["dblarrowleft"]=8660,
+ ["dblarrowright"]=8658,
+ ["dbldanda"]=2405,
+ ["dblgrave"]=63190,
+ ["dblgravecmb"]=783,
+ ["dblintegral"]=8748,
+ ["dbllowline"]=8215,
+ ["dbllowlinecmb"]=819,
+ ["dbloverlinecmb"]=831,
+ ["dblprimemod"]=698,
+ ["dblverticalbar"]=8214,
+ ["dblverticallineabovecmb"]=782,
+ ["dbopomofo"]=12553,
+ ["dbsquare"]=13256,
+ ["dcaron"]=271,
+ ["dcedilla"]=7697,
+ ["dcircle"]=9427,
+ ["dcircumflexbelow"]=7699,
+ ["dcroat"]=273,
+ ["ddabengali"]=2465,
+ ["ddadeva"]=2337,
+ ["ddagujarati"]=2721,
+ ["ddagurmukhi"]=2593,
+ ["ddalarabic"]=1672,
+ ["ddalfinalarabic"]=64393,
+ ["dddhadeva"]=2396,
+ ["ddhabengali"]=2466,
+ ["ddhadeva"]=2338,
+ ["ddhagujarati"]=2722,
+ ["ddhagurmukhi"]=2594,
+ ["ddotaccent"]=7691,
+ ["ddotbelow"]=7693,
+ ["decimalseparatorarabic"]=1643,
+ ["decimalseparatorpersian"]=1643,
+ ["decyrillic"]=1076,
+ ["degree"]=176,
+ ["dehihebrew"]=1453,
+ ["dehiragana"]=12391,
+ ["deicoptic"]=1007,
+ ["dekatakana"]=12487,
+ ["deleteleft"]=9003,
+ ["deleteright"]=8998,
+ ["delta"]=948,
+ ["deltaturned"]=397,
+ ["denominatorminusonenumeratorbengali"]=2552,
+ ["dezh"]=676,
+ ["dhabengali"]=2471,
+ ["dhadeva"]=2343,
+ ["dhagujarati"]=2727,
+ ["dhagurmukhi"]=2599,
+ ["dhook"]=599,
+ ["dialytikatonos"]=901,
+ ["dialytikatonoscmb"]=836,
+ ["diamond"]=9830,
+ ["diamondsuitwhite"]=9826,
+ ["dieresis"]=168,
+ ["dieresisacute"]=63191,
+ ["dieresisbelowcmb"]=804,
+ ["dieresiscmb"]=776,
+ ["dieresisgrave"]=63192,
+ ["dieresistonos"]=901,
+ ["dihiragana"]=12386,
+ ["dikatakana"]=12482,
+ ["dittomark"]=12291,
+ ["divide"]=247,
+ ["divides"]=8739,
+ ["divisionslash"]=8725,
+ ["djecyrillic"]=1106,
+ ["dkshade"]=9619,
+ ["dlinebelow"]=7695,
+ ["dlsquare"]=13207,
+ ["dmacron"]=273,
+ ["dmonospace"]=65348,
+ ["dnblock"]=9604,
+ ["dochadathai"]=3598,
+ ["dodekthai"]=3604,
+ ["dohiragana"]=12393,
+ ["dokatakana"]=12489,
+ ["dollar"]=36,
+ ["dollarinferior"]=63203,
+ ["dollarmonospace"]=65284,
+ ["dollaroldstyle"]=63268,
+ ["dollarsmall"]=65129,
+ ["dollarsuperior"]=63204,
+ ["dong"]=8363,
+ ["dorusquare"]=13094,
+ ["dotaccent"]=729,
+ ["dotaccentcmb"]=775,
+ ["dotbelowcmb"]=803,
+ ["dotbelowcomb"]=803,
+ ["dotkatakana"]=12539,
+ ["dotlessi"]=305,
+ ["dotlessj"]=63166,
+ ["dotlessjstrokehook"]=644,
+ ["dotmath"]=8901,
+ ["dottedcircle"]=9676,
+ ["doubleyodpatah"]=64287,
+ ["doubleyodpatahhebrew"]=64287,
+ ["downtackbelowcmb"]=798,
+ ["downtackmod"]=725,
+ ["dparen"]=9375,
+ ["dsuperior"]=63211,
+ ["dtail"]=598,
+ ["dtopbar"]=396,
+ ["duhiragana"]=12389,
+ ["dukatakana"]=12485,
+ ["dz"]=499,
+ ["dzaltone"]=675,
+ ["dzcaron"]=454,
+ ["dzcurl"]=677,
+ ["dzeabkhasiancyrillic"]=1249,
+ ["dzecyrillic"]=1109,
+ ["dzhecyrillic"]=1119,
+ ["e"]=101,
+ ["eacute"]=233,
+ ["earth"]=9793,
+ ["ebengali"]=2447,
+ ["ebopomofo"]=12572,
+ ["ebreve"]=277,
+ ["ecandradeva"]=2317,
+ ["ecandragujarati"]=2701,
+ ["ecandravowelsigndeva"]=2373,
+ ["ecandravowelsigngujarati"]=2757,
+ ["ecaron"]=283,
+ ["ecedillabreve"]=7709,
+ ["echarmenian"]=1381,
+ ["echyiwnarmenian"]=1415,
+ ["ecircle"]=9428,
+ ["ecircumflex"]=234,
+ ["ecircumflexacute"]=7871,
+ ["ecircumflexbelow"]=7705,
+ ["ecircumflexdotbelow"]=7879,
+ ["ecircumflexgrave"]=7873,
+ ["ecircumflexhookabove"]=7875,
+ ["ecircumflextilde"]=7877,
+ ["ecyrillic"]=1108,
+ ["edblgrave"]=517,
+ ["edeva"]=2319,
+ ["edieresis"]=235,
+ ["edot"]=279,
+ ["edotaccent"]=279,
+ ["edotbelow"]=7865,
+ ["eegurmukhi"]=2575,
+ ["eematragurmukhi"]=2631,
+ ["efcyrillic"]=1092,
+ ["egrave"]=232,
+ ["egujarati"]=2703,
+ ["eharmenian"]=1383,
+ ["ehbopomofo"]=12573,
+ ["ehiragana"]=12360,
+ ["ehookabove"]=7867,
+ ["eibopomofo"]=12575,
+ ["eight"]=56,
+ ["eightarabic"]=1640,
+ ["eightbengali"]=2542,
+ ["eightcircle"]=9319,
+ ["eightcircleinversesansserif"]=10129,
+ ["eightdeva"]=2414,
+ ["eighteencircle"]=9329,
+ ["eighteenparen"]=9349,
+ ["eighteenperiod"]=9369,
+ ["eightgujarati"]=2798,
+ ["eightgurmukhi"]=2670,
+ ["eighthackarabic"]=1640,
+ ["eighthangzhou"]=12328,
+ ["eighthnotebeamed"]=9835,
+ ["eightideographicparen"]=12839,
+ ["eightinferior"]=8328,
+ ["eightmonospace"]=65304,
+ ["eightoldstyle"]=63288,
+ ["eightparen"]=9339,
+ ["eightperiod"]=9359,
+ ["eightpersian"]=1784,
+ ["eightroman"]=8567,
+ ["eightsuperior"]=8312,
+ ["eightthai"]=3672,
+ ["einvertedbreve"]=519,
+ ["eiotifiedcyrillic"]=1125,
+ ["ekatakana"]=12456,
+ ["ekatakanahalfwidth"]=65396,
+ ["ekonkargurmukhi"]=2676,
+ ["ekorean"]=12628,
+ ["elcyrillic"]=1083,
+ ["element"]=8712,
+ ["elevencircle"]=9322,
+ ["elevenparen"]=9342,
+ ["elevenperiod"]=9362,
+ ["elevenroman"]=8570,
+ ["ellipsis"]=8230,
+ ["ellipsisvertical"]=8942,
+ ["emacron"]=275,
+ ["emacronacute"]=7703,
+ ["emacrongrave"]=7701,
+ ["emcyrillic"]=1084,
+ ["emdash"]=8212,
+ ["emdashvertical"]=65073,
+ ["emonospace"]=65349,
+ ["emphasismarkarmenian"]=1371,
+ ["emptyset"]=8709,
+ ["enbopomofo"]=12579,
+ ["encyrillic"]=1085,
+ ["endash"]=8211,
+ ["endashvertical"]=65074,
+ ["endescendercyrillic"]=1187,
+ ["eng"]=331,
+ ["engbopomofo"]=12581,
+ ["enghecyrillic"]=1189,
+ ["enhookcyrillic"]=1224,
+ ["enspace"]=8194,
+ ["eogonek"]=281,
+ ["eokorean"]=12627,
+ ["eopen"]=603,
+ ["eopenclosed"]=666,
+ ["eopenreversed"]=604,
+ ["eopenreversedclosed"]=606,
+ ["eopenreversedhook"]=605,
+ ["eparen"]=9376,
+ ["epsilon"]=949,
+ ["epsilontonos"]=941,
+ ["equal"]=61,
+ ["equalmonospace"]=65309,
+ ["equalsmall"]=65126,
+ ["equalsuperior"]=8316,
+ ["equivalence"]=8801,
+ ["erbopomofo"]=12582,
+ ["ercyrillic"]=1088,
+ ["ereversed"]=600,
+ ["ereversedcyrillic"]=1101,
+ ["escyrillic"]=1089,
+ ["esdescendercyrillic"]=1195,
+ ["esh"]=643,
+ ["eshcurl"]=646,
+ ["eshortdeva"]=2318,
+ ["eshortvowelsigndeva"]=2374,
+ ["eshreversedloop"]=426,
+ ["eshsquatreversed"]=645,
+ ["esmallhiragana"]=12359,
+ ["esmallkatakana"]=12455,
+ ["esmallkatakanahalfwidth"]=65386,
+ ["estimated"]=8494,
+ ["esuperior"]=63212,
+ ["eta"]=951,
+ ["etarmenian"]=1384,
+ ["etatonos"]=942,
+ ["eth"]=240,
+ ["etilde"]=7869,
+ ["etildebelow"]=7707,
+ ["etnahtafoukhhebrew"]=1425,
+ ["etnahtafoukhlefthebrew"]=1425,
+ ["etnahtahebrew"]=1425,
+ ["etnahtalefthebrew"]=1425,
+ ["eturned"]=477,
+ ["eukorean"]=12641,
+ ["euro"]=8364,
+ ["evowelsignbengali"]=2503,
+ ["evowelsigndeva"]=2375,
+ ["evowelsigngujarati"]=2759,
+ ["exclam"]=33,
+ ["exclamarmenian"]=1372,
+ ["exclamdbl"]=8252,
+ ["exclamdown"]=161,
+ ["exclamdownsmall"]=63393,
+ ["exclammonospace"]=65281,
+ ["exclamsmall"]=63265,
+ ["existential"]=8707,
+ ["ezh"]=658,
+ ["ezhcaron"]=495,
+ ["ezhcurl"]=659,
+ ["ezhreversed"]=441,
+ ["ezhtail"]=442,
+ ["f"]=102,
+ ["fadeva"]=2398,
+ ["fagurmukhi"]=2654,
+ ["fahrenheit"]=8457,
+ ["fathaarabic"]=1614,
+ ["fathalowarabic"]=1614,
+ ["fathatanarabic"]=1611,
+ ["fbopomofo"]=12552,
+ ["fcircle"]=9429,
+ ["fdotaccent"]=7711,
+ ["feharabic"]=1601,
+ ["feharmenian"]=1414,
+ ["fehfinalarabic"]=65234,
+ ["fehinitialarabic"]=65235,
+ ["fehmedialarabic"]=65236,
+ ["feicoptic"]=997,
+ ["female"]=9792,
+ ["ff"]=64256,
+ ["ffi"]=64259,
+ ["ffl"]=64260,
+ ["fi"]=64257,
+ ["fifteencircle"]=9326,
+ ["fifteenparen"]=9346,
+ ["fifteenperiod"]=9366,
+ ["figuredash"]=8210,
+ ["filledbox"]=9632,
+ ["filledrect"]=9644,
+ ["finalkaf"]=1498,
+ ["finalkafdagesh"]=64314,
+ ["finalkafdageshhebrew"]=64314,
+ ["finalkafhebrew"]=1498,
+ ["finalkafqamats"]=1498,
+ ["finalkafqamatshebrew"]=1498,
+ ["finalkafsheva"]=1498,
+ ["finalkafshevahebrew"]=1498,
+ ["finalmem"]=1501,
+ ["finalmemhebrew"]=1501,
+ ["finalnun"]=1503,
+ ["finalnunhebrew"]=1503,
+ ["finalpe"]=1507,
+ ["finalpehebrew"]=1507,
+ ["finaltsadi"]=1509,
+ ["finaltsadihebrew"]=1509,
+ ["firsttonechinese"]=713,
+ ["fisheye"]=9673,
+ ["fitacyrillic"]=1139,
+ ["five"]=53,
+ ["fivearabic"]=1637,
+ ["fivebengali"]=2539,
+ ["fivecircle"]=9316,
+ ["fivecircleinversesansserif"]=10126,
+ ["fivedeva"]=2411,
+ ["fiveeighths"]=8541,
+ ["fivegujarati"]=2795,
+ ["fivegurmukhi"]=2667,
+ ["fivehackarabic"]=1637,
+ ["fivehangzhou"]=12325,
+ ["fiveideographicparen"]=12836,
+ ["fiveinferior"]=8325,
+ ["fivemonospace"]=65301,
+ ["fiveoldstyle"]=63285,
+ ["fiveparen"]=9336,
+ ["fiveperiod"]=9356,
+ ["fivepersian"]=1781,
+ ["fiveroman"]=8564,
+ ["fivesuperior"]=8309,
+ ["fivethai"]=3669,
+ ["fl"]=64258,
+ ["florin"]=402,
+ ["fmonospace"]=65350,
+ ["fmsquare"]=13209,
+ ["fofanthai"]=3615,
+ ["fofathai"]=3613,
+ ["fongmanthai"]=3663,
+ ["forall"]=8704,
+ ["four"]=52,
+ ["fourarabic"]=1636,
+ ["fourbengali"]=2538,
+ ["fourcircle"]=9315,
+ ["fourcircleinversesansserif"]=10125,
+ ["fourdeva"]=2410,
+ ["fourgujarati"]=2794,
+ ["fourgurmukhi"]=2666,
+ ["fourhackarabic"]=1636,
+ ["fourhangzhou"]=12324,
+ ["fourideographicparen"]=12835,
+ ["fourinferior"]=8324,
+ ["fourmonospace"]=65300,
+ ["fournumeratorbengali"]=2551,
+ ["fouroldstyle"]=63284,
+ ["fourparen"]=9335,
+ ["fourperiod"]=9355,
+ ["fourpersian"]=1780,
+ ["fourroman"]=8563,
+ ["foursuperior"]=8308,
+ ["fourteencircle"]=9325,
+ ["fourteenparen"]=9345,
+ ["fourteenperiod"]=9365,
+ ["fourthai"]=3668,
+ ["fourthtonechinese"]=715,
+ ["fparen"]=9377,
+ ["fraction"]=8260,
+ ["franc"]=8355,
+ ["g"]=103,
+ ["gabengali"]=2455,
+ ["gacute"]=501,
+ ["gadeva"]=2327,
+ ["gafarabic"]=1711,
+ ["gaffinalarabic"]=64403,
+ ["gafinitialarabic"]=64404,
+ ["gafmedialarabic"]=64405,
+ ["gagujarati"]=2711,
+ ["gagurmukhi"]=2583,
+ ["gahiragana"]=12364,
+ ["gakatakana"]=12460,
+ ["gamma"]=947,
+ ["gammalatinsmall"]=611,
+ ["gammasuperior"]=736,
+ ["gangiacoptic"]=1003,
+ ["gbopomofo"]=12557,
+ ["gbreve"]=287,
+ ["gcaron"]=487,
+ ["gcedilla"]=291,
+ ["gcircle"]=9430,
+ ["gcircumflex"]=285,
+ ["gcommaaccent"]=291,
+ ["gdot"]=289,
+ ["gdotaccent"]=289,
+ ["gecyrillic"]=1075,
+ ["gehiragana"]=12370,
+ ["gekatakana"]=12466,
+ ["geometricallyequal"]=8785,
+ ["gereshaccenthebrew"]=1436,
+ ["gereshhebrew"]=1523,
+ ["gereshmuqdamhebrew"]=1437,
+ ["germandbls"]=223,
+ ["gershayimaccenthebrew"]=1438,
+ ["gershayimhebrew"]=1524,
+ ["getamark"]=12307,
+ ["ghabengali"]=2456,
+ ["ghadarmenian"]=1394,
+ ["ghadeva"]=2328,
+ ["ghagujarati"]=2712,
+ ["ghagurmukhi"]=2584,
+ ["ghainarabic"]=1594,
+ ["ghainfinalarabic"]=65230,
+ ["ghaininitialarabic"]=65231,
+ ["ghainmedialarabic"]=65232,
+ ["ghemiddlehookcyrillic"]=1173,
+ ["ghestrokecyrillic"]=1171,
+ ["gheupturncyrillic"]=1169,
+ ["ghhadeva"]=2394,
+ ["ghhagurmukhi"]=2650,
+ ["ghook"]=608,
+ ["ghzsquare"]=13203,
+ ["gihiragana"]=12366,
+ ["gikatakana"]=12462,
+ ["gimarmenian"]=1379,
+ ["gimel"]=1490,
+ ["gimeldagesh"]=64306,
+ ["gimeldageshhebrew"]=64306,
+ ["gimelhebrew"]=1490,
+ ["gjecyrillic"]=1107,
+ ["glottalinvertedstroke"]=446,
+ ["glottalstop"]=660,
+ ["glottalstopinverted"]=662,
+ ["glottalstopmod"]=704,
+ ["glottalstopreversed"]=661,
+ ["glottalstopreversedmod"]=705,
+ ["glottalstopreversedsuperior"]=740,
+ ["glottalstopstroke"]=673,
+ ["glottalstopstrokereversed"]=674,
+ ["gmacron"]=7713,
+ ["gmonospace"]=65351,
+ ["gohiragana"]=12372,
+ ["gokatakana"]=12468,
+ ["gparen"]=9378,
+ ["gpasquare"]=13228,
+ ["gradient"]=8711,
+ ["grave"]=96,
+ ["gravebelowcmb"]=790,
+ ["gravecmb"]=768,
+ ["gravecomb"]=768,
+ ["gravedeva"]=2387,
+ ["gravelowmod"]=718,
+ ["gravemonospace"]=65344,
+ ["gravetonecmb"]=832,
+ ["greater"]=62,
+ ["greaterequal"]=8805,
+ ["greaterequalorless"]=8923,
+ ["greatermonospace"]=65310,
+ ["greaterorequivalent"]=8819,
+ ["greaterorless"]=8823,
+ ["greateroverequal"]=8807,
+ ["greatersmall"]=65125,
+ ["gscript"]=609,
+ ["gstroke"]=485,
+ ["guhiragana"]=12368,
+ ["guillemotleft"]=171,
+ ["guillemotright"]=187,
+ ["guilsinglleft"]=8249,
+ ["guilsinglright"]=8250,
+ ["gukatakana"]=12464,
+ ["guramusquare"]=13080,
+ ["gysquare"]=13257,
+ ["h"]=104,
+ ["haabkhasiancyrillic"]=1193,
+ ["haaltonearabic"]=1729,
+ ["habengali"]=2489,
+ ["hadescendercyrillic"]=1203,
+ ["hadeva"]=2361,
+ ["hagujarati"]=2745,
+ ["hagurmukhi"]=2617,
+ ["haharabic"]=1581,
+ ["hahfinalarabic"]=65186,
+ ["hahinitialarabic"]=65187,
+ ["hahiragana"]=12399,
+ ["hahmedialarabic"]=65188,
+ ["haitusquare"]=13098,
+ ["hakatakana"]=12495,
+ ["hakatakanahalfwidth"]=65418,
+ ["halantgurmukhi"]=2637,
+ ["hamzaarabic"]=1569,
+ ["hamzadammaarabic"]=1569,
+ ["hamzadammatanarabic"]=1569,
+ ["hamzafathaarabic"]=1569,
+ ["hamzafathatanarabic"]=1569,
+ ["hamzalowarabic"]=1569,
+ ["hamzalowkasraarabic"]=1569,
+ ["hamzalowkasratanarabic"]=1569,
+ ["hamzasukunarabic"]=1569,
+ ["hangulfiller"]=12644,
+ ["hardsigncyrillic"]=1098,
+ ["harpoonleftbarbup"]=8636,
+ ["harpoonrightbarbup"]=8640,
+ ["hasquare"]=13258,
+ ["hatafpatah"]=1458,
+ ["hatafpatah16"]=1458,
+ ["hatafpatah23"]=1458,
+ ["hatafpatah2f"]=1458,
+ ["hatafpatahhebrew"]=1458,
+ ["hatafpatahnarrowhebrew"]=1458,
+ ["hatafpatahquarterhebrew"]=1458,
+ ["hatafpatahwidehebrew"]=1458,
+ ["hatafqamats"]=1459,
+ ["hatafqamats1b"]=1459,
+ ["hatafqamats28"]=1459,
+ ["hatafqamats34"]=1459,
+ ["hatafqamatshebrew"]=1459,
+ ["hatafqamatsnarrowhebrew"]=1459,
+ ["hatafqamatsquarterhebrew"]=1459,
+ ["hatafqamatswidehebrew"]=1459,
+ ["hatafsegol"]=1457,
+ ["hatafsegol17"]=1457,
+ ["hatafsegol24"]=1457,
+ ["hatafsegol30"]=1457,
+ ["hatafsegolhebrew"]=1457,
+ ["hatafsegolnarrowhebrew"]=1457,
+ ["hatafsegolquarterhebrew"]=1457,
+ ["hatafsegolwidehebrew"]=1457,
+ ["hbar"]=295,
+ ["hbopomofo"]=12559,
+ ["hbrevebelow"]=7723,
+ ["hcedilla"]=7721,
+ ["hcircle"]=9431,
+ ["hcircumflex"]=293,
+ ["hdieresis"]=7719,
+ ["hdotaccent"]=7715,
+ ["hdotbelow"]=7717,
+ ["he"]=1492,
+ ["heart"]=9829,
+ ["heartsuitblack"]=9829,
+ ["heartsuitwhite"]=9825,
+ ["hedagesh"]=64308,
+ ["hedageshhebrew"]=64308,
+ ["hehaltonearabic"]=1729,
+ ["heharabic"]=1607,
+ ["hehebrew"]=1492,
+ ["hehfinalaltonearabic"]=64423,
+ ["hehfinalalttwoarabic"]=65258,
+ ["hehfinalarabic"]=65258,
+ ["hehhamzaabovefinalarabic"]=64421,
+ ["hehhamzaaboveisolatedarabic"]=64420,
+ ["hehinitialaltonearabic"]=64424,
+ ["hehinitialarabic"]=65259,
+ ["hehiragana"]=12408,
+ ["hehmedialaltonearabic"]=64425,
+ ["hehmedialarabic"]=65260,
+ ["heiseierasquare"]=13179,
+ ["hekatakana"]=12504,
+ ["hekatakanahalfwidth"]=65421,
+ ["hekutaarusquare"]=13110,
+ ["henghook"]=615,
+ ["herutusquare"]=13113,
+ ["het"]=1495,
+ ["hethebrew"]=1495,
+ ["hhook"]=614,
+ ["hhooksuperior"]=689,
+ ["hieuhacirclekorean"]=12923,
+ ["hieuhaparenkorean"]=12827,
+ ["hieuhcirclekorean"]=12909,
+ ["hieuhkorean"]=12622,
+ ["hieuhparenkorean"]=12813,
+ ["hihiragana"]=12402,
+ ["hikatakana"]=12498,
+ ["hikatakanahalfwidth"]=65419,
+ ["hiriq"]=1460,
+ ["hiriq14"]=1460,
+ ["hiriq21"]=1460,
+ ["hiriq2d"]=1460,
+ ["hiriqhebrew"]=1460,
+ ["hiriqnarrowhebrew"]=1460,
+ ["hiriqquarterhebrew"]=1460,
+ ["hiriqwidehebrew"]=1460,
+ ["hlinebelow"]=7830,
+ ["hmonospace"]=65352,
+ ["hoarmenian"]=1392,
+ ["hohipthai"]=3627,
+ ["hohiragana"]=12411,
+ ["hokatakana"]=12507,
+ ["hokatakanahalfwidth"]=65422,
+ ["holam"]=1465,
+ ["holam19"]=1465,
+ ["holam26"]=1465,
+ ["holam32"]=1465,
+ ["holamhebrew"]=1465,
+ ["holamnarrowhebrew"]=1465,
+ ["holamquarterhebrew"]=1465,
+ ["holamwidehebrew"]=1465,
+ ["honokhukthai"]=3630,
+ ["hookabovecomb"]=777,
+ ["hookcmb"]=777,
+ ["hookpalatalizedbelowcmb"]=801,
+ ["hookretroflexbelowcmb"]=802,
+ ["hoonsquare"]=13122,
+ ["horicoptic"]=1001,
+ ["horizontalbar"]=8213,
+ ["horncmb"]=795,
+ ["hotsprings"]=9832,
+ ["house"]=8962,
+ ["hparen"]=9379,
+ ["hsuperior"]=688,
+ ["hturned"]=613,
+ ["huhiragana"]=12405,
+ ["huiitosquare"]=13107,
+ ["hukatakana"]=12501,
+ ["hukatakanahalfwidth"]=65420,
+ ["hungarumlaut"]=733,
+ ["hungarumlautcmb"]=779,
+ ["hv"]=405,
+ ["hyphen"]=45,
+ ["hypheninferior"]=63205,
+ ["hyphenmonospace"]=65293,
+ ["hyphensmall"]=65123,
+ ["hyphensuperior"]=63206,
+ ["hyphentwo"]=8208,
+ ["i"]=105,
+ ["iacute"]=237,
+ ["iacyrillic"]=1103,
+ ["ibengali"]=2439,
+ ["ibopomofo"]=12583,
+ ["ibreve"]=301,
+ ["icaron"]=464,
+ ["icircle"]=9432,
+ ["icircumflex"]=238,
+ ["icyrillic"]=1110,
+ ["idblgrave"]=521,
+ ["ideographearthcircle"]=12943,
+ ["ideographfirecircle"]=12939,
+ ["ideographicallianceparen"]=12863,
+ ["ideographiccallparen"]=12858,
+ ["ideographiccentrecircle"]=12965,
+ ["ideographicclose"]=12294,
+ ["ideographiccomma"]=12289,
+ ["ideographiccommaleft"]=65380,
+ ["ideographiccongratulationparen"]=12855,
+ ["ideographiccorrectcircle"]=12963,
+ ["ideographicearthparen"]=12847,
+ ["ideographicenterpriseparen"]=12861,
+ ["ideographicexcellentcircle"]=12957,
+ ["ideographicfestivalparen"]=12864,
+ ["ideographicfinancialcircle"]=12950,
+ ["ideographicfinancialparen"]=12854,
+ ["ideographicfireparen"]=12843,
+ ["ideographichaveparen"]=12850,
+ ["ideographichighcircle"]=12964,
+ ["ideographiciterationmark"]=12293,
+ ["ideographiclaborcircle"]=12952,
+ ["ideographiclaborparen"]=12856,
+ ["ideographicleftcircle"]=12967,
+ ["ideographiclowcircle"]=12966,
+ ["ideographicmedicinecircle"]=12969,
+ ["ideographicmetalparen"]=12846,
+ ["ideographicmoonparen"]=12842,
+ ["ideographicnameparen"]=12852,
+ ["ideographicperiod"]=12290,
+ ["ideographicprintcircle"]=12958,
+ ["ideographicreachparen"]=12867,
+ ["ideographicrepresentparen"]=12857,
+ ["ideographicresourceparen"]=12862,
+ ["ideographicrightcircle"]=12968,
+ ["ideographicsecretcircle"]=12953,
+ ["ideographicselfparen"]=12866,
+ ["ideographicsocietyparen"]=12851,
+ ["ideographicspace"]=12288,
+ ["ideographicspecialparen"]=12853,
+ ["ideographicstockparen"]=12849,
+ ["ideographicstudyparen"]=12859,
+ ["ideographicsunparen"]=12848,
+ ["ideographicsuperviseparen"]=12860,
+ ["ideographicwaterparen"]=12844,
+ ["ideographicwoodparen"]=12845,
+ ["ideographiczero"]=12295,
+ ["ideographmetalcircle"]=12942,
+ ["ideographmooncircle"]=12938,
+ ["ideographnamecircle"]=12948,
+ ["ideographsuncircle"]=12944,
+ ["ideographwatercircle"]=12940,
+ ["ideographwoodcircle"]=12941,
+ ["ideva"]=2311,
+ ["idieresis"]=239,
+ ["idieresisacute"]=7727,
+ ["idieresiscyrillic"]=1253,
+ ["idotbelow"]=7883,
+ ["iebrevecyrillic"]=1239,
+ ["iecyrillic"]=1077,
+ ["ieungacirclekorean"]=12917,
+ ["ieungaparenkorean"]=12821,
+ ["ieungcirclekorean"]=12903,
+ ["ieungkorean"]=12615,
+ ["ieungparenkorean"]=12807,
+ ["igrave"]=236,
+ ["igujarati"]=2695,
+ ["igurmukhi"]=2567,
+ ["ihiragana"]=12356,
+ ["ihookabove"]=7881,
+ ["iibengali"]=2440,
+ ["iicyrillic"]=1080,
+ ["iideva"]=2312,
+ ["iigujarati"]=2696,
+ ["iigurmukhi"]=2568,
+ ["iimatragurmukhi"]=2624,
+ ["iinvertedbreve"]=523,
+ ["iishortcyrillic"]=1081,
+ ["iivowelsignbengali"]=2496,
+ ["iivowelsigndeva"]=2368,
+ ["iivowelsigngujarati"]=2752,
+ ["ij"]=307,
+ ["ikatakana"]=12452,
+ ["ikatakanahalfwidth"]=65394,
+ ["ikorean"]=12643,
+ ["ilde"]=732,
+ ["iluyhebrew"]=1452,
+ ["imacron"]=299,
+ ["imacroncyrillic"]=1251,
+ ["imageorapproximatelyequal"]=8787,
+ ["imatragurmukhi"]=2623,
+ ["imonospace"]=65353,
+ ["increment"]=8710,
+ ["infinity"]=8734,
+ ["iniarmenian"]=1387,
+ ["integral"]=8747,
+ ["integralbottom"]=8993,
+ ["integralbt"]=8993,
+ ["integralex"]=63733,
+ ["integraltop"]=8992,
+ ["integraltp"]=8992,
+ ["intersection"]=8745,
+ ["intisquare"]=13061,
+ ["invbullet"]=9688,
+ ["invcircle"]=9689,
+ ["invsmileface"]=9787,
+ ["iocyrillic"]=1105,
+ ["iogonek"]=303,
+ ["iota"]=953,
+ ["iotadieresis"]=970,
+ ["iotadieresistonos"]=912,
+ ["iotalatin"]=617,
+ ["iotatonos"]=943,
+ ["iparen"]=9380,
+ ["irigurmukhi"]=2674,
+ ["ismallhiragana"]=12355,
+ ["ismallkatakana"]=12451,
+ ["ismallkatakanahalfwidth"]=65384,
+ ["issharbengali"]=2554,
+ ["istroke"]=616,
+ ["isuperior"]=63213,
+ ["iterationhiragana"]=12445,
+ ["iterationkatakana"]=12541,
+ ["itilde"]=297,
+ ["itildebelow"]=7725,
+ ["iubopomofo"]=12585,
+ ["iucyrillic"]=1102,
+ ["ivowelsignbengali"]=2495,
+ ["ivowelsigndeva"]=2367,
+ ["ivowelsigngujarati"]=2751,
+ ["izhitsacyrillic"]=1141,
+ ["izhitsadblgravecyrillic"]=1143,
+ ["j"]=106,
+ ["jaarmenian"]=1393,
+ ["jabengali"]=2460,
+ ["jadeva"]=2332,
+ ["jagujarati"]=2716,
+ ["jagurmukhi"]=2588,
+ ["jbopomofo"]=12560,
+ ["jcaron"]=496,
+ ["jcircle"]=9433,
+ ["jcircumflex"]=309,
+ ["jcrossedtail"]=669,
+ ["jdotlessstroke"]=607,
+ ["jecyrillic"]=1112,
+ ["jeemarabic"]=1580,
+ ["jeemfinalarabic"]=65182,
+ ["jeeminitialarabic"]=65183,
+ ["jeemmedialarabic"]=65184,
+ ["jeharabic"]=1688,
+ ["jehfinalarabic"]=64395,
+ ["jhabengali"]=2461,
+ ["jhadeva"]=2333,
+ ["jhagujarati"]=2717,
+ ["jhagurmukhi"]=2589,
+ ["jheharmenian"]=1403,
+ ["jis"]=12292,
+ ["jmonospace"]=65354,
+ ["jparen"]=9381,
+ ["jsuperior"]=690,
+ ["k"]=107,
+ ["kabashkircyrillic"]=1185,
+ ["kabengali"]=2453,
+ ["kacute"]=7729,
+ ["kacyrillic"]=1082,
+ ["kadescendercyrillic"]=1179,
+ ["kadeva"]=2325,
+ ["kaf"]=1499,
+ ["kafarabic"]=1603,
+ ["kafdagesh"]=64315,
+ ["kafdageshhebrew"]=64315,
+ ["kaffinalarabic"]=65242,
+ ["kafhebrew"]=1499,
+ ["kafinitialarabic"]=65243,
+ ["kafmedialarabic"]=65244,
+ ["kafrafehebrew"]=64333,
+ ["kagujarati"]=2709,
+ ["kagurmukhi"]=2581,
+ ["kahiragana"]=12363,
+ ["kahookcyrillic"]=1220,
+ ["kakatakana"]=12459,
+ ["kakatakanahalfwidth"]=65398,
+ ["kappa"]=954,
+ ["kappasymbolgreek"]=1008,
+ ["kapyeounmieumkorean"]=12657,
+ ["kapyeounphieuphkorean"]=12676,
+ ["kapyeounpieupkorean"]=12664,
+ ["kapyeounssangpieupkorean"]=12665,
+ ["karoriisquare"]=13069,
+ ["kashidaautoarabic"]=1600,
+ ["kashidaautonosidebearingarabic"]=1600,
+ ["kasmallkatakana"]=12533,
+ ["kasquare"]=13188,
+ ["kasraarabic"]=1616,
+ ["kasratanarabic"]=1613,
+ ["kastrokecyrillic"]=1183,
+ ["katahiraprolongmarkhalfwidth"]=65392,
+ ["kaverticalstrokecyrillic"]=1181,
+ ["kbopomofo"]=12558,
+ ["kcalsquare"]=13193,
+ ["kcaron"]=489,
+ ["kcedilla"]=311,
+ ["kcircle"]=9434,
+ ["kcommaaccent"]=311,
+ ["kdotbelow"]=7731,
+ ["keharmenian"]=1412,
+ ["kehiragana"]=12369,
+ ["kekatakana"]=12465,
+ ["kekatakanahalfwidth"]=65401,
+ ["kenarmenian"]=1391,
+ ["kesmallkatakana"]=12534,
+ ["kgreenlandic"]=312,
+ ["khabengali"]=2454,
+ ["khacyrillic"]=1093,
+ ["khadeva"]=2326,
+ ["khagujarati"]=2710,
+ ["khagurmukhi"]=2582,
+ ["khaharabic"]=1582,
+ ["khahfinalarabic"]=65190,
+ ["khahinitialarabic"]=65191,
+ ["khahmedialarabic"]=65192,
+ ["kheicoptic"]=999,
+ ["khhadeva"]=2393,
+ ["khhagurmukhi"]=2649,
+ ["khieukhacirclekorean"]=12920,
+ ["khieukhaparenkorean"]=12824,
+ ["khieukhcirclekorean"]=12906,
+ ["khieukhkorean"]=12619,
+ ["khieukhparenkorean"]=12810,
+ ["khokhaithai"]=3586,
+ ["khokhonthai"]=3589,
+ ["khokhuatthai"]=3587,
+ ["khokhwaithai"]=3588,
+ ["khomutthai"]=3675,
+ ["khook"]=409,
+ ["khorakhangthai"]=3590,
+ ["khzsquare"]=13201,
+ ["kihiragana"]=12365,
+ ["kikatakana"]=12461,
+ ["kikatakanahalfwidth"]=65399,
+ ["kiroguramusquare"]=13077,
+ ["kiromeetorusquare"]=13078,
+ ["kirosquare"]=13076,
+ ["kiyeokacirclekorean"]=12910,
+ ["kiyeokaparenkorean"]=12814,
+ ["kiyeokcirclekorean"]=12896,
+ ["kiyeokkorean"]=12593,
+ ["kiyeokparenkorean"]=12800,
+ ["kiyeoksioskorean"]=12595,
+ ["kjecyrillic"]=1116,
+ ["klinebelow"]=7733,
+ ["klsquare"]=13208,
+ ["kmcubedsquare"]=13222,
+ ["kmonospace"]=65355,
+ ["kmsquaredsquare"]=13218,
+ ["kohiragana"]=12371,
+ ["kohmsquare"]=13248,
+ ["kokaithai"]=3585,
+ ["kokatakana"]=12467,
+ ["kokatakanahalfwidth"]=65402,
+ ["kooposquare"]=13086,
+ ["koppacyrillic"]=1153,
+ ["koreanstandardsymbol"]=12927,
+ ["koroniscmb"]=835,
+ ["kparen"]=9382,
+ ["kpasquare"]=13226,
+ ["ksicyrillic"]=1135,
+ ["ktsquare"]=13263,
+ ["kturned"]=670,
+ ["kuhiragana"]=12367,
+ ["kukatakana"]=12463,
+ ["kukatakanahalfwidth"]=65400,
+ ["kvsquare"]=13240,
+ ["kwsquare"]=13246,
+ ["l"]=108,
+ ["labengali"]=2482,
+ ["lacute"]=314,
+ ["ladeva"]=2354,
+ ["lagujarati"]=2738,
+ ["lagurmukhi"]=2610,
+ ["lakkhangyaothai"]=3653,
+ ["lamaleffinalarabic"]=65276,
+ ["lamalefhamzaabovefinalarabic"]=65272,
+ ["lamalefhamzaaboveisolatedarabic"]=65271,
+ ["lamalefhamzabelowfinalarabic"]=65274,
+ ["lamalefhamzabelowisolatedarabic"]=65273,
+ ["lamalefisolatedarabic"]=65275,
+ ["lamalefmaddaabovefinalarabic"]=65270,
+ ["lamalefmaddaaboveisolatedarabic"]=65269,
+ ["lamarabic"]=1604,
+ ["lambda"]=955,
+ ["lambdastroke"]=411,
+ ["lamed"]=1500,
+ ["lameddagesh"]=64316,
+ ["lameddageshhebrew"]=64316,
+ ["lamedhebrew"]=1500,
+ ["lamedholam"]=1500,
+ ["lamedholamdagesh"]=1500,
+ ["lamedholamdageshhebrew"]=1500,
+ ["lamedholamhebrew"]=1500,
+ ["lamfinalarabic"]=65246,
+ ["lamhahinitialarabic"]=64714,
+ ["laminitialarabic"]=65247,
+ ["lamjeeminitialarabic"]=64713,
+ ["lamkhahinitialarabic"]=64715,
+ ["lamlamhehisolatedarabic"]=65010,
+ ["lammedialarabic"]=65248,
+ ["lammeemhahinitialarabic"]=64904,
+ ["lammeeminitialarabic"]=64716,
+ ["lammeemjeeminitialarabic"]=65247,
+ ["lammeemkhahinitialarabic"]=65247,
+ ["largecircle"]=9711,
+ ["lbar"]=410,
+ ["lbelt"]=620,
+ ["lbopomofo"]=12556,
+ ["lcaron"]=318,
+ ["lcedilla"]=316,
+ ["lcircle"]=9435,
+ ["lcircumflexbelow"]=7741,
+ ["lcommaaccent"]=316,
+ ["ldot"]=320,
+ ["ldotaccent"]=320,
+ ["ldotbelow"]=7735,
+ ["ldotbelowmacron"]=7737,
+ ["leftangleabovecmb"]=794,
+ ["lefttackbelowcmb"]=792,
+ ["less"]=60,
+ ["lessequal"]=8804,
+ ["lessequalorgreater"]=8922,
+ ["lessmonospace"]=65308,
+ ["lessorequivalent"]=8818,
+ ["lessorgreater"]=8822,
+ ["lessoverequal"]=8806,
+ ["lesssmall"]=65124,
+ ["lezh"]=622,
+ ["lfblock"]=9612,
+ ["lhookretroflex"]=621,
+ ["lira"]=8356,
+ ["liwnarmenian"]=1388,
+ ["lj"]=457,
+ ["ljecyrillic"]=1113,
+ ["ll"]=63168,
+ ["lladeva"]=2355,
+ ["llagujarati"]=2739,
+ ["llinebelow"]=7739,
+ ["llladeva"]=2356,
+ ["llvocalicbengali"]=2529,
+ ["llvocalicdeva"]=2401,
+ ["llvocalicvowelsignbengali"]=2531,
+ ["llvocalicvowelsigndeva"]=2403,
+ ["lmiddletilde"]=619,
+ ["lmonospace"]=65356,
+ ["lmsquare"]=13264,
+ ["lochulathai"]=3628,
+ ["logicaland"]=8743,
+ ["logicalnot"]=172,
+ ["logicalnotreversed"]=8976,
+ ["logicalor"]=8744,
+ ["lolingthai"]=3621,
+ ["longs"]=383,
+ ["lowlinecenterline"]=65102,
+ ["lowlinecmb"]=818,
+ ["lowlinedashed"]=65101,
+ ["lozenge"]=9674,
+ ["lparen"]=9383,
+ ["lslash"]=322,
+ ["lsquare"]=8467,
+ ["lsuperior"]=63214,
+ ["ltshade"]=9617,
+ ["luthai"]=3622,
+ ["lvocalicbengali"]=2444,
+ ["lvocalicdeva"]=2316,
+ ["lvocalicvowelsignbengali"]=2530,
+ ["lvocalicvowelsigndeva"]=2402,
+ ["lxsquare"]=13267,
+ ["m"]=109,
+ ["mabengali"]=2478,
+ ["macron"]=175,
+ ["macronbelowcmb"]=817,
+ ["macroncmb"]=772,
+ ["macronlowmod"]=717,
+ ["macronmonospace"]=65507,
+ ["macute"]=7743,
+ ["madeva"]=2350,
+ ["magujarati"]=2734,
+ ["magurmukhi"]=2606,
+ ["mahapakhhebrew"]=1444,
+ ["mahapakhlefthebrew"]=1444,
+ ["mahiragana"]=12414,
+ ["maichattawalowleftthai"]=63637,
+ ["maichattawalowrightthai"]=63636,
+ ["maichattawathai"]=3659,
+ ["maichattawaupperleftthai"]=63635,
+ ["maieklowleftthai"]=63628,
+ ["maieklowrightthai"]=63627,
+ ["maiekthai"]=3656,
+ ["maiekupperleftthai"]=63626,
+ ["maihanakatleftthai"]=63620,
+ ["maihanakatthai"]=3633,
+ ["maitaikhuleftthai"]=63625,
+ ["maitaikhuthai"]=3655,
+ ["maitholowleftthai"]=63631,
+ ["maitholowrightthai"]=63630,
+ ["maithothai"]=3657,
+ ["maithoupperleftthai"]=63629,
+ ["maitrilowleftthai"]=63634,
+ ["maitrilowrightthai"]=63633,
+ ["maitrithai"]=3658,
+ ["maitriupperleftthai"]=63632,
+ ["maiyamokthai"]=3654,
+ ["makatakana"]=12510,
+ ["makatakanahalfwidth"]=65423,
+ ["male"]=9794,
+ ["mansyonsquare"]=13127,
+ ["maqafhebrew"]=1470,
+ ["mars"]=9794,
+ ["masoracirclehebrew"]=1455,
+ ["masquare"]=13187,
+ ["mbopomofo"]=12551,
+ ["mbsquare"]=13268,
+ ["mcircle"]=9436,
+ ["mcubedsquare"]=13221,
+ ["mdotaccent"]=7745,
+ ["mdotbelow"]=7747,
+ ["meemarabic"]=1605,
+ ["meemfinalarabic"]=65250,
+ ["meeminitialarabic"]=65251,
+ ["meemmedialarabic"]=65252,
+ ["meemmeeminitialarabic"]=64721,
+ ["meemmeemisolatedarabic"]=64584,
+ ["meetorusquare"]=13133,
+ ["mehiragana"]=12417,
+ ["meizierasquare"]=13182,
+ ["mekatakana"]=12513,
+ ["mekatakanahalfwidth"]=65426,
+ ["mem"]=1502,
+ ["memdagesh"]=64318,
+ ["memdageshhebrew"]=64318,
+ ["memhebrew"]=1502,
+ ["menarmenian"]=1396,
+ ["merkhahebrew"]=1445,
+ ["merkhakefulahebrew"]=1446,
+ ["merkhakefulalefthebrew"]=1446,
+ ["merkhalefthebrew"]=1445,
+ ["mhook"]=625,
+ ["mhzsquare"]=13202,
+ ["middledotkatakanahalfwidth"]=65381,
+ ["middot"]=183,
+ ["mieumacirclekorean"]=12914,
+ ["mieumaparenkorean"]=12818,
+ ["mieumcirclekorean"]=12900,
+ ["mieumkorean"]=12609,
+ ["mieumpansioskorean"]=12656,
+ ["mieumparenkorean"]=12804,
+ ["mieumpieupkorean"]=12654,
+ ["mieumsioskorean"]=12655,
+ ["mihiragana"]=12415,
+ ["mikatakana"]=12511,
+ ["mikatakanahalfwidth"]=65424,
+ ["minus"]=8722,
+ ["minusbelowcmb"]=800,
+ ["minuscircle"]=8854,
+ ["minusmod"]=727,
+ ["minusplus"]=8723,
+ ["minute"]=8242,
+ ["miribaarusquare"]=13130,
+ ["mirisquare"]=13129,
+ ["mlonglegturned"]=624,
+ ["mlsquare"]=13206,
+ ["mmcubedsquare"]=13219,
+ ["mmonospace"]=65357,
+ ["mmsquaredsquare"]=13215,
+ ["mohiragana"]=12418,
+ ["mohmsquare"]=13249,
+ ["mokatakana"]=12514,
+ ["mokatakanahalfwidth"]=65427,
+ ["molsquare"]=13270,
+ ["momathai"]=3617,
+ ["moverssquare"]=13223,
+ ["moverssquaredsquare"]=13224,
+ ["mparen"]=9384,
+ ["mpasquare"]=13227,
+ ["mssquare"]=13235,
+ ["msuperior"]=63215,
+ ["mturned"]=623,
+ ["mu"]=181,
+ ["mu1"]=181,
+ ["muasquare"]=13186,
+ ["muchgreater"]=8811,
+ ["muchless"]=8810,
+ ["mufsquare"]=13196,
+ ["mugreek"]=956,
+ ["mugsquare"]=13197,
+ ["muhiragana"]=12416,
+ ["mukatakana"]=12512,
+ ["mukatakanahalfwidth"]=65425,
+ ["mulsquare"]=13205,
+ ["multiply"]=215,
+ ["mumsquare"]=13211,
+ ["munahhebrew"]=1443,
+ ["munahlefthebrew"]=1443,
+ ["musicalnote"]=9834,
+ ["musicalnotedbl"]=9835,
+ ["musicflatsign"]=9837,
+ ["musicsharpsign"]=9839,
+ ["mussquare"]=13234,
+ ["muvsquare"]=13238,
+ ["muwsquare"]=13244,
+ ["mvmegasquare"]=13241,
+ ["mvsquare"]=13239,
+ ["mwmegasquare"]=13247,
+ ["mwsquare"]=13245,
+ ["n"]=110,
+ ["nabengali"]=2472,
+ ["nabla"]=8711,
+ ["nacute"]=324,
+ ["nadeva"]=2344,
+ ["nagujarati"]=2728,
+ ["nagurmukhi"]=2600,
+ ["nahiragana"]=12394,
+ ["nakatakana"]=12490,
+ ["nakatakanahalfwidth"]=65413,
+ ["napostrophe"]=329,
+ ["nasquare"]=13185,
+ ["nbopomofo"]=12555,
+ ["nbspace"]=160,
+ ["ncaron"]=328,
+ ["ncedilla"]=326,
+ ["ncircle"]=9437,
+ ["ncircumflexbelow"]=7755,
+ ["ncommaaccent"]=326,
+ ["ndotaccent"]=7749,
+ ["ndotbelow"]=7751,
+ ["nehiragana"]=12397,
+ ["nekatakana"]=12493,
+ ["nekatakanahalfwidth"]=65416,
+ ["newsheqelsign"]=8362,
+ ["nfsquare"]=13195,
+ ["ngabengali"]=2457,
+ ["ngadeva"]=2329,
+ ["ngagujarati"]=2713,
+ ["ngagurmukhi"]=2585,
+ ["ngonguthai"]=3591,
+ ["nhiragana"]=12435,
+ ["nhookleft"]=626,
+ ["nhookretroflex"]=627,
+ ["nieunacirclekorean"]=12911,
+ ["nieunaparenkorean"]=12815,
+ ["nieuncieuckorean"]=12597,
+ ["nieuncirclekorean"]=12897,
+ ["nieunhieuhkorean"]=12598,
+ ["nieunkorean"]=12596,
+ ["nieunpansioskorean"]=12648,
+ ["nieunparenkorean"]=12801,
+ ["nieunsioskorean"]=12647,
+ ["nieuntikeutkorean"]=12646,
+ ["nihiragana"]=12395,
+ ["nikatakana"]=12491,
+ ["nikatakanahalfwidth"]=65414,
+ ["nikhahitleftthai"]=63641,
+ ["nikhahitthai"]=3661,
+ ["nine"]=57,
+ ["ninearabic"]=1641,
+ ["ninebengali"]=2543,
+ ["ninecircle"]=9320,
+ ["ninecircleinversesansserif"]=10130,
+ ["ninedeva"]=2415,
+ ["ninegujarati"]=2799,
+ ["ninegurmukhi"]=2671,
+ ["ninehackarabic"]=1641,
+ ["ninehangzhou"]=12329,
+ ["nineideographicparen"]=12840,
+ ["nineinferior"]=8329,
+ ["ninemonospace"]=65305,
+ ["nineoldstyle"]=63289,
+ ["nineparen"]=9340,
+ ["nineperiod"]=9360,
+ ["ninepersian"]=1785,
+ ["nineroman"]=8568,
+ ["ninesuperior"]=8313,
+ ["nineteencircle"]=9330,
+ ["nineteenparen"]=9350,
+ ["nineteenperiod"]=9370,
+ ["ninethai"]=3673,
+ ["nj"]=460,
+ ["njecyrillic"]=1114,
+ ["nkatakana"]=12531,
+ ["nkatakanahalfwidth"]=65437,
+ ["nlegrightlong"]=414,
+ ["nlinebelow"]=7753,
+ ["nmonospace"]=65358,
+ ["nmsquare"]=13210,
+ ["nnabengali"]=2467,
+ ["nnadeva"]=2339,
+ ["nnagujarati"]=2723,
+ ["nnagurmukhi"]=2595,
+ ["nnnadeva"]=2345,
+ ["nohiragana"]=12398,
+ ["nokatakana"]=12494,
+ ["nokatakanahalfwidth"]=65417,
+ ["nonbreakingspace"]=160,
+ ["nonenthai"]=3603,
+ ["nonuthai"]=3609,
+ ["noonarabic"]=1606,
+ ["noonfinalarabic"]=65254,
+ ["noonghunnaarabic"]=1722,
+ ["noonghunnafinalarabic"]=64415,
+ ["noonhehinitialarabic"]=65255,
+ ["nooninitialarabic"]=65255,
+ ["noonjeeminitialarabic"]=64722,
+ ["noonjeemisolatedarabic"]=64587,
+ ["noonmedialarabic"]=65256,
+ ["noonmeeminitialarabic"]=64725,
+ ["noonmeemisolatedarabic"]=64590,
+ ["noonnoonfinalarabic"]=64653,
+ ["notcontains"]=8716,
+ ["notelement"]=8713,
+ ["notelementof"]=8713,
+ ["notequal"]=8800,
+ ["notgreater"]=8815,
+ ["notgreaternorequal"]=8817,
+ ["notgreaternorless"]=8825,
+ ["notidentical"]=8802,
+ ["notless"]=8814,
+ ["notlessnorequal"]=8816,
+ ["notparallel"]=8742,
+ ["notprecedes"]=8832,
+ ["notsubset"]=8836,
+ ["notsucceeds"]=8833,
+ ["notsuperset"]=8837,
+ ["nowarmenian"]=1398,
+ ["nparen"]=9385,
+ ["nssquare"]=13233,
+ ["nsuperior"]=8319,
+ ["ntilde"]=241,
+ ["nu"]=957,
+ ["nuhiragana"]=12396,
+ ["nukatakana"]=12492,
+ ["nukatakanahalfwidth"]=65415,
+ ["nuktabengali"]=2492,
+ ["nuktadeva"]=2364,
+ ["nuktagujarati"]=2748,
+ ["nuktagurmukhi"]=2620,
+ ["numbersign"]=35,
+ ["numbersignmonospace"]=65283,
+ ["numbersignsmall"]=65119,
+ ["numeralsigngreek"]=884,
+ ["numeralsignlowergreek"]=885,
+ ["numero"]=8470,
+ ["nun"]=1504,
+ ["nundagesh"]=64320,
+ ["nundageshhebrew"]=64320,
+ ["nunhebrew"]=1504,
+ ["nvsquare"]=13237,
+ ["nwsquare"]=13243,
+ ["nyabengali"]=2462,
+ ["nyadeva"]=2334,
+ ["nyagujarati"]=2718,
+ ["nyagurmukhi"]=2590,
+ ["o"]=111,
+ ["oacute"]=243,
+ ["oangthai"]=3629,
+ ["obarred"]=629,
+ ["obarredcyrillic"]=1257,
+ ["obarreddieresiscyrillic"]=1259,
+ ["obengali"]=2451,
+ ["obopomofo"]=12571,
+ ["obreve"]=335,
+ ["ocandradeva"]=2321,
+ ["ocandragujarati"]=2705,
+ ["ocandravowelsigndeva"]=2377,
+ ["ocandravowelsigngujarati"]=2761,
+ ["ocaron"]=466,
+ ["ocircle"]=9438,
+ ["ocircumflex"]=244,
+ ["ocircumflexacute"]=7889,
+ ["ocircumflexdotbelow"]=7897,
+ ["ocircumflexgrave"]=7891,
+ ["ocircumflexhookabove"]=7893,
+ ["ocircumflextilde"]=7895,
+ ["ocyrillic"]=1086,
+ ["odblacute"]=337,
+ ["odblgrave"]=525,
+ ["odeva"]=2323,
+ ["odieresis"]=246,
+ ["odieresiscyrillic"]=1255,
+ ["odotbelow"]=7885,
+ ["oe"]=339,
+ ["oekorean"]=12634,
+ ["ogonek"]=731,
+ ["ogonekcmb"]=808,
+ ["ograve"]=242,
+ ["ogujarati"]=2707,
+ ["oharmenian"]=1413,
+ ["ohiragana"]=12362,
+ ["ohookabove"]=7887,
+ ["ohorn"]=417,
+ ["ohornacute"]=7899,
+ ["ohorndotbelow"]=7907,
+ ["ohorngrave"]=7901,
+ ["ohornhookabove"]=7903,
+ ["ohorntilde"]=7905,
+ ["ohungarumlaut"]=337,
+ ["oi"]=419,
+ ["oinvertedbreve"]=527,
+ ["okatakana"]=12458,
+ ["okatakanahalfwidth"]=65397,
+ ["okorean"]=12631,
+ ["olehebrew"]=1451,
+ ["omacron"]=333,
+ ["omacronacute"]=7763,
+ ["omacrongrave"]=7761,
+ ["omdeva"]=2384,
+ ["omega"]=969,
+ ["omega1"]=982,
+ ["omegacyrillic"]=1121,
+ ["omegalatinclosed"]=631,
+ ["omegaroundcyrillic"]=1147,
+ ["omegatitlocyrillic"]=1149,
+ ["omegatonos"]=974,
+ ["omgujarati"]=2768,
+ ["omicron"]=959,
+ ["omicrontonos"]=972,
+ ["omonospace"]=65359,
+ ["one"]=49,
+ ["onearabic"]=1633,
+ ["onebengali"]=2535,
+ ["onecircle"]=9312,
+ ["onecircleinversesansserif"]=10122,
+ ["onedeva"]=2407,
+ ["onedotenleader"]=8228,
+ ["oneeighth"]=8539,
+ ["onefitted"]=63196,
+ ["onegujarati"]=2791,
+ ["onegurmukhi"]=2663,
+ ["onehackarabic"]=1633,
+ ["onehalf"]=189,
+ ["onehangzhou"]=12321,
+ ["oneideographicparen"]=12832,
+ ["oneinferior"]=8321,
+ ["onemonospace"]=65297,
+ ["onenumeratorbengali"]=2548,
+ ["oneoldstyle"]=63281,
+ ["oneparen"]=9332,
+ ["oneperiod"]=9352,
+ ["onepersian"]=1777,
+ ["onequarter"]=188,
+ ["oneroman"]=8560,
+ ["onesuperior"]=185,
+ ["onethai"]=3665,
+ ["onethird"]=8531,
+ ["oogonek"]=491,
+ ["oogonekmacron"]=493,
+ ["oogurmukhi"]=2579,
+ ["oomatragurmukhi"]=2635,
+ ["oopen"]=596,
+ ["oparen"]=9386,
+ ["openbullet"]=9702,
+ ["option"]=8997,
+ ["ordfeminine"]=170,
+ ["ordmasculine"]=186,
+ ["orthogonal"]=8735,
+ ["oshortdeva"]=2322,
+ ["oshortvowelsigndeva"]=2378,
+ ["oslash"]=248,
+ ["oslashacute"]=511,
+ ["osmallhiragana"]=12361,
+ ["osmallkatakana"]=12457,
+ ["osmallkatakanahalfwidth"]=65387,
+ ["ostrokeacute"]=511,
+ ["osuperior"]=63216,
+ ["otcyrillic"]=1151,
+ ["otilde"]=245,
+ ["otildeacute"]=7757,
+ ["otildedieresis"]=7759,
+ ["oubopomofo"]=12577,
+ ["overline"]=8254,
+ ["overlinecenterline"]=65098,
+ ["overlinecmb"]=773,
+ ["overlinedashed"]=65097,
+ ["overlinedblwavy"]=65100,
+ ["overlinewavy"]=65099,
+ ["overscore"]=175,
+ ["ovowelsignbengali"]=2507,
+ ["ovowelsigndeva"]=2379,
+ ["ovowelsigngujarati"]=2763,
+ ["p"]=112,
+ ["paampssquare"]=13184,
+ ["paasentosquare"]=13099,
+ ["pabengali"]=2474,
+ ["pacute"]=7765,
+ ["padeva"]=2346,
+ ["pagedown"]=8671,
+ ["pageup"]=8670,
+ ["pagujarati"]=2730,
+ ["pagurmukhi"]=2602,
+ ["pahiragana"]=12401,
+ ["paiyannoithai"]=3631,
+ ["pakatakana"]=12497,
+ ["palatalizationcyrilliccmb"]=1156,
+ ["palochkacyrillic"]=1216,
+ ["pansioskorean"]=12671,
+ ["paragraph"]=182,
+ ["parallel"]=8741,
+ ["parenleft"]=40,
+ ["parenleftaltonearabic"]=64830,
+ ["parenleftbt"]=63725,
+ ["parenleftex"]=63724,
+ ["parenleftinferior"]=8333,
+ ["parenleftmonospace"]=65288,
+ ["parenleftsmall"]=65113,
+ ["parenleftsuperior"]=8317,
+ ["parenlefttp"]=63723,
+ ["parenleftvertical"]=65077,
+ ["parenright"]=41,
+ ["parenrightaltonearabic"]=64831,
+ ["parenrightbt"]=63736,
+ ["parenrightex"]=63735,
+ ["parenrightinferior"]=8334,
+ ["parenrightmonospace"]=65289,
+ ["parenrightsmall"]=65114,
+ ["parenrightsuperior"]=8318,
+ ["parenrighttp"]=63734,
+ ["parenrightvertical"]=65078,
+ ["partialdiff"]=8706,
+ ["paseqhebrew"]=1472,
+ ["pashtahebrew"]=1433,
+ ["pasquare"]=13225,
+ ["patah"]=1463,
+ ["patah11"]=1463,
+ ["patah1d"]=1463,
+ ["patah2a"]=1463,
+ ["patahhebrew"]=1463,
+ ["patahnarrowhebrew"]=1463,
+ ["patahquarterhebrew"]=1463,
+ ["patahwidehebrew"]=1463,
+ ["pazerhebrew"]=1441,
+ ["pbopomofo"]=12550,
+ ["pcircle"]=9439,
+ ["pdotaccent"]=7767,
+ ["pe"]=1508,
+ ["pecyrillic"]=1087,
+ ["pedagesh"]=64324,
+ ["pedageshhebrew"]=64324,
+ ["peezisquare"]=13115,
+ ["pefinaldageshhebrew"]=64323,
+ ["peharabic"]=1662,
+ ["peharmenian"]=1402,
+ ["pehebrew"]=1508,
+ ["pehfinalarabic"]=64343,
+ ["pehinitialarabic"]=64344,
+ ["pehiragana"]=12410,
+ ["pehmedialarabic"]=64345,
+ ["pekatakana"]=12506,
+ ["pemiddlehookcyrillic"]=1191,
+ ["perafehebrew"]=64334,
+ ["percent"]=37,
+ ["percentarabic"]=1642,
+ ["percentmonospace"]=65285,
+ ["percentsmall"]=65130,
+ ["period"]=46,
+ ["periodarmenian"]=1417,
+ ["periodcentered"]=183,
+ ["periodhalfwidth"]=65377,
+ ["periodinferior"]=63207,
+ ["periodmonospace"]=65294,
+ ["periodsmall"]=65106,
+ ["periodsuperior"]=63208,
+ ["perispomenigreekcmb"]=834,
+ ["perpendicular"]=8869,
+ ["perthousand"]=8240,
+ ["peseta"]=8359,
+ ["pfsquare"]=13194,
+ ["phabengali"]=2475,
+ ["phadeva"]=2347,
+ ["phagujarati"]=2731,
+ ["phagurmukhi"]=2603,
+ ["phi"]=966,
+ ["phi1"]=981,
+ ["phieuphacirclekorean"]=12922,
+ ["phieuphaparenkorean"]=12826,
+ ["phieuphcirclekorean"]=12908,
+ ["phieuphkorean"]=12621,
+ ["phieuphparenkorean"]=12812,
+ ["philatin"]=632,
+ ["phinthuthai"]=3642,
+ ["phisymbolgreek"]=981,
+ ["phook"]=421,
+ ["phophanthai"]=3614,
+ ["phophungthai"]=3612,
+ ["phosamphaothai"]=3616,
+ ["pi"]=960,
+ ["pieupacirclekorean"]=12915,
+ ["pieupaparenkorean"]=12819,
+ ["pieupcieuckorean"]=12662,
+ ["pieupcirclekorean"]=12901,
+ ["pieupkiyeokkorean"]=12658,
+ ["pieupkorean"]=12610,
+ ["pieupparenkorean"]=12805,
+ ["pieupsioskiyeokkorean"]=12660,
+ ["pieupsioskorean"]=12612,
+ ["pieupsiostikeutkorean"]=12661,
+ ["pieupthieuthkorean"]=12663,
+ ["pieuptikeutkorean"]=12659,
+ ["pihiragana"]=12404,
+ ["pikatakana"]=12500,
+ ["pisymbolgreek"]=982,
+ ["piwrarmenian"]=1411,
+ ["plus"]=43,
+ ["plusbelowcmb"]=799,
+ ["pluscircle"]=8853,
+ ["plusminus"]=177,
+ ["plusmod"]=726,
+ ["plusmonospace"]=65291,
+ ["plussmall"]=65122,
+ ["plussuperior"]=8314,
+ ["pmonospace"]=65360,
+ ["pmsquare"]=13272,
+ ["pohiragana"]=12413,
+ ["pointingindexdownwhite"]=9759,
+ ["pointingindexleftwhite"]=9756,
+ ["pointingindexrightwhite"]=9758,
+ ["pointingindexupwhite"]=9757,
+ ["pokatakana"]=12509,
+ ["poplathai"]=3611,
+ ["postalmark"]=12306,
+ ["postalmarkface"]=12320,
+ ["pparen"]=9387,
+ ["precedes"]=8826,
+ ["prescription"]=8478,
+ ["primemod"]=697,
+ ["primereversed"]=8245,
+ ["product"]=8719,
+ ["projective"]=8965,
+ ["prolongedkana"]=12540,
+ ["propellor"]=8984,
+ ["propersubset"]=8834,
+ ["propersuperset"]=8835,
+ ["proportion"]=8759,
+ ["proportional"]=8733,
+ ["psi"]=968,
+ ["psicyrillic"]=1137,
+ ["psilipneumatacyrilliccmb"]=1158,
+ ["pssquare"]=13232,
+ ["puhiragana"]=12407,
+ ["pukatakana"]=12503,
+ ["pvsquare"]=13236,
+ ["pwsquare"]=13242,
+ ["q"]=113,
+ ["qadeva"]=2392,
+ ["qadmahebrew"]=1448,
+ ["qafarabic"]=1602,
+ ["qaffinalarabic"]=65238,
+ ["qafinitialarabic"]=65239,
+ ["qafmedialarabic"]=65240,
+ ["qamats"]=1464,
+ ["qamats10"]=1464,
+ ["qamats1a"]=1464,
+ ["qamats1c"]=1464,
+ ["qamats27"]=1464,
+ ["qamats29"]=1464,
+ ["qamats33"]=1464,
+ ["qamatsde"]=1464,
+ ["qamatshebrew"]=1464,
+ ["qamatsnarrowhebrew"]=1464,
+ ["qamatsqatanhebrew"]=1464,
+ ["qamatsqatannarrowhebrew"]=1464,
+ ["qamatsqatanquarterhebrew"]=1464,
+ ["qamatsqatanwidehebrew"]=1464,
+ ["qamatsquarterhebrew"]=1464,
+ ["qamatswidehebrew"]=1464,
+ ["qarneyparahebrew"]=1439,
+ ["qbopomofo"]=12561,
+ ["qcircle"]=9440,
+ ["qhook"]=672,
+ ["qmonospace"]=65361,
+ ["qof"]=1511,
+ ["qofdagesh"]=64327,
+ ["qofdageshhebrew"]=64327,
+ ["qofhatafpatah"]=1511,
+ ["qofhatafpatahhebrew"]=1511,
+ ["qofhatafsegol"]=1511,
+ ["qofhatafsegolhebrew"]=1511,
+ ["qofhebrew"]=1511,
+ ["qofhiriq"]=1511,
+ ["qofhiriqhebrew"]=1511,
+ ["qofholam"]=1511,
+ ["qofholamhebrew"]=1511,
+ ["qofpatah"]=1511,
+ ["qofpatahhebrew"]=1511,
+ ["qofqamats"]=1511,
+ ["qofqamatshebrew"]=1511,
+ ["qofqubuts"]=1511,
+ ["qofqubutshebrew"]=1511,
+ ["qofsegol"]=1511,
+ ["qofsegolhebrew"]=1511,
+ ["qofsheva"]=1511,
+ ["qofshevahebrew"]=1511,
+ ["qoftsere"]=1511,
+ ["qoftserehebrew"]=1511,
+ ["qparen"]=9388,
+ ["quarternote"]=9833,
+ ["qubuts"]=1467,
+ ["qubuts18"]=1467,
+ ["qubuts25"]=1467,
+ ["qubuts31"]=1467,
+ ["qubutshebrew"]=1467,
+ ["qubutsnarrowhebrew"]=1467,
+ ["qubutsquarterhebrew"]=1467,
+ ["qubutswidehebrew"]=1467,
+ ["question"]=63,
+ ["questionarabic"]=1567,
+ ["questionarmenian"]=1374,
+ ["questiondown"]=191,
+ ["questiondownsmall"]=63423,
+ ["questiongreek"]=894,
+ ["questionmonospace"]=65311,
+ ["questionsmall"]=63295,
+ ["quotedbl"]=34,
+ ["quotedblbase"]=8222,
+ ["quotedblleft"]=8220,
+ ["quotedblmonospace"]=65282,
+ ["quotedblprime"]=12318,
+ ["quotedblprimereversed"]=12317,
+ ["quotedblright"]=8221,
+ ["quoteleft"]=8216,
+ ["quoteleftreversed"]=8219,
+ ["quotereversed"]=8219,
+ ["quoteright"]=8217,
+ ["quoterightn"]=329,
+ ["quotesinglbase"]=8218,
+ ["quotesingle"]=39,
+ ["quotesinglemonospace"]=65287,
+ ["r"]=114,
+ ["raarmenian"]=1404,
+ ["rabengali"]=2480,
+ ["racute"]=341,
+ ["radeva"]=2352,
+ ["radical"]=8730,
+ ["radicalex"]=63717,
+ ["radoverssquare"]=13230,
+ ["radoverssquaredsquare"]=13231,
+ ["radsquare"]=13229,
+ ["rafe"]=1471,
+ ["rafehebrew"]=1471,
+ ["ragujarati"]=2736,
+ ["ragurmukhi"]=2608,
+ ["rahiragana"]=12425,
+ ["rakatakana"]=12521,
+ ["rakatakanahalfwidth"]=65431,
+ ["ralowerdiagonalbengali"]=2545,
+ ["ramiddlediagonalbengali"]=2544,
+ ["ramshorn"]=612,
+ ["ratio"]=8758,
+ ["rbopomofo"]=12566,
+ ["rcaron"]=345,
+ ["rcedilla"]=343,
+ ["rcircle"]=9441,
+ ["rcommaaccent"]=343,
+ ["rdblgrave"]=529,
+ ["rdotaccent"]=7769,
+ ["rdotbelow"]=7771,
+ ["rdotbelowmacron"]=7773,
+ ["referencemark"]=8251,
+ ["reflexsubset"]=8838,
+ ["reflexsuperset"]=8839,
+ ["registered"]=174,
+ ["registersans"]=63720,
+ ["registerserif"]=63194,
+ ["reharabic"]=1585,
+ ["reharmenian"]=1408,
+ ["rehfinalarabic"]=65198,
+ ["rehiragana"]=12428,
+ ["rehyehaleflamarabic"]=1585,
+ ["rekatakana"]=12524,
+ ["rekatakanahalfwidth"]=65434,
+ ["resh"]=1512,
+ ["reshdageshhebrew"]=64328,
+ ["reshhatafpatah"]=1512,
+ ["reshhatafpatahhebrew"]=1512,
+ ["reshhatafsegol"]=1512,
+ ["reshhatafsegolhebrew"]=1512,
+ ["reshhebrew"]=1512,
+ ["reshhiriq"]=1512,
+ ["reshhiriqhebrew"]=1512,
+ ["reshholam"]=1512,
+ ["reshholamhebrew"]=1512,
+ ["reshpatah"]=1512,
+ ["reshpatahhebrew"]=1512,
+ ["reshqamats"]=1512,
+ ["reshqamatshebrew"]=1512,
+ ["reshqubuts"]=1512,
+ ["reshqubutshebrew"]=1512,
+ ["reshsegol"]=1512,
+ ["reshsegolhebrew"]=1512,
+ ["reshsheva"]=1512,
+ ["reshshevahebrew"]=1512,
+ ["reshtsere"]=1512,
+ ["reshtserehebrew"]=1512,
+ ["reversedtilde"]=8765,
+ ["reviahebrew"]=1431,
+ ["reviamugrashhebrew"]=1431,
+ ["revlogicalnot"]=8976,
+ ["rfishhook"]=638,
+ ["rfishhookreversed"]=639,
+ ["rhabengali"]=2525,
+ ["rhadeva"]=2397,
+ ["rho"]=961,
+ ["rhook"]=637,
+ ["rhookturned"]=635,
+ ["rhookturnedsuperior"]=693,
+ ["rhosymbolgreek"]=1009,
+ ["rhotichookmod"]=734,
+ ["rieulacirclekorean"]=12913,
+ ["rieulaparenkorean"]=12817,
+ ["rieulcirclekorean"]=12899,
+ ["rieulhieuhkorean"]=12608,
+ ["rieulkiyeokkorean"]=12602,
+ ["rieulkiyeoksioskorean"]=12649,
+ ["rieulkorean"]=12601,
+ ["rieulmieumkorean"]=12603,
+ ["rieulpansioskorean"]=12652,
+ ["rieulparenkorean"]=12803,
+ ["rieulphieuphkorean"]=12607,
+ ["rieulpieupkorean"]=12604,
+ ["rieulpieupsioskorean"]=12651,
+ ["rieulsioskorean"]=12605,
+ ["rieulthieuthkorean"]=12606,
+ ["rieultikeutkorean"]=12650,
+ ["rieulyeorinhieuhkorean"]=12653,
+ ["rightangle"]=8735,
+ ["righttackbelowcmb"]=793,
+ ["righttriangle"]=8895,
+ ["rihiragana"]=12426,
+ ["rikatakana"]=12522,
+ ["rikatakanahalfwidth"]=65432,
+ ["ring"]=730,
+ ["ringbelowcmb"]=805,
+ ["ringcmb"]=778,
+ ["ringhalfleft"]=703,
+ ["ringhalfleftarmenian"]=1369,
+ ["ringhalfleftbelowcmb"]=796,
+ ["ringhalfleftcentered"]=723,
+ ["ringhalfright"]=702,
+ ["ringhalfrightbelowcmb"]=825,
+ ["ringhalfrightcentered"]=722,
+ ["rinvertedbreve"]=531,
+ ["rittorusquare"]=13137,
+ ["rlinebelow"]=7775,
+ ["rlongleg"]=636,
+ ["rlonglegturned"]=634,
+ ["rmonospace"]=65362,
+ ["rohiragana"]=12429,
+ ["rokatakana"]=12525,
+ ["rokatakanahalfwidth"]=65435,
+ ["roruathai"]=3619,
+ ["rparen"]=9389,
+ ["rrabengali"]=2524,
+ ["rradeva"]=2353,
+ ["rragurmukhi"]=2652,
+ ["rreharabic"]=1681,
+ ["rrehfinalarabic"]=64397,
+ ["rrvocalicbengali"]=2528,
+ ["rrvocalicdeva"]=2400,
+ ["rrvocalicgujarati"]=2784,
+ ["rrvocalicvowelsignbengali"]=2500,
+ ["rrvocalicvowelsigndeva"]=2372,
+ ["rrvocalicvowelsigngujarati"]=2756,
+ ["rsuperior"]=63217,
+ ["rtblock"]=9616,
+ ["rturned"]=633,
+ ["rturnedsuperior"]=692,
+ ["ruhiragana"]=12427,
+ ["rukatakana"]=12523,
+ ["rukatakanahalfwidth"]=65433,
+ ["rupeemarkbengali"]=2546,
+ ["rupeesignbengali"]=2547,
+ ["rupiah"]=63197,
+ ["ruthai"]=3620,
+ ["rvocalicbengali"]=2443,
+ ["rvocalicdeva"]=2315,
+ ["rvocalicgujarati"]=2699,
+ ["rvocalicvowelsignbengali"]=2499,
+ ["rvocalicvowelsigndeva"]=2371,
+ ["rvocalicvowelsigngujarati"]=2755,
+ ["s"]=115,
+ ["sabengali"]=2488,
+ ["sacute"]=347,
+ ["sacutedotaccent"]=7781,
+ ["sadarabic"]=1589,
+ ["sadeva"]=2360,
+ ["sadfinalarabic"]=65210,
+ ["sadinitialarabic"]=65211,
+ ["sadmedialarabic"]=65212,
+ ["sagujarati"]=2744,
+ ["sagurmukhi"]=2616,
+ ["sahiragana"]=12373,
+ ["sakatakana"]=12469,
+ ["sakatakanahalfwidth"]=65403,
+ ["sallallahoualayhewasallamarabic"]=65018,
+ ["samekh"]=1505,
+ ["samekhdagesh"]=64321,
+ ["samekhdageshhebrew"]=64321,
+ ["samekhhebrew"]=1505,
+ ["saraaathai"]=3634,
+ ["saraaethai"]=3649,
+ ["saraaimaimalaithai"]=3652,
+ ["saraaimaimuanthai"]=3651,
+ ["saraamthai"]=3635,
+ ["saraathai"]=3632,
+ ["saraethai"]=3648,
+ ["saraiileftthai"]=63622,
+ ["saraiithai"]=3637,
+ ["saraileftthai"]=63621,
+ ["saraithai"]=3636,
+ ["saraothai"]=3650,
+ ["saraueeleftthai"]=63624,
+ ["saraueethai"]=3639,
+ ["saraueleftthai"]=63623,
+ ["sarauethai"]=3638,
+ ["sarauthai"]=3640,
+ ["sarauuthai"]=3641,
+ ["sbopomofo"]=12569,
+ ["scaron"]=353,
+ ["scarondotaccent"]=7783,
+ ["scedilla"]=351,
+ ["schwa"]=601,
+ ["schwacyrillic"]=1241,
+ ["schwadieresiscyrillic"]=1243,
+ ["schwahook"]=602,
+ ["scircle"]=9442,
+ ["scircumflex"]=349,
+ ["scommaaccent"]=537,
+ ["sdotaccent"]=7777,
+ ["sdotbelow"]=7779,
+ ["sdotbelowdotaccent"]=7785,
+ ["seagullbelowcmb"]=828,
+ ["second"]=8243,
+ ["secondtonechinese"]=714,
+ ["section"]=167,
+ ["seenarabic"]=1587,
+ ["seenfinalarabic"]=65202,
+ ["seeninitialarabic"]=65203,
+ ["seenmedialarabic"]=65204,
+ ["segol"]=1462,
+ ["segol13"]=1462,
+ ["segol1f"]=1462,
+ ["segol2c"]=1462,
+ ["segolhebrew"]=1462,
+ ["segolnarrowhebrew"]=1462,
+ ["segolquarterhebrew"]=1462,
+ ["segoltahebrew"]=1426,
+ ["segolwidehebrew"]=1462,
+ ["seharmenian"]=1405,
+ ["sehiragana"]=12379,
+ ["sekatakana"]=12475,
+ ["sekatakanahalfwidth"]=65406,
+ ["semicolon"]=59,
+ ["semicolonarabic"]=1563,
+ ["semicolonmonospace"]=65307,
+ ["semicolonsmall"]=65108,
+ ["semivoicedmarkkana"]=12444,
+ ["semivoicedmarkkanahalfwidth"]=65439,
+ ["sentisquare"]=13090,
+ ["sentosquare"]=13091,
+ ["seven"]=55,
+ ["sevenarabic"]=1639,
+ ["sevenbengali"]=2541,
+ ["sevencircle"]=9318,
+ ["sevencircleinversesansserif"]=10128,
+ ["sevendeva"]=2413,
+ ["seveneighths"]=8542,
+ ["sevengujarati"]=2797,
+ ["sevengurmukhi"]=2669,
+ ["sevenhackarabic"]=1639,
+ ["sevenhangzhou"]=12327,
+ ["sevenideographicparen"]=12838,
+ ["seveninferior"]=8327,
+ ["sevenmonospace"]=65303,
+ ["sevenoldstyle"]=63287,
+ ["sevenparen"]=9338,
+ ["sevenperiod"]=9358,
+ ["sevenpersian"]=1783,
+ ["sevenroman"]=8566,
+ ["sevensuperior"]=8311,
+ ["seventeencircle"]=9328,
+ ["seventeenparen"]=9348,
+ ["seventeenperiod"]=9368,
+ ["seventhai"]=3671,
+ ["sfthyphen"]=173,
+ ["shaarmenian"]=1399,
+ ["shabengali"]=2486,
+ ["shacyrillic"]=1096,
+ ["shaddaarabic"]=1617,
+ ["shaddadammaarabic"]=64609,
+ ["shaddadammatanarabic"]=64606,
+ ["shaddafathaarabic"]=64608,
+ ["shaddafathatanarabic"]=1617,
+ ["shaddakasraarabic"]=64610,
+ ["shaddakasratanarabic"]=64607,
+ ["shade"]=9618,
+ ["shadedark"]=9619,
+ ["shadelight"]=9617,
+ ["shademedium"]=9618,
+ ["shadeva"]=2358,
+ ["shagujarati"]=2742,
+ ["shagurmukhi"]=2614,
+ ["shalshelethebrew"]=1427,
+ ["shbopomofo"]=12565,
+ ["shchacyrillic"]=1097,
+ ["sheenarabic"]=1588,
+ ["sheenfinalarabic"]=65206,
+ ["sheeninitialarabic"]=65207,
+ ["sheenmedialarabic"]=65208,
+ ["sheicoptic"]=995,
+ ["sheqel"]=8362,
+ ["sheqelhebrew"]=8362,
+ ["sheva"]=1456,
+ ["sheva115"]=1456,
+ ["sheva15"]=1456,
+ ["sheva22"]=1456,
+ ["sheva2e"]=1456,
+ ["shevahebrew"]=1456,
+ ["shevanarrowhebrew"]=1456,
+ ["shevaquarterhebrew"]=1456,
+ ["shevawidehebrew"]=1456,
+ ["shhacyrillic"]=1211,
+ ["shimacoptic"]=1005,
+ ["shin"]=1513,
+ ["shindagesh"]=64329,
+ ["shindageshhebrew"]=64329,
+ ["shindageshshindot"]=64300,
+ ["shindageshshindothebrew"]=64300,
+ ["shindageshsindot"]=64301,
+ ["shindageshsindothebrew"]=64301,
+ ["shindothebrew"]=1473,
+ ["shinhebrew"]=1513,
+ ["shinshindot"]=64298,
+ ["shinshindothebrew"]=64298,
+ ["shinsindot"]=64299,
+ ["shinsindothebrew"]=64299,
+ ["shook"]=642,
+ ["sigma"]=963,
+ ["sigma1"]=962,
+ ["sigmafinal"]=962,
+ ["sigmalunatesymbolgreek"]=1010,
+ ["sihiragana"]=12375,
+ ["sikatakana"]=12471,
+ ["sikatakanahalfwidth"]=65404,
+ ["siluqhebrew"]=1469,
+ ["siluqlefthebrew"]=1469,
+ ["similar"]=8764,
+ ["sindothebrew"]=1474,
+ ["siosacirclekorean"]=12916,
+ ["siosaparenkorean"]=12820,
+ ["sioscieuckorean"]=12670,
+ ["sioscirclekorean"]=12902,
+ ["sioskiyeokkorean"]=12666,
+ ["sioskorean"]=12613,
+ ["siosnieunkorean"]=12667,
+ ["siosparenkorean"]=12806,
+ ["siospieupkorean"]=12669,
+ ["siostikeutkorean"]=12668,
+ ["six"]=54,
+ ["sixarabic"]=1638,
+ ["sixbengali"]=2540,
+ ["sixcircle"]=9317,
+ ["sixcircleinversesansserif"]=10127,
+ ["sixdeva"]=2412,
+ ["sixgujarati"]=2796,
+ ["sixgurmukhi"]=2668,
+ ["sixhackarabic"]=1638,
+ ["sixhangzhou"]=12326,
+ ["sixideographicparen"]=12837,
+ ["sixinferior"]=8326,
+ ["sixmonospace"]=65302,
+ ["sixoldstyle"]=63286,
+ ["sixparen"]=9337,
+ ["sixperiod"]=9357,
+ ["sixpersian"]=1782,
+ ["sixroman"]=8565,
+ ["sixsuperior"]=8310,
+ ["sixteencircle"]=9327,
+ ["sixteencurrencydenominatorbengali"]=2553,
+ ["sixteenparen"]=9347,
+ ["sixteenperiod"]=9367,
+ ["sixthai"]=3670,
+ ["slash"]=47,
+ ["slashmonospace"]=65295,
+ ["slong"]=383,
+ ["slongdotaccent"]=7835,
+ ["smileface"]=9786,
+ ["smonospace"]=65363,
+ ["sofpasuqhebrew"]=1475,
+ ["softhyphen"]=173,
+ ["softsigncyrillic"]=1100,
+ ["sohiragana"]=12381,
+ ["sokatakana"]=12477,
+ ["sokatakanahalfwidth"]=65407,
+ ["soliduslongoverlaycmb"]=824,
+ ["solidusshortoverlaycmb"]=823,
+ ["sorusithai"]=3625,
+ ["sosalathai"]=3624,
+ ["sosothai"]=3595,
+ ["sosuathai"]=3626,
+ ["space"]=32,
+ ["spacehackarabic"]=32,
+ ["spade"]=9824,
+ ["spadesuitblack"]=9824,
+ ["spadesuitwhite"]=9828,
+ ["sparen"]=9390,
+ ["squarebelowcmb"]=827,
+ ["squarecc"]=13252,
+ ["squarecm"]=13213,
+ ["squarediagonalcrosshatchfill"]=9641,
+ ["squarehorizontalfill"]=9636,
+ ["squarekg"]=13199,
+ ["squarekm"]=13214,
+ ["squarekmcapital"]=13262,
+ ["squareln"]=13265,
+ ["squarelog"]=13266,
+ ["squaremg"]=13198,
+ ["squaremil"]=13269,
+ ["squaremm"]=13212,
+ ["squaremsquared"]=13217,
+ ["squareorthogonalcrosshatchfill"]=9638,
+ ["squareupperlefttolowerrightfill"]=9639,
+ ["squareupperrighttolowerleftfill"]=9640,
+ ["squareverticalfill"]=9637,
+ ["squarewhitewithsmallblack"]=9635,
+ ["srsquare"]=13275,
+ ["ssabengali"]=2487,
+ ["ssadeva"]=2359,
+ ["ssagujarati"]=2743,
+ ["ssangcieuckorean"]=12617,
+ ["ssanghieuhkorean"]=12677,
+ ["ssangieungkorean"]=12672,
+ ["ssangkiyeokkorean"]=12594,
+ ["ssangnieunkorean"]=12645,
+ ["ssangpieupkorean"]=12611,
+ ["ssangsioskorean"]=12614,
+ ["ssangtikeutkorean"]=12600,
+ ["ssuperior"]=63218,
+ ["sterling"]=163,
+ ["sterlingmonospace"]=65505,
+ ["strokelongoverlaycmb"]=822,
+ ["strokeshortoverlaycmb"]=821,
+ ["subset"]=8834,
+ ["subsetnotequal"]=8842,
+ ["subsetorequal"]=8838,
+ ["succeeds"]=8827,
+ ["suchthat"]=8715,
+ ["suhiragana"]=12377,
+ ["sukatakana"]=12473,
+ ["sukatakanahalfwidth"]=65405,
+ ["sukunarabic"]=1618,
+ ["summation"]=8721,
+ ["sun"]=9788,
+ ["superset"]=8835,
+ ["supersetnotequal"]=8843,
+ ["supersetorequal"]=8839,
+ ["svsquare"]=13276,
+ ["syouwaerasquare"]=13180,
+ ["t"]=116,
+ ["tabengali"]=2468,
+ ["tackdown"]=8868,
+ ["tackleft"]=8867,
+ ["tadeva"]=2340,
+ ["tagujarati"]=2724,
+ ["tagurmukhi"]=2596,
+ ["taharabic"]=1591,
+ ["tahfinalarabic"]=65218,
+ ["tahinitialarabic"]=65219,
+ ["tahiragana"]=12383,
+ ["tahmedialarabic"]=65220,
+ ["taisyouerasquare"]=13181,
+ ["takatakana"]=12479,
+ ["takatakanahalfwidth"]=65408,
+ ["tatweelarabic"]=1600,
+ ["tau"]=964,
+ ["tav"]=1514,
+ ["tavdages"]=64330,
+ ["tavdagesh"]=64330,
+ ["tavdageshhebrew"]=64330,
+ ["tavhebrew"]=1514,
+ ["tbar"]=359,
+ ["tbopomofo"]=12554,
+ ["tcaron"]=357,
+ ["tccurl"]=680,
+ ["tcedilla"]=355,
+ ["tcheharabic"]=1670,
+ ["tchehfinalarabic"]=64379,
+ ["tchehinitialarabic"]=64380,
+ ["tchehmedialarabic"]=64381,
+ ["tchehmeeminitialarabic"]=64380,
+ ["tcircle"]=9443,
+ ["tcircumflexbelow"]=7793,
+ ["tcommaaccent"]=355,
+ ["tdieresis"]=7831,
+ ["tdotaccent"]=7787,
+ ["tdotbelow"]=7789,
+ ["tecyrillic"]=1090,
+ ["tedescendercyrillic"]=1197,
+ ["teharabic"]=1578,
+ ["tehfinalarabic"]=65174,
+ ["tehhahinitialarabic"]=64674,
+ ["tehhahisolatedarabic"]=64524,
+ ["tehinitialarabic"]=65175,
+ ["tehiragana"]=12390,
+ ["tehjeeminitialarabic"]=64673,
+ ["tehjeemisolatedarabic"]=64523,
+ ["tehmarbutaarabic"]=1577,
+ ["tehmarbutafinalarabic"]=65172,
+ ["tehmedialarabic"]=65176,
+ ["tehmeeminitialarabic"]=64676,
+ ["tehmeemisolatedarabic"]=64526,
+ ["tehnoonfinalarabic"]=64627,
+ ["tekatakana"]=12486,
+ ["tekatakanahalfwidth"]=65411,
+ ["telephone"]=8481,
+ ["telephoneblack"]=9742,
+ ["telishagedolahebrew"]=1440,
+ ["telishaqetanahebrew"]=1449,
+ ["tencircle"]=9321,
+ ["tenideographicparen"]=12841,
+ ["tenparen"]=9341,
+ ["tenperiod"]=9361,
+ ["tenroman"]=8569,
+ ["tesh"]=679,
+ ["tet"]=1496,
+ ["tetdagesh"]=64312,
+ ["tetdageshhebrew"]=64312,
+ ["tethebrew"]=1496,
+ ["tetsecyrillic"]=1205,
+ ["tevirhebrew"]=1435,
+ ["tevirlefthebrew"]=1435,
+ ["thabengali"]=2469,
+ ["thadeva"]=2341,
+ ["thagujarati"]=2725,
+ ["thagurmukhi"]=2597,
+ ["thalarabic"]=1584,
+ ["thalfinalarabic"]=65196,
+ ["thanthakhatlowleftthai"]=63640,
+ ["thanthakhatlowrightthai"]=63639,
+ ["thanthakhatthai"]=3660,
+ ["thanthakhatupperleftthai"]=63638,
+ ["theharabic"]=1579,
+ ["thehfinalarabic"]=65178,
+ ["thehinitialarabic"]=65179,
+ ["thehmedialarabic"]=65180,
+ ["thereexists"]=8707,
+ ["therefore"]=8756,
+ ["theta"]=952,
+ ["theta1"]=977,
+ ["thetasymbolgreek"]=977,
+ ["thieuthacirclekorean"]=12921,
+ ["thieuthaparenkorean"]=12825,
+ ["thieuthcirclekorean"]=12907,
+ ["thieuthkorean"]=12620,
+ ["thieuthparenkorean"]=12811,
+ ["thirteencircle"]=9324,
+ ["thirteenparen"]=9344,
+ ["thirteenperiod"]=9364,
+ ["thonangmonthothai"]=3601,
+ ["thook"]=429,
+ ["thophuthaothai"]=3602,
+ ["thorn"]=254,
+ ["thothahanthai"]=3607,
+ ["thothanthai"]=3600,
+ ["thothongthai"]=3608,
+ ["thothungthai"]=3606,
+ ["thousandcyrillic"]=1154,
+ ["thousandsseparatorarabic"]=1644,
+ ["thousandsseparatorpersian"]=1644,
+ ["three"]=51,
+ ["threearabic"]=1635,
+ ["threebengali"]=2537,
+ ["threecircle"]=9314,
+ ["threecircleinversesansserif"]=10124,
+ ["threedeva"]=2409,
+ ["threeeighths"]=8540,
+ ["threegujarati"]=2793,
+ ["threegurmukhi"]=2665,
+ ["threehackarabic"]=1635,
+ ["threehangzhou"]=12323,
+ ["threeideographicparen"]=12834,
+ ["threeinferior"]=8323,
+ ["threemonospace"]=65299,
+ ["threenumeratorbengali"]=2550,
+ ["threeoldstyle"]=63283,
+ ["threeparen"]=9334,
+ ["threeperiod"]=9354,
+ ["threepersian"]=1779,
+ ["threequarters"]=190,
+ ["threequartersemdash"]=63198,
+ ["threeroman"]=8562,
+ ["threesuperior"]=179,
+ ["threethai"]=3667,
+ ["thzsquare"]=13204,
+ ["tihiragana"]=12385,
+ ["tikatakana"]=12481,
+ ["tikatakanahalfwidth"]=65409,
+ ["tikeutacirclekorean"]=12912,
+ ["tikeutaparenkorean"]=12816,
+ ["tikeutcirclekorean"]=12898,
+ ["tikeutkorean"]=12599,
+ ["tikeutparenkorean"]=12802,
+ ["tilde"]=732,
+ ["tildebelowcmb"]=816,
+ ["tildecmb"]=771,
+ ["tildecomb"]=771,
+ ["tildedoublecmb"]=864,
+ ["tildeoperator"]=8764,
+ ["tildeoverlaycmb"]=820,
+ ["tildeverticalcmb"]=830,
+ ["timescircle"]=8855,
+ ["tipehahebrew"]=1430,
+ ["tipehalefthebrew"]=1430,
+ ["tippigurmukhi"]=2672,
+ ["titlocyrilliccmb"]=1155,
+ ["tiwnarmenian"]=1407,
+ ["tlinebelow"]=7791,
+ ["tmonospace"]=65364,
+ ["toarmenian"]=1385,
+ ["tohiragana"]=12392,
+ ["tokatakana"]=12488,
+ ["tokatakanahalfwidth"]=65412,
+ ["tonebarextrahighmod"]=741,
+ ["tonebarextralowmod"]=745,
+ ["tonebarhighmod"]=742,
+ ["tonebarlowmod"]=744,
+ ["tonebarmidmod"]=743,
+ ["tonefive"]=445,
+ ["tonesix"]=389,
+ ["tonetwo"]=424,
+ ["tonos"]=900,
+ ["tonsquare"]=13095,
+ ["topatakthai"]=3599,
+ ["tortoiseshellbracketleft"]=12308,
+ ["tortoiseshellbracketleftsmall"]=65117,
+ ["tortoiseshellbracketleftvertical"]=65081,
+ ["tortoiseshellbracketright"]=12309,
+ ["tortoiseshellbracketrightsmall"]=65118,
+ ["tortoiseshellbracketrightvertical"]=65082,
+ ["totaothai"]=3605,
+ ["tpalatalhook"]=427,
+ ["tparen"]=9391,
+ ["trademark"]=8482,
+ ["trademarksans"]=63722,
+ ["trademarkserif"]=63195,
+ ["tretroflexhook"]=648,
+ ["triagdn"]=9660,
+ ["triaglf"]=9668,
+ ["triagrt"]=9658,
+ ["triagup"]=9650,
+ ["ts"]=678,
+ ["tsadi"]=1510,
+ ["tsadidagesh"]=64326,
+ ["tsadidageshhebrew"]=64326,
+ ["tsadihebrew"]=1510,
+ ["tsecyrillic"]=1094,
+ ["tsere"]=1461,
+ ["tsere12"]=1461,
+ ["tsere1e"]=1461,
+ ["tsere2b"]=1461,
+ ["tserehebrew"]=1461,
+ ["tserenarrowhebrew"]=1461,
+ ["tserequarterhebrew"]=1461,
+ ["tserewidehebrew"]=1461,
+ ["tshecyrillic"]=1115,
+ ["tsuperior"]=63219,
+ ["ttabengali"]=2463,
+ ["ttadeva"]=2335,
+ ["ttagujarati"]=2719,
+ ["ttagurmukhi"]=2591,
+ ["tteharabic"]=1657,
+ ["ttehfinalarabic"]=64359,
+ ["ttehinitialarabic"]=64360,
+ ["ttehmedialarabic"]=64361,
+ ["tthabengali"]=2464,
+ ["tthadeva"]=2336,
+ ["tthagujarati"]=2720,
+ ["tthagurmukhi"]=2592,
+ ["tturned"]=647,
+ ["tuhiragana"]=12388,
+ ["tukatakana"]=12484,
+ ["tukatakanahalfwidth"]=65410,
+ ["tusmallhiragana"]=12387,
+ ["tusmallkatakana"]=12483,
+ ["tusmallkatakanahalfwidth"]=65391,
+ ["twelvecircle"]=9323,
+ ["twelveparen"]=9343,
+ ["twelveperiod"]=9363,
+ ["twelveroman"]=8571,
+ ["twentycircle"]=9331,
+ ["twentyhangzhou"]=21316,
+ ["twentyparen"]=9351,
+ ["twentyperiod"]=9371,
+ ["two"]=50,
+ ["twoarabic"]=1634,
+ ["twobengali"]=2536,
+ ["twocircle"]=9313,
+ ["twocircleinversesansserif"]=10123,
+ ["twodeva"]=2408,
+ ["twodotenleader"]=8229,
+ ["twodotleader"]=8229,
+ ["twodotleadervertical"]=65072,
+ ["twogujarati"]=2792,
+ ["twogurmukhi"]=2664,
+ ["twohackarabic"]=1634,
+ ["twohangzhou"]=12322,
+ ["twoideographicparen"]=12833,
+ ["twoinferior"]=8322,
+ ["twomonospace"]=65298,
+ ["twonumeratorbengali"]=2549,
+ ["twooldstyle"]=63282,
+ ["twoparen"]=9333,
+ ["twoperiod"]=9353,
+ ["twopersian"]=1778,
+ ["tworoman"]=8561,
+ ["twostroke"]=443,
+ ["twosuperior"]=178,
+ ["twothai"]=3666,
+ ["twothirds"]=8532,
+ ["u"]=117,
+ ["uacute"]=250,
+ ["ubar"]=649,
+ ["ubengali"]=2441,
+ ["ubopomofo"]=12584,
+ ["ubreve"]=365,
+ ["ucaron"]=468,
+ ["ucircle"]=9444,
+ ["ucircumflex"]=251,
+ ["ucircumflexbelow"]=7799,
+ ["ucyrillic"]=1091,
+ ["udattadeva"]=2385,
+ ["udblacute"]=369,
+ ["udblgrave"]=533,
+ ["udeva"]=2313,
+ ["udieresis"]=252,
+ ["udieresisacute"]=472,
+ ["udieresisbelow"]=7795,
+ ["udieresiscaron"]=474,
+ ["udieresiscyrillic"]=1265,
+ ["udieresisgrave"]=476,
+ ["udieresismacron"]=470,
+ ["udotbelow"]=7909,
+ ["ugrave"]=249,
+ ["ugujarati"]=2697,
+ ["ugurmukhi"]=2569,
+ ["uhiragana"]=12358,
+ ["uhookabove"]=7911,
+ ["uhorn"]=432,
+ ["uhornacute"]=7913,
+ ["uhorndotbelow"]=7921,
+ ["uhorngrave"]=7915,
+ ["uhornhookabove"]=7917,
+ ["uhorntilde"]=7919,
+ ["uhungarumlaut"]=369,
+ ["uhungarumlautcyrillic"]=1267,
+ ["uinvertedbreve"]=535,
+ ["ukatakana"]=12454,
+ ["ukatakanahalfwidth"]=65395,
+ ["ukcyrillic"]=1145,
+ ["ukorean"]=12636,
+ ["umacron"]=363,
+ ["umacroncyrillic"]=1263,
+ ["umacrondieresis"]=7803,
+ ["umatragurmukhi"]=2625,
+ ["umonospace"]=65365,
+ ["underscore"]=95,
+ ["underscoredbl"]=8215,
+ ["underscoremonospace"]=65343,
+ ["underscorevertical"]=65075,
+ ["underscorewavy"]=65103,
+ ["union"]=8746,
+ ["universal"]=8704,
+ ["uogonek"]=371,
+ ["uparen"]=9392,
+ ["upblock"]=9600,
+ ["upperdothebrew"]=1476,
+ ["upsilon"]=965,
+ ["upsilondieresis"]=971,
+ ["upsilondieresistonos"]=944,
+ ["upsilonlatin"]=650,
+ ["upsilontonos"]=973,
+ ["uptackbelowcmb"]=797,
+ ["uptackmod"]=724,
+ ["uragurmukhi"]=2675,
+ ["uring"]=367,
+ ["ushortcyrillic"]=1118,
+ ["usmallhiragana"]=12357,
+ ["usmallkatakana"]=12453,
+ ["usmallkatakanahalfwidth"]=65385,
+ ["ustraightcyrillic"]=1199,
+ ["ustraightstrokecyrillic"]=1201,
+ ["utilde"]=361,
+ ["utildeacute"]=7801,
+ ["utildebelow"]=7797,
+ ["uubengali"]=2442,
+ ["uudeva"]=2314,
+ ["uugujarati"]=2698,
+ ["uugurmukhi"]=2570,
+ ["uumatragurmukhi"]=2626,
+ ["uuvowelsignbengali"]=2498,
+ ["uuvowelsigndeva"]=2370,
+ ["uuvowelsigngujarati"]=2754,
+ ["uvowelsignbengali"]=2497,
+ ["uvowelsigndeva"]=2369,
+ ["uvowelsigngujarati"]=2753,
+ ["v"]=118,
+ ["vadeva"]=2357,
+ ["vagujarati"]=2741,
+ ["vagurmukhi"]=2613,
+ ["vakatakana"]=12535,
+ ["vav"]=1493,
+ ["vavdagesh"]=64309,
+ ["vavdagesh65"]=64309,
+ ["vavdageshhebrew"]=64309,
+ ["vavhebrew"]=1493,
+ ["vavholam"]=64331,
+ ["vavholamhebrew"]=64331,
+ ["vavvavhebrew"]=1520,
+ ["vavyodhebrew"]=1521,
+ ["vcircle"]=9445,
+ ["vdotbelow"]=7807,
+ ["vecyrillic"]=1074,
+ ["veharabic"]=1700,
+ ["vehfinalarabic"]=64363,
+ ["vehinitialarabic"]=64364,
+ ["vehmedialarabic"]=64365,
+ ["vekatakana"]=12537,
+ ["venus"]=9792,
+ ["verticalbar"]=124,
+ ["verticallineabovecmb"]=781,
+ ["verticallinebelowcmb"]=809,
+ ["verticallinelowmod"]=716,
+ ["verticallinemod"]=712,
+ ["vewarmenian"]=1406,
+ ["vhook"]=651,
+ ["vikatakana"]=12536,
+ ["viramabengali"]=2509,
+ ["viramadeva"]=2381,
+ ["viramagujarati"]=2765,
+ ["visargabengali"]=2435,
+ ["visargadeva"]=2307,
+ ["visargagujarati"]=2691,
+ ["vmonospace"]=65366,
+ ["voarmenian"]=1400,
+ ["voicediterationhiragana"]=12446,
+ ["voicediterationkatakana"]=12542,
+ ["voicedmarkkana"]=12443,
+ ["voicedmarkkanahalfwidth"]=65438,
+ ["vokatakana"]=12538,
+ ["vparen"]=9393,
+ ["vtilde"]=7805,
+ ["vturned"]=652,
+ ["vuhiragana"]=12436,
+ ["vukatakana"]=12532,
+ ["w"]=119,
+ ["wacute"]=7811,
+ ["waekorean"]=12633,
+ ["wahiragana"]=12431,
+ ["wakatakana"]=12527,
+ ["wakatakanahalfwidth"]=65436,
+ ["wakorean"]=12632,
+ ["wasmallhiragana"]=12430,
+ ["wasmallkatakana"]=12526,
+ ["wattosquare"]=13143,
+ ["wavedash"]=12316,
+ ["wavyunderscorevertical"]=65076,
+ ["wawarabic"]=1608,
+ ["wawfinalarabic"]=65262,
+ ["wawhamzaabovearabic"]=1572,
+ ["wawhamzaabovefinalarabic"]=65158,
+ ["wbsquare"]=13277,
+ ["wcircle"]=9446,
+ ["wcircumflex"]=373,
+ ["wdieresis"]=7813,
+ ["wdotaccent"]=7815,
+ ["wdotbelow"]=7817,
+ ["wehiragana"]=12433,
+ ["weierstrass"]=8472,
+ ["wekatakana"]=12529,
+ ["wekorean"]=12638,
+ ["weokorean"]=12637,
+ ["wgrave"]=7809,
+ ["whitebullet"]=9702,
+ ["whitecircle"]=9675,
+ ["whitecircleinverse"]=9689,
+ ["whitecornerbracketleft"]=12302,
+ ["whitecornerbracketleftvertical"]=65091,
+ ["whitecornerbracketright"]=12303,
+ ["whitecornerbracketrightvertical"]=65092,
+ ["whitediamond"]=9671,
+ ["whitediamondcontainingblacksmalldiamond"]=9672,
+ ["whitedownpointingsmalltriangle"]=9663,
+ ["whitedownpointingtriangle"]=9661,
+ ["whiteleftpointingsmalltriangle"]=9667,
+ ["whiteleftpointingtriangle"]=9665,
+ ["whitelenticularbracketleft"]=12310,
+ ["whitelenticularbracketright"]=12311,
+ ["whiterightpointingsmalltriangle"]=9657,
+ ["whiterightpointingtriangle"]=9655,
+ ["whitesmallsquare"]=9643,
+ ["whitesmilingface"]=9786,
+ ["whitesquare"]=9633,
+ ["whitestar"]=9734,
+ ["whitetelephone"]=9743,
+ ["whitetortoiseshellbracketleft"]=12312,
+ ["whitetortoiseshellbracketright"]=12313,
+ ["whiteuppointingsmalltriangle"]=9653,
+ ["whiteuppointingtriangle"]=9651,
+ ["wihiragana"]=12432,
+ ["wikatakana"]=12528,
+ ["wikorean"]=12639,
+ ["wmonospace"]=65367,
+ ["wohiragana"]=12434,
+ ["wokatakana"]=12530,
+ ["wokatakanahalfwidth"]=65382,
+ ["won"]=8361,
+ ["wonmonospace"]=65510,
+ ["wowaenthai"]=3623,
+ ["wparen"]=9394,
+ ["wring"]=7832,
+ ["wsuperior"]=695,
+ ["wturned"]=653,
+ ["wynn"]=447,
+ ["x"]=120,
+ ["xabovecmb"]=829,
+ ["xbopomofo"]=12562,
+ ["xcircle"]=9447,
+ ["xdieresis"]=7821,
+ ["xdotaccent"]=7819,
+ ["xeharmenian"]=1389,
+ ["xi"]=958,
+ ["xmonospace"]=65368,
+ ["xparen"]=9395,
+ ["xsuperior"]=739,
+ ["y"]=121,
+ ["yaadosquare"]=13134,
+ ["yabengali"]=2479,
+ ["yacute"]=253,
+ ["yadeva"]=2351,
+ ["yaekorean"]=12626,
+ ["yagujarati"]=2735,
+ ["yagurmukhi"]=2607,
+ ["yahiragana"]=12420,
+ ["yakatakana"]=12516,
+ ["yakatakanahalfwidth"]=65428,
+ ["yakorean"]=12625,
+ ["yamakkanthai"]=3662,
+ ["yasmallhiragana"]=12419,
+ ["yasmallkatakana"]=12515,
+ ["yasmallkatakanahalfwidth"]=65388,
+ ["yatcyrillic"]=1123,
+ ["ycircle"]=9448,
+ ["ycircumflex"]=375,
+ ["ydieresis"]=255,
+ ["ydotaccent"]=7823,
+ ["ydotbelow"]=7925,
+ ["yeharabic"]=1610,
+ ["yehbarreearabic"]=1746,
+ ["yehbarreefinalarabic"]=64431,
+ ["yehfinalarabic"]=65266,
+ ["yehhamzaabovearabic"]=1574,
+ ["yehhamzaabovefinalarabic"]=65162,
+ ["yehhamzaaboveinitialarabic"]=65163,
+ ["yehhamzaabovemedialarabic"]=65164,
+ ["yehinitialarabic"]=65267,
+ ["yehmedialarabic"]=65268,
+ ["yehmeeminitialarabic"]=64733,
+ ["yehmeemisolatedarabic"]=64600,
+ ["yehnoonfinalarabic"]=64660,
+ ["yehthreedotsbelowarabic"]=1745,
+ ["yekorean"]=12630,
+ ["yen"]=165,
+ ["yenmonospace"]=65509,
+ ["yeokorean"]=12629,
+ ["yeorinhieuhkorean"]=12678,
+ ["yerahbenyomohebrew"]=1450,
+ ["yerahbenyomolefthebrew"]=1450,
+ ["yericyrillic"]=1099,
+ ["yerudieresiscyrillic"]=1273,
+ ["yesieungkorean"]=12673,
+ ["yesieungpansioskorean"]=12675,
+ ["yesieungsioskorean"]=12674,
+ ["yetivhebrew"]=1434,
+ ["ygrave"]=7923,
+ ["yhook"]=436,
+ ["yhookabove"]=7927,
+ ["yiarmenian"]=1397,
+ ["yicyrillic"]=1111,
+ ["yikorean"]=12642,
+ ["yinyang"]=9775,
+ ["yiwnarmenian"]=1410,
+ ["ymonospace"]=65369,
+ ["yod"]=1497,
+ ["yoddagesh"]=64313,
+ ["yoddageshhebrew"]=64313,
+ ["yodhebrew"]=1497,
+ ["yodyodhebrew"]=1522,
+ ["yodyodpatahhebrew"]=64287,
+ ["yohiragana"]=12424,
+ ["yoikorean"]=12681,
+ ["yokatakana"]=12520,
+ ["yokatakanahalfwidth"]=65430,
+ ["yokorean"]=12635,
+ ["yosmallhiragana"]=12423,
+ ["yosmallkatakana"]=12519,
+ ["yosmallkatakanahalfwidth"]=65390,
+ ["yotgreek"]=1011,
+ ["yoyaekorean"]=12680,
+ ["yoyakorean"]=12679,
+ ["yoyakthai"]=3618,
+ ["yoyingthai"]=3597,
+ ["yparen"]=9396,
+ ["ypogegrammeni"]=890,
+ ["ypogegrammenigreekcmb"]=837,
+ ["yr"]=422,
+ ["yring"]=7833,
+ ["ysuperior"]=696,
+ ["ytilde"]=7929,
+ ["yturned"]=654,
+ ["yuhiragana"]=12422,
+ ["yuikorean"]=12684,
+ ["yukatakana"]=12518,
+ ["yukatakanahalfwidth"]=65429,
+ ["yukorean"]=12640,
+ ["yusbigcyrillic"]=1131,
+ ["yusbigiotifiedcyrillic"]=1133,
+ ["yuslittlecyrillic"]=1127,
+ ["yuslittleiotifiedcyrillic"]=1129,
+ ["yusmallhiragana"]=12421,
+ ["yusmallkatakana"]=12517,
+ ["yusmallkatakanahalfwidth"]=65389,
+ ["yuyekorean"]=12683,
+ ["yuyeokorean"]=12682,
+ ["yyabengali"]=2527,
+ ["yyadeva"]=2399,
+ ["z"]=122,
+ ["zaarmenian"]=1382,
+ ["zacute"]=378,
+ ["zadeva"]=2395,
+ ["zagurmukhi"]=2651,
+ ["zaharabic"]=1592,
+ ["zahfinalarabic"]=65222,
+ ["zahinitialarabic"]=65223,
+ ["zahiragana"]=12374,
+ ["zahmedialarabic"]=65224,
+ ["zainarabic"]=1586,
+ ["zainfinalarabic"]=65200,
+ ["zakatakana"]=12470,
+ ["zaqefgadolhebrew"]=1429,
+ ["zaqefqatanhebrew"]=1428,
+ ["zarqahebrew"]=1432,
+ ["zayin"]=1494,
+ ["zayindagesh"]=64310,
+ ["zayindageshhebrew"]=64310,
+ ["zayinhebrew"]=1494,
+ ["zbopomofo"]=12567,
+ ["zcaron"]=382,
+ ["zcircle"]=9449,
+ ["zcircumflex"]=7825,
+ ["zcurl"]=657,
+ ["zdot"]=380,
+ ["zdotaccent"]=380,
+ ["zdotbelow"]=7827,
+ ["zecyrillic"]=1079,
+ ["zedescendercyrillic"]=1177,
+ ["zedieresiscyrillic"]=1247,
+ ["zehiragana"]=12380,
+ ["zekatakana"]=12476,
+ ["zero"]=48,
+ ["zeroarabic"]=1632,
+ ["zerobengali"]=2534,
+ ["zerodeva"]=2406,
+ ["zerogujarati"]=2790,
+ ["zerogurmukhi"]=2662,
+ ["zerohackarabic"]=1632,
+ ["zeroinferior"]=8320,
+ ["zeromonospace"]=65296,
+ ["zerooldstyle"]=63280,
+ ["zeropersian"]=1776,
+ ["zerosuperior"]=8304,
+ ["zerothai"]=3664,
+ ["zerowidthjoiner"]=65279,
+ ["zerowidthnonjoiner"]=8204,
+ ["zerowidthspace"]=8203,
+ ["zeta"]=950,
+ ["zhbopomofo"]=12563,
+ ["zhearmenian"]=1386,
+ ["zhebrevecyrillic"]=1218,
+ ["zhecyrillic"]=1078,
+ ["zhedescendercyrillic"]=1175,
+ ["zhedieresiscyrillic"]=1245,
+ ["zihiragana"]=12376,
+ ["zikatakana"]=12472,
+ ["zinorhebrew"]=1454,
+ ["zlinebelow"]=7829,
+ ["zmonospace"]=65370,
+ ["zohiragana"]=12382,
+ ["zokatakana"]=12478,
+ ["zparen"]=9397,
+ ["zretroflexhook"]=656,
+ ["zstroke"]=438,
+ ["zuhiragana"]=12378,
+ ["zukatakana"]=12474,
+} \ No newline at end of file
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-auxiliary.lua b/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-auxiliary.lua
new file mode 100644
index 00000000000..5de964d3fc4
--- /dev/null
+++ b/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-auxiliary.lua
@@ -0,0 +1,451 @@
+#!/usr/bin/env texlua
+-----------------------------------------------------------------------
+-- FILE: luaotfload-auxiliary.lua
+-- DESCRIPTION: part of luaotfload
+-- REQUIREMENTS: luaotfload 2.2
+-- AUTHOR: Khaled Hosny, Élie Roux, Philipp Gesang
+-- VERSION: 2.2
+-- CREATED: 2013-05-01 14:40:50+0200
+-----------------------------------------------------------------------
+--
+
+--- this file addresses issue #24
+--- https://github.com/lualatex/luaotfload/issues/24#
+
+luaotfload = luaotfload or {}
+luaotfload.aux = luaotfload.aux or { }
+
+config = config or { }
+config.luaotfload = config.luaotfload or { }
+
+local aux = luaotfload.aux
+local log = luaotfload.log
+local identifiers = fonts.hashes.identifiers
+
+local fontid = font.id
+local texsprint = tex.sprint
+
+local utf8 = unicode.utf8
+local stringlower = string.lower
+local stringformat = string.format
+local stringgsub = string.gsub
+local stringbyte = string.byte
+
+-----------------------------------------------------------------------
+--- font patches
+-----------------------------------------------------------------------
+
+--[[doc--
+
+The font object (tfmdata) structure has changed since version 1.x, so
+in case other packages haven’t been updated we put fallbacks in place
+where they’d expect them. Specifically we have in mind:
+
+ · fontspec
+ · unicode-math
+ · microtype (most likely fixed till TL2013)
+
+--doc]]--
+
+--- fontobj -> fontobj
+local add_fontdata_fallbacks = function (fontdata)
+ if type(fontdata) == "table" then
+ local fontparameters = fontdata.parameters
+ local metadata
+ if not fontdata.shared then --- that would be a tfm
+ --- we can’t really catch everything that
+ --- goes wrong; for some reason, fontspec.lua
+ --- just assumes it always gets an otf object,
+ --- so its capheight callback, which does not
+ --- bother to do any checks, will access
+ --- fontdata.shared no matter what ...
+ fontdata.units = fontdata.units_per_em
+ else --- otf
+ metadata = fontdata.shared.rawdata.metadata
+ fontdata.units = fontparameters.units
+ local resources = fontdata.resources
+ fontdata.size = fontparameters.size
+ --- for legacy fontspec.lua and unicode-math.lua
+ fontdata.shared.otfdata = metadata
+ fontdata.shared.otfdata.metadata = metadata --- brr, that’s meta indeed
+ --- for microtype.lua
+ fontdata.shared.otfdata.luatex = {
+ unicodes = resources.unicodes,
+ features = resources.features,
+ }
+ end
+ end
+ return fontdata
+end
+
+luatexbase.add_to_callback(
+ "luaotfload.patch_font",
+ add_fontdata_fallbacks,
+ "luaotfload.fontdata_fallbacks")
+
+--[[doc--
+
+Additionally, the font registry is expected at fonts.identifiers
+(fontspec) or fonts.ids (microtype), but in the meantime it has been
+migrated to fonts.hashes.identifiers. We’ll make luaotfload satisfy
+those assumptions. (Maybe it’d be more appropriate to use
+font.getfont() since Hans made it a harmless wrapper [1].)
+
+[1] http://www.ntg.nl/pipermail/ntg-context/2013/072166.html
+
+--doc]]--
+
+fonts.identifiers = fonts.hashes.identifiers
+fonts.ids = fonts.hashes.identifiers
+
+--[[doc--
+This sets two dimensions apparently relied upon by the unicode-math
+package.
+--doc]]--
+
+local set_sscale_dimens = function (fontdata)
+ local mathconstants = fontdata.MathConstants
+ local parameters = fontdata.parameters
+ if mathconstants then
+ parameters[10] = mathconstants.ScriptPercentScaleDown or 70
+ parameters[11] = mathconstants.ScriptScriptPercentScaleDown or 50
+ end
+ return fontdata
+end
+
+luatexbase.add_to_callback(
+ "luaotfload.patch_font",
+ set_sscale_dimens,
+ "luaotfload.aux.set_sscale_dimens")
+
+--- fontobj -> int
+local lookup_units = function (fontdata)
+ local metadata = fontdata.shared and fontdata.shared.rawdata.metadata
+ if metadata and metadata.units_per_em then
+ return metadata.units_per_em
+ elseif fontdata.parameters and fontdata.parameters.units then
+ return fontdata.parameters.units
+ elseif fontdata.units then --- v1.x
+ return fontdata.units
+ end
+ return 1000
+end
+
+--[[doc--
+This callback corrects some values of the Cambria font.
+--doc]]--
+--- fontobj -> unit
+local patch_cambria_domh = function (fontdata)
+ local mathconstants = fontdata.MathConstants
+ if mathconstants and fontdata.psname == "CambriaMath" then
+ --- my test Cambria has 2048
+ local units_per_em = fontdata.units_per_em or lookup_units(fontdata)
+ local sz = fontdata.parameters.size or fontdata.size
+ local mh = 2800 / units_per_em * sz
+ if mathconstants.DisplayOperatorMinHeight < mh then
+ mathconstants.DisplayOperatorMinHeight = mh
+ end
+ end
+end
+
+luatexbase.add_to_callback(
+ "luaotfload.patch_font",
+ patch_cambria_domh,
+ "luaotfload.aux.patch_cambria_domh")
+
+--[[doc--
+
+Comment from fontspec:
+
+ “Here we patch fonts tfm table to emulate \XeTeX's \cs{fontdimen8},
+ which stores the caps-height of the font. (Cf.\ \cs{fontdimen5} which
+ stores the x-height.)
+
+ Falls back to measuring the glyph if the font doesn't contain the
+ necessary information.
+ This needs to be extended for fonts that don't contain an `X'.”
+
+--doc]]--
+
+local set_capheight = function (fontdata)
+ local shared = fontdata.shared
+ local parameters = fontdata.parameters
+ local capheight
+ if shared then
+ local units_per_em = parameters.units
+ local size = parameters.size
+ local os2_capheight = shared.rawdata.metadata.pfminfo.os2_capheight
+
+ if os2_capheight > 0 then
+ capheight = os2_capheight / units_per_em * size
+ else
+ local X8 = stringbyte"X"
+ if fontdata.characters[X8] then
+ capheight = fontdata.characters[X8].height
+ else
+ capheight = parameters.ascender / units_per_em * size
+ end
+ end
+ else
+ local X8 = stringbyte"X"
+ if fontdata.characters[X8] then
+ capheight = fontdata.characters[X8].height
+ end
+ end
+ if capheight then
+ --- is this legit? afaics there’s nothing else on the
+ --- array part of that table
+ fontdata.parameters[8] = capheight
+ end
+end
+
+luatexbase.add_to_callback(
+ "luaotfload.patch_font",
+ set_capheight,
+ "luaotfload.aux.set_capheight")
+
+-----------------------------------------------------------------------
+--- glyphs
+-----------------------------------------------------------------------
+
+--- int -> int -> bool
+local font_has_glyph = function (font_id, codepoint)
+ local fontdata = fonts.hashes.identifiers[font_id]
+ if fontdata then
+ if fontdata.characters[codepoint] ~= nil then return true end
+ end
+ return false
+end
+
+aux.font_has_glyph = font_has_glyph
+
+--[[doc--
+
+ This one is approximately “name_to_slot” from the microtype package;
+ note that it is all about Adobe Glyph names and glyph slots in the
+ font. The names and values may diverge from actual Unicode.
+
+ http://www.adobe.com/devnet/opentype/archives/glyph.html
+
+--doc]]--
+
+--- int -> string -> (int | false)
+local slot_of_name = function (font_id, glyphname)
+ local fontdata = identifiers[font_id]
+ if fontdata then
+ local unicode = fontdata.resources.unicodes[glyphname]
+ if unicode and type(unicode) == "number" then
+ return unicode
+ else
+ return unicode[1] --- for multiple components
+ end
+ end
+ return false
+end
+
+aux.slot_of_name = slot_of_name
+
+--[[doc--
+
+ Inverse of above; not authoritative as to my knowledge the official
+ inverse of the AGL is the AGLFN. Maybe this whole issue should be
+ dealt with in a separate package that loads char-def.lua and thereby
+ solves the problem for the next couple decades.
+
+ http://partners.adobe.com/public/developer/en/opentype/aglfn13.txt
+
+--doc]]--
+
+local indices
+
+--- int -> (string | false)
+local name_of_slot = function (codepoint)
+ if not indices then --- this will load the glyph list
+ local unicodes = fonts.encodings.agl.unicodes
+ indices = table.swapped(unicodes)
+ end
+ local glyphname = indices[codepoint]
+ if glyphname then
+ return glyphname
+ end
+ return false
+end
+
+aux.name_of_slot = name_of_slot
+
+-----------------------------------------------------------------------
+--- features / scripts / languages
+-----------------------------------------------------------------------
+--- lots of arrowcode ahead
+
+--[[doc--
+This function, modeled after “check_script()” from fontspec, returns
+true if in the given font, the script “asked_script” is accounted for in at
+least one feature.
+--doc]]--
+
+--- int -> string -> bool
+local provides_script = function (font_id, asked_script)
+ asked_script = stringlower(asked_script)
+ if font_id and font_id > 0 then
+ local fontdata = identifiers[font_id].shared.rawdata
+ if fontdata then
+ local fontname = fontdata.metadata.fontname
+ local features = fontdata.resources.features
+ for method, featuredata in next, features do
+ --- where method: "gpos" | "gsub"
+ for feature, data in next, featuredata do
+ if data[asked_script] then
+ log(stringformat(
+ "font no %d (%s) defines feature %s for script %s",
+ font_id, fontname, feature, asked_script))
+ return true
+ end
+ end
+ end
+ log(stringformat(
+ "font no %d (%s) defines no feature for script %s",
+ font_id, fontname, asked_script))
+ end
+ end
+ log(stringformat("no font with id %d", font_id))
+ return false
+end
+
+aux.provides_script = provides_script
+
+--[[doc--
+This function, modeled after “check_language()” from fontspec, returns
+true if in the given font, the language with tage “asked_language” is
+accounted for in the script with tag “asked_script” in at least one
+feature.
+--doc]]--
+
+--- int -> string -> string -> bool
+local provides_language = function (font_id, asked_script, asked_language)
+ asked_script = stringlower(asked_script)
+ asked_language = stringlower(asked_language)
+ if font_id and font_id > 0 then
+ local fontdata = identifiers[font_id].shared.rawdata
+ if fontdata then
+ local fontname = fontdata.metadata.fontname
+ local features = fontdata.resources.features
+ for method, featuredata in next, features do
+ --- where method: "gpos" | "gsub"
+ for feature, data in next, featuredata do
+ local scriptdata = data[asked_script]
+ if scriptdata and scriptdata[asked_language] then
+ log(stringformat("font no %d (%s) defines feature %s "
+ .. "for script %s with language %s",
+ font_id, fontname, feature,
+ asked_script, asked_language))
+ return true
+ end
+ end
+ end
+ log(stringformat(
+ "font no %d (%s) defines no feature for script %s with language %s",
+ font_id, fontname, asked_script, asked_language))
+ end
+ end
+ log(stringformat("no font with id %d", font_id))
+ return false
+end
+
+aux.provides_language = provides_language
+
+--[[doc--
+We strip the syntax elements from feature definitions (shouldn’t
+actually be there in the first place, but who cares ...)
+--doc]]--
+
+local lpeg = require"lpeg"
+local C, P, S = lpeg.C, lpeg.P, lpeg.S
+local lpegmatch = lpeg.match
+
+local sign = S"+-"
+local rhs = P"=" * P(1)^0 * P(-1)
+local strip_garbage = sign^-1 * C((1 - rhs)^1)
+
+--s = "+foo" --> foo
+--ss = "-bar" --> bar
+--sss = "baz" --> baz
+--t = "foo=bar" --> foo
+--tt = "+bar=baz" --> bar
+--ttt = "-baz=true" --> baz
+--
+--print(lpeg.match(strip_garbage, s))
+--print(lpeg.match(strip_garbage, ss))
+--print(lpeg.match(strip_garbage, sss))
+--print(lpeg.match(strip_garbage, t))
+--print(lpeg.match(strip_garbage, tt))
+--print(lpeg.match(strip_garbage, ttt))
+
+--[[doc--
+This function, modeled after “check_feature()” from fontspec, returns
+true if in the given font, the language with tag “asked_language” is
+accounted for in the script with tag “asked_script” in feature
+“asked_feature”.
+--doc]]--
+
+--- int -> string -> string -> string -> bool
+local provides_feature = function (font_id, asked_script,
+ asked_language, asked_feature)
+ asked_script = stringlower(asked_script)
+ asked_language = stringlower(asked_language)
+ asked_feature = lpegmatch(strip_garbage, asked_feature)
+
+ if font_id and font_id > 0 then
+ local fontdata = identifiers[font_id].shared.rawdata
+ if fontdata then
+ local features = fontdata.resources.features
+ local fontname = fontdata.metadata.fontname
+ for method, featuredata in next, features do
+ --- where method: "gpos" | "gsub"
+ local feature = featuredata[asked_feature]
+ if feature then
+ local scriptdata = feature[asked_script]
+ if scriptdata and scriptdata[asked_language] then
+ log(stringformat("font no %d (%s) defines feature %s "
+ .. "for script %s with language %s",
+ font_id, fontname, asked_feature,
+ asked_script, asked_language))
+ return true
+ end
+ end
+ end
+ log(stringformat(
+ "font no %d (%s) does not define feature %s for script %s with language %s",
+ font_id, fontname, asked_feature, asked_script, asked_language))
+ end
+ end
+ log(stringformat("no font with id %d", font_id))
+ return false
+end
+
+aux.provides_feature = provides_feature
+
+-----------------------------------------------------------------------
+--- font dimensions
+-----------------------------------------------------------------------
+
+--- string -> string -> int
+local get_math_dimension = function (csname, dimenname)
+ local fontdata = identifiers[fontid(csname)]
+ local mathdata = fontdata.mathparameters
+ if mathdata then return mathdata[dimenname] or 0 end
+ return 0
+end
+
+aux.get_math_dimension = get_math_dimension
+
+--- string -> string -> unit
+local sprint_math_dimension = function (csname, dimenname)
+ local dim = get_math_dimension(csname, dimenname)
+ texsprint(luatexbase.catcodetables["latex-package"], dim)
+ texsprint(luatexbase.catcodetables["latex-package"], "sp")
+end
+
+aux.sprint_math_dimension = sprint_math_dimension
+
+-- vim:tw=71:sw=2:ts=2:expandtab
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-basics-gen.lua b/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-basics-gen.lua
new file mode 100644
index 00000000000..61f3910ebb7
--- /dev/null
+++ b/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-basics-gen.lua
@@ -0,0 +1,308 @@
+if not modules then modules = { } end modules ['luat-basics-gen'] = {
+ version = 1.100,
+ comment = "companion to luatex-*.tex",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+if context then
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
+end
+
+local dummyfunction = function() end
+----- dummyreporter = function(c) return function(...) texio.write_nl(c .. " : " .. string.format(...)) end end
+local dummyreporter = function(c) return function(...) texio.write_nl(c .. " : " .. string.formatters(...)) end end
+
+statistics = {
+ register = dummyfunction,
+ starttiming = dummyfunction,
+ stoptiming = dummyfunction,
+ elapsedtime = nil,
+}
+
+directives = {
+ register = dummyfunction,
+ enable = dummyfunction,
+ disable = dummyfunction,
+}
+
+trackers = {
+ register = dummyfunction,
+ enable = dummyfunction,
+ disable = dummyfunction,
+}
+
+experiments = {
+ register = dummyfunction,
+ enable = dummyfunction,
+ disable = dummyfunction,
+}
+
+storage = { -- probably no longer needed
+ register = dummyfunction,
+ shared = { },
+}
+
+logs = {
+ new = dummyreporter,
+ reporter = dummyreporter,
+ messenger = dummyreporter,
+ report = dummyfunction,
+}
+
+callbacks = {
+ register = function(n,f) return callback.register(n,f) end,
+
+}
+
+utilities = {
+ storage = {
+ allocate = function(t) return t or { } end,
+ mark = function(t) return t or { } end,
+ },
+}
+
+characters = characters or {
+ data = { }
+}
+
+-- we need to cheat a bit here
+
+texconfig.kpse_init = true
+
+resolvers = resolvers or { } -- no fancy file helpers used
+
+local remapper = {
+ otf = "opentype fonts",
+ ttf = "truetype fonts",
+ ttc = "truetype fonts",
+ dfont = "truetype fonts", -- "truetype dictionary",
+ cid = "cid maps",
+ cidmap = "cid maps",
+ fea = "font feature files",
+ pfa = "type1 fonts", -- this is for Khaled, in ConTeXt we don't use this!
+ pfb = "type1 fonts", -- this is for Khaled, in ConTeXt we don't use this!
+}
+
+function resolvers.findfile(name,fileformat)
+ name = string.gsub(name,"\\","/")
+ if not fileformat or fileformat == "" then
+ fileformat = file.suffix(name)
+ if fileformat == "" then
+ fileformat = "tex"
+ end
+ end
+ fileformat = string.lower(fileformat)
+ fileformat = remapper[fileformat] or fileformat
+ local found = kpse.find_file(name,fileformat)
+ if not found or found == "" then
+ found = kpse.find_file(name,"other text files")
+ end
+ return found
+end
+
+-- function resolvers.findbinfile(name,fileformat)
+-- if not fileformat or fileformat == "" then
+-- fileformat = file.suffix(name)
+-- end
+-- return resolvers.findfile(name,(fileformat and remapper[fileformat]) or fileformat)
+-- end
+
+resolvers.findbinfile = resolvers.findfile
+
+function resolvers.resolve(s)
+ return s
+end
+
+function resolvers.unresolve(s)
+ return s
+end
+
+-- Caches ... I will make a real stupid version some day when I'm in the
+-- mood. After all, the generic code does not need the more advanced
+-- ConTeXt features. Cached data is not shared between ConTeXt and other
+-- usage as I don't want any dependency at all. Also, ConTeXt might have
+-- different needs and tricks added.
+
+--~ containers.usecache = true
+
+caches = { }
+
+local writable = nil
+local readables = { }
+local usingjit = jit
+
+if not caches.namespace or caches.namespace == "" or caches.namespace == "context" then
+ caches.namespace = 'generic'
+end
+
+do
+
+ local cachepaths = kpse.expand_path('$TEXMFCACHE') or ""
+
+ if cachepaths == "" then
+ cachepaths = kpse.expand_path('$TEXMFVAR')
+ end
+
+ if cachepaths == "" then
+ cachepaths = kpse.expand_path('$VARTEXMF')
+ end
+
+ if cachepaths == "" then
+ cachepaths = "."
+ end
+
+ cachepaths = string.split(cachepaths,os.type == "windows" and ";" or ":")
+
+ for i=1,#cachepaths do
+ if file.is_writable(cachepaths[i]) then
+ writable = file.join(cachepaths[i],"luatex-cache")
+ lfs.mkdir(writable)
+ writable = file.join(writable,caches.namespace)
+ lfs.mkdir(writable)
+ break
+ end
+ end
+
+ for i=1,#cachepaths do
+ if file.is_readable(cachepaths[i]) then
+ readables[#readables+1] = file.join(cachepaths[i],"luatex-cache",caches.namespace)
+ end
+ end
+
+ if not writable then
+ texio.write_nl("quiting: fix your writable cache path")
+ os.exit()
+ elseif #readables == 0 then
+ texio.write_nl("quiting: fix your readable cache path")
+ os.exit()
+ elseif #readables == 1 and readables[1] == writable then
+ texio.write(string.format("(using cache: %s)",writable))
+ else
+ texio.write(string.format("(using write cache: %s)",writable))
+ texio.write(string.format("(using read cache: %s)",table.concat(readables, " ")))
+ end
+
+end
+
+function caches.getwritablepath(category,subcategory)
+ local path = file.join(writable,category)
+ lfs.mkdir(path)
+ path = file.join(path,subcategory)
+ lfs.mkdir(path)
+ return path
+end
+
+function caches.getreadablepaths(category,subcategory)
+ local t = { }
+ for i=1,#readables do
+ t[i] = file.join(readables[i],category,subcategory)
+ end
+ return t
+end
+
+local function makefullname(path,name)
+ if path and path ~= "" then
+ name = "temp-" .. name -- clash prevention
+ return file.addsuffix(file.join(path,name),"lua"), file.addsuffix(file.join(path,name),usingjit and "lub" or "luc")
+ end
+end
+
+function caches.is_writable(path,name)
+ local fullname = makefullname(path,name)
+ return fullname and file.is_writable(fullname)
+end
+
+function caches.loaddata(paths,name)
+ for i=1,#paths do
+ local data = false
+ local luaname, lucname = makefullname(paths[i],name)
+ if lucname and lfs.isfile(lucname) then -- maybe also check for size
+ texio.write(string.format("(load luc: %s)",lucname))
+ data = loadfile(lucname)
+ if data then
+ data = data()
+ end
+ if data then
+ return data
+ else
+ texio.write(string.format("(loading failed: %s)",lucname))
+ end
+ end
+ if luaname and lfs.isfile(luaname) then
+ texio.write(string.format("(load lua: %s)",luaname))
+ data = loadfile(luaname)
+ if data then
+ data = data()
+ end
+ if data then
+ return data
+ end
+ end
+ end
+end
+
+function caches.savedata(path,name,data)
+ local luaname, lucname = makefullname(path,name)
+ if luaname then
+ texio.write(string.format("(save: %s)",luaname))
+ table.tofile(luaname,data,true,{ reduce = true })
+ if lucname and type(caches.compile) == "function" then
+ os.remove(lucname) -- better be safe
+ texio.write(string.format("(save: %s)",lucname))
+ caches.compile(data,luaname,lucname)
+ end
+ end
+end
+
+-- According to KH os.execute is not permitted in plain/latex so there is
+-- no reason to use the normal context way. So the method here is slightly
+-- different from the one we have in context. We also use different suffixes
+-- as we don't want any clashes (sharing cache files is not that handy as
+-- context moves on faster.)
+--
+-- Beware: serialization might fail on large files (so maybe we should pcall
+-- this) in which case one should limit the method to luac and enable support
+-- for execution.
+
+-- function caches.compile(data,luaname,lucname)
+-- local d = io.loaddata(luaname)
+-- if not d or d == "" then
+-- d = table.serialize(data,true) -- slow
+-- end
+-- if d and d ~= "" then
+-- local f = io.open(lucname,'w')
+-- if f then
+-- local s = loadstring(d)
+-- if s then
+-- f:write(string.dump(s,true))
+-- end
+-- f:close()
+-- end
+-- end
+-- end
+
+function caches.compile(data,luaname,lucname)
+ local d = io.loaddata(luaname)
+ if not d or d == "" then
+ d = table.serialize(data,true) -- slow
+ end
+ if d and d ~= "" then
+ local f = io.open(lucname,'w')
+ if f then
+ local s = loadstring(d)
+ if s then
+ f:write(string.dump(s,true))
+ end
+ f:close()
+ end
+ end
+end
+
+--
+
+function table.setmetatableindex(t,f)
+ setmetatable(t,{ __index = f })
+end
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-basics-nod.lua b/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-basics-nod.lua
new file mode 100644
index 00000000000..151d98a8f31
--- /dev/null
+++ b/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-basics-nod.lua
@@ -0,0 +1,95 @@
+if not modules then modules = { } end modules ['luatex-fonts-nod'] = {
+ version = 1.001,
+ comment = "companion to luatex-fonts.lua",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+if context then
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
+end
+
+-- Don't depend on code here as it is only needed to complement the
+-- font handler code.
+
+-- Attributes:
+
+if tex.attribute[0] ~= 0 then
+
+ texio.write_nl("log","!")
+ texio.write_nl("log","! Attribute 0 is reserved for ConTeXt's font feature management and has to be")
+ texio.write_nl("log","! set to zero. Also, some attributes in the range 1-255 are used for special")
+ texio.write_nl("log","! purposes so setting them at the TeX end might break the font handler.")
+ texio.write_nl("log","!")
+
+ tex.attribute[0] = 0 -- else no features
+
+end
+
+attributes = { }
+attributes.unsetvalue = -0x7FFFFFFF
+
+local numbers, last = { }, 127
+
+function attributes.private(name)
+ local number = numbers[name]
+ if not number then
+ if last < 255 then
+ last = last + 1
+ end
+ number = last
+ numbers[name] = number
+ end
+ return number
+end
+
+-- Nodes:
+
+nodes = { }
+nodes.pool = { }
+nodes.handlers = { }
+
+local nodecodes = { } for k,v in next, node.types () do nodecodes[string.gsub(v,"_","")] = k end
+local whatcodes = { } for k,v in next, node.whatsits() do whatcodes[string.gsub(v,"_","")] = k end
+local glyphcodes = { [0] = "character", "glyph", "ligature", "ghost", "left", "right" }
+
+nodes.nodecodes = nodecodes
+nodes.whatcodes = whatcodes
+nodes.whatsitcodes = whatcodes
+nodes.glyphcodes = glyphcodes
+
+local free_node = node.free
+local remove_node = node.remove
+local new_node = node.new
+
+nodes.handlers.protectglyphs = node.protect_glyphs
+nodes.handlers.unprotectglyphs = node.unprotect_glyphs
+
+function nodes.remove(head, current, free_too)
+ local t = current
+ head, current = remove_node(head,current)
+ if t then
+ if free_too then
+ free_node(t)
+ t = nil
+ else
+ t.next, t.prev = nil, nil
+ end
+ end
+ return head, current, t
+end
+
+function nodes.delete(head,current)
+ return nodes.remove(head,current,true)
+end
+
+nodes.before = node.insert_before
+nodes.after = node.insert_after
+
+function nodes.pool.kern(k)
+ local n = new_node("kern",1)
+ n.kern = k
+ return n
+end
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/otfl-blacklist.cnf b/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-blacklist.cnf
index f207c49b29d..943d3473284 100644
--- a/Master/texmf-dist/tex/luatex/luaotfload/otfl-blacklist.cnf
+++ b/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-blacklist.cnf
@@ -1,5 +1,5 @@
-% Tackes ages to load
-LastResort.ttf % a MacOSX font, but also available for free from unicode.org
-% Segfaults with LuaTeX 0.76
+% Takes ages to load
+LastResort.ttf % a MacOSX font, but also available for free from unicode.org
+% Segfaults under LuaTeX 0.76
lingoes.ttf
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-colors.lua b/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-colors.lua
new file mode 100644
index 00000000000..ec076c2f0ed
--- /dev/null
+++ b/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-colors.lua
@@ -0,0 +1,307 @@
+if not modules then modules = { } end modules ['luaotfload-colors'] = {
+ version = 2.200,
+ comment = "companion to luaotfload.lua (font color)",
+ author = "Khaled Hosny, Elie Roux, Philipp Gesang",
+ copyright = "Luaotfload Development Team",
+ license = "GNU GPL v2"
+}
+
+--[[doc--
+buggy coloring with the pre_output_filter when expansion is enabled
+ · tfmdata for different expansion values is split over different objects
+ · in ``initializeexpansion()``, chr.expansion_factor is set, and only
+ those characters that have it are affected
+ · in constructors.scale: chr.expansion_factor = ve*1000 if commented out
+ makes the bug vanish
+--doc]]--
+
+
+local color_callback = config.luaotfload.color_callback
+if not color_callback then
+ --- maybe this would be better as a method: "early" | "late"
+ color_callback = "pre_linebreak_filter"
+-- color_callback = "pre_output_filter" --- old behavior, breaks expansion
+end
+
+
+local newnode = node.new
+local nodetype = node.id
+local traverse_nodes = node.traverse
+local insert_node_before = node.insert_before
+local insert_node_after = node.insert_after
+
+local stringformat = string.format
+local stringgsub = string.gsub
+local stringfind = string.find
+local stringsub = string.sub
+
+local otffeatures = fonts.constructors.newfeatures("otf")
+local identifiers = fonts.hashes.identifiers
+local registerotffeature = otffeatures.register
+
+local add_color_callback --[[ this used to be a global‽ ]]
+
+--[[doc--
+This converts a single octet into a decimal with three digits of
+precision. The optional second argument limits precision to a single
+digit.
+--doc]]--
+
+--- string -> bool? -> string
+local hex_to_dec = function (hex,one) --- one isn’t actually used anywhere ...
+ if one then
+ return stringformat("%.1g", tonumber(hex, 16)/255)
+ else
+ return stringformat("%.3g", tonumber(hex, 16)/255)
+ end
+end
+
+--[[doc--
+Color string validator / parser.
+--doc]]--
+
+local lpeg = require"lpeg"
+local lpegmatch = lpeg.match
+local C, Cg, Ct, P, R, S = lpeg.C, lpeg.Cg, lpeg.Ct, lpeg.P, lpeg.R, lpeg.S
+
+local digit16 = R("09", "af", "AF")
+local octet = C(digit16 * digit16)
+
+local p_rgb = octet * octet * octet
+local p_rgba = p_rgb * octet
+local valid_digits = C(p_rgba + p_rgb) -- matches eight or six hex digits
+
+local p_Crgb = Cg(octet/hex_to_dec, "red") --- for captures
+ * Cg(octet/hex_to_dec, "green")
+ * Cg(octet/hex_to_dec, "blue")
+local p_Crgba = p_Crgb * Cg(octet/hex_to_dec, "alpha")
+local extract_color = Ct(p_Crgba + p_Crgb)
+
+--- string -> (string | nil)
+local sanitize_color_expression = function (digits)
+ digits = tostring(digits)
+ local sanitized = lpegmatch(valid_digits, digits)
+ if not sanitized then
+ luaotfload.warning(
+ "“%s” is not a valid rgb[a] color expression", digits)
+ return nil
+ end
+ return sanitized
+end
+
+--[[doc--
+``setcolor`` modifies tfmdata.properties.color in place
+--doc]]--
+
+--- fontobj -> string -> unit
+---
+--- (where “string” is a rgb value as three octet
+--- hexadecimal, with an optional fourth transparency
+--- value)
+---
+local setcolor = function (tfmdata, value)
+ local sanitized = sanitize_color_expression(value)
+ local properties = tfmdata.properties
+
+ if sanitized then
+ properties.color = sanitized
+ add_color_callback()
+ end
+end
+
+registerotffeature {
+ name = "color",
+ description = "color",
+ initializers = {
+ base = setcolor,
+ node = setcolor,
+ }
+}
+
+
+--- something is carried around in ``res``
+--- for later use by color_handler() --- but what?
+
+local res --- <- state of what?
+
+--- float -> unit
+local function pageresources(alpha)
+ local res2
+ if not res then
+ res = "/TransGs1<</ca 1/CA 1>>"
+ end
+ res2 = stringformat("/TransGs%s<</ca %s/CA %s>>",
+ alpha, alpha, alpha)
+ res = stringformat("%s%s",
+ res,
+ stringfind(res, res2) and "" or res2)
+end
+
+--- we store results of below color handler as tuples of
+--- push/pop strings
+local color_cache = { } --- (string, (string * string)) hash_t
+
+--- string -> (string * string)
+local hex_to_rgba = function (digits)
+ if not digits then
+ return
+ end
+
+ --- this is called like a thousand times, so some
+ --- memoizing is in order.
+ local cached = color_cache[digits]
+ if not cached then
+ local push, pop
+ local rgb = lpegmatch(extract_color, digits)
+ if rgb.alpha then
+ pageresources(rgb.alpha)
+ push = stringformat(
+ "/TransGs%g gs %s %s %s rg",
+ rgb.alpha,
+ rgb.red,
+ rgb.green,
+ rgb.blue)
+ pop = "0 g /TransGs1 gs"
+ else
+ push = stringformat(
+ "%s %s %s rg",
+ rgb.red,
+ rgb.green,
+ rgb.blue)
+ pop = "0 g"
+ end
+ color_cache[digits] = { push, pop }
+ return push, pop
+ end
+
+ return cached[1], cached[2]
+end
+
+--- Luatex internal types
+
+local glyph_t = nodetype("glyph")
+local hlist_t = nodetype("hlist")
+local vlist_t = nodetype("vlist")
+local whatsit_t = nodetype("whatsit")
+local page_insert_t = nodetype("page_insert")
+local sub_box_t = nodetype("sub_box")
+
+--- node -> nil | -1 | color‽
+local lookup_next_color
+lookup_next_color = function (head) --- paragraph material
+ for n in traverse_nodes(head) do
+ local n_id = n.id
+
+ if n_id == glyph_t then
+ local n_font
+ if identifiers[n_font]
+ and identifiers[n_font].properties
+ and identifiers[n_font].properties.color
+ then
+ return identifiers[n.font].properties.color
+ else
+ return -1
+ end
+
+ elseif n_id == vlist_t or n_id == hlist_t or n_id == sub_box_t then
+ local r = lookup_next_color(n.list)
+ if r then
+ return r
+ end
+
+ elseif n_id == whatsit_t or n_id == page_insert_t then
+ return -1
+ end
+ end
+ return nil
+end
+
+--[[doc--
+While the second argument and second returned value are apparently
+always nil when the function is called, they temporarily take string
+values during the node list traversal.
+--doc]]--
+
+local cnt = 0
+--- node -> string -> int -> (node * string)
+local node_colorize
+node_colorize = function (head, current_color, next_color)
+ for n in traverse_nodes(head) do
+ local n_id = n.id
+ local nextnode = n.next
+
+ if n_id == hlist_t or n_id == vlist_t or n_id == sub_box_t then
+ local next_color_in = lookup_next_color(nextnode) or next_color
+ n.list, current_color = node_colorize(n.list, current_color, next_color_in)
+
+ elseif n_id == glyph_t then
+ cnt = cnt + 1
+ local tfmdata = identifiers[n.font]
+
+ --- colorization is restricted to those fonts
+ --- that received the “color” property upon
+ --- loading (see ``setcolor()`` above)
+ if tfmdata and tfmdata.properties and tfmdata.properties.color then
+ local font_color = tfmdata.properties.color
+-- luaotfload.info(
+-- "n: %d; %s; %d %s, %s",
+-- cnt, utf.char(n.char), n.font, "<TRUE>", font_color)
+ if font_color ~= current_color then
+ local pushcolor = hex_to_rgba(font_color)
+ local push = newnode(whatsit_t, 8)
+ push.mode = 1
+ push.data = pushcolor
+ head = insert_node_before(head, n, push)
+ current_color = font_color
+ end
+ local next_color_in = lookup_next_color (nextnode) or next_color
+ if next_color_in ~= font_color then
+ local _, popcolor = hex_to_rgba(font_color)
+ local pop = newnode(whatsit_t, 8)
+ pop.mode = 1
+ pop.data = popcolor
+ head = insert_node_after(head, n, pop)
+ current_color = nil
+ end
+
+-- else
+-- luaotfload.info(
+-- "n: %d; %s; %d %s",
+-- cnt, utf.char(n.char), n.font, "<FALSE>")
+ end
+ end
+ end
+ return head, current_color
+end
+
+--- node -> node
+local color_handler = function (head)
+ -- check if our page resources existed in the previous run
+ -- and remove it to avoid duplicating it later
+ if res then
+ local r = "/ExtGState<<" .. res .. ">>"
+ tex.pdfpageresources = stringgsub(tex.pdfpageresources, r, "")
+ end
+ local new_head = node_colorize(head, nil, nil)
+ -- now append our page resources
+ if res and stringfind(res, "%S") then -- test for non-empty string
+ local r = "/ExtGState<<" .. res .. ">>"
+ tex.pdfpageresources = tex.pdfpageresources..r
+ end
+ return new_head
+end
+
+local color_callback_activated = 0
+
+--- unit -> unit
+add_color_callback = function ( )
+ if color_callback_activated == 0 then
+ luatexbase.add_to_callback(color_callback,
+ color_handler,
+ "luaotfload.color_handler")
+ color_callback_activated = 1
+ end
+end
+
+-- vim:tw=71:sw=4:ts=4:expandtab
+
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-database.lua b/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-database.lua
new file mode 100644
index 00000000000..576341f2308
--- /dev/null
+++ b/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-database.lua
@@ -0,0 +1,1540 @@
+if not modules then modules = { } end modules ['luaotfload-database'] = {
+ version = 2.2,
+ comment = "companion to luaotfload.lua",
+ author = "Khaled Hosny, Elie Roux, Philipp Gesang",
+ copyright = "Luaotfload Development Team",
+ license = "GNU GPL v2"
+}
+
+--- TODO: if the specification is an absolute filename with a font not in the
+--- database, add the font to the database and load it. There is a small
+--- difficulty with the filenames of the TEXMF tree that are referenced as
+--- relative paths...
+
+--- Luatex builtins
+local load = load
+local next = next
+local pcall = pcall
+local require = require
+local tonumber = tonumber
+
+local fontloaderinfo = fontloader.info
+local iolines = io.lines
+local ioopen = io.open
+local kpseexpand_path = kpse.expand_path
+local kpseexpand_var = kpse.expand_var
+local kpselookup = kpse.lookup
+local kpsereadable_file = kpse.readable_file
+local lfsisdir = lfs.isdir
+local lfsisfile = lfs.isfile
+local mathabs = math.abs
+local mathmin = math.min
+local stringfind = string.find
+local stringformat = string.format
+local stringgmatch = string.gmatch
+local stringgsub = string.gsub
+local stringlower = string.lower
+local stringsub = string.sub
+local stringupper = string.upper
+local tableconcat = table.concat
+local tablecopy = table.copy
+local tablesort = table.sort
+local tabletofile = table.tofile
+local texiowrite_nl = texio.write_nl
+local utf8gsub = unicode.utf8.gsub
+local utf8lower = unicode.utf8.lower
+
+--- these come from Lualibs/Context
+local dirglob = dir.glob
+local dirmkdirs = dir.mkdirs
+local filebasename = file.basename
+local filenameonly = file.nameonly
+local filedirname = file.dirname
+local filecollapsepath = file.collapsepath or file.collapse_path
+local fileextname = file.extname
+local fileiswritable = file.iswritable
+local filejoin = file.join
+local filereplacesuffix = file.replacesuffix
+local filesplitpath = file.splitpath or file.split_path
+local stringis_empty = string.is_empty
+local stringsplit = string.split
+local stringstrip = string.strip
+local tableappend = table.append
+local tabletohash = table.tohash
+
+--- the font loader namespace is “fonts”, same as in Context
+--- we need to put some fallbacks into place for when running
+--- as a script
+fonts = fonts or { }
+fonts.names = fonts.names or { }
+fonts.definers = fonts.definers or { }
+
+local names = fonts.names
+
+names.version = 2.203
+names.data = nil --- contains the loaded database
+names.lookups = nil --- contains the lookup cache
+names.path = {
+ dir = "", --- db and cache directory
+ basename = "luaotfload-names.lua", --- db file name
+ path = "", --- full path to db file
+ lookup_basename = "luaotfload-lookup-cache.lua", --- cache file name
+ lookup_path = "", --- cache full path
+}
+
+config = config or { }
+config.luaotfload = config.luaotfload or { }
+config.luaotfload.resolver = config.luaotfload.resolver or "normal"
+if config.luaotfload.update_live ~= false then
+ --- this option allows for disabling updates
+ --- during a TeX run
+ config.luaotfload.update_live = true
+end
+
+-- We use the cache.* of ConTeXt (see luat-basics-gen), we can
+-- use it safely (all checks and directory creations are already done). It
+-- uses TEXMFCACHE or TEXMFVAR as starting points.
+local writable_path
+if caches then
+ writable_path = caches.getwritablepath("names","")
+ if not writable_path then
+ luaotfload.error("Impossible to find a suitable writeable cache...")
+ end
+ names.path.dir = writable_path
+ names.path.path = filejoin(writable_path, names.path.basename)
+ names.path.lookup_path = filejoin(writable_path, names.path.lookup_basename)
+else --- running as script, inject some dummies
+ caches = { }
+ logs = { report = function () end }
+end
+
+
+--[[doc--
+Auxiliary functions
+--doc]]--
+
+
+local report = logs.names_report
+
+--- string -> string
+local sanitize_string = function (str)
+ if str ~= nil then
+ return utf8gsub(utf8lower(str), "[^%a%d]", "")
+ end
+ return nil
+end
+
+--[[doc--
+This is a sketch of the luaotfload db:
+
+ type dbobj = {
+ mappings : fontentry list;
+ status : filestatus;
+ version : float;
+ // preliminary additions of v2.2:
+ basenames : (string, int) hash; // where int is the index in mappings
+ barenames : (string, int) hash; // where int is the index in mappings
+ }
+ and fontentry = {
+ familyname : string;
+ filename : (string * int); // int: subfont
+ fontname : string;
+ fullname : string;
+ names : {
+ family : string;
+ fullname : string;
+ psname : string;
+ subfamily : string;
+ }
+ size : int list;
+ slant : int;
+ weight : int;
+ width : int;
+ }
+ and filestatus = (fullname, { index : int list; timestamp : int }) dict
+
+beware that this is a reconstruction and may be incomplete.
+
+mtx-fonts has in names.tma:
+
+ type names = {
+ cache_uuid : uuid;
+ cache_version : float;
+ datastate : uuid list;
+ fallbacks : (filetype, (basename, int) hash) hash;
+ families : (basename, int list) hash;
+ files : (filename, fullname) hash;
+ indices : (fullname, int) hash;
+ mappings : (filetype, (basename, int) hash) hash;
+ names : ? (empty hash) ?;
+ rejected : (basename, int) hash;
+ specifications: fontentry list;
+ }
+ and fontentry = {
+ designsize : int;
+ familyname : string;
+ filename : string;
+ fontname : string;
+ format : string;
+ fullname : string;
+ maxsize : int;
+ minsize : int;
+ modification : int;
+ rawname : string;
+ style : string;
+ subfamily : string;
+ variant : string;
+ weight : string;
+ width : string;
+ }
+
+--doc]]--
+
+local fontnames_init = function (keep_cache) --- returns dbobj
+ return {
+ mappings = { },
+ status = { },
+ barenames = { },
+ basenames = { },
+-- fullnames = { }, // -> status
+ version = names.version,
+ }
+end
+
+local make_name = function (path)
+ return filereplacesuffix(path, "lua"), filereplacesuffix(path, "luc")
+end
+
+--- When loading a lua file we try its binary complement first, which
+--- is assumed to be located at an identical path, carrying the suffix
+--- .luc.
+
+--- string -> (string * table)
+local load_lua_file = function (path)
+ local foundname = filereplacesuffix(path, "luc")
+
+ local fh = ioopen(foundname, "rb") -- try bin first
+ if fh then
+ local chunk = fh:read"*all"
+ fh:close()
+ code = load(chunk, "b")
+ end
+
+ if not code then --- fall back to text file
+ foundname = filereplacesuffix(path, "lua")
+ fh = ioopen(foundname, "rb")
+ if fh then
+ local chunk = fh:read"*all"
+ fh:close()
+ code = load(chunk, "t")
+ end
+ end
+
+ if not code then return nil, nil end
+ return foundname, code()
+end
+
+--- define locals in scope
+local crude_file_lookup
+local crude_file_lookup_verbose
+local find_closest
+local flush_cache
+local font_fullinfo
+local load_names
+local load_lookups
+local read_fonts_conf
+local reload_db
+local resolve
+local resolve_cached
+local save_names
+local save_lookups
+local scan_external_dir
+local update_names
+
+--- state of the database
+local fonts_loaded = false
+local fonts_reloaded = false
+
+--- limit output when approximate font matching (luaotfload-tool -F)
+local fuzzy_limit = 1 --- display closest only
+
+--- unit -> dbobj
+load_names = function ( )
+ local starttime = os.gettimeofday()
+ local foundname, data = load_lua_file(names.path.path)
+
+ if data then
+ report("both", 2, "db",
+ "Font names database loaded", "%s", foundname)
+ report("info", 3, "db", "Loading took %0.f ms",
+ 1000*(os.gettimeofday()-starttime))
+ else
+ report("both", 0, "db",
+ [[Font names database not found, generating new one.
+ This can take several minutes; please be patient.]])
+ data = update_names(fontnames_init(false))
+ save_names(data)
+ end
+ fonts_loaded = true
+ return data
+end
+
+--- unit -> dbobj
+load_lookups = function ( )
+ local foundname, data = load_lua_file(names.path.lookup_path)
+ if data then
+ report("both", 3, "cache",
+ "Lookup cache loaded (%s)", foundname)
+ else
+ report("both", 1, "cache",
+ "No lookup cache, creating empty.")
+ data = { }
+ end
+ return data
+end
+
+local style_synonyms = { set = { } }
+do
+ style_synonyms.list = {
+ regular = { "normal", "roman",
+ "plain", "book",
+ "medium", },
+ bold = { "demi", "demibold",
+ "semibold", "boldregular",},
+ italic = { "regularitalic", "normalitalic",
+ "oblique", "slanted", },
+ bolditalic = { "boldoblique", "boldslanted",
+ "demiitalic", "demioblique",
+ "demislanted", "demibolditalic",
+ "semibolditalic", },
+ }
+
+ for category, synonyms in next, style_synonyms.list do
+ style_synonyms.set[category] = tabletohash(synonyms, true)
+ end
+end
+
+local type1_formats = { "tfm", "ofm", }
+
+--- string -> (string * bool | int)
+crude_file_lookup_verbose = function (filename)
+ if not names.data then names.data = load_names() end
+ local data = names.data
+ local mappings = data.mappings
+ local found
+
+ --- look up in db first ...
+ found = data.barenames[filename]
+ if found and mappings[found] then
+ found = mappings[found].filename
+ report("info", 0, "db",
+ "crude file lookup: req=%s; hit=bare; ret=%s",
+ filename, found[1])
+ return found
+ end
+ found = data.basenames[filename]
+ if found and mappings[found] then
+ found = mappings[found].filename
+ report("info", 0, "db",
+ "crude file lookup: req=%s; hit=base; ret=%s",
+ filename, found[1])
+ return found
+ end
+
+ --- ofm and tfm
+ for i=1, #type1_formats do
+ local format = type1_formats[i]
+ if resolvers.findfile(filename, format) then
+ return { file.addsuffix(filename, format), false }, format
+ end
+ end
+ return { filename, false }, nil
+end
+
+--- string -> (string * bool | int)
+crude_file_lookup = function (filename)
+ if not names.data then names.data = load_names() end
+ local data = names.data
+ local mappings = data.mappings
+ local found = data.barenames[filename]
+ or data.basenames[filename]
+ if found then
+ found = data.mappings[found]
+ if found then return found.filename end
+ end
+ for i=1, #type1_formats do
+ local format = type1_formats[i]
+ if resolvers.findfile(filename, format) then
+ return { file.addsuffix(filename, format), false }, format
+ end
+ end
+ return { filename, false }, nil
+end
+
+--[[doc--
+Lookups can be quite costly, more so the less specific they are.
+Even if we find a matching font eventually, the next time the
+user compiles Eir document E will have to stand through the delay
+again.
+Thus, some caching of results -- even between runs -- is in order.
+We’ll just store successful name: lookups in a separate cache file.
+
+type lookup_cache = (string, (string * num)) dict
+
+Complete, needs testing:
+ × 1) add cache to dbobj
+ × 2) wrap lookups in cached versions
+ × 3) make caching optional (via the config table) for debugging
+ × 4) make names_update() cache aware (nil if “force”)
+ × 5) add logging
+ × 6) add cache control to luaotfload-tool
+ × 7) incr db version (now 2.203)
+ × 8) save cache only at the end of a run
+
+The spec is modified in place (ugh), so we’ll have to catalogue what
+fields actually influence its behavior.
+
+Idk what the “spec” resolver is for.
+
+ lookup inspects modifies
+ file: name forced, name
+ name:* name, style, sub, resolved, sub, name, forced
+ optsize, size
+ spec: name, sub resolved, sub, name, forced
+
+* name: contains both the name resolver from luatex-fonts and resolve()
+ below
+
+From my reading of font-def.lua, what a resolver does is
+basically rewrite the “name” field of the specification record
+with the resolution.
+Also, the fields “resolved”, “sub”, “force” etc. influence the outcome.
+
+--doc]]--
+
+--- 'a -> 'a -> table -> (string * int|boolean * boolean)
+resolve_cached = function (_, _, specification)
+ --if not names.data then names.data = load_names() end
+ if not names.lookups then names.lookups = load_lookups() end
+ local request = specification.specification
+ report("both", 4, "cache", "looking for “%s” in cache ...",
+ request)
+
+ local found = names.lookups[request]
+
+ --- case 1) cache positive ----------------------------------------
+ if found then --- replay fields from cache hit
+ report("info", 4, "cache", "found!")
+ return found[1], found[2], true
+ end
+ report("both", 4, "cache", "not cached; resolving")
+
+ --- case 2) cache negative ----------------------------------------
+ --- first we resolve normally ...
+ local filename, subfont, success = resolve(nil, nil, specification)
+ if not success then return filename, subfont, false end
+ --- ... then we add the fields to the cache ... ...
+ local entry = { filename, subfont }
+ report("both", 4, "cache", "new entry: %s", request)
+ names.lookups[request] = entry
+
+ --- obviously, the updated cache needs to be stored.
+ --- TODO this should trigger a save only once the
+ --- document is compiled (finish_pdffile callback?)
+ report("both", 5, "cache", "saving updated cache")
+ save_lookups()
+ return filename, subfont, true
+end
+
+--- this used to be inlined; with the lookup cache we don’t
+--- have to be parsimonious wrt function calls anymore
+--- “found” is the match accumulator
+local add_to_match = function (
+ found, optsize, dsnsize, size,
+ minsize, maxsize, face)
+ local continue = true
+ if optsize then
+ if dsnsize == size or (size > minsize and size <= maxsize) then
+ found[1] = face
+ continue = false ---> break
+ else
+ found[#found+1] = face
+ end
+ else
+ found[1] = face
+ continue = false ---> break
+ end
+ return found, continue
+end
+
+--[[doc--
+
+Luatex-fonts, the font-loader package luaotfload imports, comes with
+basic file location facilities (see luatex-fonts-syn.lua).
+However, not only does the builtin functionality rely on Context’s font
+name database, it is also too limited to be of more than basic use.
+For this reason, luaotfload supplies its own resolvers that accesses
+the font database created by the luaotfload-tool script.
+
+--doc]]--
+
+
+---
+--- the request specification has the fields:
+---
+--- · features: table
+--- · normal: set of { ccmp clig itlc kern liga locl mark mkmk rlig }
+--- · ???
+--- · forced: string
+--- · lookup: "name" | "file"
+--- · method: string
+--- · name: string
+--- · resolved: string
+--- · size: int
+--- · specification: string (== <lookup> ":" <name>)
+--- · sub: string
+---
+--- the first return value of “resolve” is the file name of the
+--- requested font (string)
+--- the second is of type bool or string and indicates the subfont of a
+--- ttc
+---
+--- 'a -> 'a -> table -> (string * string | bool * bool)
+---
+--- note by phg: I added a third return value that indicates a
+--- successful lookup as this cannot be inferred from the other
+--- values.
+---
+
+resolve = function (_,_,specification) -- the 1st two parameters are used by ConTeXt
+ if not fonts_loaded then names.data = load_names() end
+ local data = names.data
+
+ local name = sanitize_string(specification.name)
+ local style = sanitize_string(specification.style) or "regular"
+
+ local size
+ if specification.optsize then
+ size = tonumber(specification.optsize)
+ elseif specification.size then
+ size = specification.size / 65536
+ end
+
+ if type(data) ~= "table" then
+ --- this catches a case where load_names() doesn’t
+ --- return a database object, which can happen only
+ --- in case there is valid Lua code in the database,
+ --- but it’s not a table, e.g. it contains an integer.
+ if not fonts_reloaded then
+ return reload_db("invalid database; not a table",
+ resolve, nil, nil, specification
+ )
+ end
+ --- unsucessfully reloaded; bail
+ return specification.name, false, false
+ end
+
+ local db_version, nms_version = data.version, names.version
+ if db_version ~= nms_version then
+ report("log", 0, "db",
+ [[version mismatch; expected %4.3f, got %4.3f]],
+ nms_version, db_version
+ )
+ return reload_db("version mismatch", resolve, nil, nil, specification)
+ end
+
+ if not data.mappings then
+ return reload_db("invalid database; missing font mapping",
+ resolve, nil, nil, specification
+ )
+ end
+
+ local found = { }
+ local synonym_set = style_synonyms.set
+ for _, face in next, data.mappings do
+ local family, subfamily, fullname, psname, fontname, pfullname
+
+ local facenames = face.sanitized
+ if facenames then
+ family = facenames.family
+ subfamily = facenames.subfamily
+ fullname = facenames.fullname
+ psname = facenames.psname
+ end
+ fontname = facenames.fontname or sanitize_string(face.fontname)
+ pfullname = facenames.pfullname or sanitize_string(face.fullname)
+
+ local optsize, dsnsize, maxsize, minsize
+ if #face.size > 0 then
+ optsize = face.size
+ dsnsize = optsize[1] and optsize[1] / 10
+ -- can be nil
+ maxsize = optsize[2] and optsize[2] / 10 or dsnsize
+ minsize = optsize[3] and optsize[3] / 10 or dsnsize
+ end
+
+ if name == family then
+ if subfamily == style then
+ local continue
+ found, continue = add_to_match(
+ found, optsize, dsnsize, size,
+ minsize, maxsize, face)
+ if continue == false then break end
+ elseif synonym_set[style] and
+ synonym_set[style][subfamily]
+ then
+ local continue
+ found, continue = add_to_match(
+ found, optsize, dsnsize, size,
+ minsize, maxsize, face)
+ if continue == false then break end
+ elseif subfamily == "regular" or
+ synonym_set.regular[subfamily] then
+ found.fallback = face
+ elseif name == fullname
+ or name == pfullname
+ or name == fontname
+ or name == psname
+ then
+ local continue
+ found, continue = add_to_match(
+ found, optsize, dsnsize, size,
+ minsize, maxsize, face)
+ if continue == false then break end
+ end
+ else
+ if name == fullname
+ or name == pfullname
+ or name == fontname
+ or name == psname then
+ local continue
+ found, continue = add_to_match(
+ found, optsize, dsnsize, size,
+ minsize, maxsize, face)
+ if continue == false then break end
+ end
+ end
+ end
+
+ if #found == 1 then
+ --- “found” is really synonymous with “registered in the db”.
+ local filename = found[1].filename[1]
+ if lfsisfile(filename) or kpselookup(filename) then
+ report("log", 0, "resolve",
+ "font family='%s', subfamily='%s' found: %s",
+ name, style, filename
+ )
+ return filename, found[1].filename[2], true
+ end
+ elseif #found > 1 then
+ -- we found matching font(s) but not in the requested optical
+ -- sizes, so we loop through the matches to find the one with
+ -- least difference from the requested size.
+ local closest
+ local least = math.huge -- initial value is infinity
+ for i,face in next, found do
+ local dsnsize = face.size[1]/10
+ local difference = mathabs(dsnsize-size)
+ if difference < least then
+ closest = face
+ least = difference
+ end
+ end
+ local filename = closest.filename[1]
+ if lfsisfile(filename) or kpselookup(filename) then
+ report("log", 0, "resolve",
+ "font family='%s', subfamily='%s' found: %s",
+ name, style, filename
+ )
+ return filename, closest.filename[2], true
+ end
+ elseif found.fallback then
+ return found.fallback.filename[1],
+ found.fallback.filename[2],
+ true
+ end
+
+ --- no font found so far
+ if not fonts_reloaded then
+ --- last straw: try reloading the database
+ return reload_db(
+ "unresolved font name: ‘" .. name .. "’",
+ resolve, nil, nil, specification
+ )
+ end
+
+ --- else, fallback to requested name
+ return specification.name, false, false
+end --- resolve()
+
+--- when reload is triggered we update the database
+--- and then re-run the caller with the arg list
+
+--- string -> ('a -> 'a) -> 'a list -> 'a
+reload_db = function (why, caller, ...)
+ report("both", 1, "db", "reload initiated; reason: “%s”", why)
+ names.data = update_names()
+ save_names()
+ fonts_reloaded = true
+ return caller(...)
+end
+
+--- string -> string -> int
+local iterative_levenshtein = function (s1, s2)
+
+ local costs = { }
+ local len1, len2 = #s1, #s2
+
+ for i = 0, len1 do
+ local last = i
+ for j = 0, len2 do
+ if i == 0 then
+ costs[j] = j
+ else
+ if j > 0 then
+ local current = costs[j-1]
+ if stringsub(s1, i, i) ~= stringsub(s2, j, j) then
+ current = mathmin(current, last, costs[j]) + 1
+ end
+ costs[j-1] = last
+ last = current
+ end
+ end
+ end
+ if i > 0 then costs[len2] = last end
+ end
+
+ return costs[len2]--- lower right has the distance
+end
+
+--- string -> int -> bool
+find_closest = function (name, limit)
+ local name = sanitize_string(name)
+ limit = limit or fuzzy_limit
+
+ if not fonts_loaded then names.data = load_names() end
+
+ local data = names.data
+
+ if type(data) ~= "table" then
+ return reload_db("no database", find_closest, name)
+ end
+ local by_distance = { } --- (int, string list) dict
+ local distances = { } --- int list
+ local cached = { } --- (string, int) dict
+ local mappings = data.mappings
+ local n_fonts = #mappings
+
+ for n = 1, n_fonts do
+ local current = mappings[n]
+ local cnames = current.sanitized
+ --[[
+ This is simplistic but surpisingly fast.
+ Matching is performed against the “family” name
+ of a db record. We then store its “fullname” at
+ it edit distance.
+ We should probably do some weighting over all the
+ font name categories as well as whatever agrep
+ does.
+ --]]
+ if cnames then
+ local fullname, family = cnames.fullname, cnames.family
+ family = sanitize_string(family)
+
+ local dist = cached[family]--- maybe already calculated
+ if not dist then
+ dist = iterative_levenshtein(name, family)
+ cached[family] = dist
+ end
+ local namelst = by_distance[dist]
+ if not namelst then --- first entry
+ namelst = { fullname }
+ distances[#distances+1] = dist
+ else --- append
+ namelst[#namelst+1] = fullname
+ end
+ by_distance[dist] = namelst
+ end
+ end
+
+ --- print the matches according to their distance
+ local n_distances = #distances
+ if n_distances > 0 then --- got some data
+ tablesort(distances)
+ limit = mathmin(n_distances, limit)
+ report(false, 1, "query",
+ "displaying %d distance levels", limit)
+
+ for i = 1, limit do
+ local dist = distances[i]
+ local namelst = by_distance[dist]
+ report(false, 0, "query",
+ "distance from “" .. name .. "”: " .. dist
+ .. "\n " .. tableconcat(namelst, "\n ")
+ )
+ end
+
+ return true
+ end
+ return false
+end --- find_closest()
+
+local sanitize_names = function (names)
+ local res = { }
+ for idx, name in next, names do
+ res[idx] = sanitize_string(name)
+ end
+ return res
+end
+
+--[[doc--
+The data inside an Opentype font file can be quite heterogeneous.
+Thus in order to get the relevant information, parts of the original
+table as returned by the font file reader need to be relocated.
+--doc]]--
+font_fullinfo = function (filename, subfont)
+ local tfmdata = { }
+ local rawfont = fontloader.open(filename, subfont)
+ if not rawfont then
+ report("log", 1, "error", "failed to open %s", filename)
+ return
+ end
+ local metadata = fontloader.to_table(rawfont)
+ fontloader.close(rawfont)
+ collectgarbage("collect")
+ -- see http://www.microsoft.com/typography/OTSPEC/features_pt.htm#size
+ if metadata.fontstyle_name then
+ for _, name in next, metadata.fontstyle_name do
+ if name.lang == 1033 then --- I hate magic numbers
+ tfmdata.fontstyle_name = name.name
+ end
+ end
+ end
+ if metadata.names then
+ for _, namedata in next, metadata.names do
+ if namedata.lang == "English (US)" then
+ local names = {
+ --- see
+ --- https://developer.apple.com/fonts/TTRefMan/RM06/Chap6name.html
+ fullname = namedata.names.compatfull
+ or namedata.names.fullname,
+ family = namedata.names.preffamilyname
+ or namedata.names.family,
+ subfamily = tfmdata.fontstyle_name
+ or namedata.names.prefmodifiers
+ or namedata.names.subfamily,
+ psname = namedata.names.postscriptname,
+ pfullname = metadata.fullname,
+ fontname = metadata.fontname,
+ }
+ tfmdata.names = names
+ tfmdata.sanitized = sanitize_names(names)
+ end
+ end
+ else
+ -- no names table, propably a broken font
+ report("log", 1, "db", "broken font rejected", "%s", basefile)
+ return
+ end
+ tfmdata.fontname = metadata.fontname
+ tfmdata.fullname = metadata.fullname
+ tfmdata.familyname = metadata.familyname
+ tfmdata.filename = { filename, subfont } -- always store full path
+ tfmdata.weight = metadata.pfminfo.weight
+ tfmdata.width = metadata.pfminfo.width
+ tfmdata.slant = metadata.italicangle
+ -- don't waste the space with zero values
+ tfmdata.size = {
+ metadata.design_size ~= 0 and metadata.design_size or nil,
+ metadata.design_range_top ~= 0 and metadata.design_range_top or nil,
+ metadata.design_range_bottom ~= 0 and metadata.design_range_bottom or nil,
+ }
+ return tfmdata
+end
+
+--- we return true if the fond is new or re-indexed
+--- string -> dbobj -> dbobj -> bool
+local load_font = function (fullname, fontnames, newfontnames)
+ if not fullname then return false end
+
+ local newmappings = newfontnames.mappings
+ local newstatus = newfontnames.status
+
+-- local newfullnames = newfontnames.fullnames
+ local newbasenames = newfontnames.basenames
+ local newbarenames = newfontnames.barenames
+
+ local mappings = fontnames.mappings
+ local status = fontnames.status
+-- local fullnames = fontnames.fullnames
+ local basenames = fontnames.basenames
+ local barenames = fontnames.barenames
+
+ local basename = filebasename(fullname)
+ local barename = filenameonly(fullname)
+
+ local entryname = basename
+
+ if names.blacklist[fullname] or names.blacklist[basename]
+ then
+ report("log", 2, "db",
+ "ignoring blacklisted font “%s”", fullname)
+ return false
+ end
+
+ local timestamp, db_timestamp
+ db_timestamp = status[fullname]
+ and status[fullname].timestamp
+ timestamp = lfs.attributes(fullname, "modification")
+
+ local index_status = newstatus[fullname]
+ --- index_status: nil | false | table
+ if index_status and index_status.timestamp == timestamp then
+ -- already indexed this run
+ return false
+ end
+
+ newstatus[fullname] = newstatus[fullname] or { }
+ newstatus[fullname].timestamp = timestamp
+ newstatus[fullname].index = newstatus[fullname].index or { }
+
+ --- this test compares the modification data registered
+ --- in the database with the current one
+ if db_timestamp == timestamp
+ and not newstatus[fullname].index[1] then
+ for _,v in next, status[fullname].index do
+ local index = #newstatus[fullname].index
+ local fullinfo = mappings[v]
+ local location = #newmappings + 1
+ newmappings[location] = fullinfo --- keep
+ newstatus[fullname].index[index+1] = location --- is this actually used anywhere?
+-- newfullnames[fullname] = location
+ newbasenames[basename] = location
+ newbarenames[barename] = location
+ end
+ report("log", 2, "db", "font “%s” already indexed", entryname)
+ return false
+ end
+
+ local info = fontloaderinfo(fullname)
+ if info then
+ if type(info) == "table" and #info > 1 then --- ttc
+ for n_font = 1, #info do
+ local fullinfo = font_fullinfo(fullname, n_font-1)
+ if not fullinfo then
+ return false
+ end
+ local location = #newmappings+1
+ local index = newstatus[fullname].index[n_font]
+ if not index then index = location end
+
+ newmappings[index] = fullinfo
+-- newfullnames[fullname] = location
+ newbasenames[basename] = location
+ newbarenames[barename] = location
+ newstatus[fullname].index[n_font] = index
+ end
+ else
+ local fullinfo = font_fullinfo(fullname, false)
+ if not fullinfo then
+ return false
+ end
+ local location = #newmappings+1
+ local index = newstatus[fullname].index[1]
+ if not index then index = location end
+
+ newmappings[index] = fullinfo
+-- newfullnames[fullname] = location
+ newbasenames[basename] = location
+ newbarenames[barename] = location
+ newstatus[fullname].index[1] = index
+ end
+
+ else --- missing info
+ report("log", 1, "db", "failed to load “%s”", entryname)
+ return false
+ end
+ return true
+end
+
+local path_normalize
+do
+ --- os.type and os.name are constants so we
+ --- choose a normalization function in advance
+ --- instead of testing with every call
+ local os_type, os_name = os.type, os.name
+ local filecollapsepath = filecollapsepath
+ local lfsreadlink = lfs.readlink
+
+ --- windows and dos
+ if os_type == "windows" or os_type == "msdos" then
+ --- ms platfom specific stuff
+ path_normalize = function (path)
+ path = stringgsub(path, '\\', '/')
+ path = stringlower(path)
+ path = stringgsub(path, '^/cygdrive/(%a)/', '%1:/')
+ path = filecollapsepath(path)
+ return path
+ end
+
+ elseif os_name == "cygwin" then -- union of ms + unix
+ path_normalize = function (path)
+ path = stringgsub(path, '\\', '/')
+ path = stringlower(path)
+ path = stringgsub(path, '^/cygdrive/(%a)/', '%1:/')
+ local dest = lfsreadlink(path)
+ if dest then
+ if kpsereadable_file(dest) then
+ path = dest
+ elseif kpsereadable_file(filejoin(filedirname(path), dest)) then
+ path = filejoin(file.dirname(path), dest)
+ else
+ -- broken symlink?
+ end
+ end
+ path = filecollapsepath(path)
+ return path
+ end
+
+ else -- posix
+ path_normalize = function (path)
+ local dest = lfsreadlink(path)
+ if dest then
+ if kpsereadable_file(dest) then
+ path = dest
+ elseif kpsereadable_file(filejoin(filedirname(path), dest)) then
+ path = filejoin(file.dirname(path), dest)
+ else
+ -- broken symlink?
+ end
+ end
+ path = filecollapsepath(path)
+ return path
+ end
+ end
+end
+
+fonts.path_normalize = path_normalize
+
+names.blacklist = { }
+
+local function read_blacklist()
+ local files = {
+ kpselookup("luaotfload-blacklist.cnf", {all=true, format="tex"})
+ }
+ local blacklist = names.blacklist
+ local whitelist = { }
+
+ if files and type(files) == "table" then
+ for _,v in next, files do
+ for line in iolines(v) do
+ line = stringstrip(line) -- to get rid of lines like " % foo"
+ local first_chr = stringsub(line, 1, 1) --- faster than find
+ if first_chr == "%" or stringis_empty(line) then
+ -- comment or empty line
+ else
+ --- this is highly inefficient
+ line = stringsplit(line, "%")[1]
+ line = stringstrip(line)
+ if stringsub(line, 1, 1) == "-" then
+ whitelist[stringsub(line, 2, -1)] = true
+ else
+ report("log", 2, "db", "blacklisted file “%s”", line)
+ blacklist[line] = true
+ end
+ end
+ end
+ end
+ end
+ for _,fontname in next, whitelist do
+ blacklist[fontname] = nil
+ end
+end
+
+local font_extensions = { "otf", "ttf", "ttc", "dfont" }
+local font_extensions_set = {}
+for key, value in next, font_extensions do
+ font_extensions_set[value] = true
+end
+
+--- string -> dbobj -> dbobj -> bool -> (int * int)
+local scan_dir = function (dirname, fontnames, newfontnames)
+ --[[
+ This function scans a directory and populates the list of fonts
+ with all the fonts it finds.
+ - dirname is the name of the directory to scan
+ - names is the font database to fill -> no such term!!!
+ - texmf used to be a boolean saying if we are scanning a texmf directory
+ ]]
+ local n_scanned, n_new = 0, 0 --- total of fonts collected
+ report("log", 2, "db", "scanning", "%s", dirname)
+ for _,i in next, font_extensions do
+ for _,ext in next, { i, stringupper(i) } do
+ local found = dirglob(stringformat("%s/**.%s$", dirname, ext))
+ local n_found = #found
+ --- note that glob fails silently on broken symlinks, which
+ --- happens sometimes in TeX Live.
+ report("log", 2, "db", "%s '%s' fonts found", n_found, ext)
+ n_scanned = n_scanned + n_found
+ for j=1, n_found do
+ local fullname = found[j]
+ fullname = path_normalize(fullname)
+ report("log", 2, "db", "loading font “%s”", fullname)
+ local new = load_font(fullname, fontnames, newfontnames)
+ if new then n_new = n_new + 1 end
+ end
+ end
+ end
+ report("log", 2, "db", "%d fonts found in '%s'", n_scanned, dirname)
+ return n_scanned, n_new
+end
+
+local function scan_texmf_fonts(fontnames, newfontnames)
+ local n_scanned, n_new = 0, 0
+ --[[
+ This function scans all fonts in the texmf tree, through kpathsea
+ variables OPENTYPEFONTS and TTFONTS of texmf.cnf
+ ]]
+ if stringis_empty(kpseexpand_path("$OSFONTDIR")) then
+ report("info", 2, "db", "Scanning TEXMF fonts...")
+ else
+ report("info", 2, "db", "Scanning TEXMF and OS fonts...")
+ end
+ local fontdirs = stringgsub(kpseexpand_path("$OPENTYPEFONTS"), "^%.", "")
+ fontdirs = fontdirs .. stringgsub(kpseexpand_path("$TTFONTS"), "^%.", "")
+ if not stringis_empty(fontdirs) then
+ for _,d in next, filesplitpath(fontdirs) do
+ local found, new = scan_dir(d, fontnames, newfontnames)
+ n_scanned = n_scanned + found
+ n_new = n_new + new
+ end
+ end
+ return n_scanned, n_new
+end
+
+--[[
+ For the OS fonts, there are several options:
+ - if OSFONTDIR is set (which is the case under windows by default but
+ not on the other OSs), it scans it at the same time as the texmf tree,
+ in the scan_texmf_fonts.
+ - if not:
+ - under Windows and Mac OSX, we take a look at some hardcoded directories
+ - under Unix, we read /etc/fonts/fonts.conf and read the directories in it
+
+ This means that if you have fonts in fancy directories, you need to set them
+ in OSFONTDIR.
+]]
+
+local read_fonts_conf
+do --- closure for read_fonts_conf()
+
+ local lpeg = require "lpeg"
+
+ local C, Cc, Cf, Cg, Ct
+ = lpeg.C, lpeg.Cc, lpeg.Cf, lpeg.Cg, lpeg.Ct
+
+ local P, R, S, lpegmatch
+ = lpeg.P, lpeg.R, lpeg.S, lpeg.match
+
+ local alpha = R("az", "AZ")
+ local digit = R"09"
+ local tag_name = C(alpha^1)
+ local whitespace = S" \n\r\t\v"
+ local ws = whitespace^1
+ local comment = P"<!--" * (1 - P"--")^0 * P"-->"
+
+ ---> header specifica
+ local xml_declaration = P"<?xml" * (1 - P"?>")^0 * P"?>"
+ local xml_doctype = P"<!DOCTYPE" * ws
+ * "fontconfig" * (1 - P">")^0 * P">"
+ local header = xml_declaration^-1
+ * (xml_doctype + comment + ws)^0
+
+ ---> enforce root node
+ local root_start = P"<" * ws^-1 * P"fontconfig" * ws^-1 * P">"
+ local root_stop = P"</" * ws^-1 * P"fontconfig" * ws^-1 * P">"
+
+ local dquote, squote = P[["]], P"'"
+ local xml_namestartchar = S":_" + alpha --- ascii only, funk the rest
+ local xml_namechar = S":._" + alpha + digit
+ local xml_name = ws^-1
+ * C(xml_namestartchar * xml_namechar^0)
+ local xml_attvalue = dquote * C((1 - S[[%&"]])^1) * dquote * ws^-1
+ + squote * C((1 - S[[%&']])^1) * squote * ws^-1
+ local xml_attr = Cg(xml_name * P"=" * xml_attvalue)
+ local xml_attr_list = Cf(Ct"" * xml_attr^1, rawset)
+
+ --[[doc--
+ scan_node creates a parser for a given xml tag.
+ --doc]]--
+ --- string -> bool -> lpeg_t
+ local scan_node = function (tag)
+ --- Node attributes go into a table with the index “attributes”
+ --- (relevant for “prefix="xdg"” and the likes).
+ local p_tag = P(tag)
+ local with_attributes = P"<" * p_tag
+ * Cg(xml_attr_list, "attributes")^-1
+ * ws^-1
+ * P">"
+ local plain = P"<" * p_tag * ws^-1 * P">"
+ local node_start = plain + with_attributes
+ local node_stop = P"</" * p_tag * ws^-1 * P">"
+ --- there is no nesting, the earth is flat ...
+ local node = node_start
+ * Cc(tag) * C(comment + (1 - node_stop)^1)
+ * node_stop
+ return Ct(node) -- returns {string, string [, attributes = { key = val }] }
+ end
+
+ --[[doc--
+ At the moment, the interesting tags are “dir” for
+ directory declarations, and “include” for including
+ further configuration files.
+
+ spec: http://freedesktop.org/software/fontconfig/fontconfig-user.html
+ --doc]]--
+ local include_node = scan_node"include"
+ local dir_node = scan_node"dir"
+
+ local element = dir_node
+ + include_node
+ + comment --> ignore
+ + P(1-root_stop) --> skip byte
+
+ local root = root_start * Ct(element^0) * root_stop
+ local p_cheapxml = header * root
+
+ --lpeg.print(p_cheapxml) ---> 757 rules with v0.10
+
+ --[[doc--
+ fonts_conf_scanner() handles configuration files.
+ It is called on an abolute path to a config file (e.g.
+ /home/luser/.config/fontconfig/fonts.conf) and returns a list
+ of the nodes it managed to extract from the file.
+ --doc]]--
+ --- string -> path list
+ local fonts_conf_scanner = function (path)
+ local fh = ioopen(path, "r")
+ if not fh then
+ report("both", 3, "db", "cannot open fontconfig file %s", path)
+ return
+ end
+ local raw = fh:read"*all"
+ fh:close()
+
+ local confdata = lpegmatch(p_cheapxml, raw)
+ if not confdata then
+ report("both", 3, "db", "cannot scan fontconfig file %s", path)
+ return
+ end
+ return confdata
+ end
+
+ --[[doc--
+ read_fonts_conf_indeed() is called with six arguments; the
+ latter three are tables that represent the state and are
+ always returned.
+ The first three are
+ · the path to the file
+ · the expanded $HOME
+ · the expanded $XDG_CONFIG_DIR
+ --doc]]--
+ --- string -> string -> string -> tab -> tab -> (tab * tab * tab)
+ local read_fonts_conf_indeed
+ read_fonts_conf_indeed = function (start, home, xdg_home,
+ acc, done, dirs_done)
+
+ local paths = fonts_conf_scanner(start)
+ if not paths then --- nothing to do
+ return acc, done, dirs_done
+ end
+
+ for i=1, #paths do
+ local pathobj = paths[i]
+ local kind, path = pathobj[1], pathobj[2]
+ local attributes = pathobj.attributes
+ if attributes and attributes.prefix == "xdg" then
+ --- this prepends the xdg root (usually ~/.config)
+ path = filejoin(xdg_home, path)
+ end
+
+ if kind == "dir" then
+ if stringsub(path, 1, 1) == "~" then
+ path = filejoin(home, stringsub(path, 2))
+ end
+ --- We exclude paths with texmf in them, as they should be
+ --- found anyway; also duplicates are ignored by checking
+ --- if they are elements of dirs_done.
+ if not (stringfind(path, "texmf") or dirs_done[path]) then
+ acc[#acc+1] = path
+ dirs_done[path] = true
+ end
+
+ elseif kind == "include" then
+ --- here the path can be four things: a directory or a file,
+ --- in absolute or relative path.
+ if stringsub(path, 1, 1) == "~" then
+ path = filejoin(home, stringsub(path, 2))
+ elseif --- if the path is relative, we make it absolute
+ not ( lfsisfile(path) or lfsisdir(path) )
+ then
+ path = filejoin(filedirname(start), path)
+ end
+ if lfsisfile(path)
+ and kpsereadable_file(path)
+ and not done[path]
+ then
+ --- we exclude path with texmf in them, as they should
+ --- be found otherwise
+ acc = read_fonts_conf_indeed(
+ path, home, xdg_home,
+ acc, done, dirs_done)
+ elseif lfsisdir(path) then --- arrow code ahead
+ local config_files = dirglob(filejoin(path, "*.conf"))
+ for _, filename in next, config_files do
+ if not done[filename] then
+ acc = read_fonts_conf_indeed(
+ filename, home, xdg_home,
+ acc, done, dirs_done)
+ end
+ end
+ end --- match “kind”
+ end --- iterate paths
+ end
+
+ --inspect(acc)
+ --inspect(done)
+ return acc, done, dirs_done
+ end --- read_fonts_conf_indeed()
+
+ --[[doc--
+ read_fonts_conf() sets up an accumulator and two sets
+ for tracking what’s been done.
+
+ Also, the environment variables HOME and XDG_CONFIG_HOME --
+ which are constants anyways -- are expanded so don’t have to
+ repeat that over and over again as with the old parser.
+ Now they’re just passed on to every call of
+ read_fonts_conf_indeed().
+
+ read_fonts_conf() is also the only reference visible outside
+ the closure.
+ --doc]]--
+ --- list -> list
+ read_fonts_conf = function (path_list)
+ local home = kpseexpand_path"~" --- could be os.getenv"HOME"
+ local xdg_home = kpseexpand_path"$XDG_CONFIG_HOME"
+ if xdg_home == "" then xdg_home = filejoin(home, ".config") end
+ local acc = { } ---> list: paths collected
+ local done = { } ---> set: files inspected
+ local dirs_done = { } ---> set: dirs in list
+ for i=1, #path_list do --- we keep the state between files
+ acc, done, dirs_done = read_fonts_conf_indeed(
+ path_list[i], home, xdg_home,
+ acc, done, dirs_done)
+ end
+ return acc
+ end
+end --- read_fonts_conf closure
+
+--- TODO stuff those paths into some writable table
+local function get_os_dirs()
+ if os.name == 'macosx' then
+ return {
+ filejoin(kpseexpand_path('~'), "Library/Fonts"),
+ "/Library/Fonts",
+ "/System/Library/Fonts",
+ "/Network/Library/Fonts",
+ }
+ elseif os.type == "windows" or os.type == "msdos" or os.name == "cygwin" then
+ local windir = os.getenv("WINDIR")
+ return { filejoin(windir, 'Fonts') }
+ else
+ local fonts_conves = { --- plural, much?
+ "/usr/local/etc/fonts/fonts.conf",
+ "/etc/fonts/fonts.conf",
+ }
+ local os_dirs = read_fonts_conf(fonts_conves)
+ return os_dirs
+ end
+ return {}
+end
+
+local function scan_os_fonts(fontnames, newfontnames)
+ local n_scanned, n_new = 0, 0
+ --[[
+ This function scans the OS fonts through
+ - fontcache for Unix (reads the fonts.conf file and scans the
+ directories)
+ - a static set of directories for Windows and MacOSX
+ ]]
+ report("info", 2, "db", "Scanning OS fonts...")
+ report("info", 3, "db", "Searching in static system directories...")
+ for _,d in next, get_os_dirs() do
+ local found, new = scan_dir(d, fontnames, newfontnames)
+ n_scanned = n_scanned + found
+ n_new = n_new + new
+ end
+ return n_scanned, n_new
+end
+
+flush_cache = function ()
+ if not names.lookups then names.lookups = load_lookups() end
+ names.lookups = { }
+ collectgarbage"collect"
+ return true, names.lookups
+end
+
+--- dbobj -> bool -> dbobj
+update_names = function (fontnames, force)
+ if config.luaotfload.update_live == false then
+ report("info", 2, "db",
+ "skipping database update")
+ --- skip all db updates
+ return fontnames
+ end
+ local starttime = os.gettimeofday()
+ local n_scanned, n_new = 0, 0
+ --[[
+ The main function, scans everything
+ - “newfontnames” is the final table to return
+ - force is whether we rebuild it from scratch or not
+ ]]
+ report("both", 2, "db", "Updating the font names database"
+ .. (force and " forcefully" or ""))
+
+ if force then
+ fontnames = fontnames_init(false)
+ else
+ if not fontnames then
+ fontnames = load_names()
+ end
+ if fontnames.version ~= names.version then
+ report("both", 1, "db", "No font names database or old "
+ .. "one found; generating new one")
+ fontnames = fontnames_init(true)
+ end
+ end
+ local newfontnames = fontnames_init(true)
+ read_blacklist()
+
+ local scanned, new
+ scanned, new = scan_texmf_fonts(fontnames, newfontnames)
+ n_scanned = n_scanned + scanned
+ n_new = n_new + new
+
+ scanned, new = scan_os_fonts(fontnames, newfontnames)
+ n_scanned = n_scanned + scanned
+ n_new = n_new + new
+
+ --- stats:
+ --- before rewrite | after rewrite
+ --- partial: 804 ms | 701 ms
+ --- forced: 45384 ms | 44714 ms
+ report("info", 3, "db",
+ "Scanned %d font files; %d new entries.", n_scanned, n_new)
+ report("info", 3, "db",
+ "Rebuilt in %0.f ms", 1000*(os.gettimeofday()-starttime))
+ return newfontnames
+end
+
+--- unit -> string
+local ensure_names_path = function ( )
+ local path = names.path.dir
+ if not lfsisdir(path) then
+ dirmkdirs(path)
+ end
+ return path
+end
+
+--- The lookup cache is an experimental feature of version 2.2;
+--- instead of incorporating it into the database it gets its own
+--- file. As we update it after every single addition this saves us
+--- quite some time.
+
+--- unit -> string
+save_lookups = function ( )
+ ---- this is boilerplate and should be refactored into something
+ ---- usable by both the db and the cache writers
+ local lookups = names.lookups
+ local path = ensure_names_path()
+ if fileiswritable(path) then
+ local luaname, lucname = make_name(names.path.lookup_path)
+ if luaname then
+ tabletofile(luaname, lookups, true)
+ if lucname and type(caches.compile) == "function" then
+ os.remove(lucname)
+ caches.compile(lookups, luaname, lucname)
+ report("both", 3, "cache", "Lookup cache saved")
+ return names.path.lookup_path
+ end
+ end
+ end
+ report("info", 0, "cache", "Could not write lookup cache")
+ return nil
+end
+
+--- save_names() is usually called without the argument
+--- dbobj -> unit
+save_names = function (fontnames)
+ if not fontnames then fontnames = names.data end
+ local path = ensure_names_path()
+ if fileiswritable(path) then
+ local luaname, lucname = make_name(names.path.path)
+ if luaname then
+ --tabletofile(luaname, fontnames, true, { reduce=true })
+ tabletofile(luaname, fontnames, true)
+ if lucname and type(caches.compile) == "function" then
+ os.remove(lucname)
+ caches.compile(fontnames, luaname, lucname)
+ report("info", 0, "db", "Font names database saved")
+ return names.path.path
+ end
+ end
+ end
+ report("both", 0, "db", "Failed to save names database")
+ return nil
+end
+
+scan_external_dir = function (dir)
+ local old_names, new_names
+ if fonts_loaded then
+ old_names = names.data
+ else
+ old_names = load_names()
+ end
+ new_names = tablecopy(old_names)
+ local n_scanned, n_new = scan_dir(dir, old_names, new_names)
+ names.data = new_names
+ return n_scanned, n_new
+end
+
+--- export functionality to the namespace “fonts.names”
+names.flush_cache = flush_cache
+names.save_lookups = save_lookups
+names.load = load_names
+names.save = save_names
+names.scan = scan_external_dir
+names.update = update_names
+names.crude_file_lookup = crude_file_lookup
+names.crude_file_lookup_verbose = crude_file_lookup_verbose
+
+--- replace the resolver from luatex-fonts
+if config.luaotfload.resolver == "cached" then
+ report("both", 2, "cache", "caching of name: lookups active")
+ names.resolve = resolve_cached
+ names.resolvespec = resolve_cached
+else
+ names.resolve = resolve
+ names.resolvespec = resolve
+end
+names.find_closest = find_closest
+
+-- for testing purpose
+names.read_fonts_conf = read_fonts_conf
+
+--- dummy required by luatex-fonts (cf. luatex-fonts-syn.lua)
+
+fonts.names.getfilename = function (askedname,suffix) return "" end
+
+-- vim:tw=71:sw=4:ts=4:expandtab
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-features.lua b/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-features.lua
new file mode 100644
index 00000000000..f91aee7b9fe
--- /dev/null
+++ b/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-features.lua
@@ -0,0 +1,747 @@
+if not modules then modules = { } end modules ["features"] = {
+ version = 2.200,
+ comment = "companion to luaotfload.lua",
+ author = "Hans Hagen, Khaled Hosny, Elie Roux, Philipp Gesang",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local format, insert = string.format, table.insert
+local type, next = type, next
+local lpegmatch = lpeg.match
+
+---[[ begin included font-ltx.lua ]]
+--- this appears to be based in part on luatex-fonts-def.lua
+
+local fonts = fonts
+
+--HH A bit of tuning for definitions.
+
+fonts.constructors.namemode = "specification" -- somehow latex needs this (changed name!) => will change into an overload
+
+--[[HH--
+ tricky: we sort of bypass the parser and directly feed all into
+ the sub parser
+--HH]]--
+
+function fonts.definers.getspecification(str)
+ return "", str, "", ":", str
+end
+
+local old_feature_list = { }
+
+local report = logs.names_report
+
+local stringlower = string.lower
+local stringgsub = string.gsub
+local stringis_empty = string.is_empty
+
+--- TODO an option to dump the default features for a script would make
+--- a nice addition to luaotfload-tool
+
+local defaults = {
+ dflt = {
+ "ccmp", "locl", "rlig", "liga", "clig",
+ "kern", "mark", "mkmk", 'itlc',
+ },
+ arab = {
+ "ccmp", "locl", "isol", "fina", "fin2",
+ "fin3", "medi", "med2", "init", "rlig",
+ "calt", "liga", "cswh", "mset", "curs",
+ "kern", "mark", "mkmk",
+ },
+ deva = {
+ "ccmp", "locl", "init", "nukt", "akhn",
+ "rphf", "blwf", "half", "pstf", "vatu",
+ "pres", "blws", "abvs", "psts", "haln",
+ "calt", "blwm", "abvm", "dist", "kern",
+ "mark", "mkmk",
+ },
+ khmr = {
+ "ccmp", "locl", "pref", "blwf", "abvf",
+ "pstf", "pres", "blws", "abvs", "psts",
+ "clig", "calt", "blwm", "abvm", "dist",
+ "kern", "mark", "mkmk",
+ },
+ thai = {
+ "ccmp", "locl", "liga", "kern", "mark",
+ "mkmk",
+ },
+ hang = {
+ "ccmp", "ljmo", "vjmo", "tjmo",
+ },
+}
+
+local global_defaults = { mode = "node" }
+
+defaults.beng = defaults.deva
+defaults.guru = defaults.deva
+defaults.gujr = defaults.deva
+defaults.orya = defaults.deva
+defaults.taml = defaults.deva
+defaults.telu = defaults.deva
+defaults.knda = defaults.deva
+defaults.mlym = defaults.deva
+defaults.sinh = defaults.deva
+
+defaults.syrc = defaults.arab
+defaults.mong = defaults.arab
+defaults.nko = defaults.arab
+
+defaults.tibt = defaults.khmr
+
+defaults.lao = defaults.thai
+
+--[[doc--
+
+ As discussed, we will issue a warning because of incomplete support
+ when one of the scripts below is requested.
+
+ Reference: https://github.com/lualatex/luaotfload/issues/31
+
+--doc]]--
+
+local support_incomplete = table.tohash({
+ "deva", "beng", "guru", "gujr",
+ "orya", "taml", "telu", "knda",
+ "mlym", "sinh",
+}, true)
+
+--[[doc--
+
+ Which features are active by default depends on the script
+ requested.
+
+--doc]]--
+
+--- (string, string) dict -> (string, string) dict
+local set_default_features = function (speclist)
+ speclist = speclist or { }
+ local script = speclist.script or "dflt"
+ if support_incomplete[script] then
+ report("log", 0, "load",
+ "support for the requested script: “%s” may be incomplete",
+ script)
+ end
+
+ report("log", 0, "load",
+ "auto-selecting default features for script: %s",
+ script)
+
+ local requested = defaults[script]
+ if not requested then
+ report("log", 0, "load",
+ "no defaults for script “%s”, falling back to “dflt”",
+ script)
+ requested = defaults.dflt
+ end
+
+ for i=1, #requested do
+ local feat = requested[i]
+ if speclist[feat] ~= false then speclist[feat] = true end
+ end
+
+ for feat, state in next, global_defaults do
+ --- This is primarily intended for setting node
+ --- mode unless “base” is requested, as stated
+ --- in the manual.
+ if not speclist[feat] then speclist[feat] = state end
+ end
+ return speclist
+end
+
+-----------------------------------------------------------------------
+--- request syntax parser 2.2
+-----------------------------------------------------------------------
+--- the luaotfload font request syntax (see manual)
+--- has a canonical form:
+---
+--- \font<csname>=<prefix>:<identifier>:<features>
+---
+--- where
+--- <csname> is the control sequence that activates the font
+--- <prefix> is either “file” or “name”, determining the lookup
+--- <identifer> is either a file name (no path) or a font
+--- name, depending on the lookup
+--- <features> is a list of switches or options, separated by
+--- semicolons or commas; a switch is of the form “+” foo
+--- or “-” foo, options are of the form lhs “=” rhs
+---
+--- however, to ensure backward compatibility we also have
+--- support for Xetex-style requests.
+---
+--- for the Xetex emulation see:
+--- · The XeTeX Reference Guide by Will Robertson, 2011
+--- · The XeTeX Companion by Michel Goosens, 2010
+--- · About XeTeX by Jonathan Kew, 2005
+---
+---
+--- caueat emptor.
+--- the request is parsed into one of **four** different
+--- lookup categories: the regular ones, file and name,
+--- as well as the Xetex compatibility ones, path and anon.
+--- (maybe a better choice of identifier would be “ambig”.)
+---
+--- according to my reconstruction, the correct chaining
+--- of the lookups for each category is as follows:
+---
+--- | File -> ( db/filename lookup )
+---
+--- | Name -> ( db/name lookup,
+--- db/filename lookup )
+---
+--- | Path -> ( db/filename lookup,
+--- fullpath lookup )
+---
+--- | Anon -> ( kpse.find_file(), // <- for tfm, ofm
+--- db/name lookup,
+--- db/filename lookup,
+--- fullpath lookup )
+---
+--- caching of successful lookups is essential. we now
+--- as of v2.2 have an experimental lookup cache that is
+--- stored in a separate file. it pertains only to name:
+--- lookups, and is described in more detail in
+--- luaotfload-database.lua.
+---
+-----------------------------------------------------------------------
+
+
+local toboolean = function (s)
+ if s == "true" then return true end
+ if s == "false" then return false end
+--if s == "yes" then return true end --- Context style
+--if s == "no" then return false end
+ return s
+end
+
+local lpegmatch = lpeg.match
+local P, S, R = lpeg.P, lpeg.S, lpeg.R
+local C, Cc, Cf, Cg, Cs, Ct
+ = lpeg.C, lpeg.Cc, lpeg.Cf, lpeg.Cg, lpeg.Cs, lpeg.Ct
+
+--- terminals and low-level classes -----------------------------------
+--- note we could use the predefined ones from lpeg.patterns
+local dot = P"."
+local colon = P":"
+local featuresep = S",;"
+local slash = P"/"
+local equals = P"="
+local lbrk, rbrk = P"[", P"]"
+
+local spacing = S" \t\v"
+local ws = spacing^0
+
+local digit = R"09"
+local alpha = R("az", "AZ")
+local anum = alpha + digit
+local decimal = digit^1 * (dot * digit^0)^-1
+
+--- modifiers ---------------------------------------------------------
+--[[doc--
+ The slash notation: called “modifiers” (Kew) or “font options”
+ (Robertson, Goosens)
+ we only support the shorthands for italic / bold / bold italic
+ shapes, as well as setting optical size, the rest is ignored.
+--doc]]--
+local style_modifier = (P"BI" + P"IB" + P"bi" + P"ib" + S"biBI")
+ / stringlower
+local size_modifier = S"Ss" * P"=" --- optical size
+ * Cc"optsize" * C(decimal)
+local other_modifier = P"AAT" + P"aat" --- apple stuff; unsupported
+ + P"ICU" + P"icu" --- not applicable
+ + P"GR" + P"gr" --- sil stuff; unsupported
+local garbage_modifier = ((1 - colon - slash)^0 * Cc(false))
+local modifier = slash * (other_modifier --> ignore
+ + Cs(style_modifier) --> collect
+ + Ct(size_modifier) --> collect
+ + garbage_modifier) --> warn
+local modifier_list = Cg(Ct(modifier^0), "modifiers")
+
+--- lookups -----------------------------------------------------------
+local fontname = C((1-S"/:(")^1) --- like luatex-fonts
+local prefixed = P"name:" * ws * Cg(fontname, "name")
+ + P"file:" * ws * Cg(fontname, "file")
+local unprefixed = Cg(fontname, "anon")
+local path_lookup = lbrk * Cg(C((1-rbrk)^1), "path") * rbrk
+
+--- features ----------------------------------------------------------
+local field = (anum + S"+-.")^1 --- sic!
+--- assignments are “lhs=rhs”
+--- switches are “+key” | “-key”
+local assignment = C(field) * ws * equals * ws * (field / toboolean)
+local switch = P"+" * ws * C(field) * Cc(true)
+ + P"-" * ws * C(field) * Cc(false)
+ + C(field) * Cc(true) -- catch crap
+local feature_expr = ws * Cg(assignment + switch) * ws
+local feature_list = Cf(Ct""
+ * feature_expr
+ * (featuresep * feature_expr)^0
+ , rawset)
+ * featuresep^-1
+
+--- other -------------------------------------------------------------
+--- This rule is present in the original parser. It sets the “sub”
+--- field of the specification which allows addressing a specific
+--- font inside a TTC container. Neither in Luatex-Fonts nor in
+--- Luaotfload is this documented, so we might as well silently drop
+--- it. However, as backward compatibility is one of our prime goals we
+--- just insert it here and leave it undocumented until someone cares
+--- to ask. (Note: afair subfonts are numbered, but this rule matches a
+--- string; I won’t mess with it though until someone reports a
+--- problem.)
+--- local subvalue = P("(") * (C(P(1-S("()"))^1)/issub) * P(")") -- for Kim
+--- Who’s Kim?
+--- Note to self: subfonts apparently start at index 0. Tested with
+--- Cambria.ttc that includes “Cambria Math” at 0 and “Cambria” at 1.
+--- Other values cause luatex to segfault.
+local subfont = P"(" * Cg((1 - S"()")^1, "sub") * P")"
+--- top-level rules ---------------------------------------------------
+--- \font\foo=<specification>:<features>
+local features = Cg(feature_list, "features")
+local specification = (prefixed + unprefixed)
+ * subfont^-1
+ * modifier_list^-1
+local font_request = Ct(path_lookup * (colon^-1 * features)^-1
+ + specification * (colon * features)^-1)
+
+-- lpeg.print(font_request)
+--- new parser: 657 rules
+--- old parser: 230 rules
+
+local import_values = {
+ --- That’s what the 1.x parser did, not quite as graciously,
+ --- with an array of branch expressions.
+ -- "style", "optsize",--> from slashed notation; handled otherwise
+ "lookup", "sub", "mode",
+}
+
+local lookup_types = { "anon", "file", "name", "path" }
+
+local select_lookup = function (request)
+ for i=1, #lookup_types do
+ local lookup = lookup_types[i]
+ local value = request[lookup]
+ if value then
+ return lookup, value
+ end
+ end
+end
+
+local supported = {
+ b = "bold",
+ i = "italic",
+ bi = "bolditalic",
+ aat = false,
+ icu = false,
+ gr = false,
+}
+
+--- (string | (string * string) | bool) list -> (string * number)
+local handle_slashed = function (modifiers)
+ local style, optsize
+ for i=1, #modifiers do
+ local mod = modifiers[i]
+ if type(mod) == "table" and mod[1] == "optsize" then --> optical size
+ optsize = tonumber(mod[2])
+ elseif mod == false then
+ --- ignore
+ report("log", 0,
+ "load", "unsupported font option: %s", v)
+ elseif supported[mod] then
+ style = supported[mod]
+ elseif not stringis_empty(mod) then
+ style = stringgsub(mod, "[^%a%d]", "")
+ end
+ end
+ return style, optsize
+end
+
+--- spec -> spec
+local handle_request = function (specification)
+ local request = lpegmatch(font_request,
+ specification.specification)
+ if not request then
+ --- happens when called with an absolute path
+ --- in an anonymous lookup;
+ --- we try to behave as friendly as possible
+ --- just go with it ...
+ report("log", 0, "load", "invalid request “%s” of type anon",
+ specification.specification)
+ report("log", 0, "load", "use square bracket syntax or consult the documentation.")
+ specification.name = specification.specification
+ specification.lookup = "file"
+ return specification
+ end
+ local lookup, name = select_lookup(request)
+ request.features = set_default_features(request.features)
+
+ if name then
+ specification.name = name
+ specification.lookup = lookup or specification.lookup
+ end
+
+ if request.modifiers then
+ local style, optsize = handle_slashed(request.modifiers)
+ specification.style, specification.optsize = style, optsize
+ end
+
+ for n=1, #import_values do
+ local feat = import_values[n]
+ local newvalue = request.features[feat]
+ if newvalue then
+ specification[feat] = request.features[feat]
+ request.features[feat] = nil
+ end
+ end
+
+ --- The next line sets the “rand” feature to “random”; I haven’t
+ --- investigated it any further (luatex-fonts-ext), so it will
+ --- just stay here.
+ specification.features.normal
+ = fonts.handlers.otf.features.normalize(request.features)
+ return specification
+end
+
+local compare_requests = function (spec)
+ local old = old_behavior(spec)
+ local new = handle_request(spec)
+ return new
+end
+
+fonts.definers.registersplit(":", handle_request, "cryptic")
+fonts.definers.registersplit("", handle_request, "more cryptic") -- catches \font\text=[names]
+
+---[[ end included font-ltx.lua ]]
+
+--[[doc--
+This uses the code from luatex-fonts-merged (<- font-otc.lua) instead
+of the removed luaotfload-font-otc.lua.
+
+TODO find out how far we get setting features without these lines,
+relying on luatex-fonts only (it *does* handle features somehow, after
+all).
+--doc]]--
+
+-- we assume that the other otf stuff is loaded already
+
+---[[ begin snippet from font-otc.lua ]]
+local trace_loading = false trackers.register("otf.loading", function(v) trace_loading = v end)
+local report_otf = logs.reporter("fonts","otf loading")
+
+local otf = fonts.handlers.otf
+local registerotffeature = otf.features.register
+local setmetatableindex = table.setmetatableindex
+
+--[[HH--
+
+ In the userdata interface we can not longer tweak the loaded font as
+ conveniently as before. For instance, instead of pushing extra data in
+ in the table using the original structure, we now have to operate on
+ the mkiv representation. And as the fontloader interface is modelled
+ after fontforge we cannot change that one too much either.
+
+--HH]]--
+
+local types = {
+ substitution = "gsub_single",
+ ligature = "gsub_ligature",
+ alternate = "gsub_alternate",
+}
+
+setmetatableindex(types, function(t,k) t[k] = k return k end) -- "key"
+
+local everywhere = { ["*"] = { ["*"] = true } } -- or: { ["*"] = { "*" } }
+local noflags = { }
+
+local function addfeature(data,feature,specifications)
+ local descriptions = data.descriptions
+ local resources = data.resources
+ local lookups = resources.lookups
+ local gsubfeatures = resources.features.gsub
+ if gsubfeatures and gsubfeatures[feature] then
+ -- already present
+ else
+ local sequences = resources.sequences
+ local fontfeatures = resources.features
+ local unicodes = resources.unicodes
+ local lookuptypes = resources.lookuptypes
+ local splitter = lpeg.splitter(" ",unicodes)
+ local done = 0
+ local skip = 0
+ if not specifications[1] then
+ -- so we accept a one entry specification
+ specifications = { specifications }
+ end
+ -- subtables are tables themselves but we also accept flattened singular subtables
+ for s=1,#specifications do
+ local specification = specifications[s]
+ local valid = specification.valid
+ if not valid or valid(data,specification,feature) then
+ local initialize = specification.initialize
+ if initialize then
+ -- when false is returned we initialize only once
+ specification.initialize = initialize(specification) and initialize or nil
+ end
+ local askedfeatures = specification.features or everywhere
+ local subtables = specification.subtables or { specification.data } or { }
+ local featuretype = types[specification.type or "substitution"]
+ local featureflags = specification.flags or noflags
+ local added = false
+ local featurename = format("ctx_%s_%s",feature,s)
+ local st = { }
+ for t=1,#subtables do
+ local list = subtables[t]
+ local full = format("%s_%s",featurename,t)
+ st[t] = full
+ if featuretype == "gsub_ligature" then
+ lookuptypes[full] = "ligature"
+ for code, ligature in next, list do
+ local unicode = tonumber(code) or unicodes[code]
+ local description = descriptions[unicode]
+ if description then
+ local slookups = description.slookups
+ if type(ligature) == "string" then
+ ligature = { lpegmatch(splitter,ligature) }
+ end
+ local present = true
+ for i=1,#ligature do
+ if not descriptions[ligature[i]] then
+ present = false
+ break
+ end
+ end
+ if present then
+ if slookups then
+ slookups[full] = ligature
+ else
+ description.slookups = { [full] = ligature }
+ end
+ done, added = done + 1, true
+ else
+ skip = skip + 1
+ end
+ end
+ end
+ elseif featuretype == "gsub_single" then
+ lookuptypes[full] = "substitution"
+ for code, replacement in next, list do
+ local unicode = tonumber(code) or unicodes[code]
+ local description = descriptions[unicode]
+ if description then
+ local slookups = description.slookups
+ replacement = tonumber(replacement) or unicodes[replacement]
+ if descriptions[replacement] then
+ if slookups then
+ slookups[full] = replacement
+ else
+ description.slookups = { [full] = replacement }
+ end
+ done, added = done + 1, true
+ end
+ end
+ end
+ end
+ end
+ if added then
+ -- script = { lang1, lang2, lang3 } or script = { lang1 = true, ... }
+ for k, v in next, askedfeatures do
+ if v[1] then
+ askedfeatures[k] = table.tohash(v)
+ end
+ end
+ sequences[#sequences+1] = {
+ chain = 0,
+ features = { [feature] = askedfeatures },
+ flags = featureflags,
+ name = featurename,
+ subtables = st,
+ type = featuretype,
+ }
+ -- register in metadata (merge as there can be a few)
+ if not gsubfeatures then
+ gsubfeatures = { }
+ fontfeatures.gsub = gsubfeatures
+ end
+ local k = gsubfeatures[feature]
+ if not k then
+ k = { }
+ gsubfeatures[feature] = k
+ end
+ for script, languages in next, askedfeatures do
+ local kk = k[script]
+ if not kk then
+ kk = { }
+ k[script] = kk
+ end
+ for language, value in next, languages do
+ kk[language] = value
+ end
+ end
+ end
+ end
+ end
+ if trace_loading then
+ report_otf("registering feature %a, affected glyphs %a, skipped glyphs %a",feature,done,skip)
+ end
+ end
+end
+
+otf.enhancers.addfeature = addfeature
+
+local extrafeatures = { }
+
+function otf.addfeature(name,specification)
+ extrafeatures[name] = specification
+end
+
+local function enhance(data,filename,raw)
+ for feature, specification in next, extrafeatures do
+ addfeature(data,feature,specification)
+ end
+end
+
+otf.enhancers.register("check extra features",enhance)
+
+---[[ end snippet from font-otc.lua ]]
+
+local tlig = {
+ {
+ type = "substitution",
+ features = everywhere,
+ data = {
+ [0x0022] = 0x201D, -- quotedblright
+ [0x0027] = 0x2019, -- quoteleft
+ [0x0060] = 0x2018, -- quoteright
+ },
+ flags = { },
+ },
+ {
+ type = "ligature",
+ features = everywhere,
+ data = {
+ [0x2013] = {0x002D, 0x002D}, -- endash
+ [0x2014] = {0x002D, 0x002D, 0x002D}, -- emdash
+ [0x201C] = {0x2018, 0x2018}, -- quotedblleft
+ [0x201D] = {0x2019, 0x2019}, -- quotedblright
+ [0x201E] = {0x002C, 0x002C}, -- quotedblbase
+ [0x00A1] = {0x0021, 0x2018}, -- exclamdown
+ [0x00BF] = {0x003F, 0x2018}, -- questiondown
+ },
+ flags = { },
+ },
+ {
+ type = "ligature",
+ features = everywhere,
+ data = {
+ [0x201C] = {0x0060, 0x0060}, -- quotedblleft
+ [0x201D] = {0x0027, 0x0027}, -- quotedblright
+ [0x00A1] = {0x0021, 0x0060}, -- exclamdown
+ [0x00BF] = {0x003F, 0x0060}, -- questiondown
+ },
+ flags = { },
+ },
+}
+
+otf.addfeature("tlig", tlig)
+otf.addfeature("trep", { }) -- empty, all in tlig now
+
+local anum_arabic = { --- these are the same as in font-otc
+ [0x0030] = 0x0660,
+ [0x0031] = 0x0661,
+ [0x0032] = 0x0662,
+ [0x0033] = 0x0663,
+ [0x0034] = 0x0664,
+ [0x0035] = 0x0665,
+ [0x0036] = 0x0666,
+ [0x0037] = 0x0667,
+ [0x0038] = 0x0668,
+ [0x0039] = 0x0669,
+}
+
+local anum_persian = {--- these are the same as in font-otc
+ [0x0030] = 0x06F0,
+ [0x0031] = 0x06F1,
+ [0x0032] = 0x06F2,
+ [0x0033] = 0x06F3,
+ [0x0034] = 0x06F4,
+ [0x0035] = 0x06F5,
+ [0x0036] = 0x06F6,
+ [0x0037] = 0x06F7,
+ [0x0038] = 0x06F8,
+ [0x0039] = 0x06F9,
+}
+
+local function valid(data)
+ local features = data.resources.features
+ if features then
+ for k, v in next, features do
+ for k, v in next, v do
+ if v.arab then
+ return true
+ end
+ end
+ end
+ end
+end
+
+local anum_specification = {
+ {
+ type = "substitution",
+ features = { arab = { far = true, urd = true, snd = true } },
+ data = anum_persian,
+ flags = { },
+ valid = valid,
+ },
+ {
+ type = "substitution",
+ features = { arab = { ["*"] = true } },
+ data = anum_arabic,
+ flags = { },
+ valid = valid,
+ },
+}
+
+--- below the specifications as given in the removed font-otc.lua
+--- the rest was identical to what this file had from the beginning
+--- both make the “anum.tex” test pass anyways
+
+otf.addfeature("anum",anum_specification)
+
+registerotffeature {
+ name = 'anum',
+ description = 'arabic digits',
+}
+
+if characters.combined then
+
+ local tcom = { }
+
+ local function initialize()
+ characters.initialize()
+ for first, seconds in next, characters.combined do
+ for second, combination in next, seconds do
+ tcom[combination] = { first, second }
+ end
+ end
+ -- return false
+ end
+
+ local tcom_specification = {
+ type = "ligature",
+ features = everywhere,
+ data = tcom,
+ flags = noflags,
+ initialize = initialize,
+ }
+
+ otf.addfeature("tcom",tcom_specification)
+
+ registerotffeature {
+ name = 'tcom',
+ description = 'tex combinations',
+ }
+
+end
+
+-- vim:tw=71:sw=4:ts=4:expandtab
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-fonts-cbk.lua b/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-fonts-cbk.lua
new file mode 100644
index 00000000000..9db94f65e48
--- /dev/null
+++ b/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-fonts-cbk.lua
@@ -0,0 +1,68 @@
+if not modules then modules = { } end modules ['luatex-fonts-cbk'] = {
+ version = 1.001,
+ comment = "companion to luatex-*.tex",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+if context then
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
+end
+
+local fonts = fonts
+local nodes = nodes
+
+-- Fonts: (might move to node-gef.lua)
+
+local traverse_id = node.traverse_id
+local glyph_code = nodes.nodecodes.glyph
+
+function nodes.handlers.characters(head)
+ local fontdata = fonts.hashes.identifiers
+ if fontdata then
+ local usedfonts, done, prevfont = { }, false, nil
+ for n in traverse_id(glyph_code,head) do
+ local font = n.font
+ if font ~= prevfont then
+ prevfont = font
+ local used = usedfonts[font]
+ if not used then
+ local tfmdata = fontdata[font] --
+ if tfmdata then
+ local shared = tfmdata.shared -- we need to check shared, only when same features
+ if shared then
+ local processors = shared.processes
+ if processors and #processors > 0 then
+ usedfonts[font] = processors
+ done = true
+ end
+ end
+ end
+ end
+ end
+ end
+ if done then
+ for font, processors in next, usedfonts do
+ for i=1,#processors do
+ local h, d = processors[i](head,font,0)
+ head, done = h or head, done or d
+ end
+ end
+ end
+ return head, true
+ else
+ return head, false
+ end
+end
+
+function nodes.simple_font_handler(head)
+-- lang.hyphenate(head)
+ head = nodes.handlers.characters(head)
+ nodes.injections.handler(head)
+ nodes.handlers.protectglyphs(head)
+ head = node.ligaturing(head)
+ head = node.kerning(head)
+ return head
+end
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-fonts-def.lua b/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-fonts-def.lua
new file mode 100644
index 00000000000..0c2f0dbd583
--- /dev/null
+++ b/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-fonts-def.lua
@@ -0,0 +1,97 @@
+if not modules then modules = { } end modules ['luatex-font-def'] = {
+ version = 1.001,
+ comment = "companion to luatex-*.tex",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+if context then
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
+end
+
+local fonts = fonts
+
+-- A bit of tuning for definitions.
+
+fonts.constructors.namemode = "specification" -- somehow latex needs this (changed name!) => will change into an overload
+
+-- tricky: we sort of bypass the parser and directly feed all into
+-- the sub parser
+
+function fonts.definers.getspecification(str)
+ return "", str, "", ":", str
+end
+
+-- the generic name parser (different from context!)
+
+local list = { }
+
+local function issome () list.lookup = 'name' end -- xetex mode prefers name (not in context!)
+local function isfile () list.lookup = 'file' end
+local function isname () list.lookup = 'name' end
+local function thename(s) list.name = s end
+local function issub (v) list.sub = v end
+local function iscrap (s) list.crap = string.lower(s) end
+local function iskey (k,v) list[k] = v end
+local function istrue (s) list[s] = true end
+local function isfalse(s) list[s] = false end
+
+local P, S, R, C = lpeg.P, lpeg.S, lpeg.R, lpeg.C
+
+local spaces = P(" ")^0
+local namespec = (1-S("/:("))^0 -- was: (1-S("/: ("))^0
+local crapspec = spaces * P("/") * (((1-P(":"))^0)/iscrap) * spaces
+local filename_1 = P("file:")/isfile * (namespec/thename)
+local filename_2 = P("[") * P(true)/isname * (((1-P("]"))^0)/thename) * P("]")
+local fontname_1 = P("name:")/isname * (namespec/thename)
+local fontname_2 = P(true)/issome * (namespec/thename)
+local sometext = (R("az","AZ","09") + S("+-."))^1
+local truevalue = P("+") * spaces * (sometext/istrue)
+local falsevalue = P("-") * spaces * (sometext/isfalse)
+local keyvalue = (C(sometext) * spaces * P("=") * spaces * C(sometext))/iskey
+local somevalue = sometext/istrue
+local subvalue = P("(") * (C(P(1-S("()"))^1)/issub) * P(")") -- for Kim
+local option = spaces * (keyvalue + falsevalue + truevalue + somevalue) * spaces
+local options = P(":") * spaces * (P(";")^0 * option)^0
+
+local pattern = (filename_1 + filename_2 + fontname_1 + fontname_2) * subvalue^0 * crapspec^0 * options^0
+
+local function colonized(specification) -- xetex mode
+ list = { }
+ lpeg.match(pattern,specification.specification)
+ list.crap = nil -- style not supported, maybe some day
+ if list.name then
+ specification.name = list.name
+ list.name = nil
+ end
+ if list.lookup then
+ specification.lookup = list.lookup
+ list.lookup = nil
+ end
+ if list.sub then
+ specification.sub = list.sub
+ list.sub = nil
+ end
+ specification.features.normal = fonts.handlers.otf.features.normalize(list)
+ return specification
+end
+
+fonts.definers.registersplit(":",colonized,"cryptic")
+fonts.definers.registersplit("", colonized,"more cryptic") -- catches \font\text=[names]
+
+function fonts.definers.applypostprocessors(tfmdata)
+ local postprocessors = tfmdata.postprocessors
+ if postprocessors then
+ for i=1,#postprocessors do
+ local extrahash = postprocessors[i](tfmdata) -- after scaling etc
+ if type(extrahash) == "string" and extrahash ~= "" then
+ -- e.g. a reencoding needs this
+ extrahash = string.gsub(lower(extrahash),"[^a-z]","-")
+ tfmdata.properties.fullname = format("%s-%s",tfmdata.properties.fullname,extrahash)
+ end
+ end
+ end
+ return tfmdata
+end
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-fonts-enc.lua b/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-fonts-enc.lua
new file mode 100644
index 00000000000..e20c3a03b54
--- /dev/null
+++ b/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-fonts-enc.lua
@@ -0,0 +1,28 @@
+if not modules then modules = { } end modules ['luatex-font-enc'] = {
+ version = 1.001,
+ comment = "companion to luatex-*.tex",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+if context then
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
+end
+
+local fonts = fonts
+fonts.encodings = { }
+fonts.encodings.agl = { }
+
+setmetatable(fonts.encodings.agl, { __index = function(t,k)
+ if k == "unicodes" then
+ texio.write(" <loading (extended) adobe glyph list>")
+ local unicodes = dofile(resolvers.findfile("font-age.lua"))
+ fonts.encodings.agl = { unicodes = unicodes }
+ return unicodes
+ else
+ return nil
+ end
+end })
+
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-fonts-ext.lua b/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-fonts-ext.lua
new file mode 100644
index 00000000000..b60d0451228
--- /dev/null
+++ b/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-fonts-ext.lua
@@ -0,0 +1,272 @@
+if not modules then modules = { } end modules ['luatex-fonts-ext'] = {
+ version = 1.001,
+ comment = "companion to luatex-*.tex",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+if context then
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
+end
+
+local fonts = fonts
+local otffeatures = fonts.constructors.newfeatures("otf")
+
+-- A few generic extensions.
+
+local function initializeitlc(tfmdata,value)
+ if value then
+ -- the magic 40 and it formula come from Dohyun Kim but we might need another guess
+ local parameters = tfmdata.parameters
+ local italicangle = parameters.italicangle
+ if italicangle and italicangle ~= 0 then
+ local properties = tfmdata.properties
+ local factor = tonumber(value) or 1
+ properties.hasitalics = true
+ properties.autoitalicamount = factor * (parameters.uwidth or 40)/2
+ end
+ end
+end
+
+otffeatures.register {
+ name = "itlc",
+ description = "italic correction",
+ initializers = {
+ base = initializeitlc,
+ node = initializeitlc,
+ }
+}
+
+-- slant and extend
+
+local function initializeslant(tfmdata,value)
+ value = tonumber(value)
+ if not value then
+ value = 0
+ elseif value > 1 then
+ value = 1
+ elseif value < -1 then
+ value = -1
+ end
+ tfmdata.parameters.slantfactor = value
+end
+
+otffeatures.register {
+ name = "slant",
+ description = "slant glyphs",
+ initializers = {
+ base = initializeslant,
+ node = initializeslant,
+ }
+}
+
+local function initializeextend(tfmdata,value)
+ value = tonumber(value)
+ if not value then
+ value = 0
+ elseif value > 10 then
+ value = 10
+ elseif value < -10 then
+ value = -10
+ end
+ tfmdata.parameters.extendfactor = value
+end
+
+otffeatures.register {
+ name = "extend",
+ description = "scale glyphs horizontally",
+ initializers = {
+ base = initializeextend,
+ node = initializeextend,
+ }
+}
+
+-- expansion and protrusion
+
+fonts.protrusions = fonts.protrusions or { }
+fonts.protrusions.setups = fonts.protrusions.setups or { }
+
+local setups = fonts.protrusions.setups
+
+local function initializeprotrusion(tfmdata,value)
+ if value then
+ local setup = setups[value]
+ if setup then
+ local factor, left, right = setup.factor or 1, setup.left or 1, setup.right or 1
+ local emwidth = tfmdata.parameters.quad
+ tfmdata.parameters.protrusion = {
+ auto = true,
+ }
+ for i, chr in next, tfmdata.characters do
+ local v, pl, pr = setup[i], nil, nil
+ if v then
+ pl, pr = v[1], v[2]
+ end
+ if pl and pl ~= 0 then chr.left_protruding = left *pl*factor end
+ if pr and pr ~= 0 then chr.right_protruding = right*pr*factor end
+ end
+ end
+ end
+end
+
+otffeatures.register {
+ name = "protrusion",
+ description = "shift characters into the left and or right margin",
+ initializers = {
+ base = initializeprotrusion,
+ node = initializeprotrusion,
+ }
+}
+
+fonts.expansions = fonts.expansions or { }
+fonts.expansions.setups = fonts.expansions.setups or { }
+
+local setups = fonts.expansions.setups
+
+local function initializeexpansion(tfmdata,value)
+ if value then
+ local setup = setups[value]
+ if setup then
+ local factor = setup.factor or 1
+ tfmdata.parameters.expansion = {
+ stretch = 10 * (setup.stretch or 0),
+ shrink = 10 * (setup.shrink or 0),
+ step = 10 * (setup.step or 0),
+ auto = true,
+ }
+ for i, chr in next, tfmdata.characters do
+ local v = setup[i]
+ if v and v ~= 0 then
+ chr.expansion_factor = v*factor
+ else -- can be option
+ chr.expansion_factor = factor
+ end
+ end
+ end
+ end
+end
+
+otffeatures.register {
+ name = "expansion",
+ description = "apply hz optimization",
+ initializers = {
+ base = initializeexpansion,
+ node = initializeexpansion,
+ }
+}
+
+-- left over
+
+function fonts.loggers.onetimemessage() end
+
+-- example vectors
+
+local byte = string.byte
+
+fonts.expansions.setups['default'] = {
+
+ stretch = 2, shrink = 2, step = .5, factor = 1,
+
+ [byte('A')] = 0.5, [byte('B')] = 0.7, [byte('C')] = 0.7, [byte('D')] = 0.5, [byte('E')] = 0.7,
+ [byte('F')] = 0.7, [byte('G')] = 0.5, [byte('H')] = 0.7, [byte('K')] = 0.7, [byte('M')] = 0.7,
+ [byte('N')] = 0.7, [byte('O')] = 0.5, [byte('P')] = 0.7, [byte('Q')] = 0.5, [byte('R')] = 0.7,
+ [byte('S')] = 0.7, [byte('U')] = 0.7, [byte('W')] = 0.7, [byte('Z')] = 0.7,
+ [byte('a')] = 0.7, [byte('b')] = 0.7, [byte('c')] = 0.7, [byte('d')] = 0.7, [byte('e')] = 0.7,
+ [byte('g')] = 0.7, [byte('h')] = 0.7, [byte('k')] = 0.7, [byte('m')] = 0.7, [byte('n')] = 0.7,
+ [byte('o')] = 0.7, [byte('p')] = 0.7, [byte('q')] = 0.7, [byte('s')] = 0.7, [byte('u')] = 0.7,
+ [byte('w')] = 0.7, [byte('z')] = 0.7,
+ [byte('2')] = 0.7, [byte('3')] = 0.7, [byte('6')] = 0.7, [byte('8')] = 0.7, [byte('9')] = 0.7,
+}
+
+fonts.protrusions.setups['default'] = {
+
+ factor = 1, left = 1, right = 1,
+
+ [0x002C] = { 0, 1 }, -- comma
+ [0x002E] = { 0, 1 }, -- period
+ [0x003A] = { 0, 1 }, -- colon
+ [0x003B] = { 0, 1 }, -- semicolon
+ [0x002D] = { 0, 1 }, -- hyphen
+ [0x2013] = { 0, 0.50 }, -- endash
+ [0x2014] = { 0, 0.33 }, -- emdash
+ [0x3001] = { 0, 1 }, -- ideographic comma 、
+ [0x3002] = { 0, 1 }, -- ideographic full stop 。
+ [0x060C] = { 0, 1 }, -- arabic comma ،
+ [0x061B] = { 0, 1 }, -- arabic semicolon ؛
+ [0x06D4] = { 0, 1 }, -- arabic full stop ۔
+
+}
+
+-- normalizer
+
+fonts.handlers.otf.features.normalize = function(t)
+ if t.rand then
+ t.rand = "random"
+ end
+ return t
+end
+
+-- bonus
+
+function fonts.helpers.nametoslot(name)
+ local t = type(name)
+ if t == "string" then
+ local tfmdata = fonts.hashes.identifiers[currentfont()]
+ local shared = tfmdata and tfmdata.shared
+ local fntdata = shared and shared.rawdata
+ return fntdata and fntdata.resources.unicodes[name]
+ elseif t == "number" then
+ return n
+ end
+end
+
+-- \font\test=file:somefont:reencode=mymessup
+--
+-- fonts.encodings.reencodings.mymessup = {
+-- [109] = 110, -- m
+-- [110] = 109, -- n
+-- }
+
+fonts.encodings = fonts.encodings or { }
+local reencodings = { }
+fonts.encodings.reencodings = reencodings
+
+local function specialreencode(tfmdata,value)
+ -- we forget about kerns as we assume symbols and we
+ -- could issue a message if ther are kerns but it's
+ -- a hack anyway so we odn't care too much here
+ local encoding = value and reencodings[value]
+ if encoding then
+ local temp = { }
+ local char = tfmdata.characters
+ for k, v in next, encoding do
+ temp[k] = char[v]
+ end
+ for k, v in next, temp do
+ char[k] = temp[k]
+ end
+ -- if we use the font otherwise luatex gets confused so
+ -- we return an additional hash component for fullname
+ return string.format("reencoded:%s",value)
+ end
+end
+
+local function reencode(tfmdata,value)
+ tfmdata.postprocessors = tfmdata.postprocessors or { }
+ table.insert(tfmdata.postprocessors,
+ function(tfmdata)
+ return specialreencode(tfmdata,value)
+ end
+ )
+end
+
+otffeatures.register {
+ name = "reencode",
+ description = "reencode characters",
+ manipulators = {
+ base = reencode,
+ node = reencode,
+ }
+}
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-fonts-lua.lua b/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-fonts-lua.lua
new file mode 100644
index 00000000000..ec3fe38be3e
--- /dev/null
+++ b/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-fonts-lua.lua
@@ -0,0 +1,33 @@
+if not modules then modules = { } end modules ['luatex-fonts-lua'] = {
+ version = 1.001,
+ comment = "companion to luatex-*.tex",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+if context then
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
+end
+
+local fonts = fonts
+fonts.formats.lua = "lua"
+
+function fonts.readers.lua(specification)
+ local fullname = specification.filename or ""
+ if fullname == "" then
+ local forced = specification.forced or ""
+ if forced ~= "" then
+ fullname = specification.name .. "." .. forced
+ else
+ fullname = specification.name
+ end
+ end
+ local fullname = resolvers.findfile(fullname) or ""
+ if fullname ~= "" then
+ local loader = loadfile(fullname)
+ loader = loader and loader()
+ return loader and loader(specification)
+ end
+end
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-fonts-tfm.lua b/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-fonts-tfm.lua
new file mode 100644
index 00000000000..b9bb1bd0f28
--- /dev/null
+++ b/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-fonts-tfm.lua
@@ -0,0 +1,38 @@
+if not modules then modules = { } end modules ['luatex-fonts-tfm'] = {
+ version = 1.001,
+ comment = "companion to luatex-*.tex",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+if context then
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
+end
+
+local fonts = fonts
+local tfm = { }
+fonts.handlers.tfm = tfm
+fonts.formats.tfm = "type1" -- we need to have at least a value here
+
+function fonts.readers.tfm(specification)
+ local fullname = specification.filename or ""
+ if fullname == "" then
+ local forced = specification.forced or ""
+ if forced ~= "" then
+ fullname = specification.name .. "." .. forced
+ else
+ fullname = specification.name
+ end
+ end
+ local foundname = resolvers.findbinfile(fullname, 'tfm') or ""
+ if foundname == "" then
+ foundname = resolvers.findbinfile(fullname, 'ofm') or ""
+ end
+ if foundname ~= "" then
+ specification.filename = foundname
+ specification.format = "ofm"
+ return font.read_tfm(specification.filename,specification.size)
+ end
+end
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-lib-dir.lua b/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-lib-dir.lua
new file mode 100644
index 00000000000..00cda389913
--- /dev/null
+++ b/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-lib-dir.lua
@@ -0,0 +1,449 @@
+if not modules then modules = { } end modules ['l-dir'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- dir.expandname will be merged with cleanpath and collapsepath
+
+local type, select = type, select
+local find, gmatch, match, gsub = string.find, string.gmatch, string.match, string.gsub
+local concat, insert, remove = table.concat, table.insert, table.remove
+local lpegmatch = lpeg.match
+
+local P, S, R, C, Cc, Cs, Ct, Cv, V = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.Cc, lpeg.Cs, lpeg.Ct, lpeg.Cv, lpeg.V
+
+dir = dir or { }
+local dir = dir
+local lfs = lfs
+
+local attributes = lfs.attributes
+local walkdir = lfs.dir
+local isdir = lfs.isdir
+local isfile = lfs.isfile
+local currentdir = lfs.currentdir
+local chdir = lfs.chdir
+
+-- in case we load outside luatex
+
+if not isdir then
+ function isdir(name)
+ local a = attributes(name)
+ return a and a.mode == "directory"
+ end
+ lfs.isdir = isdir
+end
+
+if not isfile then
+ function isfile(name)
+ local a = attributes(name)
+ return a and a.mode == "file"
+ end
+ lfs.isfile = isfile
+end
+
+-- handy
+
+function dir.current()
+ return (gsub(currentdir(),"\\","/"))
+end
+
+-- optimizing for no find (*) does not save time
+
+--~ local function globpattern(path,patt,recurse,action) -- fails in recent luatex due to some change in lfs
+--~ local ok, scanner
+--~ if path == "/" then
+--~ ok, scanner = xpcall(function() return walkdir(path..".") end, function() end) -- kepler safe
+--~ else
+--~ ok, scanner = xpcall(function() return walkdir(path) end, function() end) -- kepler safe
+--~ end
+--~ if ok and type(scanner) == "function" then
+--~ if not find(path,"/$") then path = path .. '/' end
+--~ for name in scanner do
+--~ local full = path .. name
+--~ local mode = attributes(full,'mode')
+--~ if mode == 'file' then
+--~ if find(full,patt) then
+--~ action(full)
+--~ end
+--~ elseif recurse and (mode == "directory") and (name ~= '.') and (name ~= "..") then
+--~ globpattern(full,patt,recurse,action)
+--~ end
+--~ end
+--~ end
+--~ end
+
+local lfsisdir = isdir
+
+local function isdir(path)
+ path = gsub(path,"[/\\]+$","")
+ return lfsisdir(path)
+end
+
+lfs.isdir = isdir
+
+local function globpattern(path,patt,recurse,action)
+ if path == "/" then
+ path = path .. "."
+ elseif not find(path,"/$") then
+ path = path .. '/'
+ end
+ if isdir(path) then -- lfs.isdir does not like trailing /
+ for name in walkdir(path) do -- lfs.dir accepts trailing /
+ local full = path .. name
+ local mode = attributes(full,'mode')
+ if mode == 'file' then
+ if find(full,patt) then
+ action(full)
+ end
+ elseif recurse and (mode == "directory") and (name ~= '.') and (name ~= "..") then
+ globpattern(full,patt,recurse,action)
+ end
+ end
+ end
+end
+
+dir.globpattern = globpattern
+
+local function collectpattern(path,patt,recurse,result)
+ local ok, scanner
+ result = result or { }
+ if path == "/" then
+ ok, scanner, first = xpcall(function() return walkdir(path..".") end, function() end) -- kepler safe
+ else
+ ok, scanner, first = xpcall(function() return walkdir(path) end, function() end) -- kepler safe
+ end
+ if ok and type(scanner) == "function" then
+ if not find(path,"/$") then path = path .. '/' end
+ for name in scanner, first do
+ local full = path .. name
+ local attr = attributes(full)
+ local mode = attr.mode
+ if mode == 'file' then
+ if find(full,patt) then
+ result[name] = attr
+ end
+ elseif recurse and (mode == "directory") and (name ~= '.') and (name ~= "..") then
+ attr.list = collectpattern(full,patt,recurse)
+ result[name] = attr
+ end
+ end
+ end
+ return result
+end
+
+dir.collectpattern = collectpattern
+
+local pattern = Ct {
+ [1] = (C(P(".") + P("/")^1) + C(R("az","AZ") * P(":") * P("/")^0) + Cc("./")) * V(2) * V(3),
+ [2] = C(((1-S("*?/"))^0 * P("/"))^0),
+ [3] = C(P(1)^0)
+}
+
+local filter = Cs ( (
+ P("**") / ".*" +
+ P("*") / "[^/]*" +
+ P("?") / "[^/]" +
+ P(".") / "%%." +
+ P("+") / "%%+" +
+ P("-") / "%%-" +
+ P(1)
+)^0 )
+
+local function glob(str,t)
+ if type(t) == "function" then
+ if type(str) == "table" then
+ for s=1,#str do
+ glob(str[s],t)
+ end
+ elseif isfile(str) then
+ t(str)
+ else
+ local split = lpegmatch(pattern,str) -- we could use the file splitter
+ if split then
+ local root, path, base = split[1], split[2], split[3]
+ local recurse = find(base,"%*%*")
+ local start = root .. path
+ local result = lpegmatch(filter,start .. base)
+ globpattern(start,result,recurse,t)
+ end
+ end
+ else
+ if type(str) == "table" then
+ local t = t or { }
+ for s=1,#str do
+ glob(str[s],t)
+ end
+ return t
+ elseif isfile(str) then
+ if t then
+ t[#t+1] = str
+ return t
+ else
+ return { str }
+ end
+ else
+ local split = lpegmatch(pattern,str) -- we could use the file splitter
+ if split then
+ local t = t or { }
+ local action = action or function(name) t[#t+1] = name end
+ local root, path, base = split[1], split[2], split[3]
+ local recurse = find(base,"%*%*")
+ local start = root .. path
+ local result = lpegmatch(filter,start .. base)
+ globpattern(start,result,recurse,action)
+ return t
+ else
+ return { }
+ end
+ end
+ end
+end
+
+dir.glob = glob
+
+--~ list = dir.glob("**/*.tif")
+--~ list = dir.glob("/**/*.tif")
+--~ list = dir.glob("./**/*.tif")
+--~ list = dir.glob("oeps/**/*.tif")
+--~ list = dir.glob("/oeps/**/*.tif")
+
+local function globfiles(path,recurse,func,files) -- func == pattern or function
+ if type(func) == "string" then
+ local s = func
+ func = function(name) return find(name,s) end
+ end
+ files = files or { }
+ local noffiles = #files
+ for name in walkdir(path) do
+ if find(name,"^%.") then
+ --- skip
+ else
+ local mode = attributes(name,'mode')
+ if mode == "directory" then
+ if recurse then
+ globfiles(path .. "/" .. name,recurse,func,files)
+ end
+ elseif mode == "file" then
+ if not func or func(name) then
+ noffiles = noffiles + 1
+ files[noffiles] = path .. "/" .. name
+ end
+ end
+ end
+ end
+ return files
+end
+
+dir.globfiles = globfiles
+
+-- t = dir.glob("c:/data/develop/context/sources/**/????-*.tex")
+-- t = dir.glob("c:/data/develop/tex/texmf/**/*.tex")
+-- t = dir.glob("c:/data/develop/context/texmf/**/*.tex")
+-- t = dir.glob("f:/minimal/tex/**/*")
+-- print(dir.ls("f:/minimal/tex/**/*"))
+-- print(dir.ls("*.tex"))
+
+function dir.ls(pattern)
+ return concat(glob(pattern),"\n")
+end
+
+--~ mkdirs("temp")
+--~ mkdirs("a/b/c")
+--~ mkdirs(".","/a/b/c")
+--~ mkdirs("a","b","c")
+
+local make_indeed = true -- false
+
+local onwindows = os.type == "windows" or find(os.getenv("PATH"),";")
+
+if onwindows then
+
+ function dir.mkdirs(...)
+ local str, pth = "", ""
+ for i=1,select("#",...) do
+ local s = select(i,...)
+ if s == "" then
+ -- skip
+ elseif str == "" then
+ str = s
+ else
+ str = str .. "/" .. s
+ end
+ end
+ local first, middle, last
+ local drive = false
+ first, middle, last = match(str,"^(//)(//*)(.*)$")
+ if first then
+ -- empty network path == local path
+ else
+ first, last = match(str,"^(//)/*(.-)$")
+ if first then
+ middle, last = match(str,"([^/]+)/+(.-)$")
+ if middle then
+ pth = "//" .. middle
+ else
+ pth = "//" .. last
+ last = ""
+ end
+ else
+ first, middle, last = match(str,"^([a-zA-Z]:)(/*)(.-)$")
+ if first then
+ pth, drive = first .. middle, true
+ else
+ middle, last = match(str,"^(/*)(.-)$")
+ if not middle then
+ last = str
+ end
+ end
+ end
+ end
+ for s in gmatch(last,"[^/]+") do
+ if pth == "" then
+ pth = s
+ elseif drive then
+ pth, drive = pth .. s, false
+ else
+ pth = pth .. "/" .. s
+ end
+ if make_indeed and not isdir(pth) then
+ lfs.mkdir(pth)
+ end
+ end
+ return pth, (isdir(pth) == true)
+ end
+
+ --~ print(dir.mkdirs("","","a","c"))
+ --~ print(dir.mkdirs("a"))
+ --~ print(dir.mkdirs("a:"))
+ --~ print(dir.mkdirs("a:/b/c"))
+ --~ print(dir.mkdirs("a:b/c"))
+ --~ print(dir.mkdirs("a:/bbb/c"))
+ --~ print(dir.mkdirs("/a/b/c"))
+ --~ print(dir.mkdirs("/aaa/b/c"))
+ --~ print(dir.mkdirs("//a/b/c"))
+ --~ print(dir.mkdirs("///a/b/c"))
+ --~ print(dir.mkdirs("a/bbb//ccc/"))
+
+else
+
+ function dir.mkdirs(...)
+ local str, pth = "", ""
+ for i=1,select("#",...) do
+ local s = select(i,...)
+ if s and s ~= "" then -- we catch nil and false
+ if str ~= "" then
+ str = str .. "/" .. s
+ else
+ str = s
+ end
+ end
+ end
+ str = gsub(str,"/+","/")
+ if find(str,"^/") then
+ pth = "/"
+ for s in gmatch(str,"[^/]+") do
+ local first = (pth == "/")
+ if first then
+ pth = pth .. s
+ else
+ pth = pth .. "/" .. s
+ end
+ if make_indeed and not first and not isdir(pth) then
+ lfs.mkdir(pth)
+ end
+ end
+ else
+ pth = "."
+ for s in gmatch(str,"[^/]+") do
+ pth = pth .. "/" .. s
+ if make_indeed and not isdir(pth) then
+ lfs.mkdir(pth)
+ end
+ end
+ end
+ return pth, (isdir(pth) == true)
+ end
+
+ --~ print(dir.mkdirs("","","a","c"))
+ --~ print(dir.mkdirs("a"))
+ --~ print(dir.mkdirs("/a/b/c"))
+ --~ print(dir.mkdirs("/aaa/b/c"))
+ --~ print(dir.mkdirs("//a/b/c"))
+ --~ print(dir.mkdirs("///a/b/c"))
+ --~ print(dir.mkdirs("a/bbb//ccc/"))
+
+end
+
+dir.makedirs = dir.mkdirs
+
+-- we can only define it here as it uses dir.current
+
+if onwindows then
+
+ function dir.expandname(str) -- will be merged with cleanpath and collapsepath
+ local first, nothing, last = match(str,"^(//)(//*)(.*)$")
+ if first then
+ first = dir.current() .. "/" -- dir.current sanitizes
+ end
+ if not first then
+ first, last = match(str,"^(//)/*(.*)$")
+ end
+ if not first then
+ first, last = match(str,"^([a-zA-Z]:)(.*)$")
+ if first and not find(last,"^/") then
+ local d = currentdir()
+ if chdir(first) then
+ first = dir.current()
+ end
+ chdir(d)
+ end
+ end
+ if not first then
+ first, last = dir.current(), str
+ end
+ last = gsub(last,"//","/")
+ last = gsub(last,"/%./","/")
+ last = gsub(last,"^/*","")
+ first = gsub(first,"/*$","")
+ if last == "" or last == "." then
+ return first
+ else
+ return first .. "/" .. last
+ end
+ end
+
+else
+
+ function dir.expandname(str) -- will be merged with cleanpath and collapsepath
+ if not find(str,"^/") then
+ str = currentdir() .. "/" .. str
+ end
+ str = gsub(str,"//","/")
+ str = gsub(str,"/%./","/")
+ str = gsub(str,"(.)/%.$","%1")
+ return str
+ end
+
+end
+
+file.expandname = dir.expandname -- for convenience
+
+local stack = { }
+
+function dir.push(newdir)
+ insert(stack,currentdir())
+ if newdir and newdir ~= "" then
+ chdir(newdir)
+ end
+end
+
+function dir.pop()
+ local d = remove(stack)
+ if d then
+ chdir(d)
+ end
+ return d
+end
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-loaders.lua b/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-loaders.lua
new file mode 100644
index 00000000000..c9c30de9082
--- /dev/null
+++ b/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-loaders.lua
@@ -0,0 +1,32 @@
+if not modules then modules = { } end modules ["loaders"] = {
+ version = 2.200,
+ comment = "companion to luaotfload.lua",
+ author = "Hans Hagen, Khaled Hosny, Elie Roux, Philipp Gesang",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local fonts = fonts
+
+---
+--- opentype reader (from font-otf.lua):
+--- (spec : table) -> (suffix : string) -> (format : string) -> (font : table)
+---
+
+local pfb_reader = function (specification)
+ return readers.opentype(specification,"pfb","type1")
+end
+
+local pfa_reader = function (specification)
+ return readers.opentype(specification,"pfa","type1")
+end
+
+fonts.formats.pfb = "type1"
+fonts.readers.pfb = pfb_reader
+fonts.handlers.pfb = { } --- empty, as with tfm
+
+fonts.formats.pfa = "type1"
+fonts.readers.pfa = pfa_reader
+fonts.handlers.pfa = { }
+
+-- vim:tw=71:sw=2:ts=2:expandtab
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-merged.lua b/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-merged.lua
new file mode 100644
index 00000000000..bf22bac29e3
--- /dev/null
+++ b/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-merged.lua
@@ -0,0 +1,11903 @@
+-- merged file : luatex-fonts-merged.lua
+-- parent file : luatex-fonts.lua
+-- merge date : 04/29/13 20:30:03
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['l-lua']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local major,minor=string.match(_VERSION,"^[^%d]+(%d+)%.(%d+).*$")
+_MAJORVERSION=tonumber(major) or 5
+_MINORVERSION=tonumber(minor) or 1
+_LUAVERSION=_MAJORVERSION+_MINORVERSION/10
+if not lpeg then
+ lpeg=require("lpeg")
+end
+if loadstring then
+ local loadnormal=load
+ function load(first,...)
+ if type(first)=="string" then
+ return loadstring(first,...)
+ else
+ return loadnormal(first,...)
+ end
+ end
+else
+ loadstring=load
+end
+if not ipairs then
+ local function iterate(a,i)
+ i=i+1
+ local v=a[i]
+ if v~=nil then
+ return i,v
+ end
+ end
+ function ipairs(a)
+ return iterate,a,0
+ end
+end
+if not pairs then
+ function pairs(t)
+ return next,t
+ end
+end
+if not table.unpack then
+ table.unpack=_G.unpack
+elseif not unpack then
+ _G.unpack=table.unpack
+end
+if not package.loaders then
+ package.loaders=package.searchers
+end
+local print,select,tostring=print,select,tostring
+local inspectors={}
+function setinspector(inspector)
+ inspectors[#inspectors+1]=inspector
+end
+function inspect(...)
+ for s=1,select("#",...) do
+ local value=select(s,...)
+ local done=false
+ for i=1,#inspectors do
+ done=inspectors[i](value)
+ if done then
+ break
+ end
+ end
+ if not done then
+ print(tostring(value))
+ end
+ end
+end
+local dummy=function() end
+function optionalrequire(...)
+ local ok,result=xpcall(require,dummy,...)
+ if ok then
+ return result
+ end
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['l-lpeg']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+lpeg=require("lpeg")
+local type,next,tostring=type,next,tostring
+local byte,char,gmatch,format=string.byte,string.char,string.gmatch,string.format
+local floor=math.floor
+local P,R,S,V,Ct,C,Cs,Cc,Cp,Cmt=lpeg.P,lpeg.R,lpeg.S,lpeg.V,lpeg.Ct,lpeg.C,lpeg.Cs,lpeg.Cc,lpeg.Cp,lpeg.Cmt
+local lpegtype,lpegmatch,lpegprint=lpeg.type,lpeg.match,lpeg.print
+setinspector(function(v) if lpegtype(v) then lpegprint(v) return true end end)
+lpeg.patterns=lpeg.patterns or {}
+local patterns=lpeg.patterns
+local anything=P(1)
+local endofstring=P(-1)
+local alwaysmatched=P(true)
+patterns.anything=anything
+patterns.endofstring=endofstring
+patterns.beginofstring=alwaysmatched
+patterns.alwaysmatched=alwaysmatched
+local digit,sign=R('09'),S('+-')
+local cr,lf,crlf=P("\r"),P("\n"),P("\r\n")
+local newline=crlf+S("\r\n")
+local escaped=P("\\")*anything
+local squote=P("'")
+local dquote=P('"')
+local space=P(" ")
+local utfbom_32_be=P('\000\000\254\255')
+local utfbom_32_le=P('\255\254\000\000')
+local utfbom_16_be=P('\255\254')
+local utfbom_16_le=P('\254\255')
+local utfbom_8=P('\239\187\191')
+local utfbom=utfbom_32_be+utfbom_32_le+utfbom_16_be+utfbom_16_le+utfbom_8
+local utftype=utfbom_32_be*Cc("utf-32-be")+utfbom_32_le*Cc("utf-32-le")+utfbom_16_be*Cc("utf-16-be")+utfbom_16_le*Cc("utf-16-le")+utfbom_8*Cc("utf-8")+alwaysmatched*Cc("utf-8")
+local utfoffset=utfbom_32_be*Cc(4)+utfbom_32_le*Cc(4)+utfbom_16_be*Cc(2)+utfbom_16_le*Cc(2)+utfbom_8*Cc(3)+Cc(0)
+local utf8next=R("\128\191")
+patterns.utf8one=R("\000\127")
+patterns.utf8two=R("\194\223")*utf8next
+patterns.utf8three=R("\224\239")*utf8next*utf8next
+patterns.utf8four=R("\240\244")*utf8next*utf8next*utf8next
+patterns.utfbom=utfbom
+patterns.utftype=utftype
+patterns.utfoffset=utfoffset
+local utf8char=patterns.utf8one+patterns.utf8two+patterns.utf8three+patterns.utf8four
+local validutf8char=utf8char^0*endofstring*Cc(true)+Cc(false)
+local utf8character=P(1)*R("\128\191")^0
+patterns.utf8=utf8char
+patterns.utf8char=utf8char
+patterns.utf8character=utf8character
+patterns.validutf8=validutf8char
+patterns.validutf8char=validutf8char
+local eol=S("\n\r")
+local spacer=S(" \t\f\v")
+local whitespace=eol+spacer
+local nonspacer=1-spacer
+local nonwhitespace=1-whitespace
+patterns.eol=eol
+patterns.spacer=spacer
+patterns.whitespace=whitespace
+patterns.nonspacer=nonspacer
+patterns.nonwhitespace=nonwhitespace
+local stripper=spacer^0*C((spacer^0*nonspacer^1)^0)
+local collapser=Cs(spacer^0/""*nonspacer^0*((spacer^0/" "*nonspacer^1)^0))
+patterns.stripper=stripper
+patterns.collapser=collapser
+patterns.digit=digit
+patterns.sign=sign
+patterns.cardinal=sign^0*digit^1
+patterns.integer=sign^0*digit^1
+patterns.unsigned=digit^0*P('.')*digit^1
+patterns.float=sign^0*patterns.unsigned
+patterns.cunsigned=digit^0*P(',')*digit^1
+patterns.cfloat=sign^0*patterns.cunsigned
+patterns.number=patterns.float+patterns.integer
+patterns.cnumber=patterns.cfloat+patterns.integer
+patterns.oct=P("0")*R("07")^1
+patterns.octal=patterns.oct
+patterns.HEX=P("0x")*R("09","AF")^1
+patterns.hex=P("0x")*R("09","af")^1
+patterns.hexadecimal=P("0x")*R("09","AF","af")^1
+patterns.lowercase=R("az")
+patterns.uppercase=R("AZ")
+patterns.letter=patterns.lowercase+patterns.uppercase
+patterns.space=space
+patterns.tab=P("\t")
+patterns.spaceortab=patterns.space+patterns.tab
+patterns.newline=newline
+patterns.emptyline=newline^1
+patterns.equal=P("=")
+patterns.comma=P(",")
+patterns.commaspacer=P(",")*spacer^0
+patterns.period=P(".")
+patterns.colon=P(":")
+patterns.semicolon=P(";")
+patterns.underscore=P("_")
+patterns.escaped=escaped
+patterns.squote=squote
+patterns.dquote=dquote
+patterns.nosquote=(escaped+(1-squote))^0
+patterns.nodquote=(escaped+(1-dquote))^0
+patterns.unsingle=(squote/"")*patterns.nosquote*(squote/"")
+patterns.undouble=(dquote/"")*patterns.nodquote*(dquote/"")
+patterns.unquoted=patterns.undouble+patterns.unsingle
+patterns.unspacer=((patterns.spacer^1)/"")^0
+patterns.singlequoted=squote*patterns.nosquote*squote
+patterns.doublequoted=dquote*patterns.nodquote*dquote
+patterns.quoted=patterns.doublequoted+patterns.singlequoted
+patterns.propername=R("AZ","az","__")*R("09","AZ","az","__")^0*P(-1)
+patterns.somecontent=(anything-newline-space)^1
+patterns.beginline=#(1-newline)
+patterns.longtostring=Cs(whitespace^0/""*nonwhitespace^0*((whitespace^0/" "*(patterns.quoted+nonwhitespace)^1)^0))
+local function anywhere(pattern)
+ return P { P(pattern)+1*V(1) }
+end
+lpeg.anywhere=anywhere
+function lpeg.instringchecker(p)
+ p=anywhere(p)
+ return function(str)
+ return lpegmatch(p,str) and true or false
+ end
+end
+function lpeg.splitter(pattern,action)
+ return (((1-P(pattern))^1)/action+1)^0
+end
+function lpeg.tsplitter(pattern,action)
+ return Ct((((1-P(pattern))^1)/action+1)^0)
+end
+local splitters_s,splitters_m,splitters_t={},{},{}
+local function splitat(separator,single)
+ local splitter=(single and splitters_s[separator]) or splitters_m[separator]
+ if not splitter then
+ separator=P(separator)
+ local other=C((1-separator)^0)
+ if single then
+ local any=anything
+ splitter=other*(separator*C(any^0)+"")
+ splitters_s[separator]=splitter
+ else
+ splitter=other*(separator*other)^0
+ splitters_m[separator]=splitter
+ end
+ end
+ return splitter
+end
+local function tsplitat(separator)
+ local splitter=splitters_t[separator]
+ if not splitter then
+ splitter=Ct(splitat(separator))
+ splitters_t[separator]=splitter
+ end
+ return splitter
+end
+lpeg.splitat=splitat
+lpeg.tsplitat=tsplitat
+function string.splitup(str,separator)
+ if not separator then
+ separator=","
+ end
+ return lpegmatch(splitters_m[separator] or splitat(separator),str)
+end
+local cache={}
+function lpeg.split(separator,str)
+ local c=cache[separator]
+ if not c then
+ c=tsplitat(separator)
+ cache[separator]=c
+ end
+ return lpegmatch(c,str)
+end
+function string.split(str,separator)
+ if separator then
+ local c=cache[separator]
+ if not c then
+ c=tsplitat(separator)
+ cache[separator]=c
+ end
+ return lpegmatch(c,str)
+ else
+ return { str }
+ end
+end
+local spacing=patterns.spacer^0*newline
+local empty=spacing*Cc("")
+local nonempty=Cs((1-spacing)^1)*spacing^-1
+local content=(empty+nonempty)^1
+patterns.textline=content
+local linesplitter=tsplitat(newline)
+patterns.linesplitter=linesplitter
+function string.splitlines(str)
+ return lpegmatch(linesplitter,str)
+end
+local cache={}
+function lpeg.checkedsplit(separator,str)
+ local c=cache[separator]
+ if not c then
+ separator=P(separator)
+ local other=C((1-separator)^1)
+ c=Ct(separator^0*other*(separator^1*other)^0)
+ cache[separator]=c
+ end
+ return lpegmatch(c,str)
+end
+function string.checkedsplit(str,separator)
+ local c=cache[separator]
+ if not c then
+ separator=P(separator)
+ local other=C((1-separator)^1)
+ c=Ct(separator^0*other*(separator^1*other)^0)
+ cache[separator]=c
+ end
+ return lpegmatch(c,str)
+end
+local function f2(s) local c1,c2=byte(s,1,2) return c1*64+c2-12416 end
+local function f3(s) local c1,c2,c3=byte(s,1,3) return (c1*64+c2)*64+c3-925824 end
+local function f4(s) local c1,c2,c3,c4=byte(s,1,4) return ((c1*64+c2)*64+c3)*64+c4-63447168 end
+local utf8byte=patterns.utf8one/byte+patterns.utf8two/f2+patterns.utf8three/f3+patterns.utf8four/f4
+patterns.utf8byte=utf8byte
+local cache={}
+function lpeg.stripper(str)
+ if type(str)=="string" then
+ local s=cache[str]
+ if not s then
+ s=Cs(((S(str)^1)/""+1)^0)
+ cache[str]=s
+ end
+ return s
+ else
+ return Cs(((str^1)/""+1)^0)
+ end
+end
+local cache={}
+function lpeg.keeper(str)
+ if type(str)=="string" then
+ local s=cache[str]
+ if not s then
+ s=Cs((((1-S(str))^1)/""+1)^0)
+ cache[str]=s
+ end
+ return s
+ else
+ return Cs((((1-str)^1)/""+1)^0)
+ end
+end
+function lpeg.frontstripper(str)
+ return (P(str)+P(true))*Cs(anything^0)
+end
+function lpeg.endstripper(str)
+ return Cs((1-P(str)*endofstring)^0)
+end
+function lpeg.replacer(one,two,makefunction,isutf)
+ local pattern
+ local u=isutf and utf8char or 1
+ if type(one)=="table" then
+ local no=#one
+ local p=P(false)
+ if no==0 then
+ for k,v in next,one do
+ p=p+P(k)/v
+ end
+ pattern=Cs((p+u)^0)
+ elseif no==1 then
+ local o=one[1]
+ one,two=P(o[1]),o[2]
+ pattern=Cs((one/two+u)^0)
+ else
+ for i=1,no do
+ local o=one[i]
+ p=p+P(o[1])/o[2]
+ end
+ pattern=Cs((p+u)^0)
+ end
+ else
+ pattern=Cs((P(one)/(two or "")+u)^0)
+ end
+ if makefunction then
+ return function(str)
+ return lpegmatch(pattern,str)
+ end
+ else
+ return pattern
+ end
+end
+function lpeg.finder(lst,makefunction)
+ local pattern
+ if type(lst)=="table" then
+ pattern=P(false)
+ if #lst==0 then
+ for k,v in next,lst do
+ pattern=pattern+P(k)
+ end
+ else
+ for i=1,#lst do
+ pattern=pattern+P(lst[i])
+ end
+ end
+ else
+ pattern=P(lst)
+ end
+ pattern=(1-pattern)^0*pattern
+ if makefunction then
+ return function(str)
+ return lpegmatch(pattern,str)
+ end
+ else
+ return pattern
+ end
+end
+local splitters_f,splitters_s={},{}
+function lpeg.firstofsplit(separator)
+ local splitter=splitters_f[separator]
+ if not splitter then
+ separator=P(separator)
+ splitter=C((1-separator)^0)
+ splitters_f[separator]=splitter
+ end
+ return splitter
+end
+function lpeg.secondofsplit(separator)
+ local splitter=splitters_s[separator]
+ if not splitter then
+ separator=P(separator)
+ splitter=(1-separator)^0*separator*C(anything^0)
+ splitters_s[separator]=splitter
+ end
+ return splitter
+end
+function lpeg.balancer(left,right)
+ left,right=P(left),P(right)
+ return P { left*((1-left-right)+V(1))^0*right }
+end
+local nany=utf8char/""
+function lpeg.counter(pattern)
+ pattern=Cs((P(pattern)/" "+nany)^0)
+ return function(str)
+ return #lpegmatch(pattern,str)
+ end
+end
+utf=utf or (unicode and unicode.utf8) or {}
+local utfcharacters=utf and utf.characters or string.utfcharacters
+local utfgmatch=utf and utf.gmatch
+local utfchar=utf and utf.char
+lpeg.UP=lpeg.P
+if utfcharacters then
+ function lpeg.US(str)
+ local p=P(false)
+ for uc in utfcharacters(str) do
+ p=p+P(uc)
+ end
+ return p
+ end
+elseif utfgmatch then
+ function lpeg.US(str)
+ local p=P(false)
+ for uc in utfgmatch(str,".") do
+ p=p+P(uc)
+ end
+ return p
+ end
+else
+ function lpeg.US(str)
+ local p=P(false)
+ local f=function(uc)
+ p=p+P(uc)
+ end
+ lpegmatch((utf8char/f)^0,str)
+ return p
+ end
+end
+local range=utf8byte*utf8byte+Cc(false)
+function lpeg.UR(str,more)
+ local first,last
+ if type(str)=="number" then
+ first=str
+ last=more or first
+ else
+ first,last=lpegmatch(range,str)
+ if not last then
+ return P(str)
+ end
+ end
+ if first==last then
+ return P(str)
+ elseif utfchar and (last-first<8) then
+ local p=P(false)
+ for i=first,last do
+ p=p+P(utfchar(i))
+ end
+ return p
+ else
+ local f=function(b)
+ return b>=first and b<=last
+ end
+ return utf8byte/f
+ end
+end
+function lpeg.is_lpeg(p)
+ return p and lpegtype(p)=="pattern"
+end
+function lpeg.oneof(list,...)
+ if type(list)~="table" then
+ list={ list,... }
+ end
+ local p=P(list[1])
+ for l=2,#list do
+ p=p+P(list[l])
+ end
+ return p
+end
+local sort=table.sort
+local function copyindexed(old)
+ local new={}
+ for i=1,#old do
+ new[i]=old
+ end
+ return new
+end
+local function sortedkeys(tab)
+ local keys,s={},0
+ for key,_ in next,tab do
+ s=s+1
+ keys[s]=key
+ end
+ sort(keys)
+ return keys
+end
+function lpeg.append(list,pp,delayed,checked)
+ local p=pp
+ if #list>0 then
+ local keys=copyindexed(list)
+ sort(keys)
+ for i=#keys,1,-1 do
+ local k=keys[i]
+ if p then
+ p=P(k)+p
+ else
+ p=P(k)
+ end
+ end
+ elseif delayed then
+ local keys=sortedkeys(list)
+ if p then
+ for i=1,#keys,1 do
+ local k=keys[i]
+ local v=list[k]
+ p=P(k)/list+p
+ end
+ else
+ for i=1,#keys do
+ local k=keys[i]
+ local v=list[k]
+ if p then
+ p=P(k)+p
+ else
+ p=P(k)
+ end
+ end
+ if p then
+ p=p/list
+ end
+ end
+ elseif checked then
+ local keys=sortedkeys(list)
+ for i=1,#keys do
+ local k=keys[i]
+ local v=list[k]
+ if p then
+ if k==v then
+ p=P(k)+p
+ else
+ p=P(k)/v+p
+ end
+ else
+ if k==v then
+ p=P(k)
+ else
+ p=P(k)/v
+ end
+ end
+ end
+ else
+ local keys=sortedkeys(list)
+ for i=1,#keys do
+ local k=keys[i]
+ local v=list[k]
+ if p then
+ p=P(k)/v+p
+ else
+ p=P(k)/v
+ end
+ end
+ end
+ return p
+end
+local function make(t)
+ local p
+ local keys=sortedkeys(t)
+ for i=1,#keys do
+ local k=keys[i]
+ local v=t[k]
+ if not p then
+ if next(v) then
+ p=P(k)*make(v)
+ else
+ p=P(k)
+ end
+ else
+ if next(v) then
+ p=p+P(k)*make(v)
+ else
+ p=p+P(k)
+ end
+ end
+ end
+ return p
+end
+function lpeg.utfchartabletopattern(list)
+ local tree={}
+ for i=1,#list do
+ local t=tree
+ for c in gmatch(list[i],".") do
+ if not t[c] then
+ t[c]={}
+ end
+ t=t[c]
+ end
+ end
+ return make(tree)
+end
+patterns.containseol=lpeg.finder(eol)
+local function nextstep(n,step,result)
+ local m=n%step
+ local d=floor(n/step)
+ if d>0 then
+ local v=V(tostring(step))
+ local s=result.start
+ for i=1,d do
+ if s then
+ s=v*s
+ else
+ s=v
+ end
+ end
+ result.start=s
+ end
+ if step>1 and result.start then
+ local v=V(tostring(step/2))
+ result[tostring(step)]=v*v
+ end
+ if step>0 then
+ return nextstep(m,step/2,result)
+ else
+ return result
+ end
+end
+function lpeg.times(pattern,n)
+ return P(nextstep(n,2^16,{ "start",["1"]=pattern }))
+end
+local digit=R("09")
+local period=P(".")
+local zero=P("0")
+local trailingzeros=zero^0*-digit
+local case_1=period*trailingzeros/""
+local case_2=period*(digit-trailingzeros)^1*(trailingzeros/"")
+local number=digit^1*(case_1+case_2)
+local stripper=Cs((number+1)^0)
+lpeg.patterns.stripzeros=stripper
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['l-functions']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+functions=functions or {}
+function functions.dummy() end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['l-string']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local string=string
+local sub,gmatch,format,char,byte,rep,lower=string.sub,string.gmatch,string.format,string.char,string.byte,string.rep,string.lower
+local lpegmatch,patterns=lpeg.match,lpeg.patterns
+local P,S,C,Ct,Cc,Cs=lpeg.P,lpeg.S,lpeg.C,lpeg.Ct,lpeg.Cc,lpeg.Cs
+local unquoted=patterns.squote*C(patterns.nosquote)*patterns.squote+patterns.dquote*C(patterns.nodquote)*patterns.dquote
+function string.unquoted(str)
+ return lpegmatch(unquoted,str) or str
+end
+function string.quoted(str)
+ return format("%q",str)
+end
+function string.count(str,pattern)
+ local n=0
+ for _ in gmatch(str,pattern) do
+ n=n+1
+ end
+ return n
+end
+function string.limit(str,n,sentinel)
+ if #str>n then
+ sentinel=sentinel or "..."
+ return sub(str,1,(n-#sentinel))..sentinel
+ else
+ return str
+ end
+end
+local stripper=patterns.stripper
+local collapser=patterns.collapser
+local longtostring=patterns.longtostring
+function string.strip(str)
+ return lpegmatch(stripper,str) or ""
+end
+function string.collapsespaces(str)
+ return lpegmatch(collapser,str) or ""
+end
+function string.longtostring(str)
+ return lpegmatch(longtostring,str) or ""
+end
+local pattern=P(" ")^0*P(-1)
+function string.is_empty(str)
+ if str=="" then
+ return true
+ else
+ return lpegmatch(pattern,str) and true or false
+ end
+end
+local anything=patterns.anything
+local allescapes=Cc("%")*S(".-+%?()[]*")
+local someescapes=Cc("%")*S(".-+%()[]")
+local matchescapes=Cc(".")*S("*?")
+local pattern_a=Cs ((allescapes+anything )^0 )
+local pattern_b=Cs ((someescapes+matchescapes+anything )^0 )
+local pattern_c=Cs (Cc("^")*(someescapes+matchescapes+anything )^0*Cc("$") )
+function string.escapedpattern(str,simple)
+ return lpegmatch(simple and pattern_b or pattern_a,str)
+end
+function string.topattern(str,lowercase,strict)
+ if str=="" or type(str)~="string" then
+ return ".*"
+ elseif strict then
+ str=lpegmatch(pattern_c,str)
+ else
+ str=lpegmatch(pattern_b,str)
+ end
+ if lowercase then
+ return lower(str)
+ else
+ return str
+ end
+end
+function string.valid(str,default)
+ return (type(str)=="string" and str~="" and str) or default or nil
+end
+string.itself=function(s) return s end
+local pattern=Ct(C(1)^0)
+function string.totable(str)
+ return lpegmatch(pattern,str)
+end
+local replacer=lpeg.replacer("@","%%")
+function string.tformat(fmt,...)
+ return format(lpegmatch(replacer,fmt),...)
+end
+string.quote=string.quoted
+string.unquote=string.unquoted
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['l-table']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local type,next,tostring,tonumber,ipairs,select=type,next,tostring,tonumber,ipairs,select
+local table,string=table,string
+local concat,sort,insert,remove=table.concat,table.sort,table.insert,table.remove
+local format,lower,dump=string.format,string.lower,string.dump
+local getmetatable,setmetatable=getmetatable,setmetatable
+local getinfo=debug.getinfo
+local lpegmatch,patterns=lpeg.match,lpeg.patterns
+local floor=math.floor
+local stripper=patterns.stripper
+function table.strip(tab)
+ local lst,l={},0
+ for i=1,#tab do
+ local s=lpegmatch(stripper,tab[i]) or ""
+ if s=="" then
+ else
+ l=l+1
+ lst[l]=s
+ end
+ end
+ return lst
+end
+function table.keys(t)
+ if t then
+ local keys,k={},0
+ for key,_ in next,t do
+ k=k+1
+ keys[k]=key
+ end
+ return keys
+ else
+ return {}
+ end
+end
+local function compare(a,b)
+ local ta,tb=type(a),type(b)
+ if ta==tb then
+ return a<b
+ else
+ return tostring(a)<tostring(b)
+ end
+end
+local function sortedkeys(tab)
+ if tab then
+ local srt,category,s={},0,0
+ for key,_ in next,tab do
+ s=s+1
+ srt[s]=key
+ if category==3 then
+ else
+ local tkey=type(key)
+ if tkey=="string" then
+ category=(category==2 and 3) or 1
+ elseif tkey=="number" then
+ category=(category==1 and 3) or 2
+ else
+ category=3
+ end
+ end
+ end
+ if category==0 or category==3 then
+ sort(srt,compare)
+ else
+ sort(srt)
+ end
+ return srt
+ else
+ return {}
+ end
+end
+local function sortedhashkeys(tab,cmp)
+ if tab then
+ local srt,s={},0
+ for key,_ in next,tab do
+ if key then
+ s=s+1
+ srt[s]=key
+ end
+ end
+ sort(srt,cmp)
+ return srt
+ else
+ return {}
+ end
+end
+function table.allkeys(t)
+ local keys={}
+ for k,v in next,t do
+ for k,v in next,v do
+ keys[k]=true
+ end
+ end
+ return sortedkeys(keys)
+end
+table.sortedkeys=sortedkeys
+table.sortedhashkeys=sortedhashkeys
+local function nothing() end
+local function sortedhash(t,cmp)
+ if t then
+ local s
+ if cmp then
+ s=sortedhashkeys(t,function(a,b) return cmp(t,a,b) end)
+ else
+ s=sortedkeys(t)
+ end
+ local n=0
+ local function kv(s)
+ n=n+1
+ local k=s[n]
+ return k,t[k]
+ end
+ return kv,s
+ else
+ return nothing
+ end
+end
+table.sortedhash=sortedhash
+table.sortedpairs=sortedhash
+function table.append(t,list)
+ local n=#t
+ for i=1,#list do
+ n=n+1
+ t[n]=list[i]
+ end
+ return t
+end
+function table.prepend(t,list)
+ local nl=#list
+ local nt=nl+#t
+ for i=#t,1,-1 do
+ t[nt]=t[i]
+ nt=nt-1
+ end
+ for i=1,#list do
+ t[i]=list[i]
+ end
+ return t
+end
+function table.merge(t,...)
+ t=t or {}
+ for i=1,select("#",...) do
+ for k,v in next,(select(i,...)) do
+ t[k]=v
+ end
+ end
+ return t
+end
+function table.merged(...)
+ local t={}
+ for i=1,select("#",...) do
+ for k,v in next,(select(i,...)) do
+ t[k]=v
+ end
+ end
+ return t
+end
+function table.imerge(t,...)
+ local nt=#t
+ for i=1,select("#",...) do
+ local nst=select(i,...)
+ for j=1,#nst do
+ nt=nt+1
+ t[nt]=nst[j]
+ end
+ end
+ return t
+end
+function table.imerged(...)
+ local tmp,ntmp={},0
+ for i=1,select("#",...) do
+ local nst=select(i,...)
+ for j=1,#nst do
+ ntmp=ntmp+1
+ tmp[ntmp]=nst[j]
+ end
+ end
+ return tmp
+end
+local function fastcopy(old,metatabletoo)
+ if old then
+ local new={}
+ for k,v in next,old do
+ if type(v)=="table" then
+ new[k]=fastcopy(v,metatabletoo)
+ else
+ new[k]=v
+ end
+ end
+ if metatabletoo then
+ local mt=getmetatable(old)
+ if mt then
+ setmetatable(new,mt)
+ end
+ end
+ return new
+ else
+ return {}
+ end
+end
+local function copy(t,tables)
+ tables=tables or {}
+ local tcopy={}
+ if not tables[t] then
+ tables[t]=tcopy
+ end
+ for i,v in next,t do
+ if type(i)=="table" then
+ if tables[i] then
+ i=tables[i]
+ else
+ i=copy(i,tables)
+ end
+ end
+ if type(v)~="table" then
+ tcopy[i]=v
+ elseif tables[v] then
+ tcopy[i]=tables[v]
+ else
+ tcopy[i]=copy(v,tables)
+ end
+ end
+ local mt=getmetatable(t)
+ if mt then
+ setmetatable(tcopy,mt)
+ end
+ return tcopy
+end
+table.fastcopy=fastcopy
+table.copy=copy
+function table.derive(parent)
+ local child={}
+ if parent then
+ setmetatable(child,{ __index=parent })
+ end
+ return child
+end
+function table.tohash(t,value)
+ local h={}
+ if t then
+ if value==nil then value=true end
+ for _,v in next,t do
+ h[v]=value
+ end
+ end
+ return h
+end
+function table.fromhash(t)
+ local hsh,h={},0
+ for k,v in next,t do
+ if v then
+ h=h+1
+ hsh[h]=k
+ end
+ end
+ return hsh
+end
+local noquotes,hexify,handle,reduce,compact,inline,functions
+local reserved=table.tohash {
+ 'and','break','do','else','elseif','end','false','for','function','if',
+ 'in','local','nil','not','or','repeat','return','then','true','until','while',
+}
+local function simple_table(t)
+ if #t>0 then
+ local n=0
+ for _,v in next,t do
+ n=n+1
+ end
+ if n==#t then
+ local tt,nt={},0
+ for i=1,#t do
+ local v=t[i]
+ local tv=type(v)
+ if tv=="number" then
+ nt=nt+1
+ if hexify then
+ tt[nt]=format("0x%04X",v)
+ else
+ tt[nt]=tostring(v)
+ end
+ elseif tv=="boolean" then
+ nt=nt+1
+ tt[nt]=tostring(v)
+ elseif tv=="string" then
+ nt=nt+1
+ tt[nt]=format("%q",v)
+ else
+ tt=nil
+ break
+ end
+ end
+ return tt
+ end
+ end
+ return nil
+end
+local propername=patterns.propername
+local function dummy() end
+local function do_serialize(root,name,depth,level,indexed)
+ if level>0 then
+ depth=depth.." "
+ if indexed then
+ handle(format("%s{",depth))
+ else
+ local tn=type(name)
+ if tn=="number" then
+ if hexify then
+ handle(format("%s[0x%04X]={",depth,name))
+ else
+ handle(format("%s[%s]={",depth,name))
+ end
+ elseif tn=="string" then
+ if noquotes and not reserved[name] and lpegmatch(propername,name) then
+ handle(format("%s%s={",depth,name))
+ else
+ handle(format("%s[%q]={",depth,name))
+ end
+ elseif tn=="boolean" then
+ handle(format("%s[%s]={",depth,tostring(name)))
+ else
+ handle(format("%s{",depth))
+ end
+ end
+ end
+ if root and next(root) then
+ local first,last=nil,0
+ if compact then
+ last=#root
+ for k=1,last do
+ if root[k]==nil then
+ last=k-1
+ break
+ end
+ end
+ if last>0 then
+ first=1
+ end
+ end
+ local sk=sortedkeys(root)
+ for i=1,#sk do
+ local k=sk[i]
+ local v=root[k]
+ local t,tk=type(v),type(k)
+ if compact and first and tk=="number" and k>=first and k<=last then
+ if t=="number" then
+ if hexify then
+ handle(format("%s 0x%04X,",depth,v))
+ else
+ handle(format("%s %s,",depth,v))
+ end
+ elseif t=="string" then
+ if reduce and tonumber(v) then
+ handle(format("%s %s,",depth,v))
+ else
+ handle(format("%s %q,",depth,v))
+ end
+ elseif t=="table" then
+ if not next(v) then
+ handle(format("%s {},",depth))
+ elseif inline then
+ local st=simple_table(v)
+ if st then
+ handle(format("%s { %s },",depth,concat(st,", ")))
+ else
+ do_serialize(v,k,depth,level+1,true)
+ end
+ else
+ do_serialize(v,k,depth,level+1,true)
+ end
+ elseif t=="boolean" then
+ handle(format("%s %s,",depth,tostring(v)))
+ elseif t=="function" then
+ if functions then
+ handle(format('%s load(%q),',depth,dump(v)))
+ else
+ handle(format('%s "function",',depth))
+ end
+ else
+ handle(format("%s %q,",depth,tostring(v)))
+ end
+ elseif k=="__p__" then
+ if false then
+ handle(format("%s __p__=nil,",depth))
+ end
+ elseif t=="number" then
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%04X]=0x%04X,",depth,k,v))
+ else
+ handle(format("%s [%s]=%s,",depth,k,v))
+ end
+ elseif tk=="boolean" then
+ if hexify then
+ handle(format("%s [%s]=0x%04X,",depth,tostring(k),v))
+ else
+ handle(format("%s [%s]=%s,",depth,tostring(k),v))
+ end
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ if hexify then
+ handle(format("%s %s=0x%04X,",depth,k,v))
+ else
+ handle(format("%s %s=%s,",depth,k,v))
+ end
+ else
+ if hexify then
+ handle(format("%s [%q]=0x%04X,",depth,k,v))
+ else
+ handle(format("%s [%q]=%s,",depth,k,v))
+ end
+ end
+ elseif t=="string" then
+ if reduce and tonumber(v) then
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%04X]=%s,",depth,k,v))
+ else
+ handle(format("%s [%s]=%s,",depth,k,v))
+ end
+ elseif tk=="boolean" then
+ handle(format("%s [%s]=%s,",depth,tostring(k),v))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s=%s,",depth,k,v))
+ else
+ handle(format("%s [%q]=%s,",depth,k,v))
+ end
+ else
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%04X]=%q,",depth,k,v))
+ else
+ handle(format("%s [%s]=%q,",depth,k,v))
+ end
+ elseif tk=="boolean" then
+ handle(format("%s [%s]=%q,",depth,tostring(k),v))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s=%q,",depth,k,v))
+ else
+ handle(format("%s [%q]=%q,",depth,k,v))
+ end
+ end
+ elseif t=="table" then
+ if not next(v) then
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%04X]={},",depth,k))
+ else
+ handle(format("%s [%s]={},",depth,k))
+ end
+ elseif tk=="boolean" then
+ handle(format("%s [%s]={},",depth,tostring(k)))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s={},",depth,k))
+ else
+ handle(format("%s [%q]={},",depth,k))
+ end
+ elseif inline then
+ local st=simple_table(v)
+ if st then
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%04X]={ %s },",depth,k,concat(st,", ")))
+ else
+ handle(format("%s [%s]={ %s },",depth,k,concat(st,", ")))
+ end
+ elseif tk=="boolean" then
+ handle(format("%s [%s]={ %s },",depth,tostring(k),concat(st,", ")))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s={ %s },",depth,k,concat(st,", ")))
+ else
+ handle(format("%s [%q]={ %s },",depth,k,concat(st,", ")))
+ end
+ else
+ do_serialize(v,k,depth,level+1)
+ end
+ else
+ do_serialize(v,k,depth,level+1)
+ end
+ elseif t=="boolean" then
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%04X]=%s,",depth,k,tostring(v)))
+ else
+ handle(format("%s [%s]=%s,",depth,k,tostring(v)))
+ end
+ elseif tk=="boolean" then
+ handle(format("%s [%s]=%s,",depth,tostring(k),tostring(v)))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s=%s,",depth,k,tostring(v)))
+ else
+ handle(format("%s [%q]=%s,",depth,k,tostring(v)))
+ end
+ elseif t=="function" then
+ if functions then
+ local f=getinfo(v).what=="C" and dump(dummy) or dump(v)
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%04X]=load(%q),",depth,k,f))
+ else
+ handle(format("%s [%s]=load(%q),",depth,k,f))
+ end
+ elseif tk=="boolean" then
+ handle(format("%s [%s]=load(%q),",depth,tostring(k),f))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s=load(%q),",depth,k,f))
+ else
+ handle(format("%s [%q]=load(%q),",depth,k,f))
+ end
+ end
+ else
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%04X]=%q,",depth,k,tostring(v)))
+ else
+ handle(format("%s [%s]=%q,",depth,k,tostring(v)))
+ end
+ elseif tk=="boolean" then
+ handle(format("%s [%s]=%q,",depth,tostring(k),tostring(v)))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s=%q,",depth,k,tostring(v)))
+ else
+ handle(format("%s [%q]=%q,",depth,k,tostring(v)))
+ end
+ end
+ end
+ end
+ if level>0 then
+ handle(format("%s},",depth))
+ end
+end
+local function serialize(_handle,root,name,specification)
+ local tname=type(name)
+ if type(specification)=="table" then
+ noquotes=specification.noquotes
+ hexify=specification.hexify
+ handle=_handle or specification.handle or print
+ reduce=specification.reduce or false
+ functions=specification.functions
+ compact=specification.compact
+ inline=specification.inline and compact
+ if functions==nil then
+ functions=true
+ end
+ if compact==nil then
+ compact=true
+ end
+ if inline==nil then
+ inline=compact
+ end
+ else
+ noquotes=false
+ hexify=false
+ handle=_handle or print
+ reduce=false
+ compact=true
+ inline=true
+ functions=true
+ end
+ if tname=="string" then
+ if name=="return" then
+ handle("return {")
+ else
+ handle(name.."={")
+ end
+ elseif tname=="number" then
+ if hexify then
+ handle(format("[0x%04X]={",name))
+ else
+ handle("["..name.."]={")
+ end
+ elseif tname=="boolean" then
+ if name then
+ handle("return {")
+ else
+ handle("{")
+ end
+ else
+ handle("t={")
+ end
+ if root then
+ if getmetatable(root) then
+ local dummy=root._w_h_a_t_e_v_e_r_
+ root._w_h_a_t_e_v_e_r_=nil
+ end
+ if next(root) then
+ do_serialize(root,name,"",0)
+ end
+ end
+ handle("}")
+end
+function table.serialize(root,name,specification)
+ local t,n={},0
+ local function flush(s)
+ n=n+1
+ t[n]=s
+ end
+ serialize(flush,root,name,specification)
+ return concat(t,"\n")
+end
+table.tohandle=serialize
+local maxtab=2*1024
+function table.tofile(filename,root,name,specification)
+ local f=io.open(filename,'w')
+ if f then
+ if maxtab>1 then
+ local t,n={},0
+ local function flush(s)
+ n=n+1
+ t[n]=s
+ if n>maxtab then
+ f:write(concat(t,"\n"),"\n")
+ t,n={},0
+ end
+ end
+ serialize(flush,root,name,specification)
+ f:write(concat(t,"\n"),"\n")
+ else
+ local function flush(s)
+ f:write(s,"\n")
+ end
+ serialize(flush,root,name,specification)
+ end
+ f:close()
+ io.flush()
+ end
+end
+local function flattened(t,f,depth)
+ if f==nil then
+ f={}
+ depth=0xFFFF
+ elseif tonumber(f) then
+ depth=f
+ f={}
+ elseif not depth then
+ depth=0xFFFF
+ end
+ for k,v in next,t do
+ if type(k)~="number" then
+ if depth>0 and type(v)=="table" then
+ flattened(v,f,depth-1)
+ else
+ f[#f+1]=v
+ end
+ end
+ end
+ for k=1,#t do
+ local v=t[k]
+ if depth>0 and type(v)=="table" then
+ flattened(v,f,depth-1)
+ else
+ f[#f+1]=v
+ end
+ end
+ return f
+end
+table.flattened=flattened
+local function unnest(t,f)
+ if not f then
+ f={}
+ end
+ for i=1,#t do
+ local v=t[i]
+ if type(v)=="table" then
+ if type(v[1])=="table" then
+ unnest(v,f)
+ else
+ f[#f+1]=v
+ end
+ else
+ f[#f+1]=v
+ end
+ end
+ return f
+end
+function table.unnest(t)
+ return unnest(t)
+end
+local function are_equal(a,b,n,m)
+ if a and b and #a==#b then
+ n=n or 1
+ m=m or #a
+ for i=n,m do
+ local ai,bi=a[i],b[i]
+ if ai==bi then
+ elseif type(ai)=="table" and type(bi)=="table" then
+ if not are_equal(ai,bi) then
+ return false
+ end
+ else
+ return false
+ end
+ end
+ return true
+ else
+ return false
+ end
+end
+local function identical(a,b)
+ for ka,va in next,a do
+ local vb=b[ka]
+ if va==vb then
+ elseif type(va)=="table" and type(vb)=="table" then
+ if not identical(va,vb) then
+ return false
+ end
+ else
+ return false
+ end
+ end
+ return true
+end
+table.identical=identical
+table.are_equal=are_equal
+function table.compact(t)
+ if t then
+ for k,v in next,t do
+ if not next(v) then
+ t[k]=nil
+ end
+ end
+ end
+end
+function table.contains(t,v)
+ if t then
+ for i=1,#t do
+ if t[i]==v then
+ return i
+ end
+ end
+ end
+ return false
+end
+function table.count(t)
+ local n=0
+ for k,v in next,t do
+ n=n+1
+ end
+ return n
+end
+function table.swapped(t,s)
+ local n={}
+ if s then
+ for k,v in next,s do
+ n[k]=v
+ end
+ end
+ for k,v in next,t do
+ n[v]=k
+ end
+ return n
+end
+function table.mirrored(t)
+ local n={}
+ for k,v in next,t do
+ n[v]=k
+ n[k]=v
+ end
+ return n
+end
+function table.reversed(t)
+ if t then
+ local tt,tn={},#t
+ if tn>0 then
+ local ttn=0
+ for i=tn,1,-1 do
+ ttn=ttn+1
+ tt[ttn]=t[i]
+ end
+ end
+ return tt
+ end
+end
+function table.reverse(t)
+ if t then
+ local n=#t
+ for i=1,floor(n/2) do
+ local j=n-i+1
+ t[i],t[j]=t[j],t[i]
+ end
+ return t
+ end
+end
+function table.sequenced(t,sep,simple)
+ if not t then
+ return ""
+ end
+ local n=#t
+ local s={}
+ if n>0 then
+ for i=1,n do
+ s[i]=tostring(t[i])
+ end
+ else
+ n=0
+ for k,v in sortedhash(t) do
+ if simple then
+ if v==true then
+ n=n+1
+ s[n]=k
+ elseif v and v~="" then
+ n=n+1
+ s[n]=k.."="..tostring(v)
+ end
+ else
+ n=n+1
+ s[n]=k.."="..tostring(v)
+ end
+ end
+ end
+ return concat(s,sep or " | ")
+end
+function table.print(t,...)
+ if type(t)~="table" then
+ print(tostring(t))
+ else
+ serialize(print,t,...)
+ end
+end
+setinspector(function(v) if type(v)=="table" then serialize(print,v,"table") return true end end)
+function table.sub(t,i,j)
+ return { unpack(t,i,j) }
+end
+function table.is_empty(t)
+ return not t or not next(t)
+end
+function table.has_one_entry(t)
+ return t and not next(t,next(t))
+end
+function table.loweredkeys(t)
+ local l={}
+ for k,v in next,t do
+ l[lower(k)]=v
+ end
+ return l
+end
+function table.unique(old)
+ local hash={}
+ local new={}
+ local n=0
+ for i=1,#old do
+ local oi=old[i]
+ if not hash[oi] then
+ n=n+1
+ new[n]=oi
+ hash[oi]=true
+ end
+ end
+ return new
+end
+function table.sorted(t,...)
+ sort(t,...)
+ return t
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['l-io']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local io=io
+local byte,find,gsub,format=string.byte,string.find,string.gsub,string.format
+local concat=table.concat
+local floor=math.floor
+local type=type
+if string.find(os.getenv("PATH"),";") then
+ io.fileseparator,io.pathseparator="\\",";"
+else
+ io.fileseparator,io.pathseparator="/",":"
+end
+local function readall(f)
+ return f:read("*all")
+end
+local function readall(f)
+ local size=f:seek("end")
+ if size==0 then
+ return ""
+ elseif size<1024*1024 then
+ f:seek("set",0)
+ return f:read('*all')
+ else
+ local done=f:seek("set",0)
+ if size<1024*1024 then
+ step=1024*1024
+ elseif size>16*1024*1024 then
+ step=16*1024*1024
+ else
+ step=floor(size/(1024*1024))*1024*1024/8
+ end
+ local data={}
+ while true do
+ local r=f:read(step)
+ if not r then
+ return concat(data)
+ else
+ data[#data+1]=r
+ end
+ end
+ end
+end
+io.readall=readall
+function io.loaddata(filename,textmode)
+ local f=io.open(filename,(textmode and 'r') or 'rb')
+ if f then
+ local data=readall(f)
+ f:close()
+ if #data>0 then
+ return data
+ end
+ end
+end
+function io.savedata(filename,data,joiner)
+ local f=io.open(filename,"wb")
+ if f then
+ if type(data)=="table" then
+ f:write(concat(data,joiner or ""))
+ elseif type(data)=="function" then
+ data(f)
+ else
+ f:write(data or "")
+ end
+ f:close()
+ io.flush()
+ return true
+ else
+ return false
+ end
+end
+function io.loadlines(filename,n)
+ local f=io.open(filename,'r')
+ if not f then
+ elseif n then
+ local lines={}
+ for i=1,n do
+ local line=f:read("*lines")
+ if line then
+ lines[#lines+1]=line
+ else
+ break
+ end
+ end
+ f:close()
+ lines=concat(lines,"\n")
+ if #lines>0 then
+ return lines
+ end
+ else
+ local line=f:read("*line") or ""
+ f:close()
+ if #line>0 then
+ return line
+ end
+ end
+end
+function io.loadchunk(filename,n)
+ local f=io.open(filename,'rb')
+ if f then
+ local data=f:read(n or 1024)
+ f:close()
+ if #data>0 then
+ return data
+ end
+ end
+end
+function io.exists(filename)
+ local f=io.open(filename)
+ if f==nil then
+ return false
+ else
+ f:close()
+ return true
+ end
+end
+function io.size(filename)
+ local f=io.open(filename)
+ if f==nil then
+ return 0
+ else
+ local s=f:seek("end")
+ f:close()
+ return s
+ end
+end
+function io.noflines(f)
+ if type(f)=="string" then
+ local f=io.open(filename)
+ if f then
+ local n=f and io.noflines(f) or 0
+ f:close()
+ return n
+ else
+ return 0
+ end
+ else
+ local n=0
+ for _ in f:lines() do
+ n=n+1
+ end
+ f:seek('set',0)
+ return n
+ end
+end
+local nextchar={
+ [ 4]=function(f)
+ return f:read(1,1,1,1)
+ end,
+ [ 2]=function(f)
+ return f:read(1,1)
+ end,
+ [ 1]=function(f)
+ return f:read(1)
+ end,
+ [-2]=function(f)
+ local a,b=f:read(1,1)
+ return b,a
+ end,
+ [-4]=function(f)
+ local a,b,c,d=f:read(1,1,1,1)
+ return d,c,b,a
+ end
+}
+function io.characters(f,n)
+ if f then
+ return nextchar[n or 1],f
+ end
+end
+local nextbyte={
+ [4]=function(f)
+ local a,b,c,d=f:read(1,1,1,1)
+ if d then
+ return byte(a),byte(b),byte(c),byte(d)
+ end
+ end,
+ [3]=function(f)
+ local a,b,c=f:read(1,1,1)
+ if b then
+ return byte(a),byte(b),byte(c)
+ end
+ end,
+ [2]=function(f)
+ local a,b=f:read(1,1)
+ if b then
+ return byte(a),byte(b)
+ end
+ end,
+ [1]=function (f)
+ local a=f:read(1)
+ if a then
+ return byte(a)
+ end
+ end,
+ [-2]=function (f)
+ local a,b=f:read(1,1)
+ if b then
+ return byte(b),byte(a)
+ end
+ end,
+ [-3]=function(f)
+ local a,b,c=f:read(1,1,1)
+ if b then
+ return byte(c),byte(b),byte(a)
+ end
+ end,
+ [-4]=function(f)
+ local a,b,c,d=f:read(1,1,1,1)
+ if d then
+ return byte(d),byte(c),byte(b),byte(a)
+ end
+ end
+}
+function io.bytes(f,n)
+ if f then
+ return nextbyte[n or 1],f
+ else
+ return nil,nil
+ end
+end
+function io.ask(question,default,options)
+ while true do
+ io.write(question)
+ if options then
+ io.write(format(" [%s]",concat(options,"|")))
+ end
+ if default then
+ io.write(format(" [%s]",default))
+ end
+ io.write(format(" "))
+ io.flush()
+ local answer=io.read()
+ answer=gsub(answer,"^%s*(.*)%s*$","%1")
+ if answer=="" and default then
+ return default
+ elseif not options then
+ return answer
+ else
+ for k=1,#options do
+ if options[k]==answer then
+ return answer
+ end
+ end
+ local pattern="^"..answer
+ for k=1,#options do
+ local v=options[k]
+ if find(v,pattern) then
+ return v
+ end
+ end
+ end
+ end
+end
+local function readnumber(f,n,m)
+ if m then
+ f:seek("set",n)
+ n=m
+ end
+ if n==1 then
+ return byte(f:read(1))
+ elseif n==2 then
+ local a,b=byte(f:read(2),1,2)
+ return 256*a+b
+ elseif n==3 then
+ local a,b,c=byte(f:read(3),1,3)
+ return 256*256*a+256*b+c
+ elseif n==4 then
+ local a,b,c,d=byte(f:read(4),1,4)
+ return 256*256*256*a+256*256*b+256*c+d
+ elseif n==8 then
+ local a,b=readnumber(f,4),readnumber(f,4)
+ return 256*a+b
+ elseif n==12 then
+ local a,b,c=readnumber(f,4),readnumber(f,4),readnumber(f,4)
+ return 256*256*a+256*b+c
+ elseif n==-2 then
+ local b,a=byte(f:read(2),1,2)
+ return 256*a+b
+ elseif n==-3 then
+ local c,b,a=byte(f:read(3),1,3)
+ return 256*256*a+256*b+c
+ elseif n==-4 then
+ local d,c,b,a=byte(f:read(4),1,4)
+ return 256*256*256*a+256*256*b+256*c+d
+ elseif n==-8 then
+ local h,g,f,e,d,c,b,a=byte(f:read(8),1,8)
+ return 256*256*256*256*256*256*256*a+256*256*256*256*256*256*b+256*256*256*256*256*c+256*256*256*256*d+256*256*256*e+256*256*f+256*g+h
+ else
+ return 0
+ end
+end
+io.readnumber=readnumber
+function io.readstring(f,n,m)
+ if m then
+ f:seek("set",n)
+ n=m
+ end
+ local str=gsub(f:read(n),"\000","")
+ return str
+end
+if not io.i_limiter then function io.i_limiter() end end
+if not io.o_limiter then function io.o_limiter() end end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['l-file']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+file=file or {}
+local file=file
+if not lfs then
+ lfs=optionalrequire("lfs")
+end
+if not lfs then
+ lfs={
+ getcurrentdir=function()
+ return "."
+ end,
+ attributes=function()
+ return nil
+ end,
+ isfile=function(name)
+ local f=io.open(name,'rb')
+ if f then
+ f:close()
+ return true
+ end
+ end,
+ isdir=function(name)
+ print("you need to load lfs")
+ return false
+ end
+ }
+elseif not lfs.isfile then
+ local attributes=lfs.attributes
+ function lfs.isdir(name)
+ return attributes(name,"mode")=="directory"
+ end
+ function lfs.isfile(name)
+ return attributes(name,"mode")=="file"
+ end
+end
+local insert,concat=table.insert,table.concat
+local match,find=string.match,string.find
+local lpegmatch=lpeg.match
+local getcurrentdir,attributes=lfs.currentdir,lfs.attributes
+local checkedsplit=string.checkedsplit
+local P,R,S,C,Cs,Cp,Cc,Ct=lpeg.P,lpeg.R,lpeg.S,lpeg.C,lpeg.Cs,lpeg.Cp,lpeg.Cc,lpeg.Ct
+local colon=P(":")
+local period=P(".")
+local periods=P("..")
+local fwslash=P("/")
+local bwslash=P("\\")
+local slashes=S("\\/")
+local noperiod=1-period
+local noslashes=1-slashes
+local name=noperiod^1
+local suffix=period/""*(1-period-slashes)^1*-1
+local pattern=C((1-(slashes^1*noslashes^1*-1))^1)*P(1)
+local function pathpart(name,default)
+ return name and lpegmatch(pattern,name) or default or ""
+end
+local pattern=(noslashes^0*slashes)^1*C(noslashes^1)*-1
+local function basename(name)
+ return name and lpegmatch(pattern,name) or name
+end
+local pattern=(noslashes^0*slashes^1)^0*Cs((1-suffix)^1)*suffix^0
+local function nameonly(name)
+ return name and lpegmatch(pattern,name) or name
+end
+local pattern=(noslashes^0*slashes)^0*(noperiod^1*period)^1*C(noperiod^1)*-1
+local function suffixonly(name)
+ return name and lpegmatch(pattern,name) or ""
+end
+file.pathpart=pathpart
+file.basename=basename
+file.nameonly=nameonly
+file.suffixonly=suffixonly
+file.suffix=suffixonly
+file.dirname=pathpart
+file.extname=suffixonly
+local drive=C(R("az","AZ"))*colon
+local path=C((noslashes^0*slashes)^0)
+local suffix=period*C(P(1-period)^0*P(-1))
+local base=C((1-suffix)^0)
+local rest=C(P(1)^0)
+drive=drive+Cc("")
+path=path+Cc("")
+base=base+Cc("")
+suffix=suffix+Cc("")
+local pattern_a=drive*path*base*suffix
+local pattern_b=path*base*suffix
+local pattern_c=C(drive*path)*C(base*suffix)
+local pattern_d=path*rest
+function file.splitname(str,splitdrive)
+ if not str then
+ elseif splitdrive then
+ return lpegmatch(pattern_a,str)
+ else
+ return lpegmatch(pattern_b,str)
+ end
+end
+function file.splitbase(str)
+ return str and lpegmatch(pattern_d,str)
+end
+function file.nametotable(str,splitdrive)
+ if str then
+ local path,drive,subpath,name,base,suffix=lpegmatch(pattern_c,str)
+ if splitdrive then
+ return {
+ path=path,
+ drive=drive,
+ subpath=subpath,
+ name=name,
+ base=base,
+ suffix=suffix,
+ }
+ else
+ return {
+ path=path,
+ name=name,
+ base=base,
+ suffix=suffix,
+ }
+ end
+ end
+end
+local pattern=Cs(((period*(1-period-slashes)^1*-1)/""+1)^1)
+function file.removesuffix(name)
+ return name and lpegmatch(pattern,name)
+end
+local suffix=period/""*(1-period-slashes)^1*-1
+local pattern=Cs((noslashes^0*slashes^1)^0*((1-suffix)^1))*Cs(suffix)
+function file.addsuffix(filename,suffix,criterium)
+ if not filename or not suffix or suffix=="" then
+ return filename
+ elseif criterium==true then
+ return filename.."."..suffix
+ elseif not criterium then
+ local n,s=lpegmatch(pattern,filename)
+ if not s or s=="" then
+ return filename.."."..suffix
+ else
+ return filename
+ end
+ else
+ local n,s=lpegmatch(pattern,filename)
+ if s and s~="" then
+ local t=type(criterium)
+ if t=="table" then
+ for i=1,#criterium do
+ if s==criterium[i] then
+ return filename
+ end
+ end
+ elseif t=="string" then
+ if s==criterium then
+ return filename
+ end
+ end
+ end
+ return (n or filename).."."..suffix
+ end
+end
+local suffix=period*(1-period-slashes)^1*-1
+local pattern=Cs((1-suffix)^0)
+function file.replacesuffix(name,suffix)
+ if name and suffix and suffix~="" then
+ return lpegmatch(pattern,name).."."..suffix
+ else
+ return name
+ end
+end
+local reslasher=lpeg.replacer(P("\\"),"/")
+function file.reslash(str)
+ return str and lpegmatch(reslasher,str)
+end
+function file.is_writable(name)
+ if not name then
+ elseif lfs.isdir(name) then
+ name=name.."/m_t_x_t_e_s_t.tmp"
+ local f=io.open(name,"wb")
+ if f then
+ f:close()
+ os.remove(name)
+ return true
+ end
+ elseif lfs.isfile(name) then
+ local f=io.open(name,"ab")
+ if f then
+ f:close()
+ return true
+ end
+ else
+ local f=io.open(name,"ab")
+ if f then
+ f:close()
+ os.remove(name)
+ return true
+ end
+ end
+ return false
+end
+local readable=P("r")*Cc(true)
+function file.is_readable(name)
+ if name then
+ local a=attributes(name)
+ return a and lpegmatch(readable,a.permissions) or false
+ else
+ return false
+ end
+end
+file.isreadable=file.is_readable
+file.iswritable=file.is_writable
+function file.size(name)
+ if name then
+ local a=attributes(name)
+ return a and a.size or 0
+ else
+ return 0
+ end
+end
+function file.splitpath(str,separator)
+ return str and checkedsplit(lpegmatch(reslasher,str),separator or io.pathseparator)
+end
+function file.joinpath(tab,separator)
+ return tab and concat(tab,separator or io.pathseparator)
+end
+local stripper=Cs(P(fwslash)^0/""*reslasher)
+local isnetwork=fwslash*fwslash*(1-fwslash)+(1-fwslash-colon)^1*colon
+local isroot=fwslash^1*-1
+local hasroot=fwslash^1
+local deslasher=lpeg.replacer(S("\\/")^1,"/")
+function file.join(...)
+ local lst={... }
+ local one=lst[1]
+ if lpegmatch(isnetwork,one) then
+ local two=lpegmatch(deslasher,concat(lst,"/",2))
+ return one.."/"..two
+ elseif lpegmatch(isroot,one) then
+ local two=lpegmatch(deslasher,concat(lst,"/",2))
+ if lpegmatch(hasroot,two) then
+ return two
+ else
+ return "/"..two
+ end
+ elseif one=="" then
+ return lpegmatch(stripper,concat(lst,"/",2))
+ else
+ return lpegmatch(deslasher,concat(lst,"/"))
+ end
+end
+local drivespec=R("az","AZ")^1*colon
+local anchors=fwslash+drivespec
+local untouched=periods+(1-period)^1*P(-1)
+local splitstarter=(Cs(drivespec*(bwslash/"/"+fwslash)^0)+Cc(false))*Ct(lpeg.splitat(S("/\\")^1))
+local absolute=fwslash
+function file.collapsepath(str,anchor)
+ if not str then
+ return
+ end
+ if anchor==true and not lpegmatch(anchors,str) then
+ str=getcurrentdir().."/"..str
+ end
+ if str=="" or str=="." then
+ return "."
+ elseif lpegmatch(untouched,str) then
+ return lpegmatch(reslasher,str)
+ end
+ local starter,oldelements=lpegmatch(splitstarter,str)
+ local newelements={}
+ local i=#oldelements
+ while i>0 do
+ local element=oldelements[i]
+ if element=='.' then
+ elseif element=='..' then
+ local n=i-1
+ while n>0 do
+ local element=oldelements[n]
+ if element~='..' and element~='.' then
+ oldelements[n]='.'
+ break
+ else
+ n=n-1
+ end
+ end
+ if n<1 then
+ insert(newelements,1,'..')
+ end
+ elseif element~="" then
+ insert(newelements,1,element)
+ end
+ i=i-1
+ end
+ if #newelements==0 then
+ return starter or "."
+ elseif starter then
+ return starter..concat(newelements,'/')
+ elseif lpegmatch(absolute,str) then
+ return "/"..concat(newelements,'/')
+ else
+ newelements=concat(newelements,'/')
+ if anchor=="." and find(str,"^%./") then
+ return "./"..newelements
+ else
+ return newelements
+ end
+ end
+end
+local validchars=R("az","09","AZ","--","..")
+local pattern_a=lpeg.replacer(1-validchars)
+local pattern_a=Cs((validchars+P(1)/"-")^1)
+local whatever=P("-")^0/""
+local pattern_b=Cs(whatever*(1-whatever*-1)^1)
+function file.robustname(str,strict)
+ if str then
+ str=lpegmatch(pattern_a,str) or str
+ if strict then
+ return lpegmatch(pattern_b,str) or str
+ else
+ return str
+ end
+ end
+end
+file.readdata=io.loaddata
+file.savedata=io.savedata
+function file.copy(oldname,newname)
+ if oldname and newname then
+ local data=io.loaddata(oldname)
+ if data and data~="" then
+ file.savedata(newname,data)
+ end
+ end
+end
+local letter=R("az","AZ")+S("_-+")
+local separator=P("://")
+local qualified=period^0*fwslash+letter*colon+letter^1*separator+letter^1*fwslash
+local rootbased=fwslash+letter*colon
+lpeg.patterns.qualified=qualified
+lpeg.patterns.rootbased=rootbased
+function file.is_qualified_path(filename)
+ return filename and lpegmatch(qualified,filename)~=nil
+end
+function file.is_rootbased_path(filename)
+ return filename and lpegmatch(rootbased,filename)~=nil
+end
+function file.strip(name,dir)
+ if name then
+ local b,a=match(name,"^(.-)"..dir.."(.*)$")
+ return a~="" and a or name
+ end
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['l-boolean']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local type,tonumber=type,tonumber
+boolean=boolean or {}
+local boolean=boolean
+function boolean.tonumber(b)
+ if b then return 1 else return 0 end
+end
+function toboolean(str,tolerant)
+ if str==nil then
+ return false
+ elseif str==false then
+ return false
+ elseif str==true then
+ return true
+ elseif str=="true" then
+ return true
+ elseif str=="false" then
+ return false
+ elseif not tolerant then
+ return false
+ elseif str==0 then
+ return false
+ elseif (tonumber(str) or 0)>0 then
+ return true
+ else
+ return str=="yes" or str=="on" or str=="t"
+ end
+end
+string.toboolean=toboolean
+function string.booleanstring(str)
+ if str=="0" then
+ return false
+ elseif str=="1" then
+ return true
+ elseif str=="" then
+ return false
+ elseif str=="false" then
+ return false
+ elseif str=="true" then
+ return true
+ elseif (tonumber(str) or 0)>0 then
+ return true
+ else
+ return str=="yes" or str=="on" or str=="t"
+ end
+end
+function string.is_boolean(str,default)
+ if type(str)=="string" then
+ if str=="true" or str=="yes" or str=="on" or str=="t" then
+ return true
+ elseif str=="false" or str=="no" or str=="off" or str=="f" then
+ return false
+ end
+ end
+ return default
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['l-math']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local floor,sin,cos,tan=math.floor,math.sin,math.cos,math.tan
+if not math.round then
+ function math.round(x) return floor(x+0.5) end
+end
+if not math.div then
+ function math.div(n,m) return floor(n/m) end
+end
+if not math.mod then
+ function math.mod(n,m) return n%m end
+end
+local pipi=2*math.pi/360
+if not math.sind then
+ function math.sind(d) return sin(d*pipi) end
+ function math.cosd(d) return cos(d*pipi) end
+ function math.tand(d) return tan(d*pipi) end
+end
+if not math.odd then
+ function math.odd (n) return n%2~=0 end
+ function math.even(n) return n%2==0 end
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['util-str']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+utilities=utilities or {}
+utilities.strings=utilities.strings or {}
+local strings=utilities.strings
+local format,gsub,rep,sub=string.format,string.gsub,string.rep,string.sub
+local load,dump=load,string.dump
+local tonumber,type,tostring=tonumber,type,tostring
+local unpack,concat=table.unpack,table.concat
+local P,V,C,S,R,Ct,Cs,Cp,Carg,Cc=lpeg.P,lpeg.V,lpeg.C,lpeg.S,lpeg.R,lpeg.Ct,lpeg.Cs,lpeg.Cp,lpeg.Carg,lpeg.Cc
+local patterns,lpegmatch=lpeg.patterns,lpeg.match
+local utfchar,utfbyte=utf.char,utf.byte
+local loadstripped=_LUAVERSION<5.2 and load or function(str)
+ return load(dump(load(str),true))
+end
+if not number then number={} end
+local stripper=patterns.stripzeros
+local function points(n)
+ return (not n or n==0) and "0pt" or lpegmatch(stripper,format("%.5fpt",n/65536))
+end
+local function basepoints(n)
+ return (not n or n==0) and "0bp" or lpegmatch(stripper,format("%.5fbp",n*(7200/7227)/65536))
+end
+number.points=points
+number.basepoints=basepoints
+local rubish=patterns.spaceortab^0*patterns.newline
+local anyrubish=patterns.spaceortab+patterns.newline
+local anything=patterns.anything
+local stripped=(patterns.spaceortab^1/"")*patterns.newline
+local leading=rubish^0/""
+local trailing=(anyrubish^1*patterns.endofstring)/""
+local redundant=rubish^3/"\n"
+local pattern=Cs(leading*(trailing+redundant+stripped+anything)^0)
+function strings.collapsecrlf(str)
+ return lpegmatch(pattern,str)
+end
+local repeaters={}
+function strings.newrepeater(str,offset)
+ offset=offset or 0
+ local s=repeaters[str]
+ if not s then
+ s={}
+ repeaters[str]=s
+ end
+ local t=s[offset]
+ if t then
+ return t
+ end
+ t={}
+ setmetatable(t,{ __index=function(t,k)
+ if not k then
+ return ""
+ end
+ local n=k+offset
+ local s=n>0 and rep(str,n) or ""
+ t[k]=s
+ return s
+ end })
+ s[offset]=t
+ return t
+end
+local extra,tab,start=0,0,4,0
+local nspaces=strings.newrepeater(" ")
+string.nspaces=nspaces
+local pattern=Carg(1)/function(t)
+ extra,tab,start=0,t or 7,1
+ end*Cs((
+ Cp()*patterns.tab/function(position)
+ local current=(position-start+1)+extra
+ local spaces=tab-(current-1)%tab
+ if spaces>0 then
+ extra=extra+spaces-1
+ return nspaces[spaces]
+ else
+ return ""
+ end
+ end+patterns.newline*Cp()/function(position)
+ extra,start=0,position
+ end+patterns.anything
+ )^1)
+function strings.tabtospace(str,tab)
+ return lpegmatch(pattern,str,1,tab or 7)
+end
+function strings.striplong(str)
+ str=gsub(str,"^%s*","")
+ str=gsub(str,"[\n\r]+ *","\n")
+ return str
+end
+function strings.nice(str)
+ str=gsub(str,"[:%-+_]+"," ")
+ return str
+end
+local n=0
+local sequenced=table.sequenced
+function string.autodouble(s,sep)
+ if s==nil then
+ return '""'
+ end
+ local t=type(s)
+ if t=="number" then
+ return tostring(s)
+ end
+ if t=="table" then
+ return ('"'..sequenced(s,sep or ",")..'"')
+ end
+ return ('"'..tostring(s)..'"')
+end
+function string.autosingle(s,sep)
+ if s==nil then
+ return "''"
+ end
+ local t=type(s)
+ if t=="number" then
+ return tostring(s)
+ end
+ if t=="table" then
+ return ("'"..sequenced(s,sep or ",").."'")
+ end
+ return ("'"..tostring(s).."'")
+end
+local tracedchars={}
+string.tracedchars=tracedchars
+strings.tracers=tracedchars
+function string.tracedchar(b)
+ if type(b)=="number" then
+ return tracedchars[b] or (utfchar(b).." (U+"..format('%05X',b)..")")
+ else
+ local c=utfbyte(b)
+ return tracedchars[c] or (b.." (U+"..format('%05X',c)..")")
+ end
+end
+function number.signed(i)
+ if i>0 then
+ return "+",i
+ else
+ return "-",-i
+ end
+end
+local preamble=[[
+local type = type
+local tostring = tostring
+local tonumber = tonumber
+local format = string.format
+local concat = table.concat
+local signed = number.signed
+local points = number.points
+local basepoints = number.basepoints
+local utfchar = utf.char
+local utfbyte = utf.byte
+local lpegmatch = lpeg.match
+local nspaces = string.nspaces
+local tracedchar = string.tracedchar
+local autosingle = string.autosingle
+local autodouble = string.autodouble
+local sequenced = table.sequenced
+]]
+local template=[[
+%s
+%s
+return function(%s) return %s end
+]]
+local arguments={ "a1" }
+setmetatable(arguments,{ __index=function(t,k)
+ local v=t[k-1]..",a"..k
+ t[k]=v
+ return v
+ end
+})
+local prefix_any=C((S("+- .")+R("09"))^0)
+local prefix_tab=C((1-R("az","AZ","09","%%"))^0)
+local format_s=function(f)
+ n=n+1
+ if f and f~="" then
+ return format("format('%%%ss',a%s)",f,n)
+ else
+ return format("(a%s or '')",n)
+ end
+end
+local format_S=function(f)
+ n=n+1
+ if f and f~="" then
+ return format("format('%%%ss',tostring(a%s))",f,n)
+ else
+ return format("tostring(a%s)",n)
+ end
+end
+local format_q=function()
+ n=n+1
+ return format("(a%s and format('%%q',a%s) or '')",n,n)
+end
+local format_Q=function()
+ n=n+1
+ return format("format('%%q',tostring(a%s))",n)
+end
+local format_i=function(f)
+ n=n+1
+ if f and f~="" then
+ return format("format('%%%si',a%s)",f,n)
+ else
+ return format("a%s",n)
+ end
+end
+local format_d=format_i
+local format_I=function(f)
+ n=n+1
+ return format("format('%%s%%%si',signed(a%s))",f,n)
+end
+local format_f=function(f)
+ n=n+1
+ return format("format('%%%sf',a%s)",f,n)
+end
+local format_g=function(f)
+ n=n+1
+ return format("format('%%%sg',a%s)",f,n)
+end
+local format_G=function(f)
+ n=n+1
+ return format("format('%%%sG',a%s)",f,n)
+end
+local format_e=function(f)
+ n=n+1
+ return format("format('%%%se',a%s)",f,n)
+end
+local format_E=function(f)
+ n=n+1
+ return format("format('%%%sE',a%s)",f,n)
+end
+local format_x=function(f)
+ n=n+1
+ return format("format('%%%sx',a%s)",f,n)
+end
+local format_X=function(f)
+ n=n+1
+ return format("format('%%%sX',a%s)",f,n)
+end
+local format_o=function(f)
+ n=n+1
+ return format("format('%%%so',a%s)",f,n)
+end
+local format_c=function()
+ n=n+1
+ return format("utfchar(a%s)",n)
+end
+local format_C=function()
+ n=n+1
+ return format("tracedchar(a%s)",n)
+end
+local format_r=function(f)
+ n=n+1
+ return format("format('%%%s.0f',a%s)",f,n)
+end
+local format_h=function(f)
+ n=n+1
+ if f=="-" then
+ f=sub(f,2)
+ return format("format('%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
+ else
+ return format("format('0x%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
+ end
+end
+local format_H=function(f)
+ n=n+1
+ if f=="-" then
+ f=sub(f,2)
+ return format("format('%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
+ else
+ return format("format('0x%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
+ end
+end
+local format_u=function(f)
+ n=n+1
+ if f=="-" then
+ f=sub(f,2)
+ return format("format('%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
+ else
+ return format("format('u+%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
+ end
+end
+local format_U=function(f)
+ n=n+1
+ if f=="-" then
+ f=sub(f,2)
+ return format("format('%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
+ else
+ return format("format('U+%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f=="" and "05" or f,n,n,n)
+ end
+end
+local format_p=function()
+ n=n+1
+ return format("points(a%s)",n)
+end
+local format_b=function()
+ n=n+1
+ return format("basepoints(a%s)",n)
+end
+local format_t=function(f)
+ n=n+1
+ if f and f~="" then
+ return format("concat(a%s,%q)",n,f)
+ else
+ return format("concat(a%s)",n)
+ end
+end
+local format_T=function(f)
+ n=n+1
+ if f and f~="" then
+ return format("sequenced(a%s,%q)",n,f)
+ else
+ return format("sequenced(a%s)",n)
+ end
+end
+local format_l=function()
+ n=n+1
+ return format("(a%s and 'true' or 'false')",n)
+end
+local format_L=function()
+ n=n+1
+ return format("(a%s and 'TRUE' or 'FALSE')",n)
+end
+local format_N=function()
+ n=n+1
+ return format("tostring(tonumber(a%s) or a%s)",n,n)
+end
+local format_a=function(f)
+ n=n+1
+ if f and f~="" then
+ return format("autosingle(a%s,%q)",n,f)
+ else
+ return format("autosingle(a%s)",n)
+ end
+end
+local format_A=function(f)
+ n=n+1
+ if f and f~="" then
+ return format("autodouble(a%s,%q)",n,f)
+ else
+ return format("autodouble(a%s)",n)
+ end
+end
+local format_w=function(f)
+ n=n+1
+ f=tonumber(f)
+ if f then
+ return format("nspaces[%s+a%s]",f,n)
+ else
+ return format("nspaces[a%s]",n)
+ end
+end
+local format_W=function(f)
+ return format("nspaces[%s]",tonumber(f) or 0)
+end
+local format_rest=function(s)
+ return format("%q",s)
+end
+local format_extension=function(extensions,f,name)
+ local extension=extensions[name] or "tostring(%s)"
+ local f=tonumber(f) or 1
+ if f==0 then
+ return extension
+ elseif f==1 then
+ n=n+1
+ local a="a"..n
+ return format(extension,a,a)
+ elseif f<0 then
+ local a="a"..(n+f+1)
+ return format(extension,a,a)
+ else
+ local t={}
+ for i=1,f do
+ n=n+1
+ t[#t+1]="a"..n
+ end
+ return format(extension,unpack(t))
+ end
+end
+local builder=Cs { "start",
+ start=(
+ (
+ P("%")/""*(
+ V("!")
++V("s")+V("q")+V("i")+V("d")+V("f")+V("g")+V("G")+V("e")+V("E")+V("x")+V("X")+V("o")
++V("c")+V("C")+V("S")
++V("Q")
++V("N")
++V("r")+V("h")+V("H")+V("u")+V("U")+V("p")+V("b")+V("t")+V("T")+V("l")+V("L")+V("I")+V("h")
++V("w")
++V("W")
++V("a")
++V("A")
++V("*")
+ )+V("*")
+ )*(P(-1)+Carg(1))
+ )^0,
+ ["s"]=(prefix_any*P("s"))/format_s,
+ ["q"]=(prefix_any*P("q"))/format_q,
+ ["i"]=(prefix_any*P("i"))/format_i,
+ ["d"]=(prefix_any*P("d"))/format_d,
+ ["f"]=(prefix_any*P("f"))/format_f,
+ ["g"]=(prefix_any*P("g"))/format_g,
+ ["G"]=(prefix_any*P("G"))/format_G,
+ ["e"]=(prefix_any*P("e"))/format_e,
+ ["E"]=(prefix_any*P("E"))/format_E,
+ ["x"]=(prefix_any*P("x"))/format_x,
+ ["X"]=(prefix_any*P("X"))/format_X,
+ ["o"]=(prefix_any*P("o"))/format_o,
+ ["S"]=(prefix_any*P("S"))/format_S,
+ ["Q"]=(prefix_any*P("Q"))/format_S,
+ ["N"]=(prefix_any*P("N"))/format_N,
+ ["c"]=(prefix_any*P("c"))/format_c,
+ ["C"]=(prefix_any*P("C"))/format_C,
+ ["r"]=(prefix_any*P("r"))/format_r,
+ ["h"]=(prefix_any*P("h"))/format_h,
+ ["H"]=(prefix_any*P("H"))/format_H,
+ ["u"]=(prefix_any*P("u"))/format_u,
+ ["U"]=(prefix_any*P("U"))/format_U,
+ ["p"]=(prefix_any*P("p"))/format_p,
+ ["b"]=(prefix_any*P("b"))/format_b,
+ ["t"]=(prefix_tab*P("t"))/format_t,
+ ["T"]=(prefix_tab*P("T"))/format_T,
+ ["l"]=(prefix_tab*P("l"))/format_l,
+ ["L"]=(prefix_tab*P("L"))/format_L,
+ ["I"]=(prefix_any*P("I"))/format_I,
+ ["w"]=(prefix_any*P("w"))/format_w,
+ ["W"]=(prefix_any*P("W"))/format_W,
+ ["a"]=(prefix_any*P("a"))/format_a,
+ ["A"]=(prefix_any*P("A"))/format_A,
+ ["*"]=Cs(((1-P("%"))^1+P("%%")/"%%%%")^1)/format_rest,
+ ["!"]=Carg(2)*prefix_any*P("!")*C((1-P("!"))^1)*P("!")/format_extension,
+}
+local direct=Cs (
+ P("%")/""*Cc([[local format = string.format return function(str) return format("%]])*(S("+- .")+R("09"))^0*S("sqidfgGeExXo")*Cc([[",str) end]])*P(-1)
+ )
+local function make(t,str)
+ local f
+ local p
+ local p=lpegmatch(direct,str)
+ if p then
+ f=loadstripped(p)()
+ else
+ n=0
+ p=lpegmatch(builder,str,1,"..",t._extensions_)
+ if n>0 then
+ p=format(template,preamble,t._preamble_,arguments[n],p)
+ f=loadstripped(p)()
+ else
+ f=function() return str end
+ end
+ end
+ t[str]=f
+ return f
+end
+local function use(t,fmt,...)
+ return t[fmt](...)
+end
+strings.formatters={}
+function strings.formatters.new()
+ local t={ _extensions_={},_preamble_="",_type_="formatter" }
+ setmetatable(t,{ __index=make,__call=use })
+ return t
+end
+local formatters=strings.formatters.new()
+string.formatters=formatters
+string.formatter=function(str,...) return formatters[str](...) end
+local function add(t,name,template,preamble)
+ if type(t)=="table" and t._type_=="formatter" then
+ t._extensions_[name]=template or "%s"
+ if preamble then
+ t._preamble_=preamble.."\n"..t._preamble_
+ end
+ end
+end
+strings.formatters.add=add
+lpeg.patterns.xmlescape=Cs((P("<")/"&lt;"+P(">")/"&gt;"+P("&")/"&amp;"+P('"')/"&quot;"+P(1))^0)
+lpeg.patterns.texescape=Cs((C(S("#$%\\{}"))/"\\%1"+P(1))^0)
+add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],[[local xmlescape = lpeg.patterns.xmlescape]])
+add(formatters,"tex",[[lpegmatch(texescape,%s)]],[[local texescape = lpeg.patterns.texescape]])
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['luat-basics-gen']={
+ version=1.100,
+ comment="companion to luatex-*.tex",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+if context then
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
+end
+local dummyfunction=function() end
+local dummyreporter=function(c) return function(...) texio.write_nl(c.." : "..string.formatters(...)) end end
+statistics={
+ register=dummyfunction,
+ starttiming=dummyfunction,
+ stoptiming=dummyfunction,
+ elapsedtime=nil,
+}
+directives={
+ register=dummyfunction,
+ enable=dummyfunction,
+ disable=dummyfunction,
+}
+trackers={
+ register=dummyfunction,
+ enable=dummyfunction,
+ disable=dummyfunction,
+}
+experiments={
+ register=dummyfunction,
+ enable=dummyfunction,
+ disable=dummyfunction,
+}
+storage={
+ register=dummyfunction,
+ shared={},
+}
+logs={
+ new=dummyreporter,
+ reporter=dummyreporter,
+ messenger=dummyreporter,
+ report=dummyfunction,
+}
+callbacks={
+ register=function(n,f) return callback.register(n,f) end,
+}
+utilities={
+ storage={
+ allocate=function(t) return t or {} end,
+ mark=function(t) return t or {} end,
+ },
+}
+characters=characters or {
+ data={}
+}
+texconfig.kpse_init=true
+resolvers=resolvers or {}
+local remapper={
+ otf="opentype fonts",
+ ttf="truetype fonts",
+ ttc="truetype fonts",
+ dfont="truetype fonts",
+ cid="cid maps",
+ cidmap="cid maps",
+ fea="font feature files",
+ pfa="type1 fonts",
+ pfb="type1 fonts",
+}
+function resolvers.findfile(name,fileformat)
+ name=string.gsub(name,"\\","/")
+ if not fileformat or fileformat=="" then
+ fileformat=file.suffix(name)
+ if fileformat=="" then
+ fileformat="tex"
+ end
+ end
+ fileformat=string.lower(fileformat)
+ fileformat=remapper[fileformat] or fileformat
+ local found=kpse.find_file(name,fileformat)
+ if not found or found=="" then
+ found=kpse.find_file(name,"other text files")
+ end
+ return found
+end
+resolvers.findbinfile=resolvers.findfile
+function resolvers.resolve(s)
+ return s
+end
+function resolvers.unresolve(s)
+ return s
+end
+caches={}
+local writable=nil
+local readables={}
+local usingjit=jit
+if not caches.namespace or caches.namespace=="" or caches.namespace=="context" then
+ caches.namespace='generic'
+end
+do
+ local cachepaths=kpse.expand_path('$TEXMFCACHE') or ""
+ if cachepaths=="" then
+ cachepaths=kpse.expand_path('$TEXMFVAR')
+ end
+ if cachepaths=="" then
+ cachepaths=kpse.expand_path('$VARTEXMF')
+ end
+ if cachepaths=="" then
+ cachepaths="."
+ end
+ cachepaths=string.split(cachepaths,os.type=="windows" and ";" or ":")
+ for i=1,#cachepaths do
+ if file.is_writable(cachepaths[i]) then
+ writable=file.join(cachepaths[i],"luatex-cache")
+ lfs.mkdir(writable)
+ writable=file.join(writable,caches.namespace)
+ lfs.mkdir(writable)
+ break
+ end
+ end
+ for i=1,#cachepaths do
+ if file.is_readable(cachepaths[i]) then
+ readables[#readables+1]=file.join(cachepaths[i],"luatex-cache",caches.namespace)
+ end
+ end
+ if not writable then
+ texio.write_nl("quiting: fix your writable cache path")
+ os.exit()
+ elseif #readables==0 then
+ texio.write_nl("quiting: fix your readable cache path")
+ os.exit()
+ elseif #readables==1 and readables[1]==writable then
+ texio.write(string.format("(using cache: %s)",writable))
+ else
+ texio.write(string.format("(using write cache: %s)",writable))
+ texio.write(string.format("(using read cache: %s)",table.concat(readables," ")))
+ end
+end
+function caches.getwritablepath(category,subcategory)
+ local path=file.join(writable,category)
+ lfs.mkdir(path)
+ path=file.join(path,subcategory)
+ lfs.mkdir(path)
+ return path
+end
+function caches.getreadablepaths(category,subcategory)
+ local t={}
+ for i=1,#readables do
+ t[i]=file.join(readables[i],category,subcategory)
+ end
+ return t
+end
+local function makefullname(path,name)
+ if path and path~="" then
+ return file.addsuffix(file.join(path,name),"lua"),file.addsuffix(file.join(path,name),usingjit and "lub" or "luc")
+ end
+end
+function caches.is_writable(path,name)
+ local fullname=makefullname(path,name)
+ return fullname and file.is_writable(fullname)
+end
+function caches.loaddata(paths,name)
+ for i=1,#paths do
+ local data=false
+ local luaname,lucname=makefullname(paths[i],name)
+ if lucname and lfs.isfile(lucname) then
+ texio.write(string.format("(load luc: %s)",lucname))
+ data=loadfile(lucname)
+ if data then
+ data=data()
+ end
+ if data then
+ return data
+ else
+ texio.write(string.format("(loading failed: %s)",lucname))
+ end
+ end
+ if luaname and lfs.isfile(luaname) then
+ texio.write(string.format("(load lua: %s)",luaname))
+ data=loadfile(luaname)
+ if data then
+ data=data()
+ end
+ if data then
+ return data
+ end
+ end
+ end
+end
+function caches.savedata(path,name,data)
+ local luaname,lucname=makefullname(path,name)
+ if luaname then
+ texio.write(string.format("(save: %s)",luaname))
+ table.tofile(luaname,data,true,{ reduce=true })
+ if lucname and type(caches.compile)=="function" then
+ os.remove(lucname)
+ texio.write(string.format("(save: %s)",lucname))
+ caches.compile(data,luaname,lucname)
+ end
+ end
+end
+function caches.compile(data,luaname,lucname)
+ local d=io.loaddata(luaname)
+ if not d or d=="" then
+ d=table.serialize(data,true)
+ end
+ if d and d~="" then
+ local f=io.open(lucname,'wb')
+ if f then
+ local s=loadstring(d)
+ if s then
+ f:write(string.dump(s,true))
+ end
+ f:close()
+ end
+ end
+end
+function table.setmetatableindex(t,f)
+ setmetatable(t,{ __index=f })
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['data-con']={
+ version=1.100,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local format,lower,gsub=string.format,string.lower,string.gsub
+local trace_cache=false trackers.register("resolvers.cache",function(v) trace_cache=v end)
+local trace_containers=false trackers.register("resolvers.containers",function(v) trace_containers=v end)
+local trace_storage=false trackers.register("resolvers.storage",function(v) trace_storage=v end)
+containers=containers or {}
+local containers=containers
+containers.usecache=true
+local report_containers=logs.reporter("resolvers","containers")
+local allocated={}
+local mt={
+ __index=function(t,k)
+ if k=="writable" then
+ local writable=caches.getwritablepath(t.category,t.subcategory) or { "." }
+ t.writable=writable
+ return writable
+ elseif k=="readables" then
+ local readables=caches.getreadablepaths(t.category,t.subcategory) or { "." }
+ t.readables=readables
+ return readables
+ end
+ end,
+ __storage__=true
+}
+function containers.define(category,subcategory,version,enabled)
+ if category and subcategory then
+ local c=allocated[category]
+ if not c then
+ c={}
+ allocated[category]=c
+ end
+ local s=c[subcategory]
+ if not s then
+ s={
+ category=category,
+ subcategory=subcategory,
+ storage={},
+ enabled=enabled,
+ version=version or math.pi,
+ trace=false,
+ }
+ setmetatable(s,mt)
+ c[subcategory]=s
+ end
+ return s
+ end
+end
+function containers.is_usable(container,name)
+ return container.enabled and caches and caches.is_writable(container.writable,name)
+end
+function containers.is_valid(container,name)
+ if name and name~="" then
+ local storage=container.storage[name]
+ return storage and storage.cache_version==container.version
+ else
+ return false
+ end
+end
+function containers.read(container,name)
+ local storage=container.storage
+ local stored=storage[name]
+ if not stored and container.enabled and caches and containers.usecache then
+ stored=caches.loaddata(container.readables,name)
+ if stored and stored.cache_version==container.version then
+ if trace_cache or trace_containers then
+ report_containers("action %a, category %a, name %a","load",container.subcategory,name)
+ end
+ else
+ stored=nil
+ end
+ storage[name]=stored
+ elseif stored then
+ if trace_cache or trace_containers then
+ report_containers("action %a, category %a, name %a","reuse",container.subcategory,name)
+ end
+ end
+ return stored
+end
+function containers.write(container,name,data)
+ if data then
+ data.cache_version=container.version
+ if container.enabled and caches then
+ local unique,shared=data.unique,data.shared
+ data.unique,data.shared=nil,nil
+ caches.savedata(container.writable,name,data)
+ if trace_cache or trace_containers then
+ report_containers("action %a, category %a, name %a","save",container.subcategory,name)
+ end
+ data.unique,data.shared=unique,shared
+ end
+ if trace_cache or trace_containers then
+ report_containers("action %a, category %a, name %a","store",container.subcategory,name)
+ end
+ container.storage[name]=data
+ end
+ return data
+end
+function containers.content(container,name)
+ return container.storage[name]
+end
+function containers.cleanname(name)
+ return (gsub(lower(name),"[^%w\128-\255]+","-"))
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['luatex-fonts-nod']={
+ version=1.001,
+ comment="companion to luatex-fonts.lua",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+if context then
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
+end
+if tex.attribute[0]~=0 then
+ texio.write_nl("log","!")
+ texio.write_nl("log","! Attribute 0 is reserved for ConTeXt's font feature management and has to be")
+ texio.write_nl("log","! set to zero. Also, some attributes in the range 1-255 are used for special")
+ texio.write_nl("log","! purposes so setting them at the TeX end might break the font handler.")
+ texio.write_nl("log","!")
+ tex.attribute[0]=0
+end
+attributes=attributes or {}
+attributes.unsetvalue=-0x7FFFFFFF
+local numbers,last={},127
+attributes.private=attributes.private or function(name)
+ local number=numbers[name]
+ if not number then
+ if last<255 then
+ last=last+1
+ end
+ number=last
+ numbers[name]=number
+ end
+ return number
+end
+nodes={}
+nodes.pool={}
+nodes.handlers={}
+local nodecodes={} for k,v in next,node.types () do nodecodes[string.gsub(v,"_","")]=k end
+local whatcodes={} for k,v in next,node.whatsits() do whatcodes[string.gsub(v,"_","")]=k end
+local glyphcodes={ [0]="character","glyph","ligature","ghost","left","right" }
+nodes.nodecodes=nodecodes
+nodes.whatcodes=whatcodes
+nodes.whatsitcodes=whatcodes
+nodes.glyphcodes=glyphcodes
+local free_node=node.free
+local remove_node=node.remove
+local new_node=node.new
+local traverse_id=node.traverse_id
+local math_code=nodecodes.math
+nodes.handlers.protectglyphs=node.protect_glyphs
+nodes.handlers.unprotectglyphs=node.unprotect_glyphs
+function nodes.remove(head,current,free_too)
+ local t=current
+ head,current=remove_node(head,current)
+ if t then
+ if free_too then
+ free_node(t)
+ t=nil
+ else
+ t.next,t.prev=nil,nil
+ end
+ end
+ return head,current,t
+end
+function nodes.delete(head,current)
+ return nodes.remove(head,current,true)
+end
+nodes.before=node.insert_before
+nodes.after=node.insert_after
+function nodes.pool.kern(k)
+ local n=new_node("kern",1)
+ n.kern=k
+ return n
+end
+function nodes.endofmath(n)
+ for n in traverse_id(math_code,n.next) do
+ return n
+ end
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['font-ini']={
+ version=1.001,
+ comment="companion to font-ini.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local allocate=utilities.storage.allocate
+local report_defining=logs.reporter("fonts","defining")
+fonts=fonts or {}
+local fonts=fonts
+fonts.hashes={ identifiers=allocate() }
+fonts.tables=fonts.tables or {}
+fonts.helpers=fonts.helpers or {}
+fonts.tracers=fonts.tracers or {}
+fonts.specifiers=fonts.specifiers or {}
+fonts.analyzers={}
+fonts.readers={}
+fonts.definers={ methods={} }
+fonts.loggers={ register=function() end }
+fontloader.totable=fontloader.to_table
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['font-con']={
+ version=1.001,
+ comment="companion to font-ini.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local next,tostring,rawget=next,tostring,rawget
+local format,match,lower,gsub=string.format,string.match,string.lower,string.gsub
+local utfbyte=utf.byte
+local sort,insert,concat,sortedkeys,serialize,fastcopy=table.sort,table.insert,table.concat,table.sortedkeys,table.serialize,table.fastcopy
+local derivetable=table.derive
+local trace_defining=false trackers.register("fonts.defining",function(v) trace_defining=v end)
+local trace_scaling=false trackers.register("fonts.scaling",function(v) trace_scaling=v end)
+local report_defining=logs.reporter("fonts","defining")
+local fonts=fonts
+local constructors=fonts.constructors or {}
+fonts.constructors=constructors
+local handlers=fonts.handlers or {}
+fonts.handlers=handlers
+local allocate=utilities.storage.allocate
+local setmetatableindex=table.setmetatableindex
+constructors.dontembed=allocate()
+constructors.autocleanup=true
+constructors.namemode="fullpath"
+constructors.version=1.01
+constructors.cache=containers.define("fonts","constructors",constructors.version,false)
+constructors.privateoffset=0xF0000
+constructors.keys={
+ properties={
+ encodingbytes="number",
+ embedding="number",
+ cidinfo={},
+ format="string",
+ fontname="string",
+ fullname="string",
+ filename="filename",
+ psname="string",
+ name="string",
+ virtualized="boolean",
+ hasitalics="boolean",
+ autoitalicamount="basepoints",
+ nostackmath="boolean",
+ noglyphnames="boolean",
+ mode="string",
+ hasmath="boolean",
+ mathitalics="boolean",
+ textitalics="boolean",
+ finalized="boolean",
+ },
+ parameters={
+ mathsize="number",
+ scriptpercentage="float",
+ scriptscriptpercentage="float",
+ units="cardinal",
+ designsize="scaledpoints",
+ expansion={
+ stretch="integerscale",
+ shrink="integerscale",
+ step="integerscale",
+ auto="boolean",
+ },
+ protrusion={
+ auto="boolean",
+ },
+ slantfactor="float",
+ extendfactor="float",
+ factor="float",
+ hfactor="float",
+ vfactor="float",
+ size="scaledpoints",
+ units="scaledpoints",
+ scaledpoints="scaledpoints",
+ slantperpoint="scaledpoints",
+ spacing={
+ width="scaledpoints",
+ stretch="scaledpoints",
+ shrink="scaledpoints",
+ extra="scaledpoints",
+ },
+ xheight="scaledpoints",
+ quad="scaledpoints",
+ ascender="scaledpoints",
+ descender="scaledpoints",
+ synonyms={
+ space="spacing.width",
+ spacestretch="spacing.stretch",
+ spaceshrink="spacing.shrink",
+ extraspace="spacing.extra",
+ x_height="xheight",
+ space_stretch="spacing.stretch",
+ space_shrink="spacing.shrink",
+ extra_space="spacing.extra",
+ em="quad",
+ ex="xheight",
+ slant="slantperpoint",
+ },
+ },
+ description={
+ width="basepoints",
+ height="basepoints",
+ depth="basepoints",
+ boundingbox={},
+ },
+ character={
+ width="scaledpoints",
+ height="scaledpoints",
+ depth="scaledpoints",
+ italic="scaledpoints",
+ },
+}
+local designsizes=allocate()
+constructors.designsizes=designsizes
+local loadedfonts=allocate()
+constructors.loadedfonts=loadedfonts
+local factors={
+ pt=65536.0,
+ bp=65781.8,
+}
+function constructors.setfactor(f)
+ constructors.factor=factors[f or 'pt'] or factors.pt
+end
+constructors.setfactor()
+function constructors.scaled(scaledpoints,designsize)
+ if scaledpoints<0 then
+ if designsize then
+ local factor=constructors.factor
+ if designsize>factor then
+ return (- scaledpoints/1000)*designsize
+ else
+ return (- scaledpoints/1000)*designsize*factor
+ end
+ else
+ return (- scaledpoints/1000)*10*factor
+ end
+ else
+ return scaledpoints
+ end
+end
+function constructors.cleanuptable(tfmdata)
+ if constructors.autocleanup and tfmdata.properties.virtualized then
+ for k,v in next,tfmdata.characters do
+ if v.commands then v.commands=nil end
+ end
+ end
+end
+function constructors.calculatescale(tfmdata,scaledpoints)
+ local parameters=tfmdata.parameters
+ if scaledpoints<0 then
+ scaledpoints=(- scaledpoints/1000)*(tfmdata.designsize or parameters.designsize)
+ end
+ return scaledpoints,scaledpoints/(parameters.units or 1000)
+end
+local unscaled={
+ ScriptPercentScaleDown=true,
+ ScriptScriptPercentScaleDown=true,
+ RadicalDegreeBottomRaisePercent=true
+}
+function constructors.assignmathparameters(target,original)
+ local mathparameters=original.mathparameters
+ if mathparameters and next(mathparameters) then
+ local targetparameters=target.parameters
+ local targetproperties=target.properties
+ local targetmathparameters={}
+ local factor=targetproperties.math_is_scaled and 1 or targetparameters.factor
+ for name,value in next,mathparameters do
+ if unscaled[name] then
+ targetmathparameters[name]=value
+ else
+ targetmathparameters[name]=value*factor
+ end
+ end
+ if not targetmathparameters.FractionDelimiterSize then
+ targetmathparameters.FractionDelimiterSize=1.01*targetparameters.size
+ end
+ if not mathparameters.FractionDelimiterDisplayStyleSize then
+ targetmathparameters.FractionDelimiterDisplayStyleSize=2.40*targetparameters.size
+ end
+ target.mathparameters=targetmathparameters
+ end
+end
+function constructors.beforecopyingcharacters(target,original)
+end
+function constructors.aftercopyingcharacters(target,original)
+end
+function constructors.enhanceparameters(parameters)
+ local xheight=parameters.x_height
+ local quad=parameters.quad
+ local space=parameters.space
+ local stretch=parameters.space_stretch
+ local shrink=parameters.space_shrink
+ local extra=parameters.extra_space
+ local slant=parameters.slant
+ parameters.xheight=xheight
+ parameters.spacestretch=stretch
+ parameters.spaceshrink=shrink
+ parameters.extraspace=extra
+ parameters.em=quad
+ parameters.ex=xheight
+ parameters.slantperpoint=slant
+ parameters.spacing={
+ width=space,
+ stretch=stretch,
+ shrink=shrink,
+ extra=extra,
+ }
+end
+function constructors.scale(tfmdata,specification)
+ local target={}
+ if tonumber(specification) then
+ specification={ size=specification }
+ end
+ local scaledpoints=specification.size
+ local relativeid=specification.relativeid
+ local properties=tfmdata.properties or {}
+ local goodies=tfmdata.goodies or {}
+ local resources=tfmdata.resources or {}
+ local descriptions=tfmdata.descriptions or {}
+ local characters=tfmdata.characters or {}
+ local changed=tfmdata.changed or {}
+ local shared=tfmdata.shared or {}
+ local parameters=tfmdata.parameters or {}
+ local mathparameters=tfmdata.mathparameters or {}
+ local targetcharacters={}
+ local targetdescriptions=derivetable(descriptions)
+ local targetparameters=derivetable(parameters)
+ local targetproperties=derivetable(properties)
+ local targetgoodies=goodies
+ target.characters=targetcharacters
+ target.descriptions=targetdescriptions
+ target.parameters=targetparameters
+ target.properties=targetproperties
+ target.goodies=targetgoodies
+ target.shared=shared
+ target.resources=resources
+ target.unscaled=tfmdata
+ local mathsize=tonumber(specification.mathsize) or 0
+ local textsize=tonumber(specification.textsize) or scaledpoints
+ local forcedsize=tonumber(parameters.mathsize ) or 0
+ local extrafactor=tonumber(specification.factor ) or 1
+ if (mathsize==2 or forcedsize==2) and parameters.scriptpercentage then
+ scaledpoints=parameters.scriptpercentage*textsize/100
+ elseif (mathsize==3 or forcedsize==3) and parameters.scriptscriptpercentage then
+ scaledpoints=parameters.scriptscriptpercentage*textsize/100
+ elseif forcedsize>1000 then
+ scaledpoints=forcedsize
+ end
+ targetparameters.mathsize=mathsize
+ targetparameters.textsize=textsize
+ targetparameters.forcedsize=forcedsize
+ targetparameters.extrafactor=extrafactor
+ local tounicode=resources.tounicode
+ local defaultwidth=resources.defaultwidth or 0
+ local defaultheight=resources.defaultheight or 0
+ local defaultdepth=resources.defaultdepth or 0
+ local units=parameters.units or 1000
+ if target.fonts then
+ target.fonts=fastcopy(target.fonts)
+ end
+ targetproperties.language=properties.language or "dflt"
+ targetproperties.script=properties.script or "dflt"
+ targetproperties.mode=properties.mode or "base"
+ local askedscaledpoints=scaledpoints
+ local scaledpoints,delta=constructors.calculatescale(tfmdata,scaledpoints)
+ local hdelta=delta
+ local vdelta=delta
+ target.designsize=parameters.designsize
+ target.units_per_em=units
+ local direction=properties.direction or tfmdata.direction or 0
+ target.direction=direction
+ properties.direction=direction
+ target.size=scaledpoints
+ target.encodingbytes=properties.encodingbytes or 1
+ target.embedding=properties.embedding or "subset"
+ target.tounicode=1
+ target.cidinfo=properties.cidinfo
+ target.format=properties.format
+ local fontname=properties.fontname or tfmdata.fontname
+ local fullname=properties.fullname or tfmdata.fullname
+ local filename=properties.filename or tfmdata.filename
+ local psname=properties.psname or tfmdata.psname
+ local name=properties.name or tfmdata.name
+ if not psname or psname=="" then
+ psname=fontname or (fullname and fonts.names.cleanname(fullname))
+ end
+ target.fontname=fontname
+ target.fullname=fullname
+ target.filename=filename
+ target.psname=psname
+ target.name=name
+ properties.fontname=fontname
+ properties.fullname=fullname
+ properties.filename=filename
+ properties.psname=psname
+ properties.name=name
+ local expansion=parameters.expansion
+ if expansion then
+ target.stretch=expansion.stretch
+ target.shrink=expansion.shrink
+ target.step=expansion.step
+ target.auto_expand=expansion.auto
+ end
+ local protrusion=parameters.protrusion
+ if protrusion then
+ target.auto_protrude=protrusion.auto
+ end
+ local extendfactor=parameters.extendfactor or 0
+ if extendfactor~=0 and extendfactor~=1 then
+ hdelta=hdelta*extendfactor
+ target.extend=extendfactor*1000
+ else
+ target.extend=1000
+ end
+ local slantfactor=parameters.slantfactor or 0
+ if slantfactor~=0 then
+ target.slant=slantfactor*1000
+ else
+ target.slant=0
+ end
+ targetparameters.factor=delta
+ targetparameters.hfactor=hdelta
+ targetparameters.vfactor=vdelta
+ targetparameters.size=scaledpoints
+ targetparameters.units=units
+ targetparameters.scaledpoints=askedscaledpoints
+ local isvirtual=properties.virtualized or tfmdata.type=="virtual"
+ local hasquality=target.auto_expand or target.auto_protrude
+ local hasitalics=properties.hasitalics
+ local autoitalicamount=properties.autoitalicamount
+ local stackmath=not properties.nostackmath
+ local nonames=properties.noglyphnames
+ local nodemode=properties.mode=="node"
+ if changed and not next(changed) then
+ changed=false
+ end
+ target.type=isvirtual and "virtual" or "real"
+ target.postprocessors=tfmdata.postprocessors
+ local targetslant=(parameters.slant or parameters[1] or 0)
+ local targetspace=(parameters.space or parameters[2] or 0)*hdelta
+ local targetspace_stretch=(parameters.space_stretch or parameters[3] or 0)*hdelta
+ local targetspace_shrink=(parameters.space_shrink or parameters[4] or 0)*hdelta
+ local targetx_height=(parameters.x_height or parameters[5] or 0)*vdelta
+ local targetquad=(parameters.quad or parameters[6] or 0)*hdelta
+ local targetextra_space=(parameters.extra_space or parameters[7] or 0)*hdelta
+ targetparameters.slant=targetslant
+ targetparameters.space=targetspace
+ targetparameters.space_stretch=targetspace_stretch
+ targetparameters.space_shrink=targetspace_shrink
+ targetparameters.x_height=targetx_height
+ targetparameters.quad=targetquad
+ targetparameters.extra_space=targetextra_space
+ local ascender=parameters.ascender
+ if ascender then
+ targetparameters.ascender=delta*ascender
+ end
+ local descender=parameters.descender
+ if descender then
+ targetparameters.descender=delta*descender
+ end
+ constructors.enhanceparameters(targetparameters)
+ local protrusionfactor=(targetquad~=0 and 1000/targetquad) or 0
+ local scaledwidth=defaultwidth*hdelta
+ local scaledheight=defaultheight*vdelta
+ local scaleddepth=defaultdepth*vdelta
+ local hasmath=(properties.hasmath or next(mathparameters)) and true
+ if hasmath then
+ constructors.assignmathparameters(target,tfmdata)
+ properties.hasmath=true
+ target.nomath=false
+ target.MathConstants=target.mathparameters
+ else
+ properties.hasmath=false
+ target.nomath=true
+ target.mathparameters=nil
+ end
+ local italickey="italic"
+ local useitalics=true
+ if hasmath then
+ autoitalicamount=false
+ elseif properties.textitalics then
+ italickey="italic_correction"
+ useitalics=false
+ if properties.delaytextitalics then
+ autoitalicamount=false
+ end
+ end
+ if trace_defining then
+ report_defining("defining tfm, name %a, fullname %a, filename %a, hscale %a, vscale %a, math %a, italics %a",
+ name,fullname,filename,hdelta,vdelta,
+ hasmath and "enabled" or "disabled",useitalics and "enabled" or "disabled")
+ end
+ constructors.beforecopyingcharacters(target,tfmdata)
+ local sharedkerns={}
+ for unicode,character in next,characters do
+ local chr,description,index,touni
+ if changed then
+ local c=changed[unicode]
+ if c then
+ description=descriptions[c] or descriptions[unicode] or character
+ character=characters[c] or character
+ index=description.index or c
+ if tounicode then
+ touni=tounicode[index]
+ if not touni then
+ local d=descriptions[unicode] or characters[unicode]
+ local i=d.index or unicode
+ touni=tounicode[i]
+ end
+ end
+ else
+ description=descriptions[unicode] or character
+ index=description.index or unicode
+ if tounicode then
+ touni=tounicode[index]
+ end
+ end
+ else
+ description=descriptions[unicode] or character
+ index=description.index or unicode
+ if tounicode then
+ touni=tounicode[index]
+ end
+ end
+ local width=description.width
+ local height=description.height
+ local depth=description.depth
+ if width then width=hdelta*width else width=scaledwidth end
+ if height then height=vdelta*height else height=scaledheight end
+ if depth and depth~=0 then
+ depth=delta*depth
+ if nonames then
+ chr={
+ index=index,
+ height=height,
+ depth=depth,
+ width=width,
+ }
+ else
+ chr={
+ name=description.name,
+ index=index,
+ height=height,
+ depth=depth,
+ width=width,
+ }
+ end
+ else
+ if nonames then
+ chr={
+ index=index,
+ height=height,
+ width=width,
+ }
+ else
+ chr={
+ name=description.name,
+ index=index,
+ height=height,
+ width=width,
+ }
+ end
+ end
+ if touni then
+ chr.tounicode=touni
+ end
+ if hasquality then
+ local ve=character.expansion_factor
+ if ve then
+ chr.expansion_factor=ve*1000
+ end
+ local vl=character.left_protruding
+ if vl then
+ chr.left_protruding=protrusionfactor*width*vl
+ end
+ local vr=character.right_protruding
+ if vr then
+ chr.right_protruding=protrusionfactor*width*vr
+ end
+ end
+ if autoitalicamount then
+ local vi=description.italic
+ if not vi then
+ local vi=description.boundingbox[3]-description.width+autoitalicamount
+ if vi>0 then
+ chr[italickey]=vi*hdelta
+ end
+ elseif vi~=0 then
+ chr[italickey]=vi*hdelta
+ end
+ elseif hasitalics then
+ local vi=description.italic
+ if vi and vi~=0 then
+ chr[italickey]=vi*hdelta
+ end
+ end
+ if hasmath then
+ local vn=character.next
+ if vn then
+ chr.next=vn
+ else
+ local vv=character.vert_variants
+ if vv then
+ local t={}
+ for i=1,#vv do
+ local vvi=vv[i]
+ t[i]={
+ ["start"]=(vvi["start"] or 0)*vdelta,
+ ["end"]=(vvi["end"] or 0)*vdelta,
+ ["advance"]=(vvi["advance"] or 0)*vdelta,
+ ["extender"]=vvi["extender"],
+ ["glyph"]=vvi["glyph"],
+ }
+ end
+ chr.vert_variants=t
+ else
+ local hv=character.horiz_variants
+ if hv then
+ local t={}
+ for i=1,#hv do
+ local hvi=hv[i]
+ t[i]={
+ ["start"]=(hvi["start"] or 0)*hdelta,
+ ["end"]=(hvi["end"] or 0)*hdelta,
+ ["advance"]=(hvi["advance"] or 0)*hdelta,
+ ["extender"]=hvi["extender"],
+ ["glyph"]=hvi["glyph"],
+ }
+ end
+ chr.horiz_variants=t
+ end
+ end
+ end
+ local va=character.top_accent
+ if va then
+ chr.top_accent=vdelta*va
+ end
+ if stackmath then
+ local mk=character.mathkerns
+ if mk then
+ local kerns={}
+ local v=mk.top_right if v then local k={} for i=1,#v do local vi=v[i]
+ k[i]={ height=vdelta*vi.height,kern=vdelta*vi.kern }
+ end kerns.top_right=k end
+ local v=mk.top_left if v then local k={} for i=1,#v do local vi=v[i]
+ k[i]={ height=vdelta*vi.height,kern=vdelta*vi.kern }
+ end kerns.top_left=k end
+ local v=mk.bottom_left if v then local k={} for i=1,#v do local vi=v[i]
+ k[i]={ height=vdelta*vi.height,kern=vdelta*vi.kern }
+ end kerns.bottom_left=k end
+ local v=mk.bottom_right if v then local k={} for i=1,#v do local vi=v[i]
+ k[i]={ height=vdelta*vi.height,kern=vdelta*vi.kern }
+ end kerns.bottom_right=k end
+ chr.mathkern=kerns
+ end
+ end
+ end
+ if not nodemode then
+ local vk=character.kerns
+ if vk then
+ local s=sharedkerns[vk]
+ if not s then
+ s={}
+ for k,v in next,vk do s[k]=v*hdelta end
+ sharedkerns[vk]=s
+ end
+ chr.kerns=s
+ end
+ local vl=character.ligatures
+ if vl then
+ if true then
+ chr.ligatures=vl
+ else
+ local tt={}
+ for i,l in next,vl do
+ tt[i]=l
+ end
+ chr.ligatures=tt
+ end
+ end
+ end
+ if isvirtual then
+ local vc=character.commands
+ if vc then
+ local ok=false
+ for i=1,#vc do
+ local key=vc[i][1]
+ if key=="right" or key=="down" then
+ ok=true
+ break
+ end
+ end
+ if ok then
+ local tt={}
+ for i=1,#vc do
+ local ivc=vc[i]
+ local key=ivc[1]
+ if key=="right" then
+ tt[i]={ key,ivc[2]*hdelta }
+ elseif key=="down" then
+ tt[i]={ key,ivc[2]*vdelta }
+ elseif key=="rule" then
+ tt[i]={ key,ivc[2]*vdelta,ivc[3]*hdelta }
+ else
+ tt[i]=ivc
+ end
+ end
+ chr.commands=tt
+ else
+ chr.commands=vc
+ end
+ chr.index=nil
+ end
+ end
+ targetcharacters[unicode]=chr
+ end
+ constructors.aftercopyingcharacters(target,tfmdata)
+ return target
+end
+function constructors.finalize(tfmdata)
+ if tfmdata.properties and tfmdata.properties.finalized then
+ return
+ end
+ if not tfmdata.characters then
+ return nil
+ end
+ if not tfmdata.goodies then
+ tfmdata.goodies={}
+ end
+ local parameters=tfmdata.parameters
+ if not parameters then
+ return nil
+ end
+ if not parameters.expansion then
+ parameters.expansion={
+ stretch=tfmdata.stretch or 0,
+ shrink=tfmdata.shrink or 0,
+ step=tfmdata.step or 0,
+ auto=tfmdata.auto_expand or false,
+ }
+ end
+ if not parameters.protrusion then
+ parameters.protrusion={
+ auto=auto_protrude
+ }
+ end
+ if not parameters.size then
+ parameters.size=tfmdata.size
+ end
+ if not parameters.extendfactor then
+ parameters.extendfactor=tfmdata.extend or 0
+ end
+ if not parameters.slantfactor then
+ parameters.slantfactor=tfmdata.slant or 0
+ end
+ if not parameters.designsize then
+ parameters.designsize=tfmdata.designsize or 655360
+ end
+ if not parameters.units then
+ parameters.units=tfmdata.units_per_em or 1000
+ end
+ if not tfmdata.descriptions then
+ local descriptions={}
+ setmetatableindex(descriptions,function(t,k) local v={} t[k]=v return v end)
+ tfmdata.descriptions=descriptions
+ end
+ local properties=tfmdata.properties
+ if not properties then
+ properties={}
+ tfmdata.properties=properties
+ end
+ if not properties.virtualized then
+ properties.virtualized=tfmdata.type=="virtual"
+ end
+ if not tfmdata.properties then
+ tfmdata.properties={
+ fontname=tfmdata.fontname,
+ filename=tfmdata.filename,
+ fullname=tfmdata.fullname,
+ name=tfmdata.name,
+ psname=tfmdata.psname,
+ encodingbytes=tfmdata.encodingbytes or 1,
+ embedding=tfmdata.embedding or "subset",
+ tounicode=tfmdata.tounicode or 1,
+ cidinfo=tfmdata.cidinfo or nil,
+ format=tfmdata.format or "type1",
+ direction=tfmdata.direction or 0,
+ }
+ end
+ if not tfmdata.resources then
+ tfmdata.resources={}
+ end
+ if not tfmdata.shared then
+ tfmdata.shared={}
+ end
+ if not properties.hasmath then
+ properties.hasmath=not tfmdata.nomath
+ end
+ tfmdata.MathConstants=nil
+ tfmdata.postprocessors=nil
+ tfmdata.fontname=nil
+ tfmdata.filename=nil
+ tfmdata.fullname=nil
+ tfmdata.name=nil
+ tfmdata.psname=nil
+ tfmdata.encodingbytes=nil
+ tfmdata.embedding=nil
+ tfmdata.tounicode=nil
+ tfmdata.cidinfo=nil
+ tfmdata.format=nil
+ tfmdata.direction=nil
+ tfmdata.type=nil
+ tfmdata.nomath=nil
+ tfmdata.designsize=nil
+ tfmdata.size=nil
+ tfmdata.stretch=nil
+ tfmdata.shrink=nil
+ tfmdata.step=nil
+ tfmdata.auto_expand=nil
+ tfmdata.auto_protrude=nil
+ tfmdata.extend=nil
+ tfmdata.slant=nil
+ tfmdata.units_per_em=nil
+ properties.finalized=true
+ return tfmdata
+end
+local hashmethods={}
+constructors.hashmethods=hashmethods
+function constructors.hashfeatures(specification)
+ local features=specification.features
+ if features then
+ local t,tn={},0
+ for category,list in next,features do
+ if next(list) then
+ local hasher=hashmethods[category]
+ if hasher then
+ local hash=hasher(list)
+ if hash then
+ tn=tn+1
+ t[tn]=category..":"..hash
+ end
+ end
+ end
+ end
+ if tn>0 then
+ return concat(t," & ")
+ end
+ end
+ return "unknown"
+end
+hashmethods.normal=function(list)
+ local s={}
+ local n=0
+ for k,v in next,list do
+ if not k then
+ elseif k=="number" or k=="features" then
+ else
+ n=n+1
+ s[n]=k
+ end
+ end
+ if n>0 then
+ sort(s)
+ for i=1,n do
+ local k=s[i]
+ s[i]=k..'='..tostring(list[k])
+ end
+ return concat(s,"+")
+ end
+end
+function constructors.hashinstance(specification,force)
+ local hash,size,fallbacks=specification.hash,specification.size,specification.fallbacks
+ if force or not hash then
+ hash=constructors.hashfeatures(specification)
+ specification.hash=hash
+ end
+ if size<1000 and designsizes[hash] then
+ size=math.round(constructors.scaled(size,designsizes[hash]))
+ specification.size=size
+ end
+ if fallbacks then
+ return hash..' @ '..tostring(size)..' @ '..fallbacks
+ else
+ return hash..' @ '..tostring(size)
+ end
+end
+function constructors.setname(tfmdata,specification)
+ if constructors.namemode=="specification" then
+ local specname=specification.specification
+ if specname then
+ tfmdata.properties.name=specname
+ if trace_defining then
+ report_otf("overloaded fontname %a",specname)
+ end
+ end
+ end
+end
+function constructors.checkedfilename(data)
+ local foundfilename=data.foundfilename
+ if not foundfilename then
+ local askedfilename=data.filename or ""
+ if askedfilename~="" then
+ askedfilename=resolvers.resolve(askedfilename)
+ foundfilename=resolvers.findbinfile(askedfilename,"") or ""
+ if foundfilename=="" then
+ report_defining("source file %a is not found",askedfilename)
+ foundfilename=resolvers.findbinfile(file.basename(askedfilename),"") or ""
+ if foundfilename~="" then
+ report_defining("using source file %a due to cache mismatch",foundfilename)
+ end
+ end
+ end
+ data.foundfilename=foundfilename
+ end
+ return foundfilename
+end
+local formats=allocate()
+fonts.formats=formats
+setmetatableindex(formats,function(t,k)
+ local l=lower(k)
+ if rawget(t,k) then
+ t[k]=l
+ return l
+ end
+ return rawget(t,file.suffix(l))
+end)
+local locations={}
+local function setindeed(mode,target,group,name,action,position)
+ local t=target[mode]
+ if not t then
+ report_defining("fatal error in setting feature %a, group %a, mode %a",name,group,mode)
+ os.exit()
+ elseif position then
+ insert(t,position,{ name=name,action=action })
+ else
+ for i=1,#t do
+ local ti=t[i]
+ if ti.name==name then
+ ti.action=action
+ return
+ end
+ end
+ insert(t,{ name=name,action=action })
+ end
+end
+local function set(group,name,target,source)
+ target=target[group]
+ if not target then
+ report_defining("fatal target error in setting feature %a, group %a",name,group)
+ os.exit()
+ end
+ local source=source[group]
+ if not source then
+ report_defining("fatal source error in setting feature %a, group %a",name,group)
+ os.exit()
+ end
+ local node=source.node
+ local base=source.base
+ local position=source.position
+ if node then
+ setindeed("node",target,group,name,node,position)
+ end
+ if base then
+ setindeed("base",target,group,name,base,position)
+ end
+end
+local function register(where,specification)
+ local name=specification.name
+ if name and name~="" then
+ local default=specification.default
+ local description=specification.description
+ local initializers=specification.initializers
+ local processors=specification.processors
+ local manipulators=specification.manipulators
+ local modechecker=specification.modechecker
+ if default then
+ where.defaults[name]=default
+ end
+ if description and description~="" then
+ where.descriptions[name]=description
+ end
+ if initializers then
+ set('initializers',name,where,specification)
+ end
+ if processors then
+ set('processors',name,where,specification)
+ end
+ if manipulators then
+ set('manipulators',name,where,specification)
+ end
+ if modechecker then
+ where.modechecker=modechecker
+ end
+ end
+end
+constructors.registerfeature=register
+function constructors.getfeatureaction(what,where,mode,name)
+ what=handlers[what].features
+ if what then
+ where=what[where]
+ if where then
+ mode=where[mode]
+ if mode then
+ for i=1,#mode do
+ local m=mode[i]
+ if m.name==name then
+ return m.action
+ end
+ end
+ end
+ end
+ end
+end
+function constructors.newhandler(what)
+ local handler=handlers[what]
+ if not handler then
+ handler={}
+ handlers[what]=handler
+ end
+ return handler
+end
+function constructors.newfeatures(what)
+ local handler=handlers[what]
+ local features=handler.features
+ if not features then
+ local tables=handler.tables
+ local statistics=handler.statistics
+ features=allocate {
+ defaults={},
+ descriptions=tables and tables.features or {},
+ used=statistics and statistics.usedfeatures or {},
+ initializers={ base={},node={} },
+ processors={ base={},node={} },
+ manipulators={ base={},node={} },
+ }
+ features.register=function(specification) return register(features,specification) end
+ handler.features=features
+ end
+ return features
+end
+function constructors.checkedfeatures(what,features)
+ local defaults=handlers[what].features.defaults
+ if features and next(features) then
+ features=fastcopy(features)
+ for key,value in next,defaults do
+ if features[key]==nil then
+ features[key]=value
+ end
+ end
+ return features
+ else
+ return fastcopy(defaults)
+ end
+end
+function constructors.initializefeatures(what,tfmdata,features,trace,report)
+ if features and next(features) then
+ local properties=tfmdata.properties or {}
+ local whathandler=handlers[what]
+ local whatfeatures=whathandler.features
+ local whatinitializers=whatfeatures.initializers
+ local whatmodechecker=whatfeatures.modechecker
+ local mode=properties.mode or (whatmodechecker and whatmodechecker(tfmdata,features,features.mode)) or features.mode or "base"
+ properties.mode=mode
+ features.mode=mode
+ local done={}
+ while true do
+ local redo=false
+ local initializers=whatfeatures.initializers[mode]
+ if initializers then
+ for i=1,#initializers do
+ local step=initializers[i]
+ local feature=step.name
+ local value=features[feature]
+ if not value then
+ elseif done[feature] then
+ else
+ local action=step.action
+ if trace then
+ report("initializing feature %a to %a for mode %a for font %a",feature,
+ value,mode,tfmdata.properties.fullname)
+ end
+ action(tfmdata,value,features)
+ if mode~=properties.mode or mode~=features.mode then
+ if whatmodechecker then
+ properties.mode=whatmodechecker(tfmdata,features,properties.mode)
+ features.mode=properties.mode
+ end
+ if mode~=properties.mode then
+ mode=properties.mode
+ redo=true
+ end
+ end
+ done[feature]=true
+ end
+ if redo then
+ break
+ end
+ end
+ if not redo then
+ break
+ end
+ else
+ break
+ end
+ end
+ properties.mode=mode
+ return true
+ else
+ return false
+ end
+end
+function constructors.collectprocessors(what,tfmdata,features,trace,report)
+ local processes,nofprocesses={},0
+ if features and next(features) then
+ local properties=tfmdata.properties
+ local whathandler=handlers[what]
+ local whatfeatures=whathandler.features
+ local whatprocessors=whatfeatures.processors
+ local processors=whatprocessors[properties.mode]
+ if processors then
+ for i=1,#processors do
+ local step=processors[i]
+ local feature=step.name
+ if features[feature] then
+ local action=step.action
+ if trace then
+ report("installing feature processor %a for mode %a for font %a",feature,mode,tfmdata.properties.fullname)
+ end
+ if action then
+ nofprocesses=nofprocesses+1
+ processes[nofprocesses]=action
+ end
+ end
+ end
+ elseif trace then
+ report("no feature processors for mode %a for font %a",mode,tfmdata.properties.fullname)
+ end
+ end
+ return processes
+end
+function constructors.applymanipulators(what,tfmdata,features,trace,report)
+ if features and next(features) then
+ local properties=tfmdata.properties
+ local whathandler=handlers[what]
+ local whatfeatures=whathandler.features
+ local whatmanipulators=whatfeatures.manipulators
+ local manipulators=whatmanipulators[properties.mode]
+ if manipulators then
+ for i=1,#manipulators do
+ local step=manipulators[i]
+ local feature=step.name
+ local value=features[feature]
+ if value then
+ local action=step.action
+ if trace then
+ report("applying feature manipulator %a for mode %a for font %a",feature,mode,tfmdata.properties.fullname)
+ end
+ if action then
+ action(tfmdata,feature,value)
+ end
+ end
+ end
+ end
+ end
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['luatex-font-enc']={
+ version=1.001,
+ comment="companion to luatex-*.tex",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+if context then
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
+end
+local fonts=fonts
+fonts.encodings={}
+fonts.encodings.agl={}
+setmetatable(fonts.encodings.agl,{ __index=function(t,k)
+ if k=="unicodes" then
+ texio.write(" <loading (extended) adobe glyph list>")
+ local unicodes=dofile(resolvers.findfile("font-age.lua"))
+ fonts.encodings.agl={ unicodes=unicodes }
+ return unicodes
+ else
+ return nil
+ end
+end })
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['font-cid']={
+ version=1.001,
+ comment="companion to font-otf.lua (cidmaps)",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local format,match,lower=string.format,string.match,string.lower
+local tonumber=tonumber
+local P,S,R,C,V,lpegmatch=lpeg.P,lpeg.S,lpeg.R,lpeg.C,lpeg.V,lpeg.match
+local fonts,logs,trackers=fonts,logs,trackers
+local trace_loading=false trackers.register("otf.loading",function(v) trace_loading=v end)
+local report_otf=logs.reporter("fonts","otf loading")
+local cid={}
+fonts.cid=cid
+local cidmap={}
+local cidmax=10
+local number=C(R("09","af","AF")^1)
+local space=S(" \n\r\t")
+local spaces=space^0
+local period=P(".")
+local periods=period*period
+local name=P("/")*C((1-space)^1)
+local unicodes,names={},{}
+local function do_one(a,b)
+ unicodes[tonumber(a)]=tonumber(b,16)
+end
+local function do_range(a,b,c)
+ c=tonumber(c,16)
+ for i=tonumber(a),tonumber(b) do
+ unicodes[i]=c
+ c=c+1
+ end
+end
+local function do_name(a,b)
+ names[tonumber(a)]=b
+end
+local grammar=P { "start",
+ start=number*spaces*number*V("series"),
+ series=(spaces*(V("one")+V("range")+V("named")))^1,
+ one=(number*spaces*number)/do_one,
+ range=(number*periods*number*spaces*number)/do_range,
+ named=(number*spaces*name)/do_name
+}
+local function loadcidfile(filename)
+ local data=io.loaddata(filename)
+ if data then
+ unicodes,names={},{}
+ lpegmatch(grammar,data)
+ local supplement,registry,ordering=match(filename,"^(.-)%-(.-)%-()%.(.-)$")
+ return {
+ supplement=supplement,
+ registry=registry,
+ ordering=ordering,
+ filename=filename,
+ unicodes=unicodes,
+ names=names
+ }
+ end
+end
+cid.loadfile=loadcidfile
+local template="%s-%s-%s.cidmap"
+local function locate(registry,ordering,supplement)
+ local filename=format(template,registry,ordering,supplement)
+ local hashname=lower(filename)
+ local found=cidmap[hashname]
+ if not found then
+ if trace_loading then
+ report_otf("checking cidmap, registry %a, ordering %a, supplement %a, filename %a",registry,ordering,supplement,filename)
+ end
+ local fullname=resolvers.findfile(filename,'cid') or ""
+ if fullname~="" then
+ found=loadcidfile(fullname)
+ if found then
+ if trace_loading then
+ report_otf("using cidmap file %a",filename)
+ end
+ cidmap[hashname]=found
+ found.usedname=file.basename(filename)
+ end
+ end
+ end
+ return found
+end
+function cid.getmap(specification)
+ if not specification then
+ report_otf("invalid cidinfo specification, table expected")
+ return
+ end
+ local registry=specification.registry
+ local ordering=specification.ordering
+ local supplement=specification.supplement
+ local filename=format(registry,ordering,supplement)
+ local found=cidmap[lower(filename)]
+ if found then
+ return found
+ end
+ if trace_loading then
+ report_otf("cidmap needed, registry %a, ordering %a, supplement %a",registry,ordering,supplement)
+ end
+ found=locate(registry,ordering,supplement)
+ if not found then
+ local supnum=tonumber(supplement)
+ local cidnum=nil
+ if supnum<cidmax then
+ for s=supnum+1,cidmax do
+ local c=locate(registry,ordering,s)
+ if c then
+ found,cidnum=c,s
+ break
+ end
+ end
+ end
+ if not found and supnum>0 then
+ for s=supnum-1,0,-1 do
+ local c=locate(registry,ordering,s)
+ if c then
+ found,cidnum=c,s
+ break
+ end
+ end
+ end
+ registry=lower(registry)
+ ordering=lower(ordering)
+ if found and cidnum>0 then
+ for s=0,cidnum-1 do
+ local filename=format(template,registry,ordering,s)
+ if not cidmap[filename] then
+ cidmap[filename]=found
+ end
+ end
+ end
+ end
+ return found
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['font-map']={
+ version=1.001,
+ comment="companion to font-ini.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local tonumber=tonumber
+local match,format,find,concat,gsub,lower=string.match,string.format,string.find,table.concat,string.gsub,string.lower
+local P,R,S,C,Ct,Cc,lpegmatch=lpeg.P,lpeg.R,lpeg.S,lpeg.C,lpeg.Ct,lpeg.Cc,lpeg.match
+local utfbyte=utf.byte
+local floor=math.floor
+local trace_loading=false trackers.register("fonts.loading",function(v) trace_loading=v end)
+local trace_mapping=false trackers.register("fonts.mapping",function(v) trace_unimapping=v end)
+local report_fonts=logs.reporter("fonts","loading")
+local fonts=fonts
+local mappings=fonts.mappings or {}
+fonts.mappings=mappings
+local function loadlumtable(filename)
+ local lumname=file.replacesuffix(file.basename(filename),"lum")
+ local lumfile=resolvers.findfile(lumname,"map") or ""
+ if lumfile~="" and lfs.isfile(lumfile) then
+ if trace_loading or trace_mapping then
+ report_fonts("loading map table %a",lumfile)
+ end
+ lumunic=dofile(lumfile)
+ return lumunic,lumfile
+ end
+end
+local hex=R("AF","09")
+local hexfour=(hex*hex*hex*hex)/function(s) return tonumber(s,16) end
+local hexsix=(hex^1)/function(s) return tonumber(s,16) end
+local dec=(R("09")^1)/tonumber
+local period=P(".")
+local unicode=P("uni")*(hexfour*(period+P(-1))*Cc(false)+Ct(hexfour^1)*Cc(true))
+local ucode=P("u")*(hexsix*(period+P(-1))*Cc(false)+Ct(hexsix^1)*Cc(true))
+local index=P("index")*dec*Cc(false)
+local parser=unicode+ucode+index
+local parsers={}
+local function makenameparser(str)
+ if not str or str=="" then
+ return parser
+ else
+ local p=parsers[str]
+ if not p then
+ p=P(str)*period*dec*Cc(false)
+ parsers[str]=p
+ end
+ return p
+ end
+end
+local function tounicode16(unicode)
+ if unicode<0x10000 then
+ return format("%04X",unicode)
+ elseif unicode<0x1FFFFFFFFF then
+ return format("%04X%04X",floor(unicode/1024),unicode%1024+0xDC00)
+ else
+ report_fonts("can't convert %a into tounicode",unicode)
+ end
+end
+local function tounicode16sequence(unicodes)
+ local t={}
+ for l=1,#unicodes do
+ local unicode=unicodes[l]
+ if unicode<0x10000 then
+ t[l]=format("%04X",unicode)
+ elseif unicode<0x1FFFFFFFFF then
+ t[l]=format("%04X%04X",floor(unicode/1024),unicode%1024+0xDC00)
+ else
+ report_fonts ("can't convert %a into tounicode",unicode)
+ end
+ end
+ return concat(t)
+end
+local function fromunicode16(str)
+ if #str==4 then
+ return tonumber(str,16)
+ else
+ local l,r=match(str,"(....)(....)")
+ return (tonumber(l,16)- 0xD800)*0x400+tonumber(r,16)-0xDC00
+ end
+end
+mappings.loadlumtable=loadlumtable
+mappings.makenameparser=makenameparser
+mappings.tounicode16=tounicode16
+mappings.tounicode16sequence=tounicode16sequence
+mappings.fromunicode16=fromunicode16
+local separator=S("_.")
+local other=C((1-separator)^1)
+local ligsplitter=Ct(other*(separator*other)^0)
+function mappings.addtounicode(data,filename)
+ local resources=data.resources
+ local properties=data.properties
+ local descriptions=data.descriptions
+ local unicodes=resources.unicodes
+ if not unicodes then
+ return
+ end
+ unicodes['space']=unicodes['space'] or 32
+ unicodes['hyphen']=unicodes['hyphen'] or 45
+ unicodes['zwj']=unicodes['zwj'] or 0x200D
+ unicodes['zwnj']=unicodes['zwnj'] or 0x200C
+ local private=fonts.constructors.privateoffset
+ local unknown=format("%04X",utfbyte("?"))
+ local unicodevector=fonts.encodings.agl.unicodes
+ local tounicode={}
+ local originals={}
+ resources.tounicode=tounicode
+ resources.originals=originals
+ local lumunic,uparser,oparser
+ local cidinfo,cidnames,cidcodes,usedmap
+ if false then
+ lumunic=loadlumtable(filename)
+ lumunic=lumunic and lumunic.tounicode
+ end
+ cidinfo=properties.cidinfo
+ usedmap=cidinfo and fonts.cid.getmap(cidinfo)
+ if usedmap then
+ oparser=usedmap and makenameparser(cidinfo.ordering)
+ cidnames=usedmap.names
+ cidcodes=usedmap.unicodes
+ end
+ uparser=makenameparser()
+ local ns,nl=0,0
+ for unic,glyph in next,descriptions do
+ local index=glyph.index
+ local name=glyph.name
+ if unic==-1 or unic>=private or (unic>=0xE000 and unic<=0xF8FF) or unic==0xFFFE or unic==0xFFFF then
+ local unicode=lumunic and lumunic[name] or unicodevector[name]
+ if unicode then
+ originals[index]=unicode
+ tounicode[index]=tounicode16(unicode)
+ ns=ns+1
+ end
+ if (not unicode) and usedmap then
+ local foundindex=lpegmatch(oparser,name)
+ if foundindex then
+ unicode=cidcodes[foundindex]
+ if unicode then
+ originals[index]=unicode
+ tounicode[index]=tounicode16(unicode)
+ ns=ns+1
+ else
+ local reference=cidnames[foundindex]
+ if reference then
+ local foundindex=lpegmatch(oparser,reference)
+ if foundindex then
+ unicode=cidcodes[foundindex]
+ if unicode then
+ originals[index]=unicode
+ tounicode[index]=tounicode16(unicode)
+ ns=ns+1
+ end
+ end
+ if not unicode then
+ local foundcodes,multiple=lpegmatch(uparser,reference)
+ if foundcodes then
+ originals[index]=foundcodes
+ if multiple then
+ tounicode[index]=tounicode16sequence(foundcodes)
+ nl=nl+1
+ unicode=true
+ else
+ tounicode[index]=tounicode16(foundcodes)
+ ns=ns+1
+ unicode=foundcodes
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ if not unicode then
+ local split=lpegmatch(ligsplitter,name)
+ local nplit=split and #split or 0
+ if nplit>=2 then
+ local t,n={},0
+ for l=1,nplit do
+ local base=split[l]
+ local u=unicodes[base] or unicodevector[base]
+ if not u then
+ break
+ elseif type(u)=="table" then
+ n=n+1
+ t[n]=u[1]
+ else
+ n=n+1
+ t[n]=u
+ end
+ end
+ if n==0 then
+ elseif n==1 then
+ originals[index]=t[1]
+ tounicode[index]=tounicode16(t[1])
+ else
+ originals[index]=t
+ tounicode[index]=tounicode16sequence(t)
+ end
+ nl=nl+1
+ unicode=true
+ else
+ end
+ end
+ if not unicode then
+ local foundcodes,multiple=lpegmatch(uparser,name)
+ if foundcodes then
+ if multiple then
+ originals[index]=foundcodes
+ tounicode[index]=tounicode16sequence(foundcodes)
+ nl=nl+1
+ unicode=true
+ else
+ originals[index]=foundcodes
+ tounicode[index]=tounicode16(foundcodes)
+ ns=ns+1
+ unicode=foundcodes
+ end
+ end
+ end
+ end
+ end
+ if trace_mapping then
+ for unic,glyph in table.sortedhash(descriptions) do
+ local name=glyph.name
+ local index=glyph.index
+ local toun=tounicode[index]
+ if toun then
+ report_fonts("internal slot %U, name %a, unicode %U, tounicode %a",index,name,unic,toun)
+ else
+ report_fonts("internal slot %U, name %a, unicode %U",index,name,unic)
+ end
+ end
+ end
+ if trace_loading and (ns>0 or nl>0) then
+ report_fonts("%s tounicode entries added, ligatures %s",nl+ns,ns)
+ end
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['luatex-fonts-syn']={
+ version=1.001,
+ comment="companion to luatex-*.tex",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+if context then
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
+end
+local fonts=fonts
+fonts.names=fonts.names or {}
+fonts.names.version=1.001
+fonts.names.basename="luatex-fonts-names"
+fonts.names.new_to_old={}
+fonts.names.old_to_new={}
+fonts.names.cache=containers.define("fonts","data",fonts.names.version,true)
+local data,loaded=nil,false
+local fileformats={ "lua","tex","other text files" }
+function fonts.names.reportmissingbase()
+ texio.write("<missing font database, run: mtxrun --script fonts --reload --simple>")
+ fonts.names.reportmissingbase=nil
+end
+function fonts.names.reportmissingname()
+ texio.write("<unknown font in database, run: mtxrun --script fonts --reload --simple>")
+ fonts.names.reportmissingname=nil
+end
+function fonts.names.resolve(name,sub)
+ if not loaded then
+ local basename=fonts.names.basename
+ if basename and basename~="" then
+ data=containers.read(fonts.names.cache,basename)
+ if not data then
+ basename=file.addsuffix(basename,"lua")
+ for i=1,#fileformats do
+ local format=fileformats[i]
+ local foundname=resolvers.findfile(basename,format) or ""
+ if foundname~="" then
+ data=dofile(foundname)
+ texio.write("<font database loaded: ",foundname,">")
+ break
+ end
+ end
+ end
+ end
+ loaded=true
+ end
+ if type(data)=="table" and data.version==fonts.names.version then
+ local condensed=string.gsub(string.lower(name),"[^%a%d]","")
+ local found=data.mappings and data.mappings[condensed]
+ if found then
+ local fontname,filename,subfont=found[1],found[2],found[3]
+ if subfont then
+ return filename,fontname
+ else
+ return filename,false
+ end
+ elseif fonts.names.reportmissingname then
+ fonts.names.reportmissingname()
+ return name,false
+ end
+ elseif fonts.names.reportmissingbase then
+ fonts.names.reportmissingbase()
+ end
+end
+fonts.names.resolvespec=fonts.names.resolve
+function fonts.names.getfilename(askedname,suffix)
+ return ""
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['luatex-fonts-tfm']={
+ version=1.001,
+ comment="companion to luatex-*.tex",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+if context then
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
+end
+local fonts=fonts
+local tfm={}
+fonts.handlers.tfm=tfm
+fonts.formats.tfm="type1"
+function fonts.readers.tfm(specification)
+ local fullname=specification.filename or ""
+ if fullname=="" then
+ local forced=specification.forced or ""
+ if forced~="" then
+ fullname=specification.name.."."..forced
+ else
+ fullname=specification.name
+ end
+ end
+ local foundname=resolvers.findbinfile(fullname,'tfm') or ""
+ if foundname=="" then
+ foundname=resolvers.findbinfile(fullname,'ofm') or ""
+ end
+ if foundname~="" then
+ specification.filename=foundname
+ specification.format="ofm"
+ return font.read_tfm(specification.filename,specification.size)
+ end
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['font-oti']={
+ version=1.001,
+ comment="companion to font-ini.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local lower=string.lower
+local fonts=fonts
+local constructors=fonts.constructors
+local otf=constructors.newhandler("otf")
+local otffeatures=constructors.newfeatures("otf")
+local otftables=otf.tables
+local registerotffeature=otffeatures.register
+local allocate=utilities.storage.allocate
+registerotffeature {
+ name="features",
+ description="initialization of feature handler",
+ default=true,
+}
+local function setmode(tfmdata,value)
+ if value then
+ tfmdata.properties.mode=lower(value)
+ end
+end
+local function setlanguage(tfmdata,value)
+ if value then
+ local cleanvalue=lower(value)
+ local languages=otftables and otftables.languages
+ local properties=tfmdata.properties
+ if not languages then
+ properties.language=cleanvalue
+ elseif languages[value] then
+ properties.language=cleanvalue
+ else
+ properties.language="dflt"
+ end
+ end
+end
+local function setscript(tfmdata,value)
+ if value then
+ local cleanvalue=lower(value)
+ local scripts=otftables and otftables.scripts
+ local properties=tfmdata.properties
+ if not scripts then
+ properties.script=cleanvalue
+ elseif scripts[value] then
+ properties.script=cleanvalue
+ else
+ properties.script="dflt"
+ end
+ end
+end
+registerotffeature {
+ name="mode",
+ description="mode",
+ initializers={
+ base=setmode,
+ node=setmode,
+ }
+}
+registerotffeature {
+ name="language",
+ description="language",
+ initializers={
+ base=setlanguage,
+ node=setlanguage,
+ }
+}
+registerotffeature {
+ name="script",
+ description="script",
+ initializers={
+ base=setscript,
+ node=setscript,
+ }
+}
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['font-otf']={
+ version=1.001,
+ comment="companion to font-ini.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local utfbyte=utf.byte
+local format,gmatch,gsub,find,match,lower,strip=string.format,string.gmatch,string.gsub,string.find,string.match,string.lower,string.strip
+local type,next,tonumber,tostring=type,next,tonumber,tostring
+local abs=math.abs
+local getn=table.getn
+local lpegmatch=lpeg.match
+local reversed,concat,remove=table.reversed,table.concat,table.remove
+local ioflush=io.flush
+local fastcopy,tohash,derivetable=table.fastcopy,table.tohash,table.derive
+local formatters=string.formatters
+local allocate=utilities.storage.allocate
+local registertracker=trackers.register
+local registerdirective=directives.register
+local starttiming=statistics.starttiming
+local stoptiming=statistics.stoptiming
+local elapsedtime=statistics.elapsedtime
+local findbinfile=resolvers.findbinfile
+local trace_private=false registertracker("otf.private",function(v) trace_private=v end)
+local trace_loading=false registertracker("otf.loading",function(v) trace_loading=v end)
+local trace_features=false registertracker("otf.features",function(v) trace_features=v end)
+local trace_dynamics=false registertracker("otf.dynamics",function(v) trace_dynamics=v end)
+local trace_sequences=false registertracker("otf.sequences",function(v) trace_sequences=v end)
+local trace_markwidth=false registertracker("otf.markwidth",function(v) trace_markwidth=v end)
+local trace_defining=false registertracker("fonts.defining",function(v) trace_defining=v end)
+local report_otf=logs.reporter("fonts","otf loading")
+local fonts=fonts
+local otf=fonts.handlers.otf
+otf.glists={ "gsub","gpos" }
+otf.version=2.742
+otf.cache=containers.define("fonts","otf",otf.version,true)
+local fontdata=fonts.hashes.identifiers
+local chardata=characters and characters.data
+local otffeatures=fonts.constructors.newfeatures("otf")
+local registerotffeature=otffeatures.register
+local enhancers=allocate()
+otf.enhancers=enhancers
+local patches={}
+enhancers.patches=patches
+local definers=fonts.definers
+local readers=fonts.readers
+local constructors=fonts.constructors
+local forceload=false
+local cleanup=0
+local usemetatables=false
+local packdata=true
+local syncspace=true
+local forcenotdef=false
+local wildcard="*"
+local default="dflt"
+local fontloaderfields=fontloader.fields
+local mainfields=nil
+local glyphfields=nil
+registerdirective("fonts.otf.loader.cleanup",function(v) cleanup=tonumber(v) or (v and 1) or 0 end)
+registerdirective("fonts.otf.loader.force",function(v) forceload=v end)
+registerdirective("fonts.otf.loader.usemetatables",function(v) usemetatables=v end)
+registerdirective("fonts.otf.loader.pack",function(v) packdata=v end)
+registerdirective("fonts.otf.loader.syncspace",function(v) syncspace=v end)
+registerdirective("fonts.otf.loader.forcenotdef",function(v) forcenotdef=v end)
+local function load_featurefile(raw,featurefile)
+ if featurefile and featurefile~="" then
+ if trace_loading then
+ report_otf("using featurefile %a",featurefile)
+ end
+ fontloader.apply_featurefile(raw,featurefile)
+ end
+end
+local function showfeatureorder(rawdata,filename)
+ local sequences=rawdata.resources.sequences
+ if sequences and #sequences>0 then
+ if trace_loading then
+ report_otf("font %a has %s sequences",filename,#sequences)
+ report_otf(" ")
+ end
+ for nos=1,#sequences do
+ local sequence=sequences[nos]
+ local typ=sequence.type or "no-type"
+ local name=sequence.name or "no-name"
+ local subtables=sequence.subtables or { "no-subtables" }
+ local features=sequence.features
+ if trace_loading then
+ report_otf("%3i %-15s %-20s [% t]",nos,name,typ,subtables)
+ end
+ if features then
+ for feature,scripts in next,features do
+ local tt={}
+ if type(scripts)=="table" then
+ for script,languages in next,scripts do
+ local ttt={}
+ for language,_ in next,languages do
+ ttt[#ttt+1]=language
+ end
+ tt[#tt+1]=formatters["[%s: % t]"](script,ttt)
+ end
+ if trace_loading then
+ report_otf(" %s: % t",feature,tt)
+ end
+ else
+ if trace_loading then
+ report_otf(" %s: %S",feature,scripts)
+ end
+ end
+ end
+ end
+ end
+ if trace_loading then
+ report_otf("\n")
+ end
+ elseif trace_loading then
+ report_otf("font %a has no sequences",filename)
+ end
+end
+local valid_fields=table.tohash {
+ "ascent",
+ "cidinfo",
+ "copyright",
+ "descent",
+ "design_range_bottom",
+ "design_range_top",
+ "design_size",
+ "encodingchanged",
+ "extrema_bound",
+ "familyname",
+ "fontname",
+ "fontname",
+ "fontstyle_id",
+ "fontstyle_name",
+ "fullname",
+ "hasvmetrics",
+ "horiz_base",
+ "issans",
+ "isserif",
+ "italicangle",
+ "macstyle",
+ "onlybitmaps",
+ "origname",
+ "os2_version",
+ "pfminfo",
+ "serifcheck",
+ "sfd_version",
+ "strokedfont",
+ "strokewidth",
+ "table_version",
+ "ttf_tables",
+ "uni_interp",
+ "uniqueid",
+ "units_per_em",
+ "upos",
+ "use_typo_metrics",
+ "uwidth",
+ "version",
+ "vert_base",
+ "weight",
+ "weight_width_slope_only",
+}
+local ordered_enhancers={
+ "prepare tables",
+ "prepare glyphs",
+ "prepare lookups",
+ "analyze glyphs",
+ "analyze math",
+ "prepare tounicode",
+ "reorganize lookups",
+ "reorganize mark classes",
+ "reorganize anchor classes",
+ "reorganize glyph kerns",
+ "reorganize glyph lookups",
+ "reorganize glyph anchors",
+ "merge kern classes",
+ "reorganize features",
+ "reorganize subtables",
+ "check glyphs",
+ "check metadata",
+ "check extra features",
+ "add duplicates",
+ "check encoding",
+ "cleanup tables",
+}
+local actions=allocate()
+local before=allocate()
+local after=allocate()
+patches.before=before
+patches.after=after
+local function enhance(name,data,filename,raw)
+ local enhancer=actions[name]
+ if enhancer then
+ if trace_loading then
+ report_otf("apply enhancement %a to file %a",name,filename)
+ ioflush()
+ end
+ enhancer(data,filename,raw)
+ else
+ end
+end
+function enhancers.apply(data,filename,raw)
+ local basename=file.basename(lower(filename))
+ if trace_loading then
+ report_otf("%s enhancing file %a","start",filename)
+ end
+ ioflush()
+ for e=1,#ordered_enhancers do
+ local enhancer=ordered_enhancers[e]
+ local b=before[enhancer]
+ if b then
+ for pattern,action in next,b do
+ if find(basename,pattern) then
+ action(data,filename,raw)
+ end
+ end
+ end
+ enhance(enhancer,data,filename,raw)
+ local a=after[enhancer]
+ if a then
+ for pattern,action in next,a do
+ if find(basename,pattern) then
+ action(data,filename,raw)
+ end
+ end
+ end
+ ioflush()
+ end
+ if trace_loading then
+ report_otf("%s enhancing file %a","stop",filename)
+ end
+ ioflush()
+end
+function patches.register(what,where,pattern,action)
+ local pw=patches[what]
+ if pw then
+ local ww=pw[where]
+ if ww then
+ ww[pattern]=action
+ else
+ pw[where]={ [pattern]=action}
+ end
+ end
+end
+function patches.report(fmt,...)
+ if trace_loading then
+ report_otf("patching: %s",formatters[fmt](...))
+ end
+end
+function enhancers.register(what,action)
+ actions[what]=action
+end
+function otf.load(filename,format,sub,featurefile)
+ local base=file.basename(file.removesuffix(filename))
+ local name=file.removesuffix(base)
+ local attr=lfs.attributes(filename)
+ local size=attr and attr.size or 0
+ local time=attr and attr.modification or 0
+ if featurefile then
+ name=name.."@"..file.removesuffix(file.basename(featurefile))
+ end
+ if sub=="" then
+ sub=false
+ end
+ local hash=name
+ if sub then
+ hash=hash.."-"..sub
+ end
+ hash=containers.cleanname(hash)
+ local featurefiles
+ if featurefile then
+ featurefiles={}
+ for s in gmatch(featurefile,"[^,]+") do
+ local name=resolvers.findfile(file.addsuffix(s,'fea'),'fea') or ""
+ if name=="" then
+ report_otf("loading error, no featurefile %a",s)
+ else
+ local attr=lfs.attributes(name)
+ featurefiles[#featurefiles+1]={
+ name=name,
+ size=attr and attr.size or 0,
+ time=attr and attr.modification or 0,
+ }
+ end
+ end
+ if #featurefiles==0 then
+ featurefiles=nil
+ end
+ end
+ local data=containers.read(otf.cache,hash)
+ local reload=not data or data.size~=size or data.time~=time
+ if forceload then
+ report_otf("forced reload of %a due to hard coded flag",filename)
+ reload=true
+ end
+ if not reload then
+ local featuredata=data.featuredata
+ if featurefiles then
+ if not featuredata or #featuredata~=#featurefiles then
+ reload=true
+ else
+ for i=1,#featurefiles do
+ local fi,fd=featurefiles[i],featuredata[i]
+ if fi.name~=fd.name or fi.size~=fd.size or fi.time~=fd.time then
+ reload=true
+ break
+ end
+ end
+ end
+ elseif featuredata then
+ reload=true
+ end
+ if reload then
+ report_otf("loading: forced reload due to changed featurefile specification %a",featurefile)
+ end
+ end
+ if reload then
+ report_otf("loading %a, hash %a",filename,hash)
+ local fontdata,messages
+ if sub then
+ fontdata,messages=fontloader.open(filename,sub)
+ else
+ fontdata,messages=fontloader.open(filename)
+ end
+ if fontdata then
+ mainfields=mainfields or (fontloaderfields and fontloaderfields(fontdata))
+ end
+ if trace_loading and messages and #messages>0 then
+ if type(messages)=="string" then
+ report_otf("warning: %s",messages)
+ else
+ for m=1,#messages do
+ report_otf("warning: %S",messages[m])
+ end
+ end
+ else
+ report_otf("loading done")
+ end
+ if fontdata then
+ if featurefiles then
+ for i=1,#featurefiles do
+ load_featurefile(fontdata,featurefiles[i].name)
+ end
+ end
+ local unicodes={
+ }
+ local splitter=lpeg.splitter(" ",unicodes)
+ data={
+ size=size,
+ time=time,
+ format=format,
+ featuredata=featurefiles,
+ resources={
+ filename=resolvers.unresolve(filename),
+ version=otf.version,
+ creator="context mkiv",
+ unicodes=unicodes,
+ indices={
+ },
+ duplicates={
+ },
+ variants={
+ },
+ lookuptypes={},
+ },
+ metadata={
+ },
+ properties={
+ },
+ descriptions={},
+ goodies={},
+ helpers={
+ tounicodelist=splitter,
+ tounicodetable=lpeg.Ct(splitter),
+ },
+ }
+ starttiming(data)
+ report_otf("file size: %s",size)
+ enhancers.apply(data,filename,fontdata)
+ local packtime={}
+ if packdata then
+ if cleanup>0 then
+ collectgarbage("collect")
+ end
+ starttiming(packtime)
+ enhance("pack",data,filename,nil)
+ stoptiming(packtime)
+ end
+ report_otf("saving %a in cache",filename)
+ data=containers.write(otf.cache,hash,data)
+ if cleanup>1 then
+ collectgarbage("collect")
+ end
+ stoptiming(data)
+ if elapsedtime then
+ report_otf("preprocessing and caching time %s, packtime %s",
+ elapsedtime(data),packdata and elapsedtime(packtime) or 0)
+ end
+ fontloader.close(fontdata)
+ if cleanup>3 then
+ collectgarbage("collect")
+ end
+ data=containers.read(otf.cache,hash)
+ if cleanup>2 then
+ collectgarbage("collect")
+ end
+ else
+ data=nil
+ report_otf("loading failed due to read error")
+ end
+ end
+ if data then
+ if trace_defining then
+ report_otf("loading from cache using hash %a",hash)
+ end
+ enhance("unpack",data,filename,nil,false)
+ enhance("add dimensions",data,filename,nil,false)
+ if trace_sequences then
+ showfeatureorder(data,filename)
+ end
+ end
+ return data
+end
+local mt={
+ __index=function(t,k)
+ if k=="height" then
+ local ht=t.boundingbox[4]
+ return ht<0 and 0 or ht
+ elseif k=="depth" then
+ local dp=-t.boundingbox[2]
+ return dp<0 and 0 or dp
+ elseif k=="width" then
+ return 0
+ elseif k=="name" then
+ return forcenotdef and ".notdef"
+ end
+ end
+}
+actions["prepare tables"]=function(data,filename,raw)
+ data.properties.hasitalics=false
+end
+actions["add dimensions"]=function(data,filename)
+ if data then
+ local descriptions=data.descriptions
+ local resources=data.resources
+ local defaultwidth=resources.defaultwidth or 0
+ local defaultheight=resources.defaultheight or 0
+ local defaultdepth=resources.defaultdepth or 0
+ local basename=trace_markwidth and file.basename(filename)
+ if usemetatables then
+ for _,d in next,descriptions do
+ local wd=d.width
+ if not wd then
+ d.width=defaultwidth
+ elseif trace_markwidth and wd~=0 and d.class=="mark" then
+ report_otf("mark %a with width %b found in %a",d.name or "<noname>",wd,basename)
+ end
+ setmetatable(d,mt)
+ end
+ else
+ for _,d in next,descriptions do
+ local bb,wd=d.boundingbox,d.width
+ if not wd then
+ d.width=defaultwidth
+ elseif trace_markwidth and wd~=0 and d.class=="mark" then
+ report_otf("mark %a with width %b found in %a",d.name or "<noname>",wd,basename)
+ end
+ if bb then
+ local ht,dp=bb[4],-bb[2]
+ if ht==0 or ht<0 then
+ else
+ d.height=ht
+ end
+ if dp==0 or dp<0 then
+ else
+ d.depth=dp
+ end
+ end
+ end
+ end
+ end
+end
+local function somecopy(old)
+ if old then
+ local new={}
+ if type(old)=="table" then
+ for k,v in next,old do
+ if k=="glyphs" then
+ elseif type(v)=="table" then
+ new[k]=somecopy(v)
+ else
+ new[k]=v
+ end
+ end
+ else
+ for i=1,#mainfields do
+ local k=mainfields[i]
+ local v=old[k]
+ if k=="glyphs" then
+ elseif type(v)=="table" then
+ new[k]=somecopy(v)
+ else
+ new[k]=v
+ end
+ end
+ end
+ return new
+ else
+ return {}
+ end
+end
+actions["prepare glyphs"]=function(data,filename,raw)
+ local rawglyphs=raw.glyphs
+ local rawsubfonts=raw.subfonts
+ local rawcidinfo=raw.cidinfo
+ local criterium=constructors.privateoffset
+ local private=criterium
+ local resources=data.resources
+ local metadata=data.metadata
+ local properties=data.properties
+ local descriptions=data.descriptions
+ local unicodes=resources.unicodes
+ local indices=resources.indices
+ local duplicates=resources.duplicates
+ local variants=resources.variants
+ if rawsubfonts then
+ metadata.subfonts={}
+ properties.cidinfo=rawcidinfo
+ if rawcidinfo.registry then
+ local cidmap=fonts.cid.getmap(rawcidinfo)
+ if cidmap then
+ rawcidinfo.usedname=cidmap.usedname
+ local nofnames,nofunicodes=0,0
+ local cidunicodes,cidnames=cidmap.unicodes,cidmap.names
+ for cidindex=1,#rawsubfonts do
+ local subfont=rawsubfonts[cidindex]
+ local cidglyphs=subfont.glyphs
+ metadata.subfonts[cidindex]=somecopy(subfont)
+ for index=0,subfont.glyphcnt-1 do
+ local glyph=cidglyphs[index]
+ if glyph then
+ local unicode=glyph.unicode
+ local name=glyph.name or cidnames[index]
+ if not unicode or unicode==-1 or unicode>=criterium then
+ unicode=cidunicodes[index]
+ end
+ if not unicode or unicode==-1 or unicode>=criterium then
+ if not name then
+ name=format("u%06X",private)
+ end
+ unicode=private
+ unicodes[name]=private
+ if trace_private then
+ report_otf("glyph %a at index %H is moved to private unicode slot %U",name,index,private)
+ end
+ private=private+1
+ nofnames=nofnames+1
+ else
+ if not name then
+ name=format("u%06X",unicode)
+ end
+ unicodes[name]=unicode
+ nofunicodes=nofunicodes+1
+ end
+ indices[index]=unicode
+ local description={
+ boundingbox=glyph.boundingbox,
+ name=glyph.name or name or "unknown",
+ cidindex=cidindex,
+ index=index,
+ glyph=glyph,
+ }
+ descriptions[unicode]=description
+ else
+ end
+ end
+ end
+ if trace_loading then
+ report_otf("cid font remapped, %s unicode points, %s symbolic names, %s glyphs",nofunicodes,nofnames,nofunicodes+nofnames)
+ end
+ elseif trace_loading then
+ report_otf("unable to remap cid font, missing cid file for %a",filename)
+ end
+ elseif trace_loading then
+ report_otf("font %a has no glyphs",filename)
+ end
+ else
+ for index=0,raw.glyphcnt-1 do
+ local glyph=rawglyphs[index]
+ if glyph then
+ local unicode=glyph.unicode
+ local name=glyph.name
+ if not unicode or unicode==-1 or unicode>=criterium then
+ unicode=private
+ unicodes[name]=private
+ if trace_private then
+ report_otf("glyph %a at index %H is moved to private unicode slot %U",name,index,private)
+ end
+ private=private+1
+ else
+ unicodes[name]=unicode
+ end
+ indices[index]=unicode
+ if not name then
+ name=format("u%06X",unicode)
+ end
+ descriptions[unicode]={
+ boundingbox=glyph.boundingbox,
+ name=name,
+ index=index,
+ glyph=glyph,
+ }
+ local altuni=glyph.altuni
+ if altuni then
+ local d
+ for i=1,#altuni do
+ local a=altuni[i]
+ local u=a.unicode
+ local v=a.variant
+ if v then
+ local vv=variants[v]
+ if vv then
+ vv[u]=unicode
+ else
+ vv={ [u]=unicode }
+ variants[v]=vv
+ end
+ elseif d then
+ d[#d+1]=u
+ else
+ d={ u }
+ end
+ end
+ if d then
+ duplicates[unicode]=d
+ end
+ end
+ else
+ report_otf("potential problem: glyph %U is used but empty",index)
+ end
+ end
+ end
+ resources.private=private
+end
+actions["check encoding"]=function(data,filename,raw)
+ local descriptions=data.descriptions
+ local resources=data.resources
+ local properties=data.properties
+ local unicodes=resources.unicodes
+ local indices=resources.indices
+ local mapdata=raw.map or {}
+ local unicodetoindex=mapdata and mapdata.map or {}
+ local encname=lower(data.enc_name or mapdata.enc_name or "")
+ local criterium=0xFFFF
+ if find(encname,"unicode") then
+ if trace_loading then
+ report_otf("checking embedded unicode map %a",encname)
+ end
+ for unicode,index in next,unicodetoindex do
+ if unicode<=criterium and not descriptions[unicode] then
+ local parent=indices[index]
+ if parent then
+ report_otf("weird, unicode %U points to %U with index %H",unicode,parent,index)
+ else
+ report_otf("weird, unicode %U points to nowhere with index %H",unicode,index)
+ end
+ end
+ end
+ elseif properties.cidinfo then
+ report_otf("warning: no unicode map, used cidmap %a",properties.cidinfo.usedname)
+ else
+ report_otf("warning: non unicode map %a, only using glyph unicode data",encname or "whatever")
+ end
+ if mapdata then
+ mapdata.map={}
+ end
+end
+actions["add duplicates"]=function(data,filename,raw)
+ local descriptions=data.descriptions
+ local resources=data.resources
+ local properties=data.properties
+ local unicodes=resources.unicodes
+ local indices=resources.indices
+ local duplicates=resources.duplicates
+ for unicode,d in next,duplicates do
+ for i=1,#d do
+ local u=d[i]
+ if not descriptions[u] then
+ local description=descriptions[unicode]
+ local duplicate=table.copy(description)
+ duplicate.comment=format("copy of U+%05X",unicode)
+ descriptions[u]=duplicate
+ local n=0
+ for _,description in next,descriptions do
+ if kerns then
+ local kerns=description.kerns
+ for _,k in next,kerns do
+ local ku=k[unicode]
+ if ku then
+ k[u]=ku
+ n=n+1
+ end
+ end
+ end
+ end
+ if trace_loading then
+ report_otf("duplicating %U to %U with index %H (%s kerns)",unicode,u,description.index,n)
+ end
+ end
+ end
+ end
+end
+actions["analyze glyphs"]=function(data,filename,raw)
+ local descriptions=data.descriptions
+ local resources=data.resources
+ local metadata=data.metadata
+ local properties=data.properties
+ local hasitalics=false
+ local widths={}
+ local marks={}
+ for unicode,description in next,descriptions do
+ local glyph=description.glyph
+ local italic=glyph.italic_correction
+ if not italic then
+ elseif italic==0 then
+ else
+ description.italic=italic
+ hasitalics=true
+ end
+ local width=glyph.width
+ widths[width]=(widths[width] or 0)+1
+ local class=glyph.class
+ if class then
+ if class=="mark" then
+ marks[unicode]=true
+ end
+ description.class=class
+ end
+ end
+ properties.hasitalics=hasitalics
+ resources.marks=marks
+ local wd,most=0,1
+ for k,v in next,widths do
+ if v>most then
+ wd,most=k,v
+ end
+ end
+ if most>1000 then
+ if trace_loading then
+ report_otf("most common width: %s (%s times), sharing (cjk font)",wd,most)
+ end
+ for unicode,description in next,descriptions do
+ if description.width==wd then
+ else
+ description.width=description.glyph.width
+ end
+ end
+ resources.defaultwidth=wd
+ else
+ for unicode,description in next,descriptions do
+ description.width=description.glyph.width
+ end
+ end
+end
+actions["reorganize mark classes"]=function(data,filename,raw)
+ local mark_classes=raw.mark_classes
+ if mark_classes then
+ local resources=data.resources
+ local unicodes=resources.unicodes
+ local markclasses={}
+ resources.markclasses=markclasses
+ for name,class in next,mark_classes do
+ local t={}
+ for s in gmatch(class,"[^ ]+") do
+ t[unicodes[s]]=true
+ end
+ markclasses[name]=t
+ end
+ end
+end
+actions["reorganize features"]=function(data,filename,raw)
+ local features={}
+ data.resources.features=features
+ for k,what in next,otf.glists do
+ local dw=raw[what]
+ if dw then
+ local f={}
+ features[what]=f
+ for i=1,#dw do
+ local d=dw[i]
+ local dfeatures=d.features
+ if dfeatures then
+ for i=1,#dfeatures do
+ local df=dfeatures[i]
+ local tag=strip(lower(df.tag))
+ local ft=f[tag]
+ if not ft then
+ ft={}
+ f[tag]=ft
+ end
+ local dscripts=df.scripts
+ for i=1,#dscripts do
+ local d=dscripts[i]
+ local languages=d.langs
+ local script=strip(lower(d.script))
+ local fts=ft[script] if not fts then fts={} ft[script]=fts end
+ for i=1,#languages do
+ fts[strip(lower(languages[i]))]=true
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+end
+actions["reorganize anchor classes"]=function(data,filename,raw)
+ local resources=data.resources
+ local anchor_to_lookup={}
+ local lookup_to_anchor={}
+ resources.anchor_to_lookup=anchor_to_lookup
+ resources.lookup_to_anchor=lookup_to_anchor
+ local classes=raw.anchor_classes
+ if classes then
+ for c=1,#classes do
+ local class=classes[c]
+ local anchor=class.name
+ local lookups=class.lookup
+ if type(lookups)~="table" then
+ lookups={ lookups }
+ end
+ local a=anchor_to_lookup[anchor]
+ if not a then
+ a={}
+ anchor_to_lookup[anchor]=a
+ end
+ for l=1,#lookups do
+ local lookup=lookups[l]
+ local l=lookup_to_anchor[lookup]
+ if l then
+ l[anchor]=true
+ else
+ l={ [anchor]=true }
+ lookup_to_anchor[lookup]=l
+ end
+ a[lookup]=true
+ end
+ end
+ end
+end
+actions["prepare tounicode"]=function(data,filename,raw)
+ fonts.mappings.addtounicode(data,filename)
+end
+local g_directions={
+ gsub_contextchain=1,
+ gpos_contextchain=1,
+ gsub_reversecontextchain=-1,
+ gpos_reversecontextchain=-1,
+}
+local function supported(features)
+ for i=1,#features do
+ if features[i].ismac then
+ return false
+ end
+ end
+ return true
+end
+actions["reorganize subtables"]=function(data,filename,raw)
+ local resources=data.resources
+ local sequences={}
+ local lookups={}
+ local chainedfeatures={}
+ resources.sequences=sequences
+ resources.lookups=lookups
+ for _,what in next,otf.glists do
+ local dw=raw[what]
+ if dw then
+ for k=1,#dw do
+ local gk=dw[k]
+ local features=gk.features
+ if not features or supported(features) then
+ local typ=gk.type
+ local chain=g_directions[typ] or 0
+ local subtables=gk.subtables
+ if subtables then
+ local t={}
+ for s=1,#subtables do
+ t[s]=subtables[s].name
+ end
+ subtables=t
+ end
+ local flags,markclass=gk.flags,nil
+ if flags then
+ local t={
+ (flags.ignorecombiningmarks and "mark") or false,
+ (flags.ignoreligatures and "ligature") or false,
+ (flags.ignorebaseglyphs and "base") or false,
+ flags.r2l or false,
+ }
+ markclass=flags.mark_class
+ if markclass then
+ markclass=resources.markclasses[markclass]
+ end
+ flags=t
+ end
+ local name=gk.name
+ if not name then
+ report_otf("skipping weird lookup number %s",k)
+ elseif features then
+ local f={}
+ for i=1,#features do
+ local df=features[i]
+ local tag=strip(lower(df.tag))
+ local ft=f[tag] if not ft then ft={} f[tag]=ft end
+ local dscripts=df.scripts
+ for i=1,#dscripts do
+ local d=dscripts[i]
+ local languages=d.langs
+ local script=strip(lower(d.script))
+ local fts=ft[script] if not fts then fts={} ft[script]=fts end
+ for i=1,#languages do
+ fts[strip(lower(languages[i]))]=true
+ end
+ end
+ end
+ sequences[#sequences+1]={
+ type=typ,
+ chain=chain,
+ flags=flags,
+ name=name,
+ subtables=subtables,
+ markclass=markclass,
+ features=f,
+ }
+ else
+ lookups[name]={
+ type=typ,
+ chain=chain,
+ flags=flags,
+ subtables=subtables,
+ markclass=markclass,
+ }
+ end
+ end
+ end
+ end
+ end
+end
+actions["prepare lookups"]=function(data,filename,raw)
+ local lookups=raw.lookups
+ if lookups then
+ data.lookups=lookups
+ end
+end
+local function t_uncover(splitter,cache,covers)
+ local result={}
+ for n=1,#covers do
+ local cover=covers[n]
+ local uncovered=cache[cover]
+ if not uncovered then
+ uncovered=lpegmatch(splitter,cover)
+ cache[cover]=uncovered
+ end
+ result[n]=uncovered
+ end
+ return result
+end
+local function s_uncover(splitter,cache,cover)
+ if cover=="" then
+ return nil
+ else
+ local uncovered=cache[cover]
+ if not uncovered then
+ uncovered=lpegmatch(splitter,cover)
+ cache[cover]=uncovered
+ end
+ return { uncovered }
+ end
+end
+local function t_hashed(t,cache)
+ if t then
+ local ht={}
+ for i=1,#t do
+ local ti=t[i]
+ local tih=cache[ti]
+ if not tih then
+ tih={}
+ for i=1,#ti do
+ tih[ti[i]]=true
+ end
+ cache[ti]=tih
+ end
+ ht[i]=tih
+ end
+ return ht
+ else
+ return nil
+ end
+end
+local function s_hashed(t,cache)
+ if t then
+ local ht={}
+ local tf=t[1]
+ for i=1,#tf do
+ ht[i]={ [tf[i]]=true }
+ end
+ return ht
+ else
+ return nil
+ end
+end
+local function r_uncover(splitter,cache,cover,replacements)
+ if cover=="" then
+ return nil
+ else
+ local uncovered=cover[1]
+ local replaced=cache[replacements]
+ if not replaced then
+ replaced=lpegmatch(splitter,replacements)
+ cache[replacements]=replaced
+ end
+ local nu,nr=#uncovered,#replaced
+ local r={}
+ if nu==nr then
+ for i=1,nu do
+ r[uncovered[i]]=replaced[i]
+ end
+ end
+ return r
+ end
+end
+actions["reorganize lookups"]=function(data,filename,raw)
+ if data.lookups then
+ local splitter=data.helpers.tounicodetable
+ local t_u_cache={}
+ local s_u_cache=t_u_cache
+ local t_h_cache={}
+ local s_h_cache=t_h_cache
+ local r_u_cache={}
+ for _,lookup in next,data.lookups do
+ local rules=lookup.rules
+ if rules then
+ local format=lookup.format
+ if format=="class" then
+ local before_class=lookup.before_class
+ if before_class then
+ before_class=t_uncover(splitter,t_u_cache,reversed(before_class))
+ end
+ local current_class=lookup.current_class
+ if current_class then
+ current_class=t_uncover(splitter,t_u_cache,current_class)
+ end
+ local after_class=lookup.after_class
+ if after_class then
+ after_class=t_uncover(splitter,t_u_cache,after_class)
+ end
+ for i=1,#rules do
+ local rule=rules[i]
+ local class=rule.class
+ local before=class.before
+ if before then
+ for i=1,#before do
+ before[i]=before_class[before[i]] or {}
+ end
+ rule.before=t_hashed(before,t_h_cache)
+ end
+ local current=class.current
+ local lookups=rule.lookups
+ if current then
+ for i=1,#current do
+ current[i]=current_class[current[i]] or {}
+ if lookups and not lookups[i] then
+ lookups[i]=""
+ end
+ end
+ rule.current=t_hashed(current,t_h_cache)
+ end
+ local after=class.after
+ if after then
+ for i=1,#after do
+ after[i]=after_class[after[i]] or {}
+ end
+ rule.after=t_hashed(after,t_h_cache)
+ end
+ rule.class=nil
+ end
+ lookup.before_class=nil
+ lookup.current_class=nil
+ lookup.after_class=nil
+ lookup.format="coverage"
+ elseif format=="coverage" then
+ for i=1,#rules do
+ local rule=rules[i]
+ local coverage=rule.coverage
+ if coverage then
+ local before=coverage.before
+ if before then
+ before=t_uncover(splitter,t_u_cache,reversed(before))
+ rule.before=t_hashed(before,t_h_cache)
+ end
+ local current=coverage.current
+ if current then
+ current=t_uncover(splitter,t_u_cache,current)
+ local lookups=rule.lookups
+ if lookups then
+ for i=1,#current do
+ if not lookups[i] then
+ lookups[i]=""
+ end
+ end
+ end
+ rule.current=t_hashed(current,t_h_cache)
+ end
+ local after=coverage.after
+ if after then
+ after=t_uncover(splitter,t_u_cache,after)
+ rule.after=t_hashed(after,t_h_cache)
+ end
+ rule.coverage=nil
+ end
+ end
+ elseif format=="reversecoverage" then
+ for i=1,#rules do
+ local rule=rules[i]
+ local reversecoverage=rule.reversecoverage
+ if reversecoverage then
+ local before=reversecoverage.before
+ if before then
+ before=t_uncover(splitter,t_u_cache,reversed(before))
+ rule.before=t_hashed(before,t_h_cache)
+ end
+ local current=reversecoverage.current
+ if current then
+ current=t_uncover(splitter,t_u_cache,current)
+ rule.current=t_hashed(current,t_h_cache)
+ end
+ local after=reversecoverage.after
+ if after then
+ after=t_uncover(splitter,t_u_cache,after)
+ rule.after=t_hashed(after,t_h_cache)
+ end
+ local replacements=reversecoverage.replacements
+ if replacements then
+ rule.replacements=r_uncover(splitter,r_u_cache,current,replacements)
+ end
+ rule.reversecoverage=nil
+ end
+ end
+ elseif format=="glyphs" then
+ for i=1,#rules do
+ local rule=rules[i]
+ local glyphs=rule.glyphs
+ if glyphs then
+ local fore=glyphs.fore
+ if fore and fore~="" then
+ fore=s_uncover(splitter,s_u_cache,fore)
+ rule.before=s_hashed(fore,s_h_cache)
+ end
+ local back=glyphs.back
+ if back then
+ back=s_uncover(splitter,s_u_cache,back)
+ rule.after=s_hashed(back,s_h_cache)
+ end
+ local names=glyphs.names
+ if names then
+ names=s_uncover(splitter,s_u_cache,names)
+ rule.current=s_hashed(names,s_h_cache)
+ end
+ rule.glyphs=nil
+ end
+ end
+ end
+ end
+ end
+ end
+end
+local function check_variants(unicode,the_variants,splitter,unicodes)
+ local variants=the_variants.variants
+ if variants then
+ local glyphs=lpegmatch(splitter,variants)
+ local done={ [unicode]=true }
+ local n=0
+ for i=1,#glyphs do
+ local g=glyphs[i]
+ if done[g] then
+ report_otf("skipping cyclic reference %U in math variant %U",g,unicode)
+ else
+ if n==0 then
+ n=1
+ variants={ g }
+ else
+ n=n+1
+ variants[n]=g
+ end
+ done[g]=true
+ end
+ end
+ if n==0 then
+ variants=nil
+ end
+ end
+ local parts=the_variants.parts
+ if parts then
+ local p=#parts
+ if p>0 then
+ for i=1,p do
+ local pi=parts[i]
+ pi.glyph=unicodes[pi.component] or 0
+ pi.component=nil
+ end
+ else
+ parts=nil
+ end
+ end
+ local italic_correction=the_variants.italic_correction
+ if italic_correction and italic_correction==0 then
+ italic_correction=nil
+ end
+ return variants,parts,italic_correction
+end
+actions["analyze math"]=function(data,filename,raw)
+ if raw.math then
+ data.metadata.math=raw.math
+ local unicodes=data.resources.unicodes
+ local splitter=data.helpers.tounicodetable
+ for unicode,description in next,data.descriptions do
+ local glyph=description.glyph
+ local mathkerns=glyph.mathkern
+ local horiz_variants=glyph.horiz_variants
+ local vert_variants=glyph.vert_variants
+ local top_accent=glyph.top_accent
+ if mathkerns or horiz_variants or vert_variants or top_accent then
+ local math={}
+ if top_accent then
+ math.top_accent=top_accent
+ end
+ if mathkerns then
+ for k,v in next,mathkerns do
+ if not next(v) then
+ mathkerns[k]=nil
+ else
+ for k,v in next,v do
+ if v==0 then
+ k[v]=nil
+ end
+ end
+ end
+ end
+ math.kerns=mathkerns
+ end
+ if horiz_variants then
+ math.horiz_variants,math.horiz_parts,math.horiz_italic_correction=check_variants(unicode,horiz_variants,splitter,unicodes)
+ end
+ if vert_variants then
+ math.vert_variants,math.vert_parts,math.vert_italic_correction=check_variants(unicode,vert_variants,splitter,unicodes)
+ end
+ local italic_correction=description.italic
+ if italic_correction and italic_correction~=0 then
+ math.italic_correction=italic_correction
+ end
+ description.math=math
+ end
+ end
+ end
+end
+actions["reorganize glyph kerns"]=function(data,filename,raw)
+ local descriptions=data.descriptions
+ local resources=data.resources
+ local unicodes=resources.unicodes
+ for unicode,description in next,descriptions do
+ local kerns=description.glyph.kerns
+ if kerns then
+ local newkerns={}
+ for k,kern in next,kerns do
+ local name=kern.char
+ local offset=kern.off
+ local lookup=kern.lookup
+ if name and offset and lookup then
+ local unicode=unicodes[name]
+ if unicode then
+ if type(lookup)=="table" then
+ for l=1,#lookup do
+ local lookup=lookup[l]
+ local lookupkerns=newkerns[lookup]
+ if lookupkerns then
+ lookupkerns[unicode]=offset
+ else
+ newkerns[lookup]={ [unicode]=offset }
+ end
+ end
+ else
+ local lookupkerns=newkerns[lookup]
+ if lookupkerns then
+ lookupkerns[unicode]=offset
+ else
+ newkerns[lookup]={ [unicode]=offset }
+ end
+ end
+ elseif trace_loading then
+ report_otf("problems with unicode %a of kern %a of glyph %U",name,k,unicode)
+ end
+ end
+ end
+ description.kerns=newkerns
+ end
+ end
+end
+actions["merge kern classes"]=function(data,filename,raw)
+ local gposlist=raw.gpos
+ if gposlist then
+ local descriptions=data.descriptions
+ local resources=data.resources
+ local unicodes=resources.unicodes
+ local splitter=data.helpers.tounicodetable
+ for gp=1,#gposlist do
+ local gpos=gposlist[gp]
+ local subtables=gpos.subtables
+ if subtables then
+ for s=1,#subtables do
+ local subtable=subtables[s]
+ local kernclass=subtable.kernclass
+ if kernclass then
+ local split={}
+ for k=1,#kernclass do
+ local kcl=kernclass[k]
+ local firsts=kcl.firsts
+ local seconds=kcl.seconds
+ local offsets=kcl.offsets
+ local lookups=kcl.lookup
+ if type(lookups)~="table" then
+ lookups={ lookups }
+ end
+ for n,s in next,firsts do
+ split[s]=split[s] or lpegmatch(splitter,s)
+ end
+ local maxseconds=0
+ for n,s in next,seconds do
+ if n>maxseconds then
+ maxseconds=n
+ end
+ split[s]=split[s] or lpegmatch(splitter,s)
+ end
+ for l=1,#lookups do
+ local lookup=lookups[l]
+ for fk=1,#firsts do
+ local fv=firsts[fk]
+ local splt=split[fv]
+ if splt then
+ local extrakerns={}
+ local baseoffset=(fk-1)*maxseconds
+ for sk=2,maxseconds do
+ local sv=seconds[sk]
+ local splt=split[sv]
+ if splt then
+ local offset=offsets[baseoffset+sk]
+ if offset then
+ for i=1,#splt do
+ extrakerns[splt[i]]=offset
+ end
+ end
+ end
+ end
+ for i=1,#splt do
+ local first_unicode=splt[i]
+ local description=descriptions[first_unicode]
+ if description then
+ local kerns=description.kerns
+ if not kerns then
+ kerns={}
+ description.kerns=kerns
+ end
+ local lookupkerns=kerns[lookup]
+ if not lookupkerns then
+ lookupkerns={}
+ kerns[lookup]=lookupkerns
+ end
+ for second_unicode,kern in next,extrakerns do
+ lookupkerns[second_unicode]=kern
+ end
+ elseif trace_loading then
+ report_otf("no glyph data for %U",first_unicode)
+ end
+ end
+ end
+ end
+ end
+ end
+ subtable.kernclass={}
+ end
+ end
+ end
+ end
+ end
+end
+actions["check glyphs"]=function(data,filename,raw)
+ for unicode,description in next,data.descriptions do
+ description.glyph=nil
+ end
+end
+actions["check metadata"]=function(data,filename,raw)
+ local metadata=data.metadata
+ for _,k in next,mainfields do
+ if valid_fields[k] then
+ local v=raw[k]
+ if not metadata[k] then
+ metadata[k]=v
+ end
+ end
+ end
+ local ttftables=metadata.ttf_tables
+ if ttftables then
+ for i=1,#ttftables do
+ ttftables[i].data="deleted"
+ end
+ end
+end
+actions["cleanup tables"]=function(data,filename,raw)
+ data.resources.indices=nil
+ data.helpers=nil
+end
+actions["reorganize glyph lookups"]=function(data,filename,raw)
+ local resources=data.resources
+ local unicodes=resources.unicodes
+ local descriptions=data.descriptions
+ local splitter=data.helpers.tounicodelist
+ local lookuptypes=resources.lookuptypes
+ for unicode,description in next,descriptions do
+ local lookups=description.glyph.lookups
+ if lookups then
+ for tag,lookuplist in next,lookups do
+ for l=1,#lookuplist do
+ local lookup=lookuplist[l]
+ local specification=lookup.specification
+ local lookuptype=lookup.type
+ local lt=lookuptypes[tag]
+ if not lt then
+ lookuptypes[tag]=lookuptype
+ elseif lt~=lookuptype then
+ report_otf("conflicting lookuptypes, %a points to %a and %a",tag,lt,lookuptype)
+ end
+ if lookuptype=="ligature" then
+ lookuplist[l]={ lpegmatch(splitter,specification.components) }
+ elseif lookuptype=="alternate" then
+ lookuplist[l]={ lpegmatch(splitter,specification.components) }
+ elseif lookuptype=="substitution" then
+ lookuplist[l]=unicodes[specification.variant]
+ elseif lookuptype=="multiple" then
+ lookuplist[l]={ lpegmatch(splitter,specification.components) }
+ elseif lookuptype=="position" then
+ lookuplist[l]={
+ specification.x or 0,
+ specification.y or 0,
+ specification.h or 0,
+ specification.v or 0
+ }
+ elseif lookuptype=="pair" then
+ local one=specification.offsets[1]
+ local two=specification.offsets[2]
+ local paired=unicodes[specification.paired]
+ if one then
+ if two then
+ lookuplist[l]={ paired,{ one.x or 0,one.y or 0,one.h or 0,one.v or 0 },{ two.x or 0,two.y or 0,two.h or 0,two.v or 0 } }
+ else
+ lookuplist[l]={ paired,{ one.x or 0,one.y or 0,one.h or 0,one.v or 0 } }
+ end
+ else
+ if two then
+ lookuplist[l]={ paired,{},{ two.x or 0,two.y or 0,two.h or 0,two.v or 0} }
+ else
+ lookuplist[l]={ paired }
+ end
+ end
+ end
+ end
+ end
+ local slookups,mlookups
+ for tag,lookuplist in next,lookups do
+ if #lookuplist==1 then
+ if slookups then
+ slookups[tag]=lookuplist[1]
+ else
+ slookups={ [tag]=lookuplist[1] }
+ end
+ else
+ if mlookups then
+ mlookups[tag]=lookuplist
+ else
+ mlookups={ [tag]=lookuplist }
+ end
+ end
+ end
+ if slookups then
+ description.slookups=slookups
+ end
+ if mlookups then
+ description.mlookups=mlookups
+ end
+ end
+ end
+end
+actions["reorganize glyph anchors"]=function(data,filename,raw)
+ local descriptions=data.descriptions
+ for unicode,description in next,descriptions do
+ local anchors=description.glyph.anchors
+ if anchors then
+ for class,data in next,anchors do
+ if class=="baselig" then
+ for tag,specification in next,data do
+ for i=1,#specification do
+ local si=specification[i]
+ specification[i]={ si.x or 0,si.y or 0 }
+ end
+ end
+ else
+ for tag,specification in next,data do
+ data[tag]={ specification.x or 0,specification.y or 0 }
+ end
+ end
+ end
+ description.anchors=anchors
+ end
+ end
+end
+function otf.setfeatures(tfmdata,features)
+ local okay=constructors.initializefeatures("otf",tfmdata,features,trace_features,report_otf)
+ if okay then
+ return constructors.collectprocessors("otf",tfmdata,features,trace_features,report_otf)
+ else
+ return {}
+ end
+end
+local function copytotfm(data,cache_id)
+ if data then
+ local metadata=data.metadata
+ local resources=data.resources
+ local properties=derivetable(data.properties)
+ local descriptions=derivetable(data.descriptions)
+ local goodies=derivetable(data.goodies)
+ local characters={}
+ local parameters={}
+ local mathparameters={}
+ local pfminfo=metadata.pfminfo or {}
+ local resources=data.resources
+ local unicodes=resources.unicodes
+ local spaceunits=500
+ local spacer="space"
+ local designsize=metadata.designsize or metadata.design_size or 100
+ local mathspecs=metadata.math
+ if designsize==0 then
+ designsize=100
+ end
+ if mathspecs then
+ for name,value in next,mathspecs do
+ mathparameters[name]=value
+ end
+ end
+ for unicode,_ in next,data.descriptions do
+ characters[unicode]={}
+ end
+ if mathspecs then
+ for unicode,character in next,characters do
+ local d=descriptions[unicode]
+ local m=d.math
+ if m then
+ local variants=m.horiz_variants
+ local parts=m.horiz_parts
+ if variants then
+ local c=character
+ for i=1,#variants do
+ local un=variants[i]
+ c.next=un
+ c=characters[un]
+ end
+ c.horiz_variants=parts
+ elseif parts then
+ character.horiz_variants=parts
+ end
+ local variants=m.vert_variants
+ local parts=m.vert_parts
+ if variants then
+ local c=character
+ for i=1,#variants do
+ local un=variants[i]
+ c.next=un
+ c=characters[un]
+ end
+ c.vert_variants=parts
+ elseif parts then
+ character.vert_variants=parts
+ end
+ local italic_correction=m.vert_italic_correction
+ if italic_correction then
+ character.vert_italic_correction=italic_correction
+ end
+ local top_accent=m.top_accent
+ if top_accent then
+ character.top_accent=top_accent
+ end
+ local kerns=m.kerns
+ if kerns then
+ character.mathkerns=kerns
+ end
+ end
+ end
+ end
+ local monospaced=metadata.isfixedpitch or (pfminfo.panose and pfminfo.panose.proportion=="Monospaced")
+ local charwidth=pfminfo.avgwidth
+ local italicangle=metadata.italicangle
+ local charxheight=pfminfo.os2_xheight and pfminfo.os2_xheight>0 and pfminfo.os2_xheight
+ properties.monospaced=monospaced
+ parameters.italicangle=italicangle
+ parameters.charwidth=charwidth
+ parameters.charxheight=charxheight
+ local space=0x0020
+ local emdash=0x2014
+ if monospaced then
+ if descriptions[space] then
+ spaceunits,spacer=descriptions[space].width,"space"
+ end
+ if not spaceunits and descriptions[emdash] then
+ spaceunits,spacer=descriptions[emdash].width,"emdash"
+ end
+ if not spaceunits and charwidth then
+ spaceunits,spacer=charwidth,"charwidth"
+ end
+ else
+ if descriptions[space] then
+ spaceunits,spacer=descriptions[space].width,"space"
+ end
+ if not spaceunits and descriptions[emdash] then
+ spaceunits,spacer=descriptions[emdash].width/2,"emdash/2"
+ end
+ if not spaceunits and charwidth then
+ spaceunits,spacer=charwidth,"charwidth"
+ end
+ end
+ spaceunits=tonumber(spaceunits) or 500
+ local filename=constructors.checkedfilename(resources)
+ local fontname=metadata.fontname
+ local fullname=metadata.fullname or fontname
+ local units=metadata.units_per_em or 1000
+ if units==0 then
+ units=1000
+ metadata.units_per_em=1000
+ end
+ parameters.slant=0
+ parameters.space=spaceunits
+ parameters.space_stretch=units/2
+ parameters.space_shrink=1*units/3
+ parameters.x_height=2*units/5
+ parameters.quad=units
+ if spaceunits<2*units/5 then
+ end
+ if italicangle then
+ parameters.italicangle=italicangle
+ parameters.italicfactor=math.cos(math.rad(90+italicangle))
+ parameters.slant=- math.round(math.tan(italicangle*math.pi/180))
+ end
+ if monospaced then
+ parameters.space_stretch=0
+ parameters.space_shrink=0
+ elseif syncspace then
+ parameters.space_stretch=spaceunits/2
+ parameters.space_shrink=spaceunits/3
+ end
+ parameters.extra_space=parameters.space_shrink
+ if charxheight then
+ parameters.x_height=charxheight
+ else
+ local x=0x78
+ if x then
+ local x=descriptions[x]
+ if x then
+ parameters.x_height=x.height
+ end
+ end
+ end
+ parameters.designsize=(designsize/10)*65536
+ parameters.ascender=abs(metadata.ascent or 0)
+ parameters.descender=abs(metadata.descent or 0)
+ parameters.units=units
+ properties.space=spacer
+ properties.encodingbytes=2
+ properties.format=data.format or fonts.formats[filename] or "opentype"
+ properties.noglyphnames=true
+ properties.filename=filename
+ properties.fontname=fontname
+ properties.fullname=fullname
+ properties.psname=fontname or fullname
+ properties.name=filename or fullname
+ return {
+ characters=characters,
+ descriptions=descriptions,
+ parameters=parameters,
+ mathparameters=mathparameters,
+ resources=resources,
+ properties=properties,
+ goodies=goodies,
+ }
+ end
+end
+local function otftotfm(specification)
+ local cache_id=specification.hash
+ local tfmdata=containers.read(constructors.cache,cache_id)
+ if not tfmdata then
+ local name=specification.name
+ local sub=specification.sub
+ local filename=specification.filename
+ local format=specification.format
+ local features=specification.features.normal
+ local rawdata=otf.load(filename,format,sub,features and features.featurefile)
+ if rawdata and next(rawdata) then
+ rawdata.lookuphash={}
+ tfmdata=copytotfm(rawdata,cache_id)
+ if tfmdata and next(tfmdata) then
+ local features=constructors.checkedfeatures("otf",features)
+ local shared=tfmdata.shared
+ if not shared then
+ shared={}
+ tfmdata.shared=shared
+ end
+ shared.rawdata=rawdata
+ shared.dynamics={}
+ tfmdata.changed={}
+ shared.features=features
+ shared.processes=otf.setfeatures(tfmdata,features)
+ end
+ end
+ containers.write(constructors.cache,cache_id,tfmdata)
+ end
+ return tfmdata
+end
+local function read_from_otf(specification)
+ local tfmdata=otftotfm(specification)
+ if tfmdata then
+ tfmdata.properties.name=specification.name
+ tfmdata.properties.sub=specification.sub
+ tfmdata=constructors.scale(tfmdata,specification)
+ local allfeatures=tfmdata.shared.features or specification.features.normal
+ constructors.applymanipulators("otf",tfmdata,allfeatures,trace_features,report_otf)
+ constructors.setname(tfmdata,specification)
+ fonts.loggers.register(tfmdata,file.suffix(specification.filename),specification)
+ end
+ return tfmdata
+end
+local function checkmathsize(tfmdata,mathsize)
+ local mathdata=tfmdata.shared.rawdata.metadata.math
+ local mathsize=tonumber(mathsize)
+ if mathdata then
+ local parameters=tfmdata.parameters
+ parameters.scriptpercentage=mathdata.ScriptPercentScaleDown
+ parameters.scriptscriptpercentage=mathdata.ScriptScriptPercentScaleDown
+ parameters.mathsize=mathsize
+ end
+end
+registerotffeature {
+ name="mathsize",
+ description="apply mathsize as specified in the font",
+ initializers={
+ base=checkmathsize,
+ node=checkmathsize,
+ }
+}
+function otf.collectlookups(rawdata,kind,script,language)
+ local sequences=rawdata.resources.sequences
+ if sequences then
+ local featuremap,featurelist={},{}
+ for s=1,#sequences do
+ local sequence=sequences[s]
+ local features=sequence.features
+ features=features and features[kind]
+ features=features and (features[script] or features[default] or features[wildcard])
+ features=features and (features[language] or features[default] or features[wildcard])
+ if features then
+ local subtables=sequence.subtables
+ if subtables then
+ for s=1,#subtables do
+ local ss=subtables[s]
+ if not featuremap[s] then
+ featuremap[ss]=true
+ featurelist[#featurelist+1]=ss
+ end
+ end
+ end
+ end
+ end
+ if #featurelist>0 then
+ return featuremap,featurelist
+ end
+ end
+ return nil,nil
+end
+local function check_otf(forced,specification,suffix,what)
+ local name=specification.name
+ if forced then
+ name=file.addsuffix(name,suffix,true)
+ end
+ local fullname=findbinfile(name,suffix) or ""
+ if fullname=="" then
+ fullname=fonts.names.getfilename(name,suffix) or ""
+ end
+ if fullname~="" then
+ specification.filename=fullname
+ specification.format=what
+ return read_from_otf(specification)
+ end
+end
+local function opentypereader(specification,suffix,what)
+ local forced=specification.forced or ""
+ if forced=="otf" then
+ return check_otf(true,specification,forced,"opentype")
+ elseif forced=="ttf" or forced=="ttc" or forced=="dfont" then
+ return check_otf(true,specification,forced,"truetype")
+ else
+ return check_otf(false,specification,suffix,what)
+ end
+end
+readers.opentype=opentypereader
+local formats=fonts.formats
+formats.otf="opentype"
+formats.ttf="truetype"
+formats.ttc="truetype"
+formats.dfont="truetype"
+function readers.otf (specification) return opentypereader(specification,"otf",formats.otf ) end
+function readers.ttf (specification) return opentypereader(specification,"ttf",formats.ttf ) end
+function readers.ttc (specification) return opentypereader(specification,"ttf",formats.ttc ) end
+function readers.dfont(specification) return opentypereader(specification,"ttf",formats.dfont) end
+function otf.scriptandlanguage(tfmdata,attr)
+ local properties=tfmdata.properties
+ return properties.script or "dflt",properties.language or "dflt"
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['font-otb']={
+ version=1.001,
+ comment="companion to font-ini.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local concat=table.concat
+local format,gmatch,gsub,find,match,lower,strip=string.format,string.gmatch,string.gsub,string.find,string.match,string.lower,string.strip
+local type,next,tonumber,tostring=type,next,tonumber,tostring
+local lpegmatch=lpeg.match
+local utfchar=utf.char
+local trace_baseinit=false trackers.register("otf.baseinit",function(v) trace_baseinit=v end)
+local trace_singles=false trackers.register("otf.singles",function(v) trace_singles=v end)
+local trace_multiples=false trackers.register("otf.multiples",function(v) trace_multiples=v end)
+local trace_alternatives=false trackers.register("otf.alternatives",function(v) trace_alternatives=v end)
+local trace_ligatures=false trackers.register("otf.ligatures",function(v) trace_ligatures=v end)
+local trace_ligatures_detail=false trackers.register("otf.ligatures.detail",function(v) trace_ligatures_detail=v end)
+local trace_kerns=false trackers.register("otf.kerns",function(v) trace_kerns=v end)
+local trace_preparing=false trackers.register("otf.preparing",function(v) trace_preparing=v end)
+local report_prepare=logs.reporter("fonts","otf prepare")
+local fonts=fonts
+local otf=fonts.handlers.otf
+local otffeatures=otf.features
+local registerotffeature=otffeatures.register
+otf.defaultbasealternate="none"
+local wildcard="*"
+local default="dflt"
+local formatters=string.formatters
+local f_unicode=formatters["%U"]
+local f_uniname=formatters["%U (%s)"]
+local f_unilist=formatters["% t (% t)"]
+local function gref(descriptions,n)
+ if type(n)=="number" then
+ local name=descriptions[n].name
+ if name then
+ return f_uniname(n,name)
+ else
+ return f_unicode(n)
+ end
+ elseif n then
+ local num,nam={},{}
+ for i=2,#n do
+ local ni=n[i]
+ if tonumber(ni) then
+ local di=descriptions[ni]
+ num[i]=f_unicode(ni)
+ nam[i]=di and di.name or "-"
+ end
+ end
+ return f_unilist(num,nam)
+ else
+ return "<error in base mode tracing>"
+ end
+end
+local function cref(feature,lookupname)
+ if lookupname then
+ return formatters["feature %a, lookup %a"](feature,lookupname)
+ else
+ return formatters["feature %a"](feature)
+ end
+end
+local function report_alternate(feature,lookupname,descriptions,unicode,replacement,value,comment)
+ report_prepare("%s: base alternate %s => %s (%S => %S)",
+ cref(feature,lookupname),
+ gref(descriptions,unicode),
+ replacement and gref(descriptions,replacement),
+ value,
+ comment)
+end
+local function report_substitution(feature,lookupname,descriptions,unicode,substitution)
+ report_prepare("%s: base substitution %s => %S",
+ cref(feature,lookupname),
+ gref(descriptions,unicode),
+ gref(descriptions,substitution))
+end
+local function report_ligature(feature,lookupname,descriptions,unicode,ligature)
+ report_prepare("%s: base ligature %s => %S",
+ cref(feature,lookupname),
+ gref(descriptions,ligature),
+ gref(descriptions,unicode))
+end
+local function report_kern(feature,lookupname,descriptions,unicode,otherunicode,value)
+ report_prepare("%s: base kern %s + %s => %S",
+ cref(feature,lookupname),
+ gref(descriptions,unicode),
+ gref(descriptions,otherunicode),
+ value)
+end
+local basemethods={}
+local basemethod="<unset>"
+local function applybasemethod(what,...)
+ local m=basemethods[basemethod][what]
+ if m then
+ return m(...)
+ end
+end
+local basehash,basehashes,applied={},1,{}
+local function registerbasehash(tfmdata)
+ local properties=tfmdata.properties
+ local hash=concat(applied," ")
+ local base=basehash[hash]
+ if not base then
+ basehashes=basehashes+1
+ base=basehashes
+ basehash[hash]=base
+ end
+ properties.basehash=base
+ properties.fullname=properties.fullname.."-"..base
+ applied={}
+end
+local function registerbasefeature(feature,value)
+ applied[#applied+1]=feature.."="..tostring(value)
+end
+local trace=false
+local function finalize_ligatures(tfmdata,ligatures)
+ local nofligatures=#ligatures
+ if nofligatures>0 then
+ local characters=tfmdata.characters
+ local descriptions=tfmdata.descriptions
+ local resources=tfmdata.resources
+ local unicodes=resources.unicodes
+ local private=resources.private
+ local alldone=false
+ while not alldone do
+ local done=0
+ for i=1,nofligatures do
+ local ligature=ligatures[i]
+ if ligature then
+ local unicode,lookupdata=ligature[1],ligature[2]
+ if trace then
+ trace_ligatures_detail("building % a into %a",lookupdata,unicode)
+ end
+ local size=#lookupdata
+ local firstcode=lookupdata[1]
+ local firstdata=characters[firstcode]
+ local okay=false
+ if firstdata then
+ local firstname="ctx_"..firstcode
+ for i=1,size-1 do
+ local firstdata=characters[firstcode]
+ if not firstdata then
+ firstcode=private
+ if trace then
+ trace_ligatures_detail("defining %a as %a",firstname,firstcode)
+ end
+ unicodes[firstname]=firstcode
+ firstdata={ intermediate=true,ligatures={} }
+ characters[firstcode]=firstdata
+ descriptions[firstcode]={ name=firstname }
+ private=private+1
+ end
+ local target
+ local secondcode=lookupdata[i+1]
+ local secondname=firstname.."_"..secondcode
+ if i==size-1 then
+ target=unicode
+ if not unicodes[secondname] then
+ unicodes[secondname]=unicode
+ end
+ okay=true
+ else
+ target=unicodes[secondname]
+ if not target then
+ break
+ end
+ end
+ if trace then
+ trace_ligatures_detail("codes (%a,%a) + (%a,%a) -> %a",firstname,firstcode,secondname,secondcode,target)
+ end
+ local firstligs=firstdata.ligatures
+ if firstligs then
+ firstligs[secondcode]={ char=target }
+ else
+ firstdata.ligatures={ [secondcode]={ char=target } }
+ end
+ firstcode=target
+ firstname=secondname
+ end
+ end
+ if okay then
+ ligatures[i]=false
+ done=done+1
+ end
+ end
+ end
+ alldone=done==0
+ end
+ if trace then
+ for k,v in next,characters do
+ if v.ligatures then table.print(v,k) end
+ end
+ end
+ tfmdata.resources.private=private
+ end
+end
+local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplist)
+ local characters=tfmdata.characters
+ local descriptions=tfmdata.descriptions
+ local resources=tfmdata.resources
+ local changed=tfmdata.changed
+ local unicodes=resources.unicodes
+ local lookuphash=resources.lookuphash
+ local lookuptypes=resources.lookuptypes
+ local ligatures={}
+ local alternate=tonumber(value)
+ local defaultalt=otf.defaultbasealternate
+ local trace_singles=trace_baseinit and trace_singles
+ local trace_alternatives=trace_baseinit and trace_alternatives
+ local trace_ligatures=trace_baseinit and trace_ligatures
+ local actions={
+ substitution=function(lookupdata,lookupname,description,unicode)
+ if trace_singles then
+ report_substitution(feature,lookupname,descriptions,unicode,lookupdata)
+ end
+ changed[unicode]=lookupdata
+ end,
+ alternate=function(lookupdata,lookupname,description,unicode)
+ local replacement=lookupdata[alternate]
+ if replacement then
+ changed[unicode]=replacement
+ if trace_alternatives then
+ report_alternate(feature,lookupname,descriptions,unicode,replacement,value,"normal")
+ end
+ elseif defaultalt=="first" then
+ replacement=lookupdata[1]
+ changed[unicode]=replacement
+ if trace_alternatives then
+ report_alternate(feature,lookupname,descriptions,unicode,replacement,value,defaultalt)
+ end
+ elseif defaultalt=="last" then
+ replacement=lookupdata[#data]
+ if trace_alternatives then
+ report_alternate(feature,lookupname,descriptions,unicode,replacement,value,defaultalt)
+ end
+ else
+ if trace_alternatives then
+ report_alternate(feature,lookupname,descriptions,unicode,replacement,value,"unknown")
+ end
+ end
+ end,
+ ligature=function(lookupdata,lookupname,description,unicode)
+ if trace_ligatures then
+ report_ligature(feature,lookupname,descriptions,unicode,lookupdata)
+ end
+ ligatures[#ligatures+1]={ unicode,lookupdata }
+ end,
+ }
+ for unicode,character in next,characters do
+ local description=descriptions[unicode]
+ local lookups=description.slookups
+ if lookups then
+ for l=1,#lookuplist do
+ local lookupname=lookuplist[l]
+ local lookupdata=lookups[lookupname]
+ if lookupdata then
+ local lookuptype=lookuptypes[lookupname]
+ local action=actions[lookuptype]
+ if action then
+ action(lookupdata,lookupname,description,unicode)
+ end
+ end
+ end
+ end
+ local lookups=description.mlookups
+ if lookups then
+ for l=1,#lookuplist do
+ local lookupname=lookuplist[l]
+ local lookuplist=lookups[lookupname]
+ if lookuplist then
+ local lookuptype=lookuptypes[lookupname]
+ local action=actions[lookuptype]
+ if action then
+ for i=1,#lookuplist do
+ action(lookuplist[i],lookupname,description,unicode)
+ end
+ end
+ end
+ end
+ end
+ end
+ finalize_ligatures(tfmdata,ligatures)
+end
+local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist)
+ local characters=tfmdata.characters
+ local descriptions=tfmdata.descriptions
+ local resources=tfmdata.resources
+ local unicodes=resources.unicodes
+ local sharedkerns={}
+ local traceindeed=trace_baseinit and trace_kerns
+ for unicode,character in next,characters do
+ local description=descriptions[unicode]
+ local rawkerns=description.kerns
+ if rawkerns then
+ local s=sharedkerns[rawkerns]
+ if s==false then
+ elseif s then
+ character.kerns=s
+ else
+ local newkerns=character.kerns
+ local done=false
+ for l=1,#lookuplist do
+ local lookup=lookuplist[l]
+ local kerns=rawkerns[lookup]
+ if kerns then
+ for otherunicode,value in next,kerns do
+ if value==0 then
+ elseif not newkerns then
+ newkerns={ [otherunicode]=value }
+ done=true
+ if traceindeed then
+ report_kern(feature,lookup,descriptions,unicode,otherunicode,value)
+ end
+ elseif not newkerns[otherunicode] then
+ newkerns[otherunicode]=value
+ done=true
+ if traceindeed then
+ report_kern(feature,lookup,descriptions,unicode,otherunicode,value)
+ end
+ end
+ end
+ end
+ end
+ if done then
+ sharedkerns[rawkerns]=newkerns
+ character.kerns=newkerns
+ else
+ sharedkerns[rawkerns]=false
+ end
+ end
+ end
+ end
+end
+basemethods.independent={
+ preparesubstitutions=preparesubstitutions,
+ preparepositionings=preparepositionings,
+}
+local function makefake(tfmdata,name,present)
+ local resources=tfmdata.resources
+ local private=resources.private
+ local character={ intermediate=true,ligatures={} }
+ resources.unicodes[name]=private
+ tfmdata.characters[private]=character
+ tfmdata.descriptions[private]={ name=name }
+ resources.private=private+1
+ present[name]=private
+ return character
+end
+local function make_1(present,tree,name)
+ for k,v in next,tree do
+ if k=="ligature" then
+ present[name]=v
+ else
+ make_1(present,v,name.."_"..k)
+ end
+ end
+end
+local function make_2(present,tfmdata,characters,tree,name,preceding,unicode,done,lookupname)
+ for k,v in next,tree do
+ if k=="ligature" then
+ local character=characters[preceding]
+ if not character then
+ if trace_baseinit then
+ report_prepare("weird ligature in lookup %a, current %C, preceding %C",lookupname,v,preceding)
+ end
+ character=makefake(tfmdata,name,present)
+ end
+ local ligatures=character.ligatures
+ if ligatures then
+ ligatures[unicode]={ char=v }
+ else
+ character.ligatures={ [unicode]={ char=v } }
+ end
+ if done then
+ local d=done[lookupname]
+ if not d then
+ done[lookupname]={ "dummy",v }
+ else
+ d[#d+1]=v
+ end
+ end
+ else
+ local code=present[name] or unicode
+ local name=name.."_"..k
+ make_2(present,tfmdata,characters,v,name,code,k,done,lookupname)
+ end
+ end
+end
+local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplist)
+ local characters=tfmdata.characters
+ local descriptions=tfmdata.descriptions
+ local resources=tfmdata.resources
+ local changed=tfmdata.changed
+ local lookuphash=resources.lookuphash
+ local lookuptypes=resources.lookuptypes
+ local ligatures={}
+ local alternate=tonumber(value)
+ local defaultalt=otf.defaultbasealternate
+ local trace_singles=trace_baseinit and trace_singles
+ local trace_alternatives=trace_baseinit and trace_alternatives
+ local trace_ligatures=trace_baseinit and trace_ligatures
+ for l=1,#lookuplist do
+ local lookupname=lookuplist[l]
+ local lookupdata=lookuphash[lookupname]
+ local lookuptype=lookuptypes[lookupname]
+ for unicode,data in next,lookupdata do
+ if lookuptype=="substitution" then
+ if trace_singles then
+ report_substitution(feature,lookupname,descriptions,unicode,data)
+ end
+ changed[unicode]=data
+ elseif lookuptype=="alternate" then
+ local replacement=data[alternate]
+ if replacement then
+ changed[unicode]=replacement
+ if trace_alternatives then
+ report_alternate(feature,lookupname,descriptions,unicode,replacement,value,"normal")
+ end
+ elseif defaultalt=="first" then
+ replacement=data[1]
+ changed[unicode]=replacement
+ if trace_alternatives then
+ report_alternate(feature,lookupname,descriptions,unicode,replacement,value,defaultalt)
+ end
+ elseif defaultalt=="last" then
+ replacement=data[#data]
+ if trace_alternatives then
+ report_alternate(feature,lookupname,descriptions,unicode,replacement,value,defaultalt)
+ end
+ else
+ if trace_alternatives then
+ report_alternate(feature,lookupname,descriptions,unicode,replacement,value,"unknown")
+ end
+ end
+ elseif lookuptype=="ligature" then
+ ligatures[#ligatures+1]={ unicode,data,lookupname }
+ if trace_ligatures then
+ report_ligature(feature,lookupname,descriptions,unicode,data)
+ end
+ end
+ end
+ end
+ local nofligatures=#ligatures
+ if nofligatures>0 then
+ local characters=tfmdata.characters
+ local present={}
+ local done=trace_baseinit and trace_ligatures and {}
+ for i=1,nofligatures do
+ local ligature=ligatures[i]
+ local unicode,tree=ligature[1],ligature[2]
+ make_1(present,tree,"ctx_"..unicode)
+ end
+ for i=1,nofligatures do
+ local ligature=ligatures[i]
+ local unicode,tree,lookupname=ligature[1],ligature[2],ligature[3]
+ make_2(present,tfmdata,characters,tree,"ctx_"..unicode,unicode,unicode,done,lookupname)
+ end
+ end
+end
+local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist)
+ local characters=tfmdata.characters
+ local descriptions=tfmdata.descriptions
+ local resources=tfmdata.resources
+ local lookuphash=resources.lookuphash
+ local traceindeed=trace_baseinit and trace_kerns
+ for l=1,#lookuplist do
+ local lookupname=lookuplist[l]
+ local lookupdata=lookuphash[lookupname]
+ for unicode,data in next,lookupdata do
+ local character=characters[unicode]
+ local kerns=character.kerns
+ if not kerns then
+ kerns={}
+ character.kerns=kerns
+ end
+ if traceindeed then
+ for otherunicode,kern in next,data do
+ if not kerns[otherunicode] and kern~=0 then
+ kerns[otherunicode]=kern
+ report_kern(feature,lookup,descriptions,unicode,otherunicode,kern)
+ end
+ end
+ else
+ for otherunicode,kern in next,data do
+ if not kerns[otherunicode] and kern~=0 then
+ kerns[otherunicode]=kern
+ end
+ end
+ end
+ end
+ end
+end
+local function initializehashes(tfmdata)
+ nodeinitializers.features(tfmdata)
+end
+basemethods.shared={
+ initializehashes=initializehashes,
+ preparesubstitutions=preparesubstitutions,
+ preparepositionings=preparepositionings,
+}
+basemethod="independent"
+local function featuresinitializer(tfmdata,value)
+ if true then
+ local t=trace_preparing and os.clock()
+ local features=tfmdata.shared.features
+ if features then
+ applybasemethod("initializehashes",tfmdata)
+ local collectlookups=otf.collectlookups
+ local rawdata=tfmdata.shared.rawdata
+ local properties=tfmdata.properties
+ local script=properties.script
+ local language=properties.language
+ local basesubstitutions=rawdata.resources.features.gsub
+ local basepositionings=rawdata.resources.features.gpos
+ if basesubstitutions then
+ for feature,data in next,basesubstitutions do
+ local value=features[feature]
+ if value then
+ local validlookups,lookuplist=collectlookups(rawdata,feature,script,language)
+ if validlookups then
+ applybasemethod("preparesubstitutions",tfmdata,feature,value,validlookups,lookuplist)
+ registerbasefeature(feature,value)
+ end
+ end
+ end
+ end
+ if basepositions then
+ for feature,data in next,basepositions do
+ local value=features[feature]
+ if value then
+ local validlookups,lookuplist=collectlookups(rawdata,feature,script,language)
+ if validlookups then
+ applybasemethod("preparepositionings",tfmdata,feature,features[feature],validlookups,lookuplist)
+ registerbasefeature(feature,value)
+ end
+ end
+ end
+ end
+ registerbasehash(tfmdata)
+ end
+ if trace_preparing then
+ report_prepare("preparation time is %0.3f seconds for %a",os.clock()-t,tfmdata.properties.fullname)
+ end
+ end
+end
+registerotffeature {
+ name="features",
+ description="features",
+ default=true,
+ initializers={
+ base=featuresinitializer,
+ }
+}
+directives.register("fonts.otf.loader.basemethod",function(v)
+ if basemethods[v] then
+ basemethod=v
+ end
+end)
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['node-inj']={
+ version=1.001,
+ comment="companion to node-ini.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files",
+}
+local next=next
+local utfchar=utf.char
+local trace_injections=false trackers.register("nodes.injections",function(v) trace_injections=v end)
+local report_injections=logs.reporter("nodes","injections")
+local attributes,nodes,node=attributes,nodes,node
+fonts=fonts
+local fontdata=fonts.hashes.identifiers
+nodes.injections=nodes.injections or {}
+local injections=nodes.injections
+local nodecodes=nodes.nodecodes
+local glyph_code=nodecodes.glyph
+local kern_code=nodecodes.kern
+local nodepool=nodes.pool
+local newkern=nodepool.kern
+local traverse_id=node.traverse_id
+local insert_node_before=node.insert_before
+local insert_node_after=node.insert_after
+local a_kernpair=attributes.private('kernpair')
+local a_ligacomp=attributes.private('ligacomp')
+local a_markbase=attributes.private('markbase')
+local a_markmark=attributes.private('markmark')
+local a_markdone=attributes.private('markdone')
+local a_cursbase=attributes.private('cursbase')
+local a_curscurs=attributes.private('curscurs')
+local a_cursdone=attributes.private('cursdone')
+function injections.installnewkern(nk)
+ newkern=nk or newkern
+end
+local cursives={}
+local marks={}
+local kerns={}
+function injections.setcursive(start,nxt,factor,rlmode,exit,entry,tfmstart,tfmnext)
+ local dx,dy=factor*(exit[1]-entry[1]),factor*(exit[2]-entry[2])
+ local ws,wn=tfmstart.width,tfmnext.width
+ local bound=#cursives+1
+ start[a_cursbase]=bound
+ nxt[a_curscurs]=bound
+ cursives[bound]={ rlmode,dx,dy,ws,wn }
+ return dx,dy,bound
+end
+function injections.setpair(current,factor,rlmode,r2lflag,spec,tfmchr)
+ local x,y,w,h=factor*spec[1],factor*spec[2],factor*spec[3],factor*spec[4]
+ if x~=0 or w~=0 or y~=0 or h~=0 then
+ local bound=current[a_kernpair]
+ if bound then
+ local kb=kerns[bound]
+ kb[2],kb[3],kb[4],kb[5]=(kb[2] or 0)+x,(kb[3] or 0)+y,(kb[4] or 0)+w,(kb[5] or 0)+h
+ else
+ bound=#kerns+1
+ current[a_kernpair]=bound
+ kerns[bound]={ rlmode,x,y,w,h,r2lflag,tfmchr.width }
+ end
+ return x,y,w,h,bound
+ end
+ return x,y,w,h
+end
+function injections.setkern(current,factor,rlmode,x,tfmchr)
+ local dx=factor*x
+ if dx~=0 then
+ local bound=#kerns+1
+ current[a_kernpair]=bound
+ kerns[bound]={ rlmode,dx }
+ return dx,bound
+ else
+ return 0,0
+ end
+end
+function injections.setmark(start,base,factor,rlmode,ba,ma,index)
+ local dx,dy=factor*(ba[1]-ma[1]),factor*(ba[2]-ma[2])
+ local bound=base[a_markbase]
+ local index=1
+ if bound then
+ local mb=marks[bound]
+ if mb then
+ index=#mb+1
+ mb[index]={ dx,dy,rlmode }
+ start[a_markmark]=bound
+ start[a_markdone]=index
+ return dx,dy,bound
+ else
+ report_injections("possible problem, %U is base mark without data (id %a)",base.char,bound)
+ end
+ end
+ index=index or 1
+ bound=#marks+1
+ base[a_markbase]=bound
+ start[a_markmark]=bound
+ start[a_markdone]=index
+ marks[bound]={ [index]={ dx,dy,rlmode } }
+ return dx,dy,bound
+end
+local function dir(n)
+ return (n and n<0 and "r-to-l") or (n and n>0 and "l-to-r") or "unset"
+end
+local function trace(head)
+ report_injections("begin run")
+ for n in traverse_id(glyph_code,head) do
+ if n.subtype<256 then
+ local kp=n[a_kernpair]
+ local mb=n[a_markbase]
+ local mm=n[a_markmark]
+ local md=n[a_markdone]
+ local cb=n[a_cursbase]
+ local cc=n[a_curscurs]
+ local char=n.char
+ report_injections("font %s, char %U, glyph %c",n.font,char,char)
+ if kp then
+ local k=kerns[kp]
+ if k[3] then
+ report_injections(" pairkern: dir %a, x %p, y %p, w %p, h %p",dir(k[1]),k[2],k[3],k[4],k[5])
+ else
+ report_injections(" kern: dir %a, dx %p",dir(k[1]),k[2])
+ end
+ end
+ if mb then
+ report_injections(" markbase: bound %a",mb)
+ end
+ if mm then
+ local m=marks[mm]
+ if mb then
+ local m=m[mb]
+ if m then
+ report_injections(" markmark: bound %a, index %a, dx %p, dy %p",mm,md,m[1],m[2])
+ else
+ report_injections(" markmark: bound %a, missing index",mm)
+ end
+ else
+ m=m[1]
+ report_injections(" markmark: bound %a, dx %p, dy %p",mm,m and m[1],m and m[2])
+ end
+ end
+ if cb then
+ report_injections(" cursbase: bound %a",cb)
+ end
+ if cc then
+ local c=cursives[cc]
+ report_injections(" curscurs: bound %a, dir %a, dx %p, dy %p",cc,dir(c[1]),c[2],c[3])
+ end
+ end
+ end
+ report_injections("end run")
+end
+local function show_result(head)
+ local current=head
+ local skipping=false
+ while current do
+ local id=current.id
+ if id==glyph_code then
+ report_injections("char: %C, width %p, xoffset %p, yoffset %p",current.char,current.width,current.xoffset,current.yoffset)
+ skipping=false
+ elseif id==kern_code then
+ report_injections("kern: %p",current.kern)
+ skipping=false
+ elseif not skipping then
+ report_injections()
+ skipping=true
+ end
+ current=current.next
+ end
+end
+function injections.handler(head,where,keep)
+ local has_marks,has_cursives,has_kerns=next(marks),next(cursives),next(kerns)
+ if has_marks or has_cursives then
+ if trace_injections then
+ trace(head)
+ end
+ local done,ky,rl,valid,cx,wx,mk,nofvalid=false,{},{},{},{},{},{},0
+ if has_kerns then
+ local nf,tm=nil,nil
+ for n in traverse_id(glyph_code,head) do
+ if n.subtype<256 then
+ nofvalid=nofvalid+1
+ valid[nofvalid]=n
+ if n.font~=nf then
+ nf=n.font
+ tm=fontdata[nf].resources.marks
+ end
+ if tm then
+ mk[n]=tm[n.char]
+ end
+ local k=n[a_kernpair]
+ if k then
+ local kk=kerns[k]
+ if kk then
+ local x,y,w,h=kk[2] or 0,kk[3] or 0,kk[4] or 0,kk[5] or 0
+ local dy=y-h
+ if dy~=0 then
+ ky[n]=dy
+ end
+ if w~=0 or x~=0 then
+ wx[n]=kk
+ end
+ rl[n]=kk[1]
+ end
+ end
+ end
+ end
+ else
+ local nf,tm=nil,nil
+ for n in traverse_id(glyph_code,head) do
+ if n.subtype<256 then
+ nofvalid=nofvalid+1
+ valid[nofvalid]=n
+ if n.font~=nf then
+ nf=n.font
+ tm=fontdata[nf].resources.marks
+ end
+ if tm then
+ mk[n]=tm[n.char]
+ end
+ end
+ end
+ end
+ if nofvalid>0 then
+ local cx={}
+ if has_kerns and next(ky) then
+ for n,k in next,ky do
+ n.yoffset=k
+ end
+ end
+ if has_cursives then
+ local p_cursbase,p=nil,nil
+ local t,d,maxt={},{},0
+ for i=1,nofvalid do
+ local n=valid[i]
+ if not mk[n] then
+ local n_cursbase=n[a_cursbase]
+ if p_cursbase then
+ local n_curscurs=n[a_curscurs]
+ if p_cursbase==n_curscurs then
+ local c=cursives[n_curscurs]
+ if c then
+ local rlmode,dx,dy,ws,wn=c[1],c[2],c[3],c[4],c[5]
+ if rlmode>=0 then
+ dx=dx-ws
+ else
+ dx=dx+wn
+ end
+ if dx~=0 then
+ cx[n]=dx
+ rl[n]=rlmode
+ end
+ dy=-dy
+ maxt=maxt+1
+ t[maxt]=p
+ d[maxt]=dy
+ else
+ maxt=0
+ end
+ end
+ elseif maxt>0 then
+ local ny=n.yoffset
+ for i=maxt,1,-1 do
+ ny=ny+d[i]
+ local ti=t[i]
+ ti.yoffset=ti.yoffset+ny
+ end
+ maxt=0
+ end
+ if not n_cursbase and maxt>0 then
+ local ny=n.yoffset
+ for i=maxt,1,-1 do
+ ny=ny+d[i]
+ local ti=t[i]
+ ti.yoffset=ny
+ end
+ maxt=0
+ end
+ p_cursbase,p=n_cursbase,n
+ end
+ end
+ if maxt>0 then
+ local ny=n.yoffset
+ for i=maxt,1,-1 do
+ ny=ny+d[i]
+ local ti=t[i]
+ ti.yoffset=ny
+ end
+ maxt=0
+ end
+ if not keep then
+ cursives={}
+ end
+ end
+ if has_marks then
+ for i=1,nofvalid do
+ local p=valid[i]
+ local p_markbase=p[a_markbase]
+ if p_markbase then
+ local mrks=marks[p_markbase]
+ local nofmarks=#mrks
+ for n in traverse_id(glyph_code,p.next) do
+ local n_markmark=n[a_markmark]
+ if p_markbase==n_markmark then
+ local index=n[a_markdone] or 1
+ local d=mrks[index]
+ if d then
+ local rlmode=d[3]
+ local k=wx[p]
+ if k then
+ local x=k[2]
+ local w=k[4]
+ if w then
+ if rlmode and rlmode>=0 then
+ n.xoffset=p.xoffset-p.width+d[1]-(w-x)
+ else
+ n.xoffset=p.xoffset-d[1]-x
+ end
+ else
+ if rlmode and rlmode>=0 then
+ n.xoffset=p.xoffset-p.width+d[1]
+ else
+ n.xoffset=p.xoffset-d[1]-x
+ end
+ end
+ else
+ if rlmode and rlmode>=0 then
+ n.xoffset=p.xoffset-p.width+d[1]
+ else
+ n.xoffset=p.xoffset-d[1]
+ end
+ end
+ if mk[p] then
+ n.yoffset=p.yoffset+d[2]
+ else
+ n.yoffset=n.yoffset+p.yoffset+d[2]
+ end
+ if nofmarks==1 then
+ break
+ else
+ nofmarks=nofmarks-1
+ end
+ end
+ else
+ end
+ end
+ end
+ end
+ if not keep then
+ marks={}
+ end
+ end
+ if next(wx) then
+ for n,k in next,wx do
+ local x=k[2]
+ local w=k[4]
+ if w then
+ local rl=k[1]
+ local wx=w-x
+ if rl<0 then
+ if wx~=0 then
+ insert_node_before(head,n,newkern(wx))
+ end
+ if x~=0 then
+ insert_node_after (head,n,newkern(x))
+ end
+ else
+ if x~=0 then
+ insert_node_before(head,n,newkern(x))
+ end
+ if wx~=0 then
+ insert_node_after (head,n,newkern(wx))
+ end
+ end
+ elseif x~=0 then
+ insert_node_before(head,n,newkern(x))
+ end
+ end
+ end
+ if next(cx) then
+ for n,k in next,cx do
+ if k~=0 then
+ local rln=rl[n]
+ if rln and rln<0 then
+ insert_node_before(head,n,newkern(-k))
+ else
+ insert_node_before(head,n,newkern(k))
+ end
+ end
+ end
+ end
+ if not keep then
+ kerns={}
+ end
+ return head,true
+ elseif not keep then
+ kerns,cursives,marks={},{},{}
+ end
+ elseif has_kerns then
+ if trace_injections then
+ trace(head)
+ end
+ for n in traverse_id(glyph_code,head) do
+ if n.subtype<256 then
+ local k=n[a_kernpair]
+ if k then
+ local kk=kerns[k]
+ if kk then
+ local rl,x,y,w=kk[1],kk[2] or 0,kk[3],kk[4]
+ if y and y~=0 then
+ n.yoffset=y
+ end
+ if w then
+ local wx=w-x
+ if rl<0 then
+ if wx~=0 then
+ insert_node_before(head,n,newkern(wx))
+ end
+ if x~=0 then
+ insert_node_after (head,n,newkern(x))
+ end
+ else
+ if x~=0 then
+ insert_node_before(head,n,newkern(x))
+ end
+ if wx~=0 then
+ insert_node_after(head,n,newkern(wx))
+ end
+ end
+ else
+ if x~=0 then
+ insert_node_before(head,n,newkern(x))
+ end
+ end
+ end
+ end
+ end
+ end
+ if not keep then
+ kerns={}
+ end
+ return head,true
+ else
+ end
+ return head,false
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['font-ota']={
+ version=1.001,
+ comment="companion to font-otf.lua (analysing)",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local type=type
+if not trackers then trackers={ register=function() end } end
+local fonts,nodes,node=fonts,nodes,node
+local allocate=utilities.storage.allocate
+local otf=fonts.handlers.otf
+local analyzers=fonts.analyzers
+local initializers=allocate()
+local methods=allocate()
+analyzers.initializers=initializers
+analyzers.methods=methods
+analyzers.useunicodemarks=false
+local a_state=attributes.private('state')
+local nodecodes=nodes.nodecodes
+local glyph_code=nodecodes.glyph
+local math_code=nodecodes.math
+local traverse_id=node.traverse_id
+local traverse_node_list=node.traverse
+local end_of_math=node.end_of_math
+local fontdata=fonts.hashes.identifiers
+local categories=characters and characters.categories or {}
+local otffeatures=fonts.constructors.newfeatures("otf")
+local registerotffeature=otffeatures.register
+local s_init=1 local s_rphf=7
+local s_medi=2 local s_half=8
+local s_fina=3 local s_pref=9
+local s_isol=4 local s_blwf=10
+local s_mark=5 local s_pstf=11
+local s_rest=6
+local states={
+ init=s_init,
+ medi=s_medi,
+ fina=s_fina,
+ isol=s_isol,
+ mark=s_mark,
+ rest=s_rest,
+ rphf=s_rphf,
+ half=s_half,
+ pref=s_pref,
+ blwf=s_blwf,
+ pstf=s_pstf,
+}
+local features={
+ init=s_init,
+ medi=s_medi,
+ fina=s_fina,
+ isol=s_isol,
+}
+analyzers.states=states
+analyzers.features=features
+function analyzers.setstate(head,font)
+ local useunicodemarks=analyzers.useunicodemarks
+ local tfmdata=fontdata[font]
+ local descriptions=tfmdata.descriptions
+ local first,last,current,n,done=nil,nil,head,0,false
+ while current do
+ local id=current.id
+ if id==glyph_code and current.font==font then
+ done=true
+ local char=current.char
+ local d=descriptions[char]
+ if d then
+ if d.class=="mark" or (useunicodemarks and categories[char]=="mn") then
+ done=true
+ current[a_state]=s_mark
+ elseif n==0 then
+ first,last,n=current,current,1
+ current[a_state]=s_init
+ else
+ last,n=current,n+1
+ current[a_state]=s_medi
+ end
+ else
+ if first and first==last then
+ last[a_state]=s_isol
+ elseif last then
+ last[a_state]=s_fina
+ end
+ first,last,n=nil,nil,0
+ end
+ elseif id==disc_code then
+ current[a_state]=s_midi
+ last=current
+ else
+ if first and first==last then
+ last[a_state]=s_isol
+ elseif last then
+ last[a_state]=s_fina
+ end
+ first,last,n=nil,nil,0
+ if id==math_code then
+ current=end_of_math(current)
+ end
+ end
+ current=current.next
+ end
+ if first and first==last then
+ last[a_state]=s_isol
+ elseif last then
+ last[a_state]=s_fina
+ end
+ return head,done
+end
+local function analyzeinitializer(tfmdata,value)
+ local script,language=otf.scriptandlanguage(tfmdata)
+ local action=initializers[script]
+ if not action then
+ elseif type(action)=="function" then
+ return action(tfmdata,value)
+ else
+ local action=action[language]
+ if action then
+ return action(tfmdata,value)
+ end
+ end
+end
+local function analyzeprocessor(head,font,attr)
+ local tfmdata=fontdata[font]
+ local script,language=otf.scriptandlanguage(tfmdata,attr)
+ local action=methods[script]
+ if not action then
+ elseif type(action)=="function" then
+ return action(head,font,attr)
+ else
+ action=action[language]
+ if action then
+ return action(head,font,attr)
+ end
+ end
+ return head,false
+end
+registerotffeature {
+ name="analyze",
+ description="analysis of (for instance) character classes",
+ default=true,
+ initializers={
+ node=analyzeinitializer,
+ },
+ processors={
+ position=1,
+ node=analyzeprocessor,
+ }
+}
+methods.latn=analyzers.setstate
+local tatweel=0x0640
+local zwnj=0x200C
+local zwj=0x200D
+local isolated={
+ [0x0600]=true,[0x0601]=true,[0x0602]=true,[0x0603]=true,
+ [0x0604]=true,
+ [0x0608]=true,[0x060B]=true,[0x0621]=true,[0x0674]=true,
+ [0x06DD]=true,
+ [0x0856]=true,[0x0858]=true,[0x0857]=true,
+ [0x07FA]=true,
+ [zwnj]=true,
+}
+local final={
+ [0x0622]=true,[0x0623]=true,[0x0624]=true,[0x0625]=true,
+ [0x0627]=true,[0x0629]=true,[0x062F]=true,[0x0630]=true,
+ [0x0631]=true,[0x0632]=true,[0x0648]=true,[0x0671]=true,
+ [0x0672]=true,[0x0673]=true,[0x0675]=true,[0x0676]=true,
+ [0x0677]=true,[0x0688]=true,[0x0689]=true,[0x068A]=true,
+ [0x068B]=true,[0x068C]=true,[0x068D]=true,[0x068E]=true,
+ [0x068F]=true,[0x0690]=true,[0x0691]=true,[0x0692]=true,
+ [0x0693]=true,[0x0694]=true,[0x0695]=true,[0x0696]=true,
+ [0x0697]=true,[0x0698]=true,[0x0699]=true,[0x06C0]=true,
+ [0x06C3]=true,[0x06C4]=true,[0x06C5]=true,[0x06C6]=true,
+ [0x06C7]=true,[0x06C8]=true,[0x06C9]=true,[0x06CA]=true,
+ [0x06CB]=true,[0x06CD]=true,[0x06CF]=true,[0x06D2]=true,
+ [0x06D3]=true,[0x06D5]=true,[0x06EE]=true,[0x06EF]=true,
+ [0x0759]=true,[0x075A]=true,[0x075B]=true,[0x076B]=true,
+ [0x076C]=true,[0x0771]=true,[0x0773]=true,[0x0774]=true,
+ [0x0778]=true,[0x0779]=true,
+ [0x08AA]=true,[0x08AB]=true,[0x08AC]=true,
+ [0xFEF5]=true,[0xFEF7]=true,[0xFEF9]=true,[0xFEFB]=true,
+ [0x0710]=true,[0x0715]=true,[0x0716]=true,[0x0717]=true,
+ [0x0718]=true,[0x0719]=true,[0x0728]=true,[0x072A]=true,
+ [0x072C]=true,[0x071E]=true,
+ [0x072F]=true,[0x074D]=true,
+ [0x0840]=true,[0x0849]=true,[0x0854]=true,[0x0846]=true,
+ [0x084F]=true
+}
+local medial={
+ [0x0626]=true,[0x0628]=true,[0x062A]=true,[0x062B]=true,
+ [0x062C]=true,[0x062D]=true,[0x062E]=true,[0x0633]=true,
+ [0x0634]=true,[0x0635]=true,[0x0636]=true,[0x0637]=true,
+ [0x0638]=true,[0x0639]=true,[0x063A]=true,[0x063B]=true,
+ [0x063C]=true,[0x063D]=true,[0x063E]=true,[0x063F]=true,
+ [0x0641]=true,[0x0642]=true,[0x0643]=true,
+ [0x0644]=true,[0x0645]=true,[0x0646]=true,[0x0647]=true,
+ [0x0649]=true,[0x064A]=true,[0x066E]=true,[0x066F]=true,
+ [0x0678]=true,[0x0679]=true,[0x067A]=true,[0x067B]=true,
+ [0x067C]=true,[0x067D]=true,[0x067E]=true,[0x067F]=true,
+ [0x0680]=true,[0x0681]=true,[0x0682]=true,[0x0683]=true,
+ [0x0684]=true,[0x0685]=true,[0x0686]=true,[0x0687]=true,
+ [0x069A]=true,[0x069B]=true,[0x069C]=true,[0x069D]=true,
+ [0x069E]=true,[0x069F]=true,[0x06A0]=true,[0x06A1]=true,
+ [0x06A2]=true,[0x06A3]=true,[0x06A4]=true,[0x06A5]=true,
+ [0x06A6]=true,[0x06A7]=true,[0x06A8]=true,[0x06A9]=true,
+ [0x06AA]=true,[0x06AB]=true,[0x06AC]=true,[0x06AD]=true,
+ [0x06AE]=true,[0x06AF]=true,[0x06B0]=true,[0x06B1]=true,
+ [0x06B2]=true,[0x06B3]=true,[0x06B4]=true,[0x06B5]=true,
+ [0x06B6]=true,[0x06B7]=true,[0x06B8]=true,[0x06B9]=true,
+ [0x06BA]=true,[0x06BB]=true,[0x06BC]=true,[0x06BD]=true,
+ [0x06BE]=true,[0x06BF]=true,[0x06C1]=true,[0x06C2]=true,
+ [0x06CC]=true,[0x06CE]=true,[0x06D0]=true,[0x06D1]=true,
+ [0x06FA]=true,[0x06FB]=true,[0x06FC]=true,[0x06FF]=true,
+ [0x0750]=true,[0x0751]=true,[0x0752]=true,[0x0753]=true,
+ [0x0754]=true,[0x0755]=true,[0x0756]=true,[0x0757]=true,
+ [0x0758]=true,[0x075C]=true,[0x075D]=true,[0x075E]=true,
+ [0x075F]=true,[0x0760]=true,[0x0761]=true,[0x0762]=true,
+ [0x0763]=true,[0x0764]=true,[0x0765]=true,[0x0766]=true,
+ [0x0767]=true,[0x0768]=true,[0x0769]=true,[0x076A]=true,
+ [0x076D]=true,[0x076E]=true,[0x076F]=true,[0x0770]=true,
+ [0x0772]=true,[0x0775]=true,[0x0776]=true,[0x0777]=true,
+ [0x077A]=true,[0x077B]=true,[0x077C]=true,[0x077D]=true,
+ [0x077E]=true,[0x077F]=true,
+ [0x08A0]=true,[0x08A2]=true,[0x08A4]=true,[0x08A5]=true,
+ [0x08A6]=true,[0x0620]=true,[0x08A8]=true,[0x08A9]=true,
+ [0x08A7]=true,[0x08A3]=true,
+ [0x0712]=true,[0x0713]=true,[0x0714]=true,[0x071A]=true,
+ [0x071B]=true,[0x071C]=true,[0x071D]=true,[0x071F]=true,
+ [0x0720]=true,[0x0721]=true,[0x0722]=true,[0x0723]=true,
+ [0x0724]=true,[0x0725]=true,[0x0726]=true,[0x0727]=true,
+ [0x0729]=true,[0x072B]=true,[0x072D]=true,[0x072E]=true,
+ [0x074E]=true,[0x074F]=true,
+ [0x0841]=true,[0x0842]=true,[0x0843]=true,[0x0844]=true,
+ [0x0845]=true,[0x0847]=true,[0x0848]=true,[0x0855]=true,
+ [0x0851]=true,[0x084E]=true,[0x084D]=true,[0x084A]=true,
+ [0x084B]=true,[0x084C]=true,[0x0850]=true,[0x0852]=true,
+ [0x0853]=true,
+ [0x07D7]=true,[0x07E8]=true,[0x07D9]=true,[0x07EA]=true,
+ [0x07CA]=true,[0x07DB]=true,[0x07CC]=true,[0x07DD]=true,
+ [0x07CE]=true,[0x07DF]=true,[0x07D4]=true,[0x07E5]=true,
+ [0x07E9]=true,[0x07E7]=true,[0x07E3]=true,[0x07E2]=true,
+ [0x07E0]=true,[0x07E1]=true,[0x07DE]=true,[0x07DC]=true,
+ [0x07D1]=true,[0x07DA]=true,[0x07D8]=true,[0x07D6]=true,
+ [0x07D2]=true,[0x07D0]=true,[0x07CF]=true,[0x07CD]=true,
+ [0x07CB]=true,[0x07D3]=true,[0x07E4]=true,[0x07D5]=true,
+ [0x07E6]=true,
+ [tatweel]=true,
+ [zwj]=true,
+}
+local arab_warned={}
+local function warning(current,what)
+ local char=current.char
+ if not arab_warned[char] then
+ log.report("analyze","arab: character %C has no %a class",char,what)
+ arab_warned[char]=true
+ end
+end
+local function finish(first,last)
+ if last then
+ if first==last then
+ local fc=first.char
+ if medial[fc] or final[fc] then
+ first[a_state]=s_isol
+ else
+ warning(first,"isol")
+ first[a_state]=s_error
+ end
+ else
+ local lc=last.char
+ if medial[lc] or final[lc] then
+ last[a_state]=s_fina
+ else
+ warning(last,"fina")
+ last[a_state]=s_error
+ end
+ end
+ first,last=nil,nil
+ elseif first then
+ local fc=first.char
+ if medial[fc] or final[fc] then
+ first[a_state]=s_isol
+ else
+ warning(first,"isol")
+ first[a_state]=s_error
+ end
+ first=nil
+ end
+ return first,last
+end
+function methods.arab(head,font,attr)
+ local useunicodemarks=analyzers.useunicodemarks
+ local tfmdata=fontdata[font]
+ local marks=tfmdata.resources.marks
+ local first,last,current,done=nil,nil,head,false
+ while current do
+ local id=current.id
+ if id==glyph_code and current.font==font and current.subtype<256 and not current[a_state] then
+ done=true
+ local char=current.char
+ if marks[char] or (useunicodemarks and categories[char]=="mn") then
+ current[a_state]=s_mark
+ elseif isolated[char] then
+ first,last=finish(first,last)
+ current[a_state]=s_isol
+ first,last=nil,nil
+ elseif not first then
+ if medial[char] then
+ current[a_state]=s_init
+ first,last=first or current,current
+ elseif final[char] then
+ current[a_state]=s_isol
+ first,last=nil,nil
+ else
+ first,last=finish(first,last)
+ end
+ elseif medial[char] then
+ first,last=first or current,current
+ current[a_state]=s_medi
+ elseif final[char] then
+ if not last[a_state]==s_init then
+ last[a_state]=s_medi
+ end
+ current[a_state]=s_fina
+ first,last=nil,nil
+ elseif char>=0x0600 and char<=0x06FF then
+ current[a_state]=s_rest
+ first,last=finish(first,last)
+ else
+ first,last=finish(first,last)
+ end
+ else
+ if first or last then
+ first,last=finish(first,last)
+ end
+ if id==math_code then
+ current=end_of_math(current)
+ end
+ end
+ current=current.next
+ end
+ if first or last then
+ finish(first,last)
+ end
+ return head,done
+end
+methods.syrc=methods.arab
+methods.mand=methods.arab
+methods.nko=methods.arab
+directives.register("otf.analyze.useunicodemarks",function(v)
+ analyzers.useunicodemarks=v
+end)
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['font-otn']={
+ version=1.001,
+ comment="companion to font-ini.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files",
+}
+local concat,insert,remove=table.concat,table.insert,table.remove
+local gmatch,gsub,find,match,lower,strip=string.gmatch,string.gsub,string.find,string.match,string.lower,string.strip
+local type,next,tonumber,tostring=type,next,tonumber,tostring
+local lpegmatch=lpeg.match
+local random=math.random
+local formatters=string.formatters
+local logs,trackers,nodes,attributes=logs,trackers,nodes,attributes
+local registertracker=trackers.register
+local fonts=fonts
+local otf=fonts.handlers.otf
+local trace_lookups=false registertracker("otf.lookups",function(v) trace_lookups=v end)
+local trace_singles=false registertracker("otf.singles",function(v) trace_singles=v end)
+local trace_multiples=false registertracker("otf.multiples",function(v) trace_multiples=v end)
+local trace_alternatives=false registertracker("otf.alternatives",function(v) trace_alternatives=v end)
+local trace_ligatures=false registertracker("otf.ligatures",function(v) trace_ligatures=v end)
+local trace_contexts=false registertracker("otf.contexts",function(v) trace_contexts=v end)
+local trace_marks=false registertracker("otf.marks",function(v) trace_marks=v end)
+local trace_kerns=false registertracker("otf.kerns",function(v) trace_kerns=v end)
+local trace_cursive=false registertracker("otf.cursive",function(v) trace_cursive=v end)
+local trace_preparing=false registertracker("otf.preparing",function(v) trace_preparing=v end)
+local trace_bugs=false registertracker("otf.bugs",function(v) trace_bugs=v end)
+local trace_details=false registertracker("otf.details",function(v) trace_details=v end)
+local trace_applied=false registertracker("otf.applied",function(v) trace_applied=v end)
+local trace_steps=false registertracker("otf.steps",function(v) trace_steps=v end)
+local trace_skips=false registertracker("otf.skips",function(v) trace_skips=v end)
+local trace_directions=false registertracker("otf.directions",function(v) trace_directions=v end)
+local report_direct=logs.reporter("fonts","otf direct")
+local report_subchain=logs.reporter("fonts","otf subchain")
+local report_chain=logs.reporter("fonts","otf chain")
+local report_process=logs.reporter("fonts","otf process")
+local report_prepare=logs.reporter("fonts","otf prepare")
+local report_warning=logs.reporter("fonts","otf warning")
+registertracker("otf.verbose_chain",function(v) otf.setcontextchain(v and "verbose") end)
+registertracker("otf.normal_chain",function(v) otf.setcontextchain(v and "normal") end)
+registertracker("otf.replacements","otf.singles,otf.multiples,otf.alternatives,otf.ligatures")
+registertracker("otf.positions","otf.marks,otf.kerns,otf.cursive")
+registertracker("otf.actions","otf.replacements,otf.positions")
+registertracker("otf.injections","nodes.injections")
+registertracker("*otf.sample","otf.steps,otf.actions,otf.analyzing")
+local insert_node_after=node.insert_after
+local delete_node=nodes.delete
+local copy_node=node.copy
+local find_node_tail=node.tail or node.slide
+local flush_node_list=node.flush_list
+local end_of_math=node.end_of_math
+local setmetatableindex=table.setmetatableindex
+local zwnj=0x200C
+local zwj=0x200D
+local wildcard="*"
+local default="dflt"
+local nodecodes=nodes.nodecodes
+local whatcodes=nodes.whatcodes
+local glyphcodes=nodes.glyphcodes
+local glyph_code=nodecodes.glyph
+local glue_code=nodecodes.glue
+local disc_code=nodecodes.disc
+local whatsit_code=nodecodes.whatsit
+local math_code=nodecodes.math
+local dir_code=whatcodes.dir
+local localpar_code=whatcodes.localpar
+local ligature_code=glyphcodes.ligature
+local privateattribute=attributes.private
+local a_state=privateattribute('state')
+local a_markbase=privateattribute('markbase')
+local a_markmark=privateattribute('markmark')
+local a_markdone=privateattribute('markdone')
+local a_cursbase=privateattribute('cursbase')
+local a_curscurs=privateattribute('curscurs')
+local a_cursdone=privateattribute('cursdone')
+local a_kernpair=privateattribute('kernpair')
+local a_ligacomp=privateattribute('ligacomp')
+local injections=nodes.injections
+local setmark=injections.setmark
+local setcursive=injections.setcursive
+local setkern=injections.setkern
+local setpair=injections.setpair
+local markonce=true
+local cursonce=true
+local kernonce=true
+local fonthashes=fonts.hashes
+local fontdata=fonthashes.identifiers
+local otffeatures=fonts.constructors.newfeatures("otf")
+local registerotffeature=otffeatures.register
+local onetimemessage=fonts.loggers.onetimemessage
+otf.defaultnodealternate="none"
+local tfmdata=false
+local characters=false
+local descriptions=false
+local resources=false
+local marks=false
+local currentfont=false
+local lookuptable=false
+local anchorlookups=false
+local lookuptypes=false
+local handlers={}
+local rlmode=0
+local featurevalue=false
+local checkstep=(nodes and nodes.tracers and nodes.tracers.steppers.check) or function() end
+local registerstep=(nodes and nodes.tracers and nodes.tracers.steppers.register) or function() end
+local registermessage=(nodes and nodes.tracers and nodes.tracers.steppers.message) or function() end
+local function logprocess(...)
+ if trace_steps then
+ registermessage(...)
+ end
+ report_direct(...)
+end
+local function logwarning(...)
+ report_direct(...)
+end
+local f_unicode=formatters["%U"]
+local f_uniname=formatters["%U (%s)"]
+local f_unilist=formatters["% t (% t)"]
+local function gref(n)
+ if type(n)=="number" then
+ local description=descriptions[n]
+ local name=description and description.name
+ if name then
+ return f_uniname(n,name)
+ else
+ return f_unicode(n)
+ end
+ elseif n then
+ local num,nam={},{}
+ for i=1,#n do
+ local ni=n[i]
+ if tonumber(ni) then
+ local di=descriptions[ni]
+ num[i]=f_unicode(ni)
+ nam[i]=di and di.name or "-"
+ end
+ end
+ return f_unilist(num,nam)
+ else
+ return "<error in node mode tracing>"
+ end
+end
+local function cref(kind,chainname,chainlookupname,lookupname,index)
+ if index then
+ return formatters["feature %a, chain %a, sub %a, lookup %a, index %a"](kind,chainname,chainlookupname,lookupname,index)
+ elseif lookupname then
+ return formatters["feature %a, chain %a, sub %a, lookup %a"](kind,chainname,chainlookupname,lookupname)
+ elseif chainlookupname then
+ return formatters["feature %a, chain %a, sub %a"](kind,chainname,chainlookupname)
+ elseif chainname then
+ return formatters["feature %a, chain %a"](kind,chainname)
+ else
+ return formatters["feature %a"](kind)
+ end
+end
+local function pref(kind,lookupname)
+ return formatters["feature %a, lookup %a"](kind,lookupname)
+end
+local function copy_glyph(g)
+ local components=g.components
+ if components then
+ g.components=nil
+ local n=copy_node(g)
+ g.components=components
+ return n
+ else
+ return copy_node(g)
+ end
+end
+local function markstoligature(kind,lookupname,head,start,stop,char)
+ if start==stop and start.char==char then
+ return head,start
+ else
+ local prev=start.prev
+ local next=stop.next
+ start.prev=nil
+ stop.next=nil
+ local base=copy_glyph(start)
+ if head==start then
+ head=base
+ end
+ base.char=char
+ base.subtype=ligature_code
+ base.components=start
+ if prev then
+ prev.next=base
+ end
+ if next then
+ next.prev=base
+ end
+ base.next=next
+ base.prev=prev
+ return head,base
+ end
+end
+local function getcomponentindex(start)
+ if start.id~=glyph_code then
+ return 0
+ elseif start.subtype==ligature_code then
+ local i=0
+ local components=start.components
+ while components do
+ i=i+getcomponentindex(components)
+ components=components.next
+ end
+ return i
+ elseif not marks[start.char] then
+ return 1
+ else
+ return 0
+ end
+end
+local function toligature(kind,lookupname,head,start,stop,char,markflag,discfound)
+ if start==stop and start.char==char then
+ start.char=char
+ return head,start
+ end
+ local prev=start.prev
+ local next=stop.next
+ start.prev=nil
+ stop.next=nil
+ local base=copy_glyph(start)
+ if start==head then
+ head=base
+ end
+ base.char=char
+ base.subtype=ligature_code
+ base.components=start
+ if prev then
+ prev.next=base
+ end
+ if next then
+ next.prev=base
+ end
+ base.next=next
+ base.prev=prev
+ if not discfound then
+ local deletemarks=markflag~="mark"
+ local components=start
+ local baseindex=0
+ local componentindex=0
+ local head=base
+ local current=base
+ while start do
+ local char=start.char
+ if not marks[char] then
+ baseindex=baseindex+componentindex
+ componentindex=getcomponentindex(start)
+ elseif not deletemarks then
+ start[a_ligacomp]=baseindex+(start[a_ligacomp] or componentindex)
+ if trace_marks then
+ logwarning("%s: keep mark %s, gets index %s",pref(kind,lookupname),gref(char),start[a_ligacomp])
+ end
+ head,current=insert_node_after(head,current,copy_node(start))
+ end
+ start=start.next
+ end
+ local start=components
+ while start and start.id==glyph_code do
+ local char=start.char
+ if marks[char] then
+ start[a_ligacomp]=baseindex+(start[a_ligacomp] or componentindex)
+ if trace_marks then
+ logwarning("%s: keep mark %s, gets index %s",pref(kind,lookupname),gref(char),start[a_ligacomp])
+ end
+ else
+ break
+ end
+ start=start.next
+ end
+ end
+ return head,base
+end
+function handlers.gsub_single(head,start,kind,lookupname,replacement)
+ if trace_singles then
+ logprocess("%s: replacing %s by single %s",pref(kind,lookupname),gref(start.char),gref(replacement))
+ end
+ start.char=replacement
+ return head,start,true
+end
+local function get_alternative_glyph(start,alternatives,value,trace_alternatives)
+ local n=#alternatives
+ if value=="random" then
+ local r=random(1,n)
+ return alternatives[r],trace_alternatives and formatters["value %a, taking %a"](value,r)
+ elseif value=="first" then
+ return alternatives[1],trace_alternatives and formatters["value %a, taking %a"](value,1)
+ elseif value=="last" then
+ return alternatives[n],trace_alternatives and formatters["value %a, taking %a"](value,n)
+ else
+ value=tonumber(value)
+ if type(value)~="number" then
+ return alternatives[1],trace_alternatives and formatters["invalid value %s, taking %a"](value,1)
+ elseif value>n then
+ local defaultalt=otf.defaultnodealternate
+ if defaultalt=="first" then
+ return alternatives[n],trace_alternatives and formatters["invalid value %s, taking %a"](value,1)
+ elseif defaultalt=="last" then
+ return alternatives[1],trace_alternatives and formatters["invalid value %s, taking %a"](value,n)
+ else
+ return false,trace_alternatives and formatters["invalid value %a, %s"](value,"out of range")
+ end
+ elseif value==0 then
+ return start.char,trace_alternatives and formatters["invalid value %a, %s"](value,"no change")
+ elseif value<1 then
+ return alternatives[1],trace_alternatives and formatters["invalid value %a, taking %a"](value,1)
+ else
+ return alternatives[value],trace_alternatives and formatters["value %a, taking %a"](value,value)
+ end
+ end
+end
+local function multiple_glyphs(head,start,multiple)
+ local nofmultiples=#multiple
+ if nofmultiples>0 then
+ start.char=multiple[1]
+ if nofmultiples>1 then
+ local sn=start.next
+ for k=2,nofmultiples do
+ local n=copy_node(start)
+ n.char=multiple[k]
+ n.next=sn
+ n.prev=start
+ if sn then
+ sn.prev=n
+ end
+ start.next=n
+ start=n
+ end
+ end
+ return head,start,true
+ else
+ if trace_multiples then
+ logprocess("no multiple for %s",gref(start.char))
+ end
+ return head,start,false
+ end
+end
+function handlers.gsub_alternate(head,start,kind,lookupname,alternative,sequence)
+ local value=featurevalue==true and tfmdata.shared.features[kind] or featurevalue
+ local choice,comment=get_alternative_glyph(start,alternative,value,trace_alternatives)
+ if choice then
+ if trace_alternatives then
+ logprocess("%s: replacing %s by alternative %a to %s, %s",pref(kind,lookupname),gref(start.char),choice,gref(choice),comment)
+ end
+ start.char=choice
+ else
+ if trace_alternatives then
+ logwarning("%s: no variant %a for %s, %s",pref(kind,lookupname),value,gref(start.char),comment)
+ end
+ end
+ return head,start,true
+end
+function handlers.gsub_multiple(head,start,kind,lookupname,multiple)
+ if trace_multiples then
+ logprocess("%s: replacing %s by multiple %s",pref(kind,lookupname),gref(start.char),gref(multiple))
+ end
+ return multiple_glyphs(head,start,multiple)
+end
+function handlers.gsub_ligature(head,start,kind,lookupname,ligature,sequence)
+ local s,stop,discfound=start.next,nil,false
+ local startchar=start.char
+ if marks[startchar] then
+ while s do
+ local id=s.id
+ if id==glyph_code and s.font==currentfont and s.subtype<256 then
+ local lg=ligature[s.char]
+ if lg then
+ stop=s
+ ligature=lg
+ s=s.next
+ else
+ break
+ end
+ else
+ break
+ end
+ end
+ if stop then
+ local lig=ligature.ligature
+ if lig then
+ if trace_ligatures then
+ local stopchar=stop.char
+ head,start=markstoligature(kind,lookupname,head,start,stop,lig)
+ logprocess("%s: replacing %s upto %s by ligature %s case 1",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(start.char))
+ else
+ head,start=markstoligature(kind,lookupname,head,start,stop,lig)
+ end
+ return head,start,true
+ else
+ end
+ end
+ else
+ local skipmark=sequence.flags[1]
+ while s do
+ local id=s.id
+ if id==glyph_code and s.subtype<256 then
+ if s.font==currentfont then
+ local char=s.char
+ if skipmark and marks[char] then
+ s=s.next
+ else
+ local lg=ligature[char]
+ if lg then
+ stop=s
+ ligature=lg
+ s=s.next
+ else
+ break
+ end
+ end
+ else
+ break
+ end
+ elseif id==disc_code then
+ discfound=true
+ s=s.next
+ else
+ break
+ end
+ end
+ if stop then
+ local lig=ligature.ligature
+ if lig then
+ if trace_ligatures then
+ local stopchar=stop.char
+ head,start=toligature(kind,lookupname,head,start,stop,lig,skipmark,discfound)
+ logprocess("%s: replacing %s upto %s by ligature %s case 2",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(start.char))
+ else
+ head,start=toligature(kind,lookupname,head,start,stop,lig,skipmark,discfound)
+ end
+ return head,start,true
+ else
+ end
+ end
+ end
+ return head,start,false
+end
+function handlers.gpos_mark2base(head,start,kind,lookupname,markanchors,sequence)
+ local markchar=start.char
+ if marks[markchar] then
+ local base=start.prev
+ if base and base.id==glyph_code and base.font==currentfont and base.subtype<256 then
+ local basechar=base.char
+ if marks[basechar] then
+ while true do
+ base=base.prev
+ if base and base.id==glyph_code and base.font==currentfont and base.subtype<256 then
+ basechar=base.char
+ if not marks[basechar] then
+ break
+ end
+ else
+ if trace_bugs then
+ logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar))
+ end
+ return head,start,false
+ end
+ end
+ end
+ local baseanchors=descriptions[basechar]
+ if baseanchors then
+ baseanchors=baseanchors.anchors
+ end
+ if baseanchors then
+ local baseanchors=baseanchors['basechar']
+ if baseanchors then
+ local al=anchorlookups[lookupname]
+ for anchor,ba in next,baseanchors do
+ if al[anchor] then
+ local ma=markanchors[anchor]
+ if ma then
+ local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma)
+ if trace_marks then
+ logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%p,%p)",
+ pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
+ end
+ return head,start,true
+ end
+ end
+ end
+ if trace_bugs then
+ logwarning("%s, no matching anchors for mark %s and base %s",pref(kind,lookupname),gref(markchar),gref(basechar))
+ end
+ end
+ else
+ onetimemessage(currentfont,basechar,"no base anchors",report_fonts)
+ end
+ elseif trace_bugs then
+ logwarning("%s: prev node is no char",pref(kind,lookupname))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar))
+ end
+ return head,start,false
+end
+function handlers.gpos_mark2ligature(head,start,kind,lookupname,markanchors,sequence)
+ local markchar=start.char
+ if marks[markchar] then
+ local base=start.prev
+ if base and base.id==glyph_code and base.font==currentfont and base.subtype<256 then
+ local basechar=base.char
+ if marks[basechar] then
+ while true do
+ base=base.prev
+ if base and base.id==glyph_code and base.font==currentfont and base.subtype<256 then
+ basechar=base.char
+ if not marks[basechar] then
+ break
+ end
+ else
+ if trace_bugs then
+ logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar))
+ end
+ return head,start,false
+ end
+ end
+ end
+ local index=start[a_ligacomp]
+ local baseanchors=descriptions[basechar]
+ if baseanchors then
+ baseanchors=baseanchors.anchors
+ if baseanchors then
+ local baseanchors=baseanchors['baselig']
+ if baseanchors then
+ local al=anchorlookups[lookupname]
+ for anchor,ba in next,baseanchors do
+ if al[anchor] then
+ local ma=markanchors[anchor]
+ if ma then
+ ba=ba[index]
+ if ba then
+ local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma)
+ if trace_marks then
+ logprocess("%s, anchor %s, index %s, bound %s: anchoring mark %s to baselig %s at index %s => (%p,%p)",
+ pref(kind,lookupname),anchor,index,bound,gref(markchar),gref(basechar),index,dx,dy)
+ end
+ return head,start,true
+ end
+ end
+ end
+ end
+ if trace_bugs then
+ logwarning("%s: no matching anchors for mark %s and baselig %s",pref(kind,lookupname),gref(markchar),gref(basechar))
+ end
+ end
+ end
+ else
+ onetimemessage(currentfont,basechar,"no base anchors",report_fonts)
+ end
+ elseif trace_bugs then
+ logwarning("%s: prev node is no char",pref(kind,lookupname))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar))
+ end
+ return head,start,false
+end
+function handlers.gpos_mark2mark(head,start,kind,lookupname,markanchors,sequence)
+ local markchar=start.char
+ if marks[markchar] then
+ local base=start.prev
+ local slc=start[a_ligacomp]
+ if slc then
+ while base do
+ local blc=base[a_ligacomp]
+ if blc and blc~=slc then
+ base=base.prev
+ else
+ break
+ end
+ end
+ end
+ if base and base.id==glyph_code and base.font==currentfont and base.subtype<256 then
+ local basechar=base.char
+ local baseanchors=descriptions[basechar]
+ if baseanchors then
+ baseanchors=baseanchors.anchors
+ if baseanchors then
+ baseanchors=baseanchors['basemark']
+ if baseanchors then
+ local al=anchorlookups[lookupname]
+ for anchor,ba in next,baseanchors do
+ if al[anchor] then
+ local ma=markanchors[anchor]
+ if ma then
+ local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma)
+ if trace_marks then
+ logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)",
+ pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
+ end
+ return head,start,true
+ end
+ end
+ end
+ if trace_bugs then
+ logwarning("%s: no matching anchors for mark %s and basemark %s",pref(kind,lookupname),gref(markchar),gref(basechar))
+ end
+ end
+ end
+ else
+ onetimemessage(currentfont,basechar,"no base anchors",report_fonts)
+ end
+ elseif trace_bugs then
+ logwarning("%s: prev node is no mark",pref(kind,lookupname))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar))
+ end
+ return head,start,false
+end
+function handlers.gpos_cursive(head,start,kind,lookupname,exitanchors,sequence)
+ local alreadydone=cursonce and start[a_cursbase]
+ if not alreadydone then
+ local done=false
+ local startchar=start.char
+ if marks[startchar] then
+ if trace_cursive then
+ logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar))
+ end
+ else
+ local nxt=start.next
+ while not done and nxt and nxt.id==glyph_code and nxt.font==currentfont and nxt.subtype<256 do
+ local nextchar=nxt.char
+ if marks[nextchar] then
+ nxt=nxt.next
+ else
+ local entryanchors=descriptions[nextchar]
+ if entryanchors then
+ entryanchors=entryanchors.anchors
+ if entryanchors then
+ entryanchors=entryanchors['centry']
+ if entryanchors then
+ local al=anchorlookups[lookupname]
+ for anchor,entry in next,entryanchors do
+ if al[anchor] then
+ local exit=exitanchors[anchor]
+ if exit then
+ local dx,dy,bound=setcursive(start,nxt,tfmdata.parameters.factor,rlmode,exit,entry,characters[startchar],characters[nextchar])
+ if trace_cursive then
+ logprocess("%s: moving %s to %s cursive (%p,%p) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode)
+ end
+ done=true
+ break
+ end
+ end
+ end
+ end
+ end
+ else
+ onetimemessage(currentfont,startchar,"no entry anchors",report_fonts)
+ end
+ break
+ end
+ end
+ end
+ return head,start,done
+ else
+ if trace_cursive and trace_details then
+ logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(start.char),alreadydone)
+ end
+ return head,start,false
+ end
+end
+function handlers.gpos_single(head,start,kind,lookupname,kerns,sequence)
+ local startchar=start.char
+ local dx,dy,w,h=setpair(start,tfmdata.parameters.factor,rlmode,sequence.flags[4],kerns,characters[startchar])
+ if trace_kerns then
+ logprocess("%s: shifting single %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),dx,dy,w,h)
+ end
+ return head,start,false
+end
+function handlers.gpos_pair(head,start,kind,lookupname,kerns,sequence)
+ local snext=start.next
+ if not snext then
+ return head,start,false
+ else
+ local prev,done=start,false
+ local factor=tfmdata.parameters.factor
+ local lookuptype=lookuptypes[lookupname]
+ while snext and snext.id==glyph_code and snext.font==currentfont and snext.subtype<256 do
+ local nextchar=snext.char
+ local krn=kerns[nextchar]
+ if not krn and marks[nextchar] then
+ prev=snext
+ snext=snext.next
+ else
+ local krn=kerns[nextchar]
+ if not krn then
+ elseif type(krn)=="table" then
+ if lookuptype=="pair" then
+ local a,b=krn[2],krn[3]
+ if a and #a>0 then
+ local startchar=start.char
+ local x,y,w,h=setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar])
+ if trace_kerns then
+ logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h)
+ end
+ end
+ if b and #b>0 then
+ local startchar=start.char
+ local x,y,w,h=setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar])
+ if trace_kerns then
+ logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h)
+ end
+ end
+ else
+ report_process("%s: check this out (old kern stuff)",pref(kind,lookupname))
+ end
+ done=true
+ elseif krn~=0 then
+ local k=setkern(snext,factor,rlmode,krn)
+ if trace_kerns then
+ logprocess("%s: inserting kern %s between %s and %s",pref(kind,lookupname),k,gref(prev.char),gref(nextchar))
+ end
+ done=true
+ end
+ break
+ end
+ end
+ return head,start,done
+ end
+end
+local chainmores={}
+local chainprocs={}
+local function logprocess(...)
+ if trace_steps then
+ registermessage(...)
+ end
+ report_subchain(...)
+end
+local logwarning=report_subchain
+local function logprocess(...)
+ if trace_steps then
+ registermessage(...)
+ end
+ report_chain(...)
+end
+local logwarning=report_chain
+function chainprocs.chainsub(head,start,stop,kind,chainname,currentcontext,lookuphash,lookuplist,chainlookupname)
+ logwarning("%s: a direct call to chainsub cannot happen",cref(kind,chainname,chainlookupname))
+ return head,start,false
+end
+function chainmores.chainsub(head,start,stop,kind,chainname,currentcontext,lookuphash,lookuplist,chainlookupname,n)
+ logprocess("%s: a direct call to chainsub cannot happen",cref(kind,chainname,chainlookupname))
+ return head,start,false
+end
+function chainprocs.reversesub(head,start,stop,kind,chainname,currentcontext,lookuphash,replacements)
+ local char=start.char
+ local replacement=replacements[char]
+ if replacement then
+ if trace_singles then
+ logprocess("%s: single reverse replacement of %s by %s",cref(kind,chainname),gref(char),gref(replacement))
+ end
+ start.char=replacement
+ return head,start,true
+ else
+ return head,start,false
+ end
+end
+local function delete_till_stop(start,stop,ignoremarks)
+ local n=1
+ if start==stop then
+ elseif ignoremarks then
+ repeat
+ local next=start.next
+ if not marks[next.char] then
+ local components=next.components
+ if components then
+ flush_node_list(components)
+ end
+ delete_node(start,next)
+ end
+ n=n+1
+ until next==stop
+ else
+ repeat
+ local next=start.next
+ local components=next.components
+ if components then
+ flush_node_list(components)
+ end
+ delete_node(start,next)
+ n=n+1
+ until next==stop
+ end
+ return n
+end
+function chainprocs.gsub_single(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex)
+ local current=start
+ local subtables=currentlookup.subtables
+ if #subtables>1 then
+ logwarning("todo: check if we need to loop over the replacements: %s",concat(subtables," "))
+ end
+ while current do
+ if current.id==glyph_code then
+ local currentchar=current.char
+ local lookupname=subtables[1]
+ local replacement=lookuphash[lookupname]
+ if not replacement then
+ if trace_bugs then
+ logwarning("%s: no single hits",cref(kind,chainname,chainlookupname,lookupname,chainindex))
+ end
+ else
+ replacement=replacement[currentchar]
+ if not replacement or replacement=="" then
+ if trace_bugs then
+ logwarning("%s: no single for %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(currentchar))
+ end
+ else
+ if trace_singles then
+ logprocess("%s: replacing single %s by %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(currentchar),gref(replacement))
+ end
+ current.char=replacement
+ end
+ end
+ return head,start,true
+ elseif current==stop then
+ break
+ else
+ current=current.next
+ end
+ end
+ return head,start,false
+end
+chainmores.gsub_single=chainprocs.gsub_single
+function chainprocs.gsub_multiple(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
+ delete_till_stop(start,stop)
+ local startchar=start.char
+ local subtables=currentlookup.subtables
+ local lookupname=subtables[1]
+ local replacements=lookuphash[lookupname]
+ if not replacements then
+ if trace_bugs then
+ logwarning("%s: no multiple hits",cref(kind,chainname,chainlookupname,lookupname))
+ end
+ else
+ replacements=replacements[startchar]
+ if not replacements or replacement=="" then
+ if trace_bugs then
+ logwarning("%s: no multiple for %s",cref(kind,chainname,chainlookupname,lookupname),gref(startchar))
+ end
+ else
+ if trace_multiples then
+ logprocess("%s: replacing %s by multiple characters %s",cref(kind,chainname,chainlookupname,lookupname),gref(startchar),gref(replacements))
+ end
+ return multiple_glyphs(head,start,replacements)
+ end
+ end
+ return head,start,false
+end
+chainmores.gsub_multiple=chainprocs.gsub_multiple
+function chainprocs.gsub_alternate(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
+ local current=start
+ local subtables=currentlookup.subtables
+ local value=featurevalue==true and tfmdata.shared.features[kind] or featurevalue
+ while current do
+ if current.id==glyph_code then
+ local currentchar=current.char
+ local lookupname=subtables[1]
+ local alternatives=lookuphash[lookupname]
+ if not alternatives then
+ if trace_bugs then
+ logwarning("%s: no alternative hit",cref(kind,chainname,chainlookupname,lookupname))
+ end
+ else
+ alternatives=alternatives[currentchar]
+ if alternatives then
+ local choice,comment=get_alternative_glyph(current,alternatives,value,trace_alternatives)
+ if choice then
+ if trace_alternatives then
+ logprocess("%s: replacing %s by alternative %a to %s, %s",cref(kind,chainname,chainlookupname,lookupname),gref(char),choice,gref(choice),comment)
+ end
+ start.char=choice
+ else
+ if trace_alternatives then
+ logwarning("%s: no variant %a for %s, %s",cref(kind,chainname,chainlookupname,lookupname),value,gref(char),comment)
+ end
+ end
+ elseif trace_bugs then
+ logwarning("%s: no alternative for %s, %s",cref(kind,chainname,chainlookupname,lookupname),gref(currentchar),comment)
+ end
+ end
+ return head,start,true
+ elseif current==stop then
+ break
+ else
+ current=current.next
+ end
+ end
+ return head,start,false
+end
+chainmores.gsub_alternate=chainprocs.gsub_alternate
+function chainprocs.gsub_ligature(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex)
+ local startchar=start.char
+ local subtables=currentlookup.subtables
+ local lookupname=subtables[1]
+ local ligatures=lookuphash[lookupname]
+ if not ligatures then
+ if trace_bugs then
+ logwarning("%s: no ligature hits",cref(kind,chainname,chainlookupname,lookupname,chainindex))
+ end
+ else
+ ligatures=ligatures[startchar]
+ if not ligatures then
+ if trace_bugs then
+ logwarning("%s: no ligatures starting with %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar))
+ end
+ else
+ local s=start.next
+ local discfound=false
+ local last=stop
+ local nofreplacements=0
+ local skipmark=currentlookup.flags[1]
+ while s do
+ local id=s.id
+ if id==disc_code then
+ s=s.next
+ discfound=true
+ else
+ local schar=s.char
+ if skipmark and marks[schar] then
+ s=s.next
+ else
+ local lg=ligatures[schar]
+ if lg then
+ ligatures,last,nofreplacements=lg,s,nofreplacements+1
+ if s==stop then
+ break
+ else
+ s=s.next
+ end
+ else
+ break
+ end
+ end
+ end
+ end
+ local l2=ligatures.ligature
+ if l2 then
+ if chainindex then
+ stop=last
+ end
+ if trace_ligatures then
+ if start==stop then
+ logprocess("%s: replacing character %s by ligature %s case 3",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(l2))
+ else
+ logprocess("%s: replacing character %s upto %s by ligature %s case 4",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(stop.char),gref(l2))
+ end
+ end
+ head,start=toligature(kind,lookupname,head,start,stop,l2,currentlookup.flags[1],discfound)
+ return head,start,true,nofreplacements
+ elseif trace_bugs then
+ if start==stop then
+ logwarning("%s: replacing character %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar))
+ else
+ logwarning("%s: replacing character %s upto %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(stop.char))
+ end
+ end
+ end
+ end
+ return head,start,false,0
+end
+chainmores.gsub_ligature=chainprocs.gsub_ligature
+function chainprocs.gpos_mark2base(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
+ local markchar=start.char
+ if marks[markchar] then
+ local subtables=currentlookup.subtables
+ local lookupname=subtables[1]
+ local markanchors=lookuphash[lookupname]
+ if markanchors then
+ markanchors=markanchors[markchar]
+ end
+ if markanchors then
+ local base=start.prev
+ if base and base.id==glyph_code and base.font==currentfont and base.subtype<256 then
+ local basechar=base.char
+ if marks[basechar] then
+ while true do
+ base=base.prev
+ if base and base.id==glyph_code and base.font==currentfont and base.subtype<256 then
+ basechar=base.char
+ if not marks[basechar] then
+ break
+ end
+ else
+ if trace_bugs then
+ logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar))
+ end
+ return head,start,false
+ end
+ end
+ end
+ local baseanchors=descriptions[basechar].anchors
+ if baseanchors then
+ local baseanchors=baseanchors['basechar']
+ if baseanchors then
+ local al=anchorlookups[lookupname]
+ for anchor,ba in next,baseanchors do
+ if al[anchor] then
+ local ma=markanchors[anchor]
+ if ma then
+ local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma)
+ if trace_marks then
+ logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%p,%p)",
+ cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
+ end
+ return head,start,true
+ end
+ end
+ end
+ if trace_bugs then
+ logwarning("%s, no matching anchors for mark %s and base %s",cref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar))
+ end
+ end
+ end
+ elseif trace_bugs then
+ logwarning("%s: prev node is no char",cref(kind,chainname,chainlookupname,lookupname))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar))
+ end
+ return head,start,false
+end
+function chainprocs.gpos_mark2ligature(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
+ local markchar=start.char
+ if marks[markchar] then
+ local subtables=currentlookup.subtables
+ local lookupname=subtables[1]
+ local markanchors=lookuphash[lookupname]
+ if markanchors then
+ markanchors=markanchors[markchar]
+ end
+ if markanchors then
+ local base=start.prev
+ if base and base.id==glyph_code and base.font==currentfont and base.subtype<256 then
+ local basechar=base.char
+ if marks[basechar] then
+ while true do
+ base=base.prev
+ if base and base.id==glyph_code and base.font==currentfont and base.subtype<256 then
+ basechar=base.char
+ if not marks[basechar] then
+ break
+ end
+ else
+ if trace_bugs then
+ logwarning("%s: no base for mark %s",cref(kind,chainname,chainlookupname,lookupname),markchar)
+ end
+ return head,start,false
+ end
+ end
+ end
+ local index=start[a_ligacomp]
+ local baseanchors=descriptions[basechar].anchors
+ if baseanchors then
+ local baseanchors=baseanchors['baselig']
+ if baseanchors then
+ local al=anchorlookups[lookupname]
+ for anchor,ba in next,baseanchors do
+ if al[anchor] then
+ local ma=markanchors[anchor]
+ if ma then
+ ba=ba[index]
+ if ba then
+ local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma)
+ if trace_marks then
+ logprocess("%s, anchor %s, bound %s: anchoring mark %s to baselig %s at index %s => (%p,%p)",
+ cref(kind,chainname,chainlookupname,lookupname),anchor,a or bound,gref(markchar),gref(basechar),index,dx,dy)
+ end
+ return head,start,true
+ end
+ end
+ end
+ end
+ if trace_bugs then
+ logwarning("%s: no matching anchors for mark %s and baselig %s",cref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar))
+ end
+ end
+ end
+ elseif trace_bugs then
+ logwarning("feature %s, lookup %s: prev node is no char",kind,lookupname)
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar))
+ end
+ return head,start,false
+end
+function chainprocs.gpos_mark2mark(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
+ local markchar=start.char
+ if marks[markchar] then
+ local subtables=currentlookup.subtables
+ local lookupname=subtables[1]
+ local markanchors=lookuphash[lookupname]
+ if markanchors then
+ markanchors=markanchors[markchar]
+ end
+ if markanchors then
+ local base=start.prev
+ local slc=start[a_ligacomp]
+ if slc then
+ while base do
+ local blc=base[a_ligacomp]
+ if blc and blc~=slc then
+ base=base.prev
+ else
+ break
+ end
+ end
+ end
+ if base and base.id==glyph_code and base.font==currentfont and base.subtype<256 then
+ local basechar=base.char
+ local baseanchors=descriptions[basechar].anchors
+ if baseanchors then
+ baseanchors=baseanchors['basemark']
+ if baseanchors then
+ local al=anchorlookups[lookupname]
+ for anchor,ba in next,baseanchors do
+ if al[anchor] then
+ local ma=markanchors[anchor]
+ if ma then
+ local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma)
+ if trace_marks then
+ logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%p,%p)",
+ cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
+ end
+ return head,start,true
+ end
+ end
+ end
+ if trace_bugs then
+ logwarning("%s: no matching anchors for mark %s and basemark %s",gref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar))
+ end
+ end
+ end
+ elseif trace_bugs then
+ logwarning("%s: prev node is no mark",cref(kind,chainname,chainlookupname,lookupname))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar))
+ end
+ return head,start,false
+end
+function chainprocs.gpos_cursive(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
+ local alreadydone=cursonce and start[a_cursbase]
+ if not alreadydone then
+ local startchar=start.char
+ local subtables=currentlookup.subtables
+ local lookupname=subtables[1]
+ local exitanchors=lookuphash[lookupname]
+ if exitanchors then
+ exitanchors=exitanchors[startchar]
+ end
+ if exitanchors then
+ local done=false
+ if marks[startchar] then
+ if trace_cursive then
+ logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar))
+ end
+ else
+ local nxt=start.next
+ while not done and nxt and nxt.id==glyph_code and nxt.font==currentfont and nxt.subtype<256 do
+ local nextchar=nxt.char
+ if marks[nextchar] then
+ nxt=nxt.next
+ else
+ local entryanchors=descriptions[nextchar]
+ if entryanchors then
+ entryanchors=entryanchors.anchors
+ if entryanchors then
+ entryanchors=entryanchors['centry']
+ if entryanchors then
+ local al=anchorlookups[lookupname]
+ for anchor,entry in next,entryanchors do
+ if al[anchor] then
+ local exit=exitanchors[anchor]
+ if exit then
+ local dx,dy,bound=setcursive(start,nxt,tfmdata.parameters.factor,rlmode,exit,entry,characters[startchar],characters[nextchar])
+ if trace_cursive then
+ logprocess("%s: moving %s to %s cursive (%p,%p) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode)
+ end
+ done=true
+ break
+ end
+ end
+ end
+ end
+ end
+ else
+ onetimemessage(currentfont,startchar,"no entry anchors",report_fonts)
+ end
+ break
+ end
+ end
+ end
+ return head,start,done
+ else
+ if trace_cursive and trace_details then
+ logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(start.char),alreadydone)
+ end
+ return head,start,false
+ end
+ end
+ return head,start,false
+end
+function chainprocs.gpos_single(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence)
+ local startchar=start.char
+ local subtables=currentlookup.subtables
+ local lookupname=subtables[1]
+ local kerns=lookuphash[lookupname]
+ if kerns then
+ kerns=kerns[startchar]
+ if kerns then
+ local dx,dy,w,h=setpair(start,tfmdata.parameters.factor,rlmode,sequence.flags[4],kerns,characters[startchar])
+ if trace_kerns then
+ logprocess("%s: shifting single %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),dx,dy,w,h)
+ end
+ end
+ end
+ return head,start,false
+end
+function chainprocs.gpos_pair(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence)
+ local snext=start.next
+ if snext then
+ local startchar=start.char
+ local subtables=currentlookup.subtables
+ local lookupname=subtables[1]
+ local kerns=lookuphash[lookupname]
+ if kerns then
+ kerns=kerns[startchar]
+ if kerns then
+ local lookuptype=lookuptypes[lookupname]
+ local prev,done=start,false
+ local factor=tfmdata.parameters.factor
+ while snext and snext.id==glyph_code and snext.font==currentfont and snext.subtype<256 do
+ local nextchar=snext.char
+ local krn=kerns[nextchar]
+ if not krn and marks[nextchar] then
+ prev=snext
+ snext=snext.next
+ else
+ if not krn then
+ elseif type(krn)=="table" then
+ if lookuptype=="pair" then
+ local a,b=krn[2],krn[3]
+ if a and #a>0 then
+ local startchar=start.char
+ local x,y,w,h=setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar])
+ if trace_kerns then
+ logprocess("%s: shifting first of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h)
+ end
+ end
+ if b and #b>0 then
+ local startchar=start.char
+ local x,y,w,h=setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar])
+ if trace_kerns then
+ logprocess("%s: shifting second of pair %s and %s by (%p,%p) and correction (%p,%p)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h)
+ end
+ end
+ else
+ report_process("%s: check this out (old kern stuff)",cref(kind,chainname,chainlookupname))
+ local a,b=krn[2],krn[6]
+ if a and a~=0 then
+ local k=setkern(snext,factor,rlmode,a)
+ if trace_kerns then
+ logprocess("%s: inserting first kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(prev.char),gref(nextchar))
+ end
+ end
+ if b and b~=0 then
+ logwarning("%s: ignoring second kern xoff %s",cref(kind,chainname,chainlookupname),b*factor)
+ end
+ end
+ done=true
+ elseif krn~=0 then
+ local k=setkern(snext,factor,rlmode,krn)
+ if trace_kerns then
+ logprocess("%s: inserting kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(prev.char),gref(nextchar))
+ end
+ done=true
+ end
+ break
+ end
+ end
+ return head,start,done
+ end
+ end
+ end
+ return head,start,false
+end
+local function show_skip(kind,chainname,char,ck,class)
+ if ck[9] then
+ logwarning("%s: skipping char %s, class %a, rule %a, lookuptype %a, %a => %a",cref(kind,chainname),gref(char),class,ck[1],ck[2],ck[9],ck[10])
+ else
+ logwarning("%s: skipping char %s, class %a, rule %a, lookuptype %a",cref(kind,chainname),gref(char),class,ck[1],ck[2])
+ end
+end
+local function normal_handle_contextchain(head,start,kind,chainname,contexts,sequence,lookuphash)
+ local flags=sequence.flags
+ local done=false
+ local skipmark=flags[1]
+ local skipligature=flags[2]
+ local skipbase=flags[3]
+ local someskip=skipmark or skipligature or skipbase
+ local markclass=sequence.markclass
+ local skipped=false
+ for k=1,#contexts do
+ local match=true
+ local current=start
+ local last=start
+ local ck=contexts[k]
+ local seq=ck[3]
+ local s=#seq
+ if s==1 then
+ match=current.id==glyph_code and current.font==currentfont and current.subtype<256 and seq[1][current.char]
+ else
+ local f,l=ck[4],ck[5]
+ if f==1 and f==l then
+ else
+ if f==l then
+ else
+ local n=f+1
+ last=last.next
+ while n<=l do
+ if last then
+ local id=last.id
+ if id==glyph_code then
+ if last.font==currentfont and last.subtype<256 then
+ local char=last.char
+ local ccd=descriptions[char]
+ if ccd then
+ local class=ccd.class
+ if class==skipmark or class==skipligature or class==skipbase or (markclass and class=="mark" and not markclass[char]) then
+ skipped=true
+ if trace_skips then
+ show_skip(kind,chainname,char,ck,class)
+ end
+ last=last.next
+ elseif seq[n][char] then
+ if n<l then
+ last=last.next
+ end
+ n=n+1
+ else
+ match=false
+ break
+ end
+ else
+ match=false
+ break
+ end
+ else
+ match=false
+ break
+ end
+ elseif id==disc_code then
+ last=last.next
+ else
+ match=false
+ break
+ end
+ else
+ match=false
+ break
+ end
+ end
+ end
+ end
+ if match and f>1 then
+ local prev=start.prev
+ if prev then
+ local n=f-1
+ while n>=1 do
+ if prev then
+ local id=prev.id
+ if id==glyph_code then
+ if prev.font==currentfont and prev.subtype<256 then
+ local char=prev.char
+ local ccd=descriptions[char]
+ if ccd then
+ local class=ccd.class
+ if class==skipmark or class==skipligature or class==skipbase or (markclass and class=="mark" and not markclass[char]) then
+ skipped=true
+ if trace_skips then
+ show_skip(kind,chainname,char,ck,class)
+ end
+ elseif seq[n][char] then
+ n=n -1
+ else
+ match=false
+ break
+ end
+ else
+ match=false
+ break
+ end
+ else
+ match=false
+ break
+ end
+ elseif id==disc_code then
+ elseif seq[n][32] then
+ n=n -1
+ else
+ match=false
+ break
+ end
+ prev=prev.prev
+ elseif seq[n][32] then
+ n=n -1
+ else
+ match=false
+ break
+ end
+ end
+ elseif f==2 then
+ match=seq[1][32]
+ else
+ for n=f-1,1 do
+ if not seq[n][32] then
+ match=false
+ break
+ end
+ end
+ end
+ end
+ if match and s>l then
+ local current=last and last.next
+ if current then
+ local n=l+1
+ while n<=s do
+ if current then
+ local id=current.id
+ if id==glyph_code then
+ if current.font==currentfont and current.subtype<256 then
+ local char=current.char
+ local ccd=descriptions[char]
+ if ccd then
+ local class=ccd.class
+ if class==skipmark or class==skipligature or class==skipbase or (markclass and class=="mark" and not markclass[char]) then
+ skipped=true
+ if trace_skips then
+ show_skip(kind,chainname,char,ck,class)
+ end
+ elseif seq[n][char] then
+ n=n+1
+ else
+ match=false
+ break
+ end
+ else
+ match=false
+ break
+ end
+ else
+ match=false
+ break
+ end
+ elseif id==disc_code then
+ elseif seq[n][32] then
+ n=n+1
+ else
+ match=false
+ break
+ end
+ current=current.next
+ elseif seq[n][32] then
+ n=n+1
+ else
+ match=false
+ break
+ end
+ end
+ elseif s-l==1 then
+ match=seq[s][32]
+ else
+ for n=l+1,s do
+ if not seq[n][32] then
+ match=false
+ break
+ end
+ end
+ end
+ end
+ end
+ if match then
+ if trace_contexts then
+ local rule,lookuptype,f,l=ck[1],ck[2],ck[4],ck[5]
+ local char=start.char
+ if ck[9] then
+ logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %a, %a => %a",
+ cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype,ck[9],ck[10])
+ else
+ logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %a",
+ cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype)
+ end
+ end
+ local chainlookups=ck[6]
+ if chainlookups then
+ local nofchainlookups=#chainlookups
+ if nofchainlookups==1 then
+ local chainlookupname=chainlookups[1]
+ local chainlookup=lookuptable[chainlookupname]
+ if chainlookup then
+ local cp=chainprocs[chainlookup.type]
+ if cp then
+ head,start,done=cp(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,nil,sequence)
+ else
+ logprocess("%s: %s is not yet supported",cref(kind,chainname,chainlookupname),chainlookup.type)
+ end
+ else
+ logprocess("%s is not yet supported",cref(kind,chainname,chainlookupname))
+ end
+ else
+ local i=1
+ repeat
+ if skipped then
+ while true do
+ local char=start.char
+ local ccd=descriptions[char]
+ if ccd then
+ local class=ccd.class
+ if class==skipmark or class==skipligature or class==skipbase or (markclass and class=="mark" and not markclass[char]) then
+ start=start.next
+ else
+ break
+ end
+ else
+ break
+ end
+ end
+ end
+ local chainlookupname=chainlookups[i]
+ local chainlookup=lookuptable[chainlookupname]
+ local cp=chainlookup and chainmores[chainlookup.type]
+ if cp then
+ local ok,n
+ head,start,ok,n=cp(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,i,sequence)
+ if ok then
+ done=true
+ i=i+(n or 1)
+ else
+ i=i+1
+ end
+ else
+ i=i+1
+ end
+ if start then
+ start=start.next
+ else
+ end
+ until i>nofchainlookups
+ end
+ else
+ local replacements=ck[7]
+ if replacements then
+ head,start,done=chainprocs.reversesub(head,start,last,kind,chainname,ck,lookuphash,replacements)
+ else
+ done=true
+ if trace_contexts then
+ logprocess("%s: skipping match",cref(kind,chainname))
+ end
+ end
+ end
+ end
+ end
+ return head,start,done
+end
+local verbose_handle_contextchain=function(font,...)
+ logwarning("no verbose handler installed, reverting to 'normal'")
+ otf.setcontextchain()
+ return normal_handle_contextchain(...)
+end
+otf.chainhandlers={
+ normal=normal_handle_contextchain,
+ verbose=verbose_handle_contextchain,
+}
+function otf.setcontextchain(method)
+ if not method or method=="normal" or not otf.chainhandlers[method] then
+ if handlers.contextchain then
+ logwarning("installing normal contextchain handler")
+ end
+ handlers.contextchain=normal_handle_contextchain
+ else
+ logwarning("installing contextchain handler %a",method)
+ local handler=otf.chainhandlers[method]
+ handlers.contextchain=function(...)
+ return handler(currentfont,...)
+ end
+ end
+ handlers.gsub_context=handlers.contextchain
+ handlers.gsub_contextchain=handlers.contextchain
+ handlers.gsub_reversecontextchain=handlers.contextchain
+ handlers.gpos_contextchain=handlers.contextchain
+ handlers.gpos_context=handlers.contextchain
+end
+otf.setcontextchain()
+local missing={}
+local function logprocess(...)
+ if trace_steps then
+ registermessage(...)
+ end
+ report_process(...)
+end
+local logwarning=report_process
+local function report_missing_cache(typ,lookup)
+ local f=missing[currentfont] if not f then f={} missing[currentfont]=f end
+ local t=f[typ] if not t then t={} f[typ]=t end
+ if not t[lookup] then
+ t[lookup]=true
+ logwarning("missing cache for lookup %a, type %a, font %a, name %a",lookup,typ,currentfont,tfmdata.properties.fullname)
+ end
+end
+local resolved={}
+local lookuphashes={}
+setmetatableindex(lookuphashes,function(t,font)
+ local lookuphash=fontdata[font].resources.lookuphash
+ if not lookuphash or not next(lookuphash) then
+ lookuphash=false
+ end
+ t[font]=lookuphash
+ return lookuphash
+end)
+local autofeatures=fonts.analyzers.features
+local function initialize(sequence,script,language,enabled)
+ local features=sequence.features
+ if features then
+ for kind,scripts in next,features do
+ local valid=enabled[kind]
+ if valid then
+ local languages=scripts[script] or scripts[wildcard]
+ if languages and (languages[language] or languages[wildcard]) then
+ return { valid,autofeatures[kind] or false,sequence.chain or 0,kind,sequence }
+ end
+ end
+ end
+ end
+ return false
+end
+function otf.dataset(tfmdata,font)
+ local shared=tfmdata.shared
+ local properties=tfmdata.properties
+ local language=properties.language or "dflt"
+ local script=properties.script or "dflt"
+ local enabled=shared.features
+ local res=resolved[font]
+ if not res then
+ res={}
+ resolved[font]=res
+ end
+ local rs=res[script]
+ if not rs then
+ rs={}
+ res[script]=rs
+ end
+ local rl=rs[language]
+ if not rl then
+ rl={
+ }
+ rs[language]=rl
+ local sequences=tfmdata.resources.sequences
+for s=1,#sequences do
+ local v=enabled and initialize(sequences[s],script,language,enabled)
+ if v then
+ rl[#rl+1]=v
+ end
+end
+ end
+ return rl
+end
+local function featuresprocessor(head,font,attr)
+ local lookuphash=lookuphashes[font]
+ if not lookuphash then
+ return head,false
+ end
+ if trace_steps then
+ checkstep(head)
+ end
+ tfmdata=fontdata[font]
+ descriptions=tfmdata.descriptions
+ characters=tfmdata.characters
+ resources=tfmdata.resources
+ marks=resources.marks
+ anchorlookups=resources.lookup_to_anchor
+ lookuptable=resources.lookups
+ lookuptypes=resources.lookuptypes
+ currentfont=font
+ rlmode=0
+ local sequences=resources.sequences
+ local done=false
+ local datasets=otf.dataset(tfmdata,font,attr)
+ local dirstack={}
+for s=1,#datasets do
+ local dataset=datasets[s]
+ featurevalue=dataset[1]
+ local sequence=dataset[5]
+ local rlparmode=0
+ local topstack=0
+ local success=false
+ local attribute=dataset[2]
+ local chain=dataset[3]
+ local typ=sequence.type
+ local subtables=sequence.subtables
+ if chain<0 then
+ local handler=handlers[typ]
+ local start=find_node_tail(head)
+ while start do
+ local id=start.id
+ if id==glyph_code then
+ if start.font==font and start.subtype<256 then
+ local a=start[0]
+ if a then
+ a=a==attr
+ else
+ a=true
+ end
+ if a then
+ for i=1,#subtables do
+ local lookupname=subtables[i]
+ local lookupcache=lookuphash[lookupname]
+ if lookupcache then
+ local lookupmatch=lookupcache[start.char]
+ if lookupmatch then
+ head,start,success=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
+ if success then
+ break
+ end
+ end
+ else
+ report_missing_cache(typ,lookupname)
+ end
+ end
+ if start then start=start.prev end
+ else
+ start=start.prev
+ end
+ else
+ start=start.prev
+ end
+ else
+ start=start.prev
+ end
+ end
+ else
+ local handler=handlers[typ]
+ local ns=#subtables
+ local start=head
+ rlmode=0
+ if ns==1 then
+ local lookupname=subtables[1]
+ local lookupcache=lookuphash[lookupname]
+ if not lookupcache then
+ report_missing_cache(typ,lookupname)
+ else
+ while start do
+ local id=start.id
+ if id==glyph_code then
+ if start.font==font and start.subtype<256 then
+ local a=start[0]
+ if a then
+ a=(a==attr) and (not attribute or start[a_state]==attribute)
+ else
+ a=not attribute or start[a_state]==attribute
+ end
+ if a then
+ local lookupmatch=lookupcache[start.char]
+ if lookupmatch then
+ local ok
+ head,start,ok=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1)
+ if ok then
+ success=true
+ end
+ end
+ if start then start=start.next end
+ else
+ start=start.next
+ end
+ elseif id==math_code then
+ start=end_of_math(start).next
+ else
+ start=start.next
+ end
+ elseif id==whatsit_code then
+ local subtype=start.subtype
+ if subtype==dir_code then
+ local dir=start.dir
+ if dir=="+TRT" or dir=="+TLT" then
+ topstack=topstack+1
+ dirstack[topstack]=dir
+ elseif dir=="-TRT" or dir=="-TLT" then
+ topstack=topstack-1
+ end
+ local newdir=dirstack[topstack]
+ if newdir=="+TRT" then
+ rlmode=-1
+ elseif newdir=="+TLT" then
+ rlmode=1
+ else
+ rlmode=rlparmode
+ end
+ if trace_directions then
+ report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir)
+ end
+ elseif subtype==localpar_code then
+ local dir=start.dir
+ if dir=="TRT" then
+ rlparmode=-1
+ elseif dir=="TLT" then
+ rlparmode=1
+ else
+ rlparmode=0
+ end
+ rlmode=rlparmode
+ if trace_directions then
+ report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode)
+ end
+ end
+ start=start.next
+ elseif id==math_code then
+ start=end_of_math(start).next
+ else
+ start=start.next
+ end
+ end
+ end
+ else
+ while start do
+ local id=start.id
+ if id==glyph_code then
+ if start.font==font and start.subtype<256 then
+ local a=start[0]
+ if a then
+ a=(a==attr) and (not attribute or start[a_state]==attribute)
+ else
+ a=not attribute or start[a_state]==attribute
+ end
+ if a then
+ for i=1,ns do
+ local lookupname=subtables[i]
+ local lookupcache=lookuphash[lookupname]
+ if lookupcache then
+ local lookupmatch=lookupcache[start.char]
+ if lookupmatch then
+ local ok
+ head,start,ok=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
+ if ok then
+ success=true
+ break
+ elseif not start then
+ break
+ end
+ end
+ else
+ report_missing_cache(typ,lookupname)
+ end
+ end
+ if start then start=start.next end
+ else
+ start=start.next
+ end
+ else
+ start=start.next
+ end
+ elseif id==whatsit_code then
+ local subtype=start.subtype
+ if subtype==dir_code then
+ local dir=start.dir
+ if dir=="+TRT" or dir=="+TLT" then
+ topstack=topstack+1
+ dirstack[topstack]=dir
+ elseif dir=="-TRT" or dir=="-TLT" then
+ topstack=topstack-1
+ end
+ local newdir=dirstack[topstack]
+ if newdir=="+TRT" then
+ rlmode=-1
+ elseif newdir=="+TLT" then
+ rlmode=1
+ else
+ rlmode=rlparmode
+ end
+ if trace_directions then
+ report_process("directions after txtdir %a: parmode %a, txtmode %a, # stack %a, new dir %a",dir,rlparmode,rlmode,topstack,newdir)
+ end
+ elseif subtype==localpar_code then
+ local dir=start.dir
+ if dir=="TRT" then
+ rlparmode=-1
+ elseif dir=="TLT" then
+ rlparmode=1
+ else
+ rlparmode=0
+ end
+ rlmode=rlparmode
+ if trace_directions then
+ report_process("directions after pardir %a: parmode %a, txtmode %a",dir,rlparmode,rlmode)
+ end
+ end
+ start=start.next
+ elseif id==math_code then
+ start=end_of_math(start).next
+ else
+ start=start.next
+ end
+ end
+ end
+ end
+ if success then
+ done=true
+ end
+ if trace_steps then
+ registerstep(head)
+ end
+ end
+ return head,done
+end
+local function generic(lookupdata,lookupname,unicode,lookuphash)
+ local target=lookuphash[lookupname]
+ if target then
+ target[unicode]=lookupdata
+ else
+ lookuphash[lookupname]={ [unicode]=lookupdata }
+ end
+end
+local action={
+ substitution=generic,
+ multiple=generic,
+ alternate=generic,
+ position=generic,
+ ligature=function(lookupdata,lookupname,unicode,lookuphash)
+ local target=lookuphash[lookupname]
+ if not target then
+ target={}
+ lookuphash[lookupname]=target
+ end
+ for i=1,#lookupdata do
+ local li=lookupdata[i]
+ local tu=target[li]
+ if not tu then
+ tu={}
+ target[li]=tu
+ end
+ target=tu
+ end
+ target.ligature=unicode
+ end,
+ pair=function(lookupdata,lookupname,unicode,lookuphash)
+ local target=lookuphash[lookupname]
+ if not target then
+ target={}
+ lookuphash[lookupname]=target
+ end
+ local others=target[unicode]
+ local paired=lookupdata[1]
+ if others then
+ others[paired]=lookupdata
+ else
+ others={ [paired]=lookupdata }
+ target[unicode]=others
+ end
+ end,
+}
+local function prepare_lookups(tfmdata)
+ local rawdata=tfmdata.shared.rawdata
+ local resources=rawdata.resources
+ local lookuphash=resources.lookuphash
+ local anchor_to_lookup=resources.anchor_to_lookup
+ local lookup_to_anchor=resources.lookup_to_anchor
+ local lookuptypes=resources.lookuptypes
+ local characters=tfmdata.characters
+ local descriptions=tfmdata.descriptions
+ for unicode,character in next,characters do
+ local description=descriptions[unicode]
+ if description then
+ local lookups=description.slookups
+ if lookups then
+ for lookupname,lookupdata in next,lookups do
+ action[lookuptypes[lookupname]](lookupdata,lookupname,unicode,lookuphash)
+ end
+ end
+ local lookups=description.mlookups
+ if lookups then
+ for lookupname,lookuplist in next,lookups do
+ local lookuptype=lookuptypes[lookupname]
+ for l=1,#lookuplist do
+ local lookupdata=lookuplist[l]
+ action[lookuptype](lookupdata,lookupname,unicode,lookuphash)
+ end
+ end
+ end
+ local list=description.kerns
+ if list then
+ for lookup,krn in next,list do
+ local target=lookuphash[lookup]
+ if target then
+ target[unicode]=krn
+ else
+ lookuphash[lookup]={ [unicode]=krn }
+ end
+ end
+ end
+ local list=description.anchors
+ if list then
+ for typ,anchors in next,list do
+ if typ=="mark" or typ=="cexit" then
+ for name,anchor in next,anchors do
+ local lookups=anchor_to_lookup[name]
+ if lookups then
+ for lookup,_ in next,lookups do
+ local target=lookuphash[lookup]
+ if target then
+ target[unicode]=anchors
+ else
+ lookuphash[lookup]={ [unicode]=anchors }
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+end
+local function split(replacement,original)
+ local result={}
+ for i=1,#replacement do
+ result[original[i]]=replacement[i]
+ end
+ return result
+end
+local valid={
+ coverage={ chainsub=true,chainpos=true,contextsub=true },
+ reversecoverage={ reversesub=true },
+ glyphs={ chainsub=true,chainpos=true },
+}
+local function prepare_contextchains(tfmdata)
+ local rawdata=tfmdata.shared.rawdata
+ local resources=rawdata.resources
+ local lookuphash=resources.lookuphash
+ local lookups=rawdata.lookups
+ if lookups then
+ for lookupname,lookupdata in next,rawdata.lookups do
+ local lookuptype=lookupdata.type
+ if lookuptype then
+ local rules=lookupdata.rules
+ if rules then
+ local format=lookupdata.format
+ local validformat=valid[format]
+ if not validformat then
+ report_prepare("unsupported format %a",format)
+ elseif not validformat[lookuptype] then
+ report_prepare("unsupported format %a, lookuptype %a, lookupname %a",format,lookuptype,lookupname)
+ else
+ local contexts=lookuphash[lookupname]
+ if not contexts then
+ contexts={}
+ lookuphash[lookupname]=contexts
+ end
+ local t,nt={},0
+ for nofrules=1,#rules do
+ local rule=rules[nofrules]
+ local current=rule.current
+ local before=rule.before
+ local after=rule.after
+ local replacements=rule.replacements
+ local sequence={}
+ local nofsequences=0
+ if before then
+ for n=1,#before do
+ nofsequences=nofsequences+1
+ sequence[nofsequences]=before[n]
+ end
+ end
+ local start=nofsequences+1
+ for n=1,#current do
+ nofsequences=nofsequences+1
+ sequence[nofsequences]=current[n]
+ end
+ local stop=nofsequences
+ if after then
+ for n=1,#after do
+ nofsequences=nofsequences+1
+ sequence[nofsequences]=after[n]
+ end
+ end
+ if sequence[1] then
+ nt=nt+1
+ t[nt]={ nofrules,lookuptype,sequence,start,stop,rule.lookups,replacements }
+ for unic,_ in next,sequence[start] do
+ local cu=contexts[unic]
+ if not cu then
+ contexts[unic]=t
+ end
+ end
+ end
+ end
+ end
+ else
+ end
+ else
+ report_prepare("missing lookuptype for lookupname %a",lookupname)
+ end
+ end
+ end
+end
+local function featuresinitializer(tfmdata,value)
+ if true then
+ local rawdata=tfmdata.shared.rawdata
+ local properties=rawdata.properties
+ if not properties.initialized then
+ local starttime=trace_preparing and os.clock()
+ local resources=rawdata.resources
+ resources.lookuphash=resources.lookuphash or {}
+ prepare_contextchains(tfmdata)
+ prepare_lookups(tfmdata)
+ properties.initialized=true
+ if trace_preparing then
+ report_prepare("preparation time is %0.3f seconds for %a",os.clock()-starttime,tfmdata.properties.fullname)
+ end
+ end
+ end
+end
+registerotffeature {
+ name="features",
+ description="features",
+ default=true,
+ initializers={
+ position=1,
+ node=featuresinitializer,
+ },
+ processors={
+ node=featuresprocessor,
+ }
+}
+otf.handlers=handlers
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['font-otp']={
+ version=1.001,
+ comment="companion to font-otf.lua (packing)",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local next,type=next,type
+local sort,concat=table.sort,table.concat
+local sortedhash=table.sortedhash
+local trace_packing=false trackers.register("otf.packing",function(v) trace_packing=v end)
+local trace_loading=false trackers.register("otf.loading",function(v) trace_loading=v end)
+local report_otf=logs.reporter("fonts","otf loading")
+fonts=fonts or {}
+local handlers=fonts.handlers or {}
+fonts.handlers=handlers
+local otf=handlers.otf or {}
+handlers.otf=otf
+local enhancers=otf.enhancers or {}
+otf.enhancers=enhancers
+local glists=otf.glists or { "gsub","gpos" }
+otf.glists=glists
+local criterium=1
+local threshold=0
+local function tabstr_normal(t)
+ local s={}
+ local n=0
+ for k,v in next,t do
+ n=n+1
+ if type(v)=="table" then
+ s[n]=k..">"..tabstr_normal(v)
+ elseif v==true then
+ s[n]=k.."+"
+ elseif v then
+ s[n]=k.."="..v
+ else
+ s[n]=k.."-"
+ end
+ end
+ if n==0 then
+ return ""
+ elseif n==1 then
+ return s[1]
+ else
+ sort(s)
+ return concat(s,",")
+ end
+end
+local function tabstr_flat(t)
+ local s={}
+ local n=0
+ for k,v in next,t do
+ n=n+1
+ s[n]=k.."="..v
+ end
+ if n==0 then
+ return ""
+ elseif n==1 then
+ return s[1]
+ else
+ sort(s)
+ return concat(s,",")
+ end
+end
+local function tabstr_mixed(t)
+ local s={}
+ local n=#t
+ if n==0 then
+ return ""
+ elseif n==1 then
+ local k=t[1]
+ if k==true then
+ return "++"
+ elseif k==false then
+ return "--"
+ else
+ return tostring(k)
+ end
+ else
+ for i=1,n do
+ local k=t[i]
+ if k==true then
+ s[i]="++"
+ elseif k==false then
+ s[i]="--"
+ else
+ s[i]=k
+ end
+ end
+ return concat(s,",")
+ end
+end
+local function tabstr_boolean(t)
+ local s={}
+ local n=0
+ for k,v in next,t do
+ n=n+1
+ if v then
+ s[n]=k.."+"
+ else
+ s[n]=k.."-"
+ end
+ end
+ if n==0 then
+ return ""
+ elseif n==1 then
+ return s[1]
+ else
+ sort(s)
+ return concat(s,",")
+ end
+end
+local function packdata(data)
+ if data then
+ local h,t,c={},{},{}
+ local hh,tt,cc={},{},{}
+ local nt,ntt=0,0
+ local function pack_normal(v)
+ local tag=tabstr_normal(v)
+ local ht=h[tag]
+ if ht then
+ c[ht]=c[ht]+1
+ return ht
+ else
+ nt=nt+1
+ t[nt]=v
+ h[tag]=nt
+ c[nt]=1
+ return nt
+ end
+ end
+ local function pack_flat(v)
+ local tag=tabstr_flat(v)
+ local ht=h[tag]
+ if ht then
+ c[ht]=c[ht]+1
+ return ht
+ else
+ nt=nt+1
+ t[nt]=v
+ h[tag]=nt
+ c[nt]=1
+ return nt
+ end
+ end
+ local function pack_boolean(v)
+ local tag=tabstr_boolean(v)
+ local ht=h[tag]
+ if ht then
+ c[ht]=c[ht]+1
+ return ht
+ else
+ nt=nt+1
+ t[nt]=v
+ h[tag]=nt
+ c[nt]=1
+ return nt
+ end
+ end
+ local function pack_indexed(v)
+ local tag=concat(v," ")
+ local ht=h[tag]
+ if ht then
+ c[ht]=c[ht]+1
+ return ht
+ else
+ nt=nt+1
+ t[nt]=v
+ h[tag]=nt
+ c[nt]=1
+ return nt
+ end
+ end
+ local function pack_mixed(v)
+ local tag=tabstr_mixed(v)
+ local ht=h[tag]
+ if ht then
+ c[ht]=c[ht]+1
+ return ht
+ else
+ nt=nt+1
+ t[nt]=v
+ h[tag]=nt
+ c[nt]=1
+ return nt
+ end
+ end
+ local function pack_final(v)
+ if c[v]<=criterium then
+ return t[v]
+ else
+ local hv=hh[v]
+ if hv then
+ return hv
+ else
+ ntt=ntt+1
+ tt[ntt]=t[v]
+ hh[v]=ntt
+ cc[ntt]=c[v]
+ return ntt
+ end
+ end
+ end
+ local function success(stage,pass)
+ if nt==0 then
+ if trace_loading or trace_packing then
+ report_otf("pack quality: nothing to pack")
+ end
+ return false
+ elseif nt>=threshold then
+ local one,two,rest=0,0,0
+ if pass==1 then
+ for k,v in next,c do
+ if v==1 then
+ one=one+1
+ elseif v==2 then
+ two=two+1
+ else
+ rest=rest+1
+ end
+ end
+ else
+ for k,v in next,cc do
+ if v>20 then
+ rest=rest+1
+ elseif v>10 then
+ two=two+1
+ else
+ one=one+1
+ end
+ end
+ data.tables=tt
+ end
+ if trace_loading or trace_packing then
+ report_otf("pack quality: stage %s, pass %s, %s packed, 1-10:%s, 11-20:%s, rest:%s (criterium: %s)",stage,pass,one+two+rest,one,two,rest,criterium)
+ end
+ return true
+ else
+ if trace_loading or trace_packing then
+ report_otf("pack quality: stage %s, pass %s, %s packed, aborting pack (threshold: %s)",stage,pass,nt,threshold)
+ end
+ return false
+ end
+ end
+ local function packers(pass)
+ if pass==1 then
+ return pack_normal,pack_indexed,pack_flat,pack_boolean,pack_mixed
+ else
+ return pack_final,pack_final,pack_final,pack_final,pack_final
+ end
+ end
+ local resources=data.resources
+ local lookuptypes=resources.lookuptypes
+ for pass=1,2 do
+ if trace_packing then
+ report_otf("start packing: stage 1, pass %s",pass)
+ end
+ local pack_normal,pack_indexed,pack_flat,pack_boolean,pack_mixed=packers(pass)
+ for unicode,description in next,data.descriptions do
+ local boundingbox=description.boundingbox
+ if boundingbox then
+ description.boundingbox=pack_indexed(boundingbox)
+ end
+ local slookups=description.slookups
+ if slookups then
+ for tag,slookup in next,slookups do
+ local what=lookuptypes[tag]
+ if what=="pair" then
+ local t=slookup[2] if t then slookup[2]=pack_indexed(t) end
+ local t=slookup[3] if t then slookup[3]=pack_indexed(t) end
+ elseif what~="substitution" then
+ slookups[tag]=pack_indexed(slookup)
+ end
+ end
+ end
+ local mlookups=description.mlookups
+ if mlookups then
+ for tag,mlookup in next,mlookups do
+ local what=lookuptypes[tag]
+ if what=="pair" then
+ for i=1,#mlookup do
+ local lookup=mlookup[i]
+ local t=lookup[2] if t then lookup[2]=pack_indexed(t) end
+ local t=lookup[3] if t then lookup[3]=pack_indexed(t) end
+ end
+ elseif what~="substitution" then
+ for i=1,#mlookup do
+ mlookup[i]=pack_indexed(mlookup[i])
+ end
+ end
+ end
+ end
+ local kerns=description.kerns
+ if kerns then
+ for tag,kern in next,kerns do
+ kerns[tag]=pack_flat(kern)
+ end
+ end
+ local math=description.math
+ if math then
+ local kerns=math.kerns
+ if kerns then
+ for tag,kern in next,kerns do
+ kerns[tag]=pack_normal(kern)
+ end
+ end
+ end
+ local anchors=description.anchors
+ if anchors then
+ for what,anchor in next,anchors do
+ if what=="baselig" then
+ for _,a in next,anchor do
+ for k=1,#a do
+ a[k]=pack_indexed(a[k])
+ end
+ end
+ else
+ for k,v in next,anchor do
+ anchor[k]=pack_indexed(v)
+ end
+ end
+ end
+ end
+ end
+ local lookups=data.lookups
+ if lookups then
+ for _,lookup in next,lookups do
+ local rules=lookup.rules
+ if rules then
+ for i=1,#rules do
+ local rule=rules[i]
+ local r=rule.before if r then for i=1,#r do r[i]=pack_boolean(r[i]) end end
+ local r=rule.after if r then for i=1,#r do r[i]=pack_boolean(r[i]) end end
+ local r=rule.current if r then for i=1,#r do r[i]=pack_boolean(r[i]) end end
+ local r=rule.replacements if r then rule.replacements=pack_flat (r) end
+ local r=rule.lookups if r then rule.lookups=pack_indexed(r) end
+ end
+ end
+ end
+ end
+ local anchor_to_lookup=resources.anchor_to_lookup
+ if anchor_to_lookup then
+ for anchor,lookup in next,anchor_to_lookup do
+ anchor_to_lookup[anchor]=pack_normal(lookup)
+ end
+ end
+ local lookup_to_anchor=resources.lookup_to_anchor
+ if lookup_to_anchor then
+ for lookup,anchor in next,lookup_to_anchor do
+ lookup_to_anchor[lookup]=pack_normal(anchor)
+ end
+ end
+ local sequences=resources.sequences
+ if sequences then
+ for feature,sequence in next,sequences do
+ local flags=sequence.flags
+ if flags then
+ sequence.flags=pack_normal(flags)
+ end
+ local subtables=sequence.subtables
+ if subtables then
+ sequence.subtables=pack_normal(subtables)
+ end
+ local features=sequence.features
+ if features then
+ for script,feature in next,features do
+ features[script]=pack_normal(feature)
+ end
+ end
+ end
+ end
+ local lookups=resources.lookups
+ if lookups then
+ for name,lookup in next,lookups do
+ local flags=lookup.flags
+ if flags then
+ lookup.flags=pack_normal(flags)
+ end
+ local subtables=lookup.subtables
+ if subtables then
+ lookup.subtables=pack_normal(subtables)
+ end
+ end
+ end
+ local features=resources.features
+ if features then
+ for _,what in next,glists do
+ local list=features[what]
+ if list then
+ for feature,spec in next,list do
+ list[feature]=pack_normal(spec)
+ end
+ end
+ end
+ end
+ if not success(1,pass) then
+ return
+ end
+ end
+ if nt>0 then
+ for pass=1,2 do
+ if trace_packing then
+ report_otf("start packing: stage 2, pass %s",pass)
+ end
+ local pack_normal,pack_indexed,pack_flat,pack_boolean,pack_mixed=packers(pass)
+ for unicode,description in next,data.descriptions do
+ local kerns=description.kerns
+ if kerns then
+ description.kerns=pack_normal(kerns)
+ end
+ local math=description.math
+ if math then
+ local kerns=math.kerns
+ if kerns then
+ math.kerns=pack_normal(kerns)
+ end
+ end
+ local anchors=description.anchors
+ if anchors then
+ description.anchors=pack_normal(anchors)
+ end
+ local mlookups=description.mlookups
+ if mlookups then
+ for tag,mlookup in next,mlookups do
+ mlookups[tag]=pack_normal(mlookup)
+ end
+ end
+ end
+ local lookups=data.lookups
+ if lookups then
+ for _,lookup in next,lookups do
+ local rules=lookup.rules
+ if rules then
+ for i=1,#rules do
+ local rule=rules[i]
+ local r=rule.before if r then rule.before=pack_normal(r) end
+ local r=rule.after if r then rule.after=pack_normal(r) end
+ local r=rule.current if r then rule.current=pack_normal(r) end
+ end
+ end
+ end
+ end
+ local sequences=resources.sequences
+ if sequences then
+ for feature,sequence in next,sequences do
+ sequence.features=pack_normal(sequence.features)
+ end
+ end
+ if not success(2,pass) then
+ end
+ end
+ for pass=1,2 do
+ local pack_normal,pack_indexed,pack_flat,pack_boolean,pack_mixed=packers(pass)
+ for unicode,description in next,data.descriptions do
+ local slookups=description.slookups
+ if slookups then
+ description.slookups=pack_normal(slookups)
+ end
+ local mlookups=description.mlookups
+ if mlookups then
+ description.mlookups=pack_normal(mlookups)
+ end
+ end
+ end
+ end
+ end
+end
+local unpacked_mt={
+ __index=function(t,k)
+ t[k]=false
+ return k
+ end
+}
+local function unpackdata(data)
+ if data then
+ local tables=data.tables
+ if tables then
+ local resources=data.resources
+ local lookuptypes=resources.lookuptypes
+ local unpacked={}
+ setmetatable(unpacked,unpacked_mt)
+ for unicode,description in next,data.descriptions do
+ local tv=tables[description.boundingbox]
+ if tv then
+ description.boundingbox=tv
+ end
+ local slookups=description.slookups
+ if slookups then
+ local tv=tables[slookups]
+ if tv then
+ description.slookups=tv
+ slookups=unpacked[tv]
+ end
+ if slookups then
+ for tag,lookup in next,slookups do
+ local what=lookuptypes[tag]
+ if what=="pair" then
+ local tv=tables[lookup[2]]
+ if tv then
+ lookup[2]=tv
+ end
+ local tv=tables[lookup[3]]
+ if tv then
+ lookup[3]=tv
+ end
+ elseif what~="substitution" then
+ local tv=tables[lookup]
+ if tv then
+ slookups[tag]=tv
+ end
+ end
+ end
+ end
+ end
+ local mlookups=description.mlookups
+ if mlookups then
+ local tv=tables[mlookups]
+ if tv then
+ description.mlookups=tv
+ mlookups=unpacked[tv]
+ end
+ if mlookups then
+ for tag,list in next,mlookups do
+ local tv=tables[list]
+ if tv then
+ mlookups[tag]=tv
+ list=unpacked[tv]
+ end
+ if list then
+ local what=lookuptypes[tag]
+ if what=="pair" then
+ for i=1,#list do
+ local lookup=list[i]
+ local tv=tables[lookup[2]]
+ if tv then
+ lookup[2]=tv
+ end
+ local tv=tables[lookup[3]]
+ if tv then
+ lookup[3]=tv
+ end
+ end
+ elseif what~="substitution" then
+ for i=1,#list do
+ local tv=tables[list[i]]
+ if tv then
+ list[i]=tv
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ local kerns=description.kerns
+ if kerns then
+ local tm=tables[kerns]
+ if tm then
+ description.kerns=tm
+ kerns=unpacked[tm]
+ end
+ if kerns then
+ for k,kern in next,kerns do
+ local tv=tables[kern]
+ if tv then
+ kerns[k]=tv
+ end
+ end
+ end
+ end
+ local math=description.math
+ if math then
+ local kerns=math.kerns
+ if kerns then
+ local tm=tables[kerns]
+ if tm then
+ math.kerns=tm
+ kerns=unpacked[tm]
+ end
+ if kerns then
+ for k,kern in next,kerns do
+ local tv=tables[kern]
+ if tv then
+ kerns[k]=tv
+ end
+ end
+ end
+ end
+ end
+ local anchors=description.anchors
+ if anchors then
+ local ta=tables[anchors]
+ if ta then
+ description.anchors=ta
+ anchors=unpacked[ta]
+ end
+ if anchors then
+ for tag,anchor in next,anchors do
+ if tag=="baselig" then
+ for _,list in next,anchor do
+ for i=1,#list do
+ local tv=tables[list[i]]
+ if tv then
+ list[i]=tv
+ end
+ end
+ end
+ else
+ for a,data in next,anchor do
+ local tv=tables[data]
+ if tv then
+ anchor[a]=tv
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ local lookups=data.lookups
+ if lookups then
+ for _,lookup in next,lookups do
+ local rules=lookup.rules
+ if rules then
+ for i=1,#rules do
+ local rule=rules[i]
+ local before=rule.before
+ if before then
+ local tv=tables[before]
+ if tv then
+ rule.before=tv
+ before=unpacked[tv]
+ end
+ if before then
+ for i=1,#before do
+ local tv=tables[before[i]]
+ if tv then
+ before[i]=tv
+ end
+ end
+ end
+ end
+ local after=rule.after
+ if after then
+ local tv=tables[after]
+ if tv then
+ rule.after=tv
+ after=unpacked[tv]
+ end
+ if after then
+ for i=1,#after do
+ local tv=tables[after[i]]
+ if tv then
+ after[i]=tv
+ end
+ end
+ end
+ end
+ local current=rule.current
+ if current then
+ local tv=tables[current]
+ if tv then
+ rule.current=tv
+ current=unpacked[tv]
+ end
+ if current then
+ for i=1,#current do
+ local tv=tables[current[i]]
+ if tv then
+ current[i]=tv
+ end
+ end
+ end
+ end
+ local replacements=rule.replacements
+ if replacements then
+ local tv=tables[replacements]
+ if tv then
+ rule.replacements=tv
+ end
+ end
+ local fore=rule.fore
+ if fore then
+ local tv=tables[fore]
+ if tv then
+ rule.fore=tv
+ end
+ end
+ local back=rule.back
+ if back then
+ local tv=tables[back]
+ if tv then
+ rule.back=tv
+ end
+ end
+ local names=rule.names
+ if names then
+ local tv=tables[names]
+ if tv then
+ rule.names=tv
+ end
+ end
+ local lookups=rule.lookups
+ if lookups then
+ local tv=tables[lookups]
+ if tv then
+ rule.lookups=tv
+ end
+ end
+ end
+ end
+ end
+ end
+ local anchor_to_lookup=resources.anchor_to_lookup
+ if anchor_to_lookup then
+ for anchor,lookup in next,anchor_to_lookup do
+ local tv=tables[lookup]
+ if tv then
+ anchor_to_lookup[anchor]=tv
+ end
+ end
+ end
+ local lookup_to_anchor=resources.lookup_to_anchor
+ if lookup_to_anchor then
+ for lookup,anchor in next,lookup_to_anchor do
+ local tv=tables[anchor]
+ if tv then
+ lookup_to_anchor[lookup]=tv
+ end
+ end
+ end
+ local ls=resources.sequences
+ if ls then
+ for _,feature in next,ls do
+ local flags=feature.flags
+ if flags then
+ local tv=tables[flags]
+ if tv then
+ feature.flags=tv
+ end
+ end
+ local subtables=feature.subtables
+ if subtables then
+ local tv=tables[subtables]
+ if tv then
+ feature.subtables=tv
+ end
+ end
+ local features=feature.features
+ if features then
+ local tv=tables[features]
+ if tv then
+ feature.features=tv
+ features=unpacked[tv]
+ end
+ if features then
+ for script,data in next,features do
+ local tv=tables[data]
+ if tv then
+ features[script]=tv
+ end
+ end
+ end
+ end
+ end
+ end
+ local lookups=resources.lookups
+ if lookups then
+ for _,lookup in next,lookups do
+ local flags=lookup.flags
+ if flags then
+ local tv=tables[flags]
+ if tv then
+ lookup.flags=tv
+ end
+ end
+ local subtables=lookup.subtables
+ if subtables then
+ local tv=tables[subtables]
+ if tv then
+ lookup.subtables=tv
+ end
+ end
+ end
+ end
+ local features=resources.features
+ if features then
+ for _,what in next,glists do
+ local feature=features[what]
+ if feature then
+ for tag,spec in next,feature do
+ local tv=tables[spec]
+ if tv then
+ feature[tag]=tv
+ end
+ end
+ end
+ end
+ end
+ data.tables=nil
+ end
+ end
+end
+if otf.enhancers.register then
+ otf.enhancers.register("pack",packdata)
+ otf.enhancers.register("unpack",unpackdata)
+end
+otf.enhancers.unpack=unpackdata
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['luatex-fonts-lua']={
+ version=1.001,
+ comment="companion to luatex-*.tex",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+if context then
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
+end
+local fonts=fonts
+fonts.formats.lua="lua"
+function fonts.readers.lua(specification)
+ local fullname=specification.filename or ""
+ if fullname=="" then
+ local forced=specification.forced or ""
+ if forced~="" then
+ fullname=specification.name.."."..forced
+ else
+ fullname=specification.name
+ end
+ end
+ local fullname=resolvers.findfile(fullname) or ""
+ if fullname~="" then
+ local loader=loadfile(fullname)
+ loader=loader and loader()
+ return loader and loader(specification)
+ end
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['font-def']={
+ version=1.001,
+ comment="companion to font-ini.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local format,gmatch,match,find,lower,gsub=string.format,string.gmatch,string.match,string.find,string.lower,string.gsub
+local tostring,next=tostring,next
+local lpegmatch=lpeg.match
+local allocate=utilities.storage.allocate
+local trace_defining=false trackers .register("fonts.defining",function(v) trace_defining=v end)
+local directive_embedall=false directives.register("fonts.embedall",function(v) directive_embedall=v end)
+trackers.register("fonts.loading","fonts.defining","otf.loading","afm.loading","tfm.loading")
+trackers.register("fonts.all","fonts.*","otf.*","afm.*","tfm.*")
+local report_defining=logs.reporter("fonts","defining")
+local fonts=fonts
+local fontdata=fonts.hashes.identifiers
+local readers=fonts.readers
+local definers=fonts.definers
+local specifiers=fonts.specifiers
+local constructors=fonts.constructors
+local fontgoodies=fonts.goodies
+readers.sequence=allocate { 'otf','ttf','afm','tfm','lua' }
+local variants=allocate()
+specifiers.variants=variants
+definers.methods=definers.methods or {}
+local internalized=allocate()
+local lastdefined=nil
+local loadedfonts=constructors.loadedfonts
+local designsizes=constructors.designsizes
+local resolvefile=fontgoodies and fontgoodies.filenames and fontgoodies.filenames.resolve or function(s) return s end
+local splitter,splitspecifiers=nil,""
+local P,C,S,Cc=lpeg.P,lpeg.C,lpeg.S,lpeg.Cc
+local left=P("(")
+local right=P(")")
+local colon=P(":")
+local space=P(" ")
+definers.defaultlookup="file"
+local prefixpattern=P(false)
+local function addspecifier(symbol)
+ splitspecifiers=splitspecifiers..symbol
+ local method=S(splitspecifiers)
+ local lookup=C(prefixpattern)*colon
+ local sub=left*C(P(1-left-right-method)^1)*right
+ local specification=C(method)*C(P(1)^1)
+ local name=C((1-sub-specification)^1)
+ splitter=P((lookup+Cc(""))*name*(sub+Cc(""))*(specification+Cc("")))
+end
+local function addlookup(str,default)
+ prefixpattern=prefixpattern+P(str)
+end
+definers.addlookup=addlookup
+addlookup("file")
+addlookup("name")
+addlookup("spec")
+local function getspecification(str)
+ return lpegmatch(splitter,str)
+end
+definers.getspecification=getspecification
+function definers.registersplit(symbol,action,verbosename)
+ addspecifier(symbol)
+ variants[symbol]=action
+ if verbosename then
+ variants[verbosename]=action
+ end
+end
+local function makespecification(specification,lookup,name,sub,method,detail,size)
+ size=size or 655360
+ if not lookup or lookup=="" then
+ lookup=definers.defaultlookup
+ end
+ if trace_defining then
+ report_defining("specification %a, lookup %a, name %a, sub %a, method %a, detail %a",
+ specification,lookup,name,sub,method,detail)
+ end
+ local t={
+ lookup=lookup,
+ specification=specification,
+ size=size,
+ name=name,
+ sub=sub,
+ method=method,
+ detail=detail,
+ resolved="",
+ forced="",
+ features={},
+ }
+ return t
+end
+definers.makespecification=makespecification
+function definers.analyze(specification,size)
+ local lookup,name,sub,method,detail=getspecification(specification or "")
+ return makespecification(specification,lookup,name,sub,method,detail,size)
+end
+definers.resolvers=definers.resolvers or {}
+local resolvers=definers.resolvers
+function resolvers.file(specification)
+ local name=resolvefile(specification.name)
+ local suffix=file.suffix(name)
+ if fonts.formats[suffix] then
+ specification.forced=suffix
+ specification.name=file.removesuffix(name)
+ else
+ specification.name=name
+ end
+end
+function resolvers.name(specification)
+ local resolve=fonts.names.resolve
+ if resolve then
+ local resolved,sub=resolve(specification.name,specification.sub,specification)
+ if resolved then
+ specification.resolved=resolved
+ specification.sub=sub
+ local suffix=file.suffix(resolved)
+ if fonts.formats[suffix] then
+ specification.forced=suffix
+ specification.name=file.removesuffix(resolved)
+ else
+ specification.name=resolved
+ end
+ end
+ else
+ resolvers.file(specification)
+ end
+end
+function resolvers.spec(specification)
+ local resolvespec=fonts.names.resolvespec
+ if resolvespec then
+ local resolved,sub=resolvespec(specification.name,specification.sub,specification)
+ if resolved then
+ specification.resolved=resolved
+ specification.sub=sub
+ specification.forced=file.suffix(resolved)
+ specification.name=file.removesuffix(resolved)
+ end
+ else
+ resolvers.name(specification)
+ end
+end
+function definers.resolve(specification)
+ if not specification.resolved or specification.resolved=="" then
+ local r=resolvers[specification.lookup]
+ if r then
+ r(specification)
+ end
+ end
+ if specification.forced=="" then
+ specification.forced=nil
+ else
+ specification.forced=specification.forced
+ end
+ specification.hash=lower(specification.name..' @ '..constructors.hashfeatures(specification))
+ if specification.sub and specification.sub~="" then
+ specification.hash=specification.sub..' @ '..specification.hash
+ end
+ return specification
+end
+function definers.applypostprocessors(tfmdata)
+ local postprocessors=tfmdata.postprocessors
+ if postprocessors then
+ local properties=tfmdata.properties
+ for i=1,#postprocessors do
+ local extrahash=postprocessors[i](tfmdata)
+ if type(extrahash)=="string" and extrahash~="" then
+ extrahash=gsub(lower(extrahash),"[^a-z]","-")
+ properties.fullname=format("%s-%s",properties.fullname,extrahash)
+ end
+ end
+ end
+ return tfmdata
+end
+local function checkembedding(tfmdata)
+ local properties=tfmdata.properties
+ local embedding
+ if directive_embedall then
+ embedding="full"
+ elseif properties and properties.filename and constructors.dontembed[properties.filename] then
+ embedding="no"
+ else
+ embedding="subset"
+ end
+ if properties then
+ properties.embedding=embedding
+ else
+ tfmdata.properties={ embedding=embedding }
+ end
+ tfmdata.embedding=embedding
+end
+function definers.loadfont(specification)
+ local hash=constructors.hashinstance(specification)
+ local tfmdata=loadedfonts[hash]
+ if not tfmdata then
+ local forced=specification.forced or ""
+ if forced~="" then
+ local reader=readers[lower(forced)]
+ tfmdata=reader and reader(specification)
+ if not tfmdata then
+ report_defining("forced type %a of %a not found",forced,specification.name)
+ end
+ else
+ local sequence=readers.sequence
+ for s=1,#sequence do
+ local reader=sequence[s]
+ if readers[reader] then
+ if trace_defining then
+ report_defining("trying (reader sequence driven) type %a for %a with file %a",reader,specification.name,specification.filename)
+ end
+ tfmdata=readers[reader](specification)
+ if tfmdata then
+ break
+ else
+ specification.filename=nil
+ end
+ end
+ end
+ end
+ if tfmdata then
+ tfmdata=definers.applypostprocessors(tfmdata)
+ checkembedding(tfmdata)
+ loadedfonts[hash]=tfmdata
+ designsizes[specification.hash]=tfmdata.parameters.designsize
+ end
+ end
+ if not tfmdata then
+ report_defining("font with asked name %a is not found using lookup %a",specification.name,specification.lookup)
+ end
+ return tfmdata
+end
+function constructors.checkvirtualids()
+end
+function constructors.readanddefine(name,size)
+ local specification=definers.analyze(name,size)
+ local method=specification.method
+ if method and variants[method] then
+ specification=variants[method](specification)
+ end
+ specification=definers.resolve(specification)
+ local hash=constructors.hashinstance(specification)
+ local id=definers.registered(hash)
+ if not id then
+ local tfmdata=definers.loadfont(specification)
+ if tfmdata then
+ tfmdata.properties.hash=hash
+ constructors.checkvirtualids(tfmdata)
+ id=font.define(tfmdata)
+ definers.register(tfmdata,id)
+ else
+ id=0
+ end
+ end
+ return fontdata[id],id
+end
+function definers.current()
+ return lastdefined
+end
+function definers.registered(hash)
+ local id=internalized[hash]
+ return id,id and fontdata[id]
+end
+function definers.register(tfmdata,id)
+ if tfmdata and id then
+ local hash=tfmdata.properties.hash
+ if not hash then
+ report_defining("registering font, id %a, name %a, invalid hash",id,tfmdata.properties.filename or "?")
+ elseif not internalized[hash] then
+ internalized[hash]=id
+ if trace_defining then
+ report_defining("registering font, id %s, hash %a",id,hash)
+ end
+ fontdata[id]=tfmdata
+ end
+ end
+end
+function definers.read(specification,size,id)
+ statistics.starttiming(fonts)
+ if type(specification)=="string" then
+ specification=definers.analyze(specification,size)
+ end
+ local method=specification.method
+ if method and variants[method] then
+ specification=variants[method](specification)
+ end
+ specification=definers.resolve(specification)
+ local hash=constructors.hashinstance(specification)
+ local tfmdata=definers.registered(hash)
+ if tfmdata then
+ if trace_defining then
+ report_defining("already hashed: %s",hash)
+ end
+ else
+ tfmdata=definers.loadfont(specification)
+ if tfmdata then
+ if trace_defining then
+ report_defining("loaded and hashed: %s",hash)
+ end
+ tfmdata.properties.hash=hash
+ if id then
+ definers.register(tfmdata,id)
+ end
+ else
+ if trace_defining then
+ report_defining("not loaded and hashed: %s",hash)
+ end
+ end
+ end
+ lastdefined=tfmdata or id
+ if not tfmdata then
+ report_defining("unknown font %a, loading aborted",specification.name)
+ elseif trace_defining and type(tfmdata)=="table" then
+ local properties=tfmdata.properties or {}
+ local parameters=tfmdata.parameters or {}
+ report_defining("using %s font with id %a, name %a, size %a, bytes %a, encoding %a, fullname %a, filename %a",
+ properties.format,id,properties.name,parameters.size,properties.encodingbytes,
+ properties.encodingname,properties.fullname,file.basename(properties.filename))
+ end
+ statistics.stoptiming(fonts)
+ return tfmdata
+end
+function font.getfont(id)
+ return fontdata[id]
+end
+callbacks.register('define_font',definers.read,"definition of fonts (tfmdata preparation)")
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['luatex-font-def']={
+ version=1.001,
+ comment="companion to luatex-*.tex",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+if context then
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
+end
+local fonts=fonts
+fonts.constructors.namemode="specification"
+function fonts.definers.getspecification(str)
+ return "",str,"",":",str
+end
+local list={}
+local function issome () list.lookup='name' end
+local function isfile () list.lookup='file' end
+local function isname () list.lookup='name' end
+local function thename(s) list.name=s end
+local function issub (v) list.sub=v end
+local function iscrap (s) list.crap=string.lower(s) end
+local function iskey (k,v) list[k]=v end
+local function istrue (s) list[s]=true end
+local function isfalse(s) list[s]=false end
+local P,S,R,C=lpeg.P,lpeg.S,lpeg.R,lpeg.C
+local spaces=P(" ")^0
+local namespec=(1-S("/:("))^0
+local crapspec=spaces*P("/")*(((1-P(":"))^0)/iscrap)*spaces
+local filename_1=P("file:")/isfile*(namespec/thename)
+local filename_2=P("[")*P(true)/isname*(((1-P("]"))^0)/thename)*P("]")
+local fontname_1=P("name:")/isname*(namespec/thename)
+local fontname_2=P(true)/issome*(namespec/thename)
+local sometext=(R("az","AZ","09")+S("+-."))^1
+local truevalue=P("+")*spaces*(sometext/istrue)
+local falsevalue=P("-")*spaces*(sometext/isfalse)
+local keyvalue=(C(sometext)*spaces*P("=")*spaces*C(sometext))/iskey
+local somevalue=sometext/istrue
+local subvalue=P("(")*(C(P(1-S("()"))^1)/issub)*P(")")
+local option=spaces*(keyvalue+falsevalue+truevalue+somevalue)*spaces
+local options=P(":")*spaces*(P(";")^0*option)^0
+local pattern=(filename_1+filename_2+fontname_1+fontname_2)*subvalue^0*crapspec^0*options^0
+local function colonized(specification)
+ list={}
+ lpeg.match(pattern,specification.specification)
+ list.crap=nil
+ if list.name then
+ specification.name=list.name
+ list.name=nil
+ end
+ if list.lookup then
+ specification.lookup=list.lookup
+ list.lookup=nil
+ end
+ if list.sub then
+ specification.sub=list.sub
+ list.sub=nil
+ end
+ specification.features.normal=fonts.handlers.otf.features.normalize(list)
+ return specification
+end
+fonts.definers.registersplit(":",colonized,"cryptic")
+fonts.definers.registersplit("",colonized,"more cryptic")
+function fonts.definers.applypostprocessors(tfmdata)
+ local postprocessors=tfmdata.postprocessors
+ if postprocessors then
+ for i=1,#postprocessors do
+ local extrahash=postprocessors[i](tfmdata)
+ if type(extrahash)=="string" and extrahash~="" then
+ extrahash=string.gsub(lower(extrahash),"[^a-z]","-")
+ tfmdata.properties.fullname=format("%s-%s",tfmdata.properties.fullname,extrahash)
+ end
+ end
+ end
+ return tfmdata
+end
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['luatex-fonts-ext']={
+ version=1.001,
+ comment="companion to luatex-*.tex",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+if context then
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
+end
+local fonts=fonts
+local otffeatures=fonts.constructors.newfeatures("otf")
+local function initializeitlc(tfmdata,value)
+ if value then
+ local parameters=tfmdata.parameters
+ local italicangle=parameters.italicangle
+ if italicangle and italicangle~=0 then
+ local properties=tfmdata.properties
+ local factor=tonumber(value) or 1
+ properties.hasitalics=true
+ properties.autoitalicamount=factor*(parameters.uwidth or 40)/2
+ end
+ end
+end
+otffeatures.register {
+ name="itlc",
+ description="italic correction",
+ initializers={
+ base=initializeitlc,
+ node=initializeitlc,
+ }
+}
+local function initializeslant(tfmdata,value)
+ value=tonumber(value)
+ if not value then
+ value=0
+ elseif value>1 then
+ value=1
+ elseif value<-1 then
+ value=-1
+ end
+ tfmdata.parameters.slantfactor=value
+end
+otffeatures.register {
+ name="slant",
+ description="slant glyphs",
+ initializers={
+ base=initializeslant,
+ node=initializeslant,
+ }
+}
+local function initializeextend(tfmdata,value)
+ value=tonumber(value)
+ if not value then
+ value=0
+ elseif value>10 then
+ value=10
+ elseif value<-10 then
+ value=-10
+ end
+ tfmdata.parameters.extendfactor=value
+end
+otffeatures.register {
+ name="extend",
+ description="scale glyphs horizontally",
+ initializers={
+ base=initializeextend,
+ node=initializeextend,
+ }
+}
+fonts.protrusions=fonts.protrusions or {}
+fonts.protrusions.setups=fonts.protrusions.setups or {}
+local setups=fonts.protrusions.setups
+local function initializeprotrusion(tfmdata,value)
+ if value then
+ local setup=setups[value]
+ if setup then
+ local factor,left,right=setup.factor or 1,setup.left or 1,setup.right or 1
+ local emwidth=tfmdata.parameters.quad
+ tfmdata.parameters.protrusion={
+ auto=true,
+ }
+ for i,chr in next,tfmdata.characters do
+ local v,pl,pr=setup[i],nil,nil
+ if v then
+ pl,pr=v[1],v[2]
+ end
+ if pl and pl~=0 then chr.left_protruding=left*pl*factor end
+ if pr and pr~=0 then chr.right_protruding=right*pr*factor end
+ end
+ end
+ end
+end
+otffeatures.register {
+ name="protrusion",
+ description="shift characters into the left and or right margin",
+ initializers={
+ base=initializeprotrusion,
+ node=initializeprotrusion,
+ }
+}
+fonts.expansions=fonts.expansions or {}
+fonts.expansions.setups=fonts.expansions.setups or {}
+local setups=fonts.expansions.setups
+local function initializeexpansion(tfmdata,value)
+ if value then
+ local setup=setups[value]
+ if setup then
+ local factor=setup.factor or 1
+ tfmdata.parameters.expansion={
+ stretch=10*(setup.stretch or 0),
+ shrink=10*(setup.shrink or 0),
+ step=10*(setup.step or 0),
+ auto=true,
+ }
+ for i,chr in next,tfmdata.characters do
+ local v=setup[i]
+ if v and v~=0 then
+ chr.expansion_factor=v*factor
+ else
+ chr.expansion_factor=factor
+ end
+ end
+ end
+ end
+end
+otffeatures.register {
+ name="expansion",
+ description="apply hz optimization",
+ initializers={
+ base=initializeexpansion,
+ node=initializeexpansion,
+ }
+}
+function fonts.loggers.onetimemessage() end
+local byte=string.byte
+fonts.expansions.setups['default']={
+ stretch=2,shrink=2,step=.5,factor=1,
+ [byte('A')]=0.5,[byte('B')]=0.7,[byte('C')]=0.7,[byte('D')]=0.5,[byte('E')]=0.7,
+ [byte('F')]=0.7,[byte('G')]=0.5,[byte('H')]=0.7,[byte('K')]=0.7,[byte('M')]=0.7,
+ [byte('N')]=0.7,[byte('O')]=0.5,[byte('P')]=0.7,[byte('Q')]=0.5,[byte('R')]=0.7,
+ [byte('S')]=0.7,[byte('U')]=0.7,[byte('W')]=0.7,[byte('Z')]=0.7,
+ [byte('a')]=0.7,[byte('b')]=0.7,[byte('c')]=0.7,[byte('d')]=0.7,[byte('e')]=0.7,
+ [byte('g')]=0.7,[byte('h')]=0.7,[byte('k')]=0.7,[byte('m')]=0.7,[byte('n')]=0.7,
+ [byte('o')]=0.7,[byte('p')]=0.7,[byte('q')]=0.7,[byte('s')]=0.7,[byte('u')]=0.7,
+ [byte('w')]=0.7,[byte('z')]=0.7,
+ [byte('2')]=0.7,[byte('3')]=0.7,[byte('6')]=0.7,[byte('8')]=0.7,[byte('9')]=0.7,
+}
+fonts.protrusions.setups['default']={
+ factor=1,left=1,right=1,
+ [0x002C]={ 0,1 },
+ [0x002E]={ 0,1 },
+ [0x003A]={ 0,1 },
+ [0x003B]={ 0,1 },
+ [0x002D]={ 0,1 },
+ [0x2013]={ 0,0.50 },
+ [0x2014]={ 0,0.33 },
+ [0x3001]={ 0,1 },
+ [0x3002]={ 0,1 },
+ [0x060C]={ 0,1 },
+ [0x061B]={ 0,1 },
+ [0x06D4]={ 0,1 },
+}
+fonts.handlers.otf.features.normalize=function(t)
+ if t.rand then
+ t.rand="random"
+ end
+ return t
+end
+function fonts.helpers.nametoslot(name)
+ local t=type(name)
+ if t=="string" then
+ local tfmdata=fonts.hashes.identifiers[currentfont()]
+ local shared=tfmdata and tfmdata.shared
+ local fntdata=shared and shared.rawdata
+ return fntdata and fntdata.resources.unicodes[name]
+ elseif t=="number" then
+ return n
+ end
+end
+fonts.encodings=fonts.encodings or {}
+local reencodings={}
+fonts.encodings.reencodings=reencodings
+local function specialreencode(tfmdata,value)
+ local encoding=value and reencodings[value]
+ if encoding then
+ local temp={}
+ local char=tfmdata.characters
+ for k,v in next,encoding do
+ temp[k]=char[v]
+ end
+ for k,v in next,temp do
+ char[k]=temp[k]
+ end
+ return string.format("reencoded:%s",value)
+ end
+end
+local function reencode(tfmdata,value)
+ tfmdata.postprocessors=tfmdata.postprocessors or {}
+ table.insert(tfmdata.postprocessors,
+ function(tfmdata)
+ return specialreencode(tfmdata,value)
+ end
+ )
+end
+otffeatures.register {
+ name="reencode",
+ description="reencode characters",
+ manipulators={
+ base=reencode,
+ node=reencode,
+ }
+}
+
+end -- closure
+
+do -- begin closure to overcome local limits and interference
+
+if not modules then modules={} end modules ['luatex-fonts-cbk']={
+ version=1.001,
+ comment="companion to luatex-*.tex",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+if context then
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
+end
+local fonts=fonts
+local nodes=nodes
+local traverse_id=node.traverse_id
+local glyph_code=nodes.nodecodes.glyph
+function nodes.handlers.characters(head)
+ local fontdata=fonts.hashes.identifiers
+ if fontdata then
+ local usedfonts,done,prevfont={},false,nil
+ for n in traverse_id(glyph_code,head) do
+ local font=n.font
+ if font~=prevfont then
+ prevfont=font
+ local used=usedfonts[font]
+ if not used then
+ local tfmdata=fontdata[font]
+ if tfmdata then
+ local shared=tfmdata.shared
+ if shared then
+ local processors=shared.processes
+ if processors and #processors>0 then
+ usedfonts[font]=processors
+ done=true
+ end
+ end
+ end
+ end
+ end
+ end
+ if done then
+ for font,processors in next,usedfonts do
+ for i=1,#processors do
+ local h,d=processors[i](head,font,0)
+ head,done=h or head,done or d
+ end
+ end
+ end
+ return head,true
+ else
+ return head,false
+ end
+end
+function nodes.simple_font_handler(head)
+ head=nodes.handlers.characters(head)
+ nodes.injections.handler(head)
+ nodes.handlers.protectglyphs(head)
+ head=node.ligaturing(head)
+ head=node.kerning(head)
+ return head
+end
+
+end -- closure
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-override.lua b/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-override.lua
new file mode 100644
index 00000000000..863187fbd7d
--- /dev/null
+++ b/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-override.lua
@@ -0,0 +1,84 @@
+if not modules then modules = { } end modules ['luat-ovr'] = {
+ version = 2.2,
+ comment = "companion to luatex-*.tex",
+ author = "Khaled Hosny, Elie Roux, Philipp Gesang",
+ copyright = "Luaotfload Development Team",
+ license = "GNU GPL v2"
+}
+
+local module_name = "luaotfload"
+
+local texiowrite_nl = texio.write_nl
+local stringformat = string.format
+local tableconcat = table.concat
+local type = type
+
+--[[doc--
+We recreate the verbosity levels previously implemented in font-nms:
+
+ ==========================================================
+ lvl arg trace_loading trace_search suppress_output
+ ----------------------------------------------------------
+ (0) -> -q ⊥ ⊥ ⊤
+ (1) -> ∅ ⊥ ⊥ ⊥
+ (2) -> -v ⊤ ⊥ ⊥
+ (>2) -> -vv ⊤ ⊤ ⊥
+ ==========================================================
+
+--doc]]--
+
+local loglevel = 1 --- default
+local logout = "log"
+
+local set_loglevel = function (n)
+ if type(n) == "number" then
+ loglevel = n
+ end
+end
+logs.set_loglevel = set_loglevel
+logs.set_log_level = set_loglevel --- accomodating lazy typists
+
+local set_logout = function (s)
+ if s == "stdout" then
+ logout = "term"
+ --else --- remains “log”
+ end
+end
+
+logs.set_logout = set_logout
+
+local log = function (category, fmt, ...)
+ local res = { module_name, " |" }
+ if category then res[#res+1] = " " .. category end
+ if fmt then res[#res+1] = ": " .. stringformat(fmt, ...) end
+ texiowrite_nl(logout, tableconcat(res))
+end
+
+local stdout = function (category, fmt, ...)
+ local res = { module_name, " |" }
+ if category then res[#res+1] = " " .. category end
+ if fmt then res[#res+1] = ": " .. stringformat(fmt, ...) end
+ texiowrite_nl(tableconcat(res))
+end
+
+local level_ids = { common = 0, loading = 1, search = 2 }
+
+logs.names_report = function (mode, lvl, ...)
+ if type(lvl) == "string" then
+ lvl = level_ids[lvl]
+ end
+ if not lvl then lvl = 0 end
+
+ if loglevel >= lvl then
+ if mode == "log" then
+ log (...)
+ elseif mode == "both" then
+ log (...)
+ stdout (...)
+ else
+ stdout (...)
+ end
+ end
+end
+
+-- vim:tw=71:sw=4:ts=4:expandtab
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-tool.lua b/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-tool.lua
new file mode 100755
index 00000000000..0cd19b1d3a4
--- /dev/null
+++ b/Master/texmf-dist/tex/luatex/luaotfload/luaotfload-tool.lua
@@ -0,0 +1,454 @@
+#!/usr/bin/env texlua
+-----------------------------------------------------------------------
+-- FILE: luaotfload-tool.lua
+-- DESCRIPTION: database functionality
+-- REQUIREMENTS: luaotfload 2.2
+-- AUTHOR: Khaled Hosny, Élie Roux, Philipp Gesang
+-- VERSION: 2.2
+-- LICENSE: GPL v2
+-- CREATED: 2013-05-06 13:37:12+0200
+-----------------------------------------------------------------------
+
+--[[doc--
+
+This file was originally written (as \fileent{mkluatexfontdb.lua}) by
+Elie Roux and Khaled Hosny and, as a derived work of ConTeXt, is
+provided under the terms of the GPL v2.0 license as printed in full
+text in the manual (luaotfload.pdf).
+
+ \url{http://www.gnu.org/licenses/old-licenses/gpl-2.0.html}.
+
+This file is a wrapper for the luaotfload font names module
+(luaotfload-database.lua). It is part of the luaotfload bundle, please
+see the luaotfload documentation for more info. Report bugs to
+
+ \url{https://github.com/lualatex/luaotfload/issues}.
+
+--doc]]--
+
+kpse.set_program_name"luatex"
+
+local stringformat = string.format
+local texiowrite_nl = texio.write_nl
+local stringlower = string.lower
+
+
+local loader_file = "luatexbase.loader.lua"
+local loader_path = assert(kpse.find_file(loader_file, "lua"),
+ "File '"..loader_file.."' not found")
+
+
+string.quoted = string.quoted or function (str)
+ return string.format("%q",str)
+end
+
+require(loader_path)
+
+--[[doc--
+Depending on how the script is called we change its behavior.
+For backwards compatibility, moving or symlinking the script to a
+file name starting with \fileent{mkluatexfontdb} will cause it to
+trigger a database update on every run.
+Running as \fileent{luaotfload-tool} -- the new name -- will do this upon
+request only.
+
+There are two naming conventions followed here: firstly that of
+utilities such as \fileent{mktexpk}, \fileent{mktexlsr} and the likes,
+and secondly that of \fileent{fmtutil}.
+After support for querying the database was added, the latter appeared
+to be the more appropriate.
+--doc]]--
+
+config = config or { }
+local config = config
+config.luaotfload = config.luaotfload or { }
+
+do -- we don’t have file.basename and the likes yet, so inline parser ftw
+ local C, P = lpeg.C, lpeg.P
+ local lpegmatch = lpeg.match
+ local slash = P"/"
+ local dot = P"."
+ local noslash = 1 - slash
+ local slashes = slash^1
+ local path = slashes^-1 * (noslash^1 * slashes)^1
+ local thename = (1 - slash - dot)^1
+ local extension = dot * (1 - slash - dot)^1
+ local p_basename = path^-1 * C(thename) * extension^-1 * P(-1)
+
+ local self = lpegmatch(p_basename, stringlower(arg[0]))
+ if self == "luaotfload-tool" then
+ config.luaotfload.self = "luaotfload-tool"
+ else
+ config.luaotfload.self = "mkluatexfontdb"
+ end
+end
+
+config.lualibs = config.lualibs or { }
+config.lualibs.verbose = false
+config.lualibs.prefer_merged = true
+config.lualibs.load_extended = false
+
+require"lualibs"
+
+--[[doc--
+\fileent{luatex-basics-gen.lua} calls functions from the
+\luafunction{texio.*} library; too much for our taste.
+We intercept them with dummies.
+--doc]]--
+
+local dummy_function = function ( ) end
+local backup_write, backup_write_nl = texio.write, texio.write_nl
+
+texio.write, texio.write_nl = dummy_function, dummy_function
+require"luaotfload-basics-gen.lua"
+texio.write, texio.write_nl = backup_write, backup_write_nl
+
+require"luaotfload-override.lua" --- this populates the logs.* namespace
+require"luaotfload-database"
+require"alt_getopt"
+
+local version = "2.2" -- same version number as luaotfload
+local names = fonts.names
+
+local db_src_out = names.path.dir.."/"..names.path.basename
+local db_bin_out = file.replacesuffix(db_src_out, "luc")
+
+local help_messages = {
+ ["luaotfload-tool"] = [[
+
+Usage: %s [OPTION]...
+
+Operations on the LuaTeX font database.
+
+This tool is part of the luaotfload package. Valid options are:
+
+-------------------------------------------------------------------------------
+ VERBOSITY AND LOGGING
+
+ -q --quiet don't output anything
+ -v --verbose=LEVEL be more verbose (print the searched directories)
+ -vv print the loaded fonts
+ -vvv print all steps of directory searching
+ -V --version print version and exit
+ -h --help print this message
+
+ --alias=<name> force behavior of “luaotfload-tool” or legacy
+ “mkluatexfontdb”
+-------------------------------------------------------------------------------
+ DATABASE
+
+ -u --update update the database
+ -f --force force re-indexing all fonts
+ -c --flush-cache empty cache of font requests
+
+ --find="font name" query the database for a font name
+ -F --fuzzy look for approximate matches if --find fails
+ --limit=n limit display of fuzzy matches to <n>
+ (default: n = 1)
+ -i --info display font metadata
+
+ --log=stdout redirect log output to stdout
+
+The font database will be saved to
+ %s
+ %s
+
+]],
+ mkluatexfontdb = [[
+
+Usage: %s [OPTION]...
+
+Rebuild the LuaTeX font database.
+
+Valid options:
+ -f --force force re-indexing all fonts
+ -q --quiet don't output anything
+ -v --verbose=LEVEL be more verbose (print the searched directories)
+ -vv print the loaded fonts
+ -vvv print all steps of directory searching
+ -V --version print version and exit
+ -h --help print this message
+ --alias=<name> force behavior of “luaotfload-tool” or legacy
+ “mkluatexfontdb”
+
+The font database will be saved to
+ %s
+ %s
+
+]],
+}
+
+local help_msg = function ( )
+ local template = help_messages[config.luaotfload.self]
+ or help_messages["luaotfload-tool"]
+ texiowrite_nl(stringformat(template, config.luaotfload.self, db_src_out, db_bin_out))
+end
+
+local version_msg = function ( )
+ texiowrite_nl(stringformat(
+ "%s version %s, database version %s.\n",
+ config.luaotfload.self, version, names.version))
+end
+
+local show_info_items = function (fontinfo)
+ local items = table.sortedkeys(fontinfo)
+ for n = 1, #items do
+ local item = items[n]
+ texiowrite_nl(stringformat(
+ [[ %11s: %s]], item, fontinfo[item]))
+ end
+end
+
+local show_font_info = function (filename)
+ local fullname = resolvers.findfile(filename)
+ if fullname then
+ local fontinfo = fontloader.info(fullname)
+ local nfonts = #fontinfo
+ if nfonts > 0 then -- true type collection
+ logs.names_report(true, 1, "resolve",
+ [[%s is a font collection]], filename)
+ for n = 1, nfonts do
+ logs.names_report(true, 1, "resolve",
+ [[showing info for font no. %d]], n)
+ show_info_items(fontinfo[n])
+ end
+ else
+ show_info_items(fontinfo)
+ end
+ else
+ logs.names_report(true, 1, "resolve",
+ "font %s not found", filename)
+ end
+end
+
+--[[--
+Running the scripts triggers one or more actions that have to be
+executed in the correct order. To avoid duplication we track them in a
+set.
+--]]--
+
+local action_sequence = {
+ "loglevel", "help", "version", "flush", "generate", "query"
+}
+local action_pending = table.tohash(action_sequence, false)
+
+action_pending.loglevel = true --- always set the loglevel
+action_pending.generate = false --- this is the default action
+
+local actions = { } --- (jobspec -> (bool * bool)) list
+
+actions.loglevel = function (job)
+ logs.set_loglevel(job.log_level)
+ logs.names_report("info", 3, "util",
+ "setting log level", "%d", job.log_level)
+ return true, true
+end
+
+actions.version = function (job)
+ version_msg()
+ return true, false
+end
+
+actions.help = function (job)
+ help_msg()
+ return true, false
+end
+
+actions.generate = function (job)
+ local fontnames, savedname
+ fontnames = names.update(fontnames, job.force_reload)
+ logs.names_report("info", 2, "db",
+ "Fonts in the database: %i", #fontnames.mappings)
+ savedname = names.save(fontnames)
+ if savedname then --- FIXME have names.save return bool
+ return true, true
+ end
+ return false, false
+end
+
+actions.flush = function (job)
+ local success, lookups = names.flush_cache()
+ if success then
+ local savedname = names.save_lookups()
+ logs.names_report("info", 2, "cache", "Cache emptied")
+ if savedname then
+ return true, true
+ end
+ end
+ return false, false
+end
+
+actions.query = function (job)
+
+ local query = job.query
+ local tmpspec = {
+ name = query,
+ lookup = "name",
+ specification = "name:" .. query,
+ optsize = 0,
+ }
+
+ local foundname, subfont, success =
+ fonts.names.resolve(nil, nil, tmpspec)
+
+ if success then
+ logs.names_report(false, 1,
+ "resolve", "Font “%s” found!", query)
+ if subfont then
+ logs.names_report(false, 1, "resolve",
+ "Resolved file name “%s”, subfont nr. “%s”",
+ foundname, subfont)
+ else
+ logs.names_report(false, 1,
+ "resolve", "Resolved file name “%s”", foundname)
+ end
+ if job.show_info then
+ show_font_info(foundname)
+ end
+ else
+ logs.names_report(false, 1,
+ "resolve", "Cannot find “%s”.", query)
+ if job.fuzzy == true then
+ logs.names_report(false, 1,
+ "resolve", "Looking for close matches, this may take a while ...")
+ local success = fonts.names.find_closest(query, job.fuzzy_limit)
+ end
+ end
+ return true, true
+end
+
+--[[--
+Command-line processing.
+mkluatexfontdb.lua relies on the script alt_getopt to process argv and
+analyzes its output.
+
+TODO with extended lualibs we have the functionality from the
+environment.* namespace that could eliminate the dependency on
+alt_getopt.
+--]]--
+
+local process_cmdline = function ( ) -- unit -> jobspec
+ local result = { -- jobspec
+ force_reload = nil,
+ query = "",
+ log_level = 1, --- 2 is approx. the old behavior
+ }
+
+ local long_options = {
+ alias = 1,
+ ["flush-cache"] = "c",
+ find = 1,
+ force = "f",
+ fuzzy = "F",
+ help = "h",
+ info = "i",
+ limit = 1,
+ log = 1,
+ quiet = "q",
+ update = "u",
+ verbose = 1 ,
+ version = "V",
+ }
+
+ local short_options = "cfFiquvVh"
+
+ local options, _, optarg =
+ alt_getopt.get_ordered_opts (arg, short_options, long_options)
+
+ local nopts = #options
+ for n=1, nopts do
+ local v = options[n]
+ if v == "q" then
+ result.log_level = 0
+ elseif v == "u" then
+ action_pending["generate"] = true
+ elseif v == "v" then
+ if result.log_level > 0 then
+ result.log_level = result.log_level + 1
+ else
+ result.log_level = 2
+ end
+ elseif v == "V" then
+ action_pending["version"] = true
+ elseif v == "h" then
+ action_pending["help"] = true
+ elseif v == "f" then
+ result.update = true
+ result.force_reload = 1
+ elseif v == "verbose" then
+ local lvl = optarg[n]
+ if lvl then
+ result.log_level = tonumber(lvl)
+ end
+ elseif v == "log" then
+ local str = optarg[n]
+ if str then
+ logs.set_logout(str)
+ end
+ elseif v == "find" then
+ action_pending["query"] = true
+ result.query = optarg[n]
+ elseif v == "F" then
+ result.fuzzy = true
+ elseif v == "limit" then
+ local lim = optarg[n]
+ if lim then
+ result.fuzzy_limit = tonumber(lim)
+ end
+ elseif v == "i" then
+ result.show_info = true
+ elseif v == "alias" then
+ config.luaotfload.self = optarg[n]
+ elseif v == "c" then
+ action_pending["flush"] = true
+ end
+ end
+
+ if config.luaotfload.self == "mkluatexfontdb" then
+ action_pending["generate"] = true
+ result.log_level = math.max(2, result.log_level)
+ end
+ return result
+end
+
+local main = function ( ) -- unit -> int
+ local retval = 0
+ local job = process_cmdline()
+
+-- inspect(action_pending)
+-- inspect(job)
+
+ for i=1, #action_sequence do
+ local actionname = action_sequence[i]
+ local exit = false
+ if action_pending[actionname] then
+ logs.names_report("log", 3, "util", "preparing for task",
+ "%s", actionname)
+
+ local action = actions[actionname]
+ local success, continue = action(job)
+
+ if not success then
+ logs.names_report(false, 0, "util",
+ "could not finish task", "%s", actionname)
+ retval = -1
+ exit = true
+ elseif not continue then
+ logs.names_report(false, 3, "util",
+ "task completed, exiting", "%s", actionname)
+ exit = true
+ else
+ logs.names_report(false, 3, "util",
+ "task completed successfully", "%s", actionname)
+ end
+ end
+ if exit then break end
+ end
+
+ texiowrite_nl""
+ return retval
+end
+
+return main()
+
+-- vim:tw=71:sw=4:ts=4:expandtab
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/luaotfload.lua b/Master/texmf-dist/tex/luatex/luaotfload/luaotfload.lua
index 9a5b769fff7..343abc5f830 100644
--- a/Master/texmf-dist/tex/luatex/luaotfload/luaotfload.lua
+++ b/Master/texmf-dist/tex/luatex/luaotfload/luaotfload.lua
@@ -7,151 +7,335 @@
-- luaotfload.dtx (with options: `lua')
-- This is a generated file.
--
--- Copyright (C) 2009-2013 by by Elie Roux <elie.roux@telecom-bretagne.eu>
--- and Khaled Hosny <khaledhosny@eglug.org>
--- (Support: <lualatex-dev@tug.org>.)
+-- Copyright (C) 2009-2013
+-- by Elie Roux <elie.roux@telecom-bretagne.eu>
+-- and Khaled Hosny <khaledhosny@eglug.org>
+-- and Philipp Gesang <philipp.gesang@alumni.uni-heidelberg.de>
--
--- This work is under the CC0 license.
+-- Home: https://github.com/lualatex/luaotfload
+-- Support: <lualatex-dev@tug.org>.
+--
+-- This work is under the GPL v2.0 license.
--
-- This work consists of the main source file luaotfload.dtx
-- and the derived files
-- luaotfload.sty, luaotfload.lua
--
-module("luaotfload", package.seeall)
+luaotfload = luaotfload or {}
+local luaotfload = luaotfload
+
+config = config or { }
+config.luaotfload = config.luaotfload or { }
+------.luaotfload.resolver = config.luaotfload.resolver or "normal"
+config.luaotfload.resolver = config.luaotfload.resolver or "cached"
+config.luaotfload.definer = config.luaotfload.definer or "patch"
+config.luaotfload.loglevel = config.luaotfload.loglevel or 1
+config.luaotfload.color_callback = config.luaotfload.color_callback or "pre_linebreak_filter"
+--luaotfload.prefer_merge = config.luaotfload.prefer_merge or true
luaotfload.module = {
name = "luaotfload",
- version = 1.29,
- date = "2013/04/25",
+ version = 2.2,
+ date = "2013/04/29",
description = "OpenType layout system.",
author = "Elie Roux & Hans Hagen",
copyright = "Elie Roux",
- license = "CC0"
+ license = "GPL v2.0"
}
-local error, warning, info, log = luatexbase.provides_module(luaotfload.module)
-kpse.init_prog("", 600, "/")
-local luatex_version = 60
+local luatexbase = luatexbase
+
+local type, next = type, next
+local setmetatable = setmetatable
+local find_file = kpse.find_file
+local lfsisfile = lfs.isfile
+local stringfind = string.find
+local stringformat = string.format
+local stringmatch = string.match
+local stringsub = string.sub
+
+local add_to_callback, create_callback =
+ luatexbase.add_to_callback, luatexbase.create_callback
+local reset_callback, call_callback =
+ luatexbase.reset_callback, luatexbase.call_callback
+
+local dummy_function = function () end
+
+
+luaotfload.font_definer = "patch" --- | ^^e2^^80^^9cgeneric^^e2^^80^^9d | ^^e2^^80^^9cold^^e2^^80^^9d
+
+local error, warning, info, log =
+ luatexbase.provides_module(luaotfload.module)
+
+luaotfload.error = error
+luaotfload.warning = warning
+luaotfload.info = info
+luaotfload.log = log
+
+
+local luatex_version = 76
if tex.luatexversion < luatex_version then
warning("LuaTeX v%.2f is old, v%.2f is recommended.",
tex.luatexversion/100,
luatex_version /100)
end
-function luaotfload.loadmodule(name)
- local tofind = "otfl-"..name
- local found = kpse.find_file(tofind,"tex")
- if found then
- log("loading file %s.", found)
- dofile(found)
- else
- error("file %s not found.", tofind)
+
+
+local fl_prefix = "luaotfload" -- ^^e2^^80^^9cluatex^^e2^^80^^9d for luatex-plain
+local loadmodule = function (name)
+ require(fl_prefix .."-"..name)
+end
+
+local Cs, P, lpegmatch = lpeg.Cs, lpeg.P, lpeg.match
+
+local p_dot, p_slash = P".", P"/"
+local p_suffix = (p_dot * (1 - p_dot - p_slash)^1 * P(-1)) / ""
+local p_removesuffix = Cs((p_suffix + 1)^1)
+
+local find_vf_file = function (name)
+ local fullname = find_file(name, "ovf")
+ if not fullname then
+ --fullname = find_file(file.removesuffix(name), "ovf")
+ fullname = find_file(lpegmatch(p_removesuffix, name), "ovf")
+ end
+ if fullname then
+ log("loading virtual font file %s.", fullname)
end
+ return fullname
end
-luaotfload.loadmodule("luat-dum.lua") -- not used in context at all
-luaotfload.loadmodule("luat-ovr.lua") -- override some luat-dum functions
-luaotfload.loadmodule("data-con.lua") -- maybe some day we don't need this one
-tex.attribute[0] = 0
-luaotfload.loadmodule("font-ini.lua")
-luaotfload.loadmodule("node-dum.lua")
-luaotfload.loadmodule("node-inj.lua")
-function attributes.private(name)
- local attr = "otfl@" .. name
- local number = luatexbase.attributes[attr]
- if not number then
- number = luatexbase.new_attribute(attr)
+
+
+local starttime = os.gettimeofday()
+
+local trapped_register = callback.register
+callback.register = dummy_function
+
+
+do
+ local new_attribute = luatexbase.new_attribute
+ local the_attributes = luatexbase.attributes
+
+ attributes = attributes or { }
+
+ attributes.private = function (name)
+ local attr = "luaotfload@" .. name --- used to be: ^^e2^^80^^9cotfl@^^e2^^80^^9d
+ local number = the_attributes[attr]
+ if not number then
+ number = new_attribute(attr)
+ end
+ return number
end
- return number
end
-luaotfload.loadmodule("font-tfm.lua")
-luaotfload.loadmodule("font-cid.lua")
-luaotfload.loadmodule("font-ott.lua")
-luaotfload.loadmodule("font-map.lua")
-luaotfload.loadmodule("font-otf.lua")
-luaotfload.loadmodule("font-otd.lua")
-luaotfload.loadmodule("font-oti.lua")
-luaotfload.loadmodule("font-otb.lua")
-luaotfload.loadmodule("font-otn.lua")
-luaotfload.loadmodule("font-ota.lua")
-luaotfload.loadmodule("font-otc.lua")
-luaotfload.loadmodule("font-def.lua")
-luaotfload.loadmodule("font-xtx.lua")
-luaotfload.loadmodule("font-dum.lua")
-if fonts and fonts.tfm and fonts.tfm.readers then
- fonts.tfm.readers.ofm = fonts.tfm.readers.tfm
+
+
+local context_environment = { }
+
+local push_namespaces = function ()
+ log("push namespace for font loader")
+ local normalglobal = { }
+ for k, v in next, _G do
+ normalglobal[k] = v
+ end
+ return normalglobal
end
-luaotfload.loadmodule("font-nms.lua")
-luaotfload.loadmodule("font-clr.lua")
-luatexbase.create_callback("luaotfload.patch_font", "simple", function() end)
-local function def_font(...)
- local fontdata = fonts.define.read(...)
- if type(fontdata) == "table" and fontdata.shared then
- local otfdata = fontdata.shared.otfdata
- if otfdata.metadata.math then
- local mc = { }
- for k,v in next, otfdata.metadata.math do
- if k:find("Percent") then
- -- keep percent values as is
- mc[k] = v
- else
- mc[k] = v / fontdata.units * fontdata.size
+
+local pop_namespaces = function (normalglobal, isolate)
+ if normalglobal then
+ local _G = _G
+ local mode = "non-destructive"
+ if isolate then mode = "destructive" end
+ log("pop namespace from font loader -- " .. mode)
+ for k, v in next, _G do
+ if not normalglobal[k] then
+ context_environment[k] = v
+ if isolate then
+ _G[k] = nil
end
end
- -- for \overwithdelims
- mc.FractionDelimiterSize = 1.01 * fontdata.size
- mc.FractionDelimiterDisplayStyleSize = 2.39 * fontdata.size
-
- fontdata.MathConstants = mc
end
- luatexbase.call_callback("luaotfload.patch_font", fontdata)
+ for k, v in next, normalglobal do
+ _G[k] = v
+ end
+ -- just to be sure:
+ setmetatable(context_environment,_G)
+ else
+ log("irrecoverable error during pop_namespace: no globals to restore")
+ os.exit()
end
- return fontdata
-end
---fonts.define.resolvers.file = fonts.define.resolvers.name
-containers.cleanname = function (name)
- return (string.gsub(string.lower(name),"[^%w%d\128\255]+","-"))
end
-fonts.mode = "node"
-local register_base_sub = fonts.otf.features.register_base_substitution
-local gsubs = {
- "ss01", "ss02", "ss03", "ss04", "ss05",
- "ss06", "ss07", "ss08", "ss09", "ss10",
- "ss11", "ss12", "ss13", "ss14", "ss15",
- "ss16", "ss17", "ss18", "ss19", "ss20",
-}
-for _,v in next, gsubs do
- register_base_sub(v)
+luaotfload.context_environment = context_environment
+luaotfload.push_namespaces = push_namespaces
+luaotfload.pop_namespaces = pop_namespaces
+
+local our_environment = push_namespaces()
+
+
+tex.attribute[0] = 0
+
+
+loadmodule"merged.lua"
+---loadmodule"font-odv.lua" --- <= Devanagari support from Context
+
+if fonts then
+
+ if not fonts._merge_loaded_message_done_ then
+ --- a program talking first person -- HH sure believes in strong AI ...
+ log[[^^e2^^80^^9cI am using the merged version of 'luaotfload.lua' here. If]]
+ log[[ you run into problems or experience unexpected behaviour,]]
+ log[[ and if you have ConTeXt installed you can try to delete the]]
+ log[[ file 'luaotfload-font-merged.lua' as I might then use the]]
+ log[[ possibly updated libraries. The merged version is not]]
+ log[[ supported as it is a frozen instance. Problems can be]]
+ log[[ reported to the ConTeXt mailing list.^^e2^^80^^9d]]
+ end
+ fonts._merge_loaded_message_done_ = true
+
+else--- the loading sequence is known to change, so this might have to
+ --- be updated with future updates!
+ --- do not modify it though unless there is a change to the merged
+ --- package!
+ loadmodule("l-lua.lua")
+ loadmodule("l-lpeg.lua")
+ loadmodule("l-function.lua")
+ loadmodule("l-string.lua")
+ loadmodule("l-table.lua")
+ loadmodule("l-io.lua")
+ loadmodule("l-file.lua")
+ loadmodule("l-boolean.lua")
+ loadmodule("l-math.lua")
+ loadmodule("util-str.lua")
+ loadmodule('luatex-basics-gen.lua')
+ loadmodule('data-con.lua')
+ loadmodule('luatex-basics-nod.lua')
+ loadmodule('font-ini.lua')
+ loadmodule('font-con.lua')
+ loadmodule('luatex-fonts-enc.lua')
+ loadmodule('font-cid.lua')
+ loadmodule('font-map.lua')
+ loadmodule('luatex-fonts-syn.lua')
+ loadmodule('luatex-fonts-tfm.lua')
+ loadmodule('font-oti.lua')
+ loadmodule('font-otf.lua')
+ loadmodule('font-otb.lua')
+ loadmodule('node-inj.lua')
+ loadmodule('font-ota.lua')
+ loadmodule('font-otn.lua')
+ loadmodule('font-otp.lua')--- since 2013-04-23
+ loadmodule('luatex-fonts-lua.lua')
+ loadmodule('font-def.lua')
+ loadmodule('luatex-fonts-def.lua')
+ loadmodule('luatex-fonts-ext.lua')
+ loadmodule('luatex-fonts-cbk.lua')
+end --- non-merge fallback scope
+
+
+pop_namespaces(our_environment, false)-- true)
+
+log("fontloader loaded in %0.3f seconds", os.gettimeofday()-starttime)
+
+
+callback.register = trapped_register
+
+
+add_to_callback("pre_linebreak_filter",
+ nodes.simple_font_handler,
+ "luaotfload.node_processor",
+ 1)
+add_to_callback("hpack_filter",
+ nodes.simple_font_handler,
+ "luaotfload.node_processor",
+ 1)
+add_to_callback("find_vf_file",
+ find_vf_file, "luaotfload.find_vf_file")
+
+loadmodule"lib-dir.lua" --- required by luaofload-database.lua
+loadmodule"override.lua" --- ^^e2^^80^^9cluat-ovr^^e2^^80^^9d
+
+logs.set_loglevel(config.luaotfload.loglevel)
+
+loadmodule"loaders.lua" --- ^^e2^^80^^9cfont-pfb^^e2^^80^^9d new in 2.0, added 2011
+loadmodule"database.lua" --- ^^e2^^80^^9cfont-nms^^e2^^80^^9d
+loadmodule"colors.lua" --- ^^e2^^80^^9cfont-clr^^e2^^80^^9d
+
+
+local request_resolvers = fonts.definers.resolvers
+local formats = fonts.formats
+formats.ofm = "type1"
+
+request_resolvers.file = function (specification)
+ local found = fonts.names.crude_file_lookup(specification.name)
+ --local found = fonts.names.crude_file_lookup_verbose(specification.name)
+ specification.name = found[1]
+ --if format then specification.forced = format end
end
-luatexbase.add_to_callback("pre_linebreak_filter",
- nodes.simple_font_handler,
- "luaotfload.pre_linebreak_filter")
-luatexbase.add_to_callback("hpack_filter",
- nodes.simple_font_handler,
- "luaotfload.hpack_filter")
-luatexbase.reset_callback("define_font")
-luatexbase.add_to_callback("define_font",
- def_font,
- "luaotfload.define_font", 1)
-luatexbase.add_to_callback("find_vf_file",
- fonts.vf.find,
- "luaotfload.find_vf_file")
-local function set_sscale_diments(fontdata)
- local mc = fontdata.MathConstants
- if mc then
- if mc["ScriptPercentScaleDown"] then
- fontdata.parameters[10] = mc.ScriptPercentScaleDown
- else -- resort to plain TeX default
- fontdata.parameters[10] = 70
- end
- if mc["ScriptScriptPercentScaleDown"] then
- fontdata.parameters[11] = mc.ScriptScriptPercentScaleDown
- else -- resort to plain TeX default
- fontdata.parameters[11] = 50
+
+
+--request_resolvers.anon = request_resolvers.name
+
+local type1_formats = { "tfm", "ofm", }
+
+request_resolvers.anon = function (specification)
+ local name = specification.name
+ for i=1, #type1_formats do
+ local format = type1_formats[i]
+ if resolvers.findfile(name, format) then
+ specification.name = file.addsuffix(name, format)
+ return
end
end
+ request_resolvers.name(specification)
+end
+
+request_resolvers.path = function (specification)
+ local exists, _ = lfsisfile(specification.name)
+ if not exists then -- resort to file: lookup
+ request_resolvers.file(specification)
+ end
end
-luatexbase.add_to_callback("luaotfload.patch_font", set_sscale_diments, "unicodemath.set_sscale_diments")
+
+create_callback("luaotfload.patch_font", "simple", dummy_function)
+
+
+local read_font_file = fonts.definers.read
+
+--- spec -> size -> id -> tmfdata
+local patch_defined_font = function (specification, size, id)
+ local tfmdata = read_font_file(specification, size, id)
+ if type(tfmdata) == "table" and tfmdata.shared then
+ --- We need to test for the ^^e2^^80^^9cshared^^e2^^80^^9d field here
+ --- or else the fontspec capheight callback will
+ --- operate on tfm fonts.
+ call_callback("luaotfload.patch_font", tfmdata)
+ end
+ return tfmdata
+end
+
+caches.compilemethod = "both"
+
+reset_callback("define_font")
+
+
+local font_definer = config.luaotfload.definer
+
+if font_definer == "generic" then
+ add_to_callback("define_font",
+ fonts.definers.read,
+ "luaotfload.define_font",
+ 1)
+elseif font_definer == "patch" then
+ add_to_callback("define_font",
+ patch_defined_font,
+ "luaotfload.define_font",
+ 1)
+end
+
+loadmodule"features.lua" --- contains what was ^^e2^^80^^9cfont-ltx^^e2^^80^^9d and ^^e2^^80^^9cfont-otc^^e2^^80^^9d
+loadmodule"auxiliary.lua" --- additionaly high-level functionality (new)
+
+-- vim:tw=71:sw=4:ts=4:expandtab
+
--
-- End of File `luaotfload.lua'.
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/luaotfload.sty b/Master/texmf-dist/tex/luatex/luaotfload/luaotfload.sty
index 02c913e76ae..77930c8d9b2 100644
--- a/Master/texmf-dist/tex/luatex/luaotfload/luaotfload.sty
+++ b/Master/texmf-dist/tex/luatex/luaotfload/luaotfload.sty
@@ -7,11 +7,15 @@
%% luaotfload.dtx (with options: `package')
%% This is a generated file.
%%
-%% Copyright (C) 2009-2013 by by Elie Roux <elie.roux@telecom-bretagne.eu>
-%% and Khaled Hosny <khaledhosny@eglug.org>
-%% (Support: <lualatex-dev@tug.org>.)
+%% Copyright (C) 2009-2013
+%% by Elie Roux <elie.roux@telecom-bretagne.eu>
+%% and Khaled Hosny <khaledhosny@eglug.org>
+%% and Philipp Gesang <philipp.gesang@alumni.uni-heidelberg.de>
%%
-%% This work is under the CC0 license.
+%% Home: https://github.com/lualatex/luaotfload
+%% Support: <lualatex-dev@tug.org>.
+%%
+%% This work is under the GPL v2.0 license.
%%
%% This work consists of the main source file luaotfload.dtx
%% and the derived files
@@ -19,18 +23,15 @@
%%
\csname ifluaotfloadloaded\endcsname
\let\ifluaotfloadloaded\endinput
-
\bgroup\expandafter\expandafter\expandafter\egroup
\expandafter\ifx\csname ProvidesPackage\endcsname\relax
\input luatexbase.sty
\else
\NeedsTeXFormat{LaTeX2e}
\ProvidesPackage{luaotfload}%
- [2012/05/28 v1.27 OpenType layout system]
+ [2013/04/16 v2.2 OpenType layout system]
\RequirePackage{luatexbase}
\fi
-
-\RequireLuaModule{lualibs}
\RequireLuaModule{luaotfload}
\endinput
%%
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/otfl-data-con.lua b/Master/texmf-dist/tex/luatex/luaotfload/otfl-data-con.lua
deleted file mode 100644
index e7bb8af7d1d..00000000000
--- a/Master/texmf-dist/tex/luatex/luaotfload/otfl-data-con.lua
+++ /dev/null
@@ -1,132 +0,0 @@
-if not modules then modules = { } end modules ['data-con'] = {
- version = 1.100,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local format, lower, gsub = string.format, string.lower, string.gsub
-
-local trace_cache = false trackers.register("resolvers.cache", function(v) trace_cache = v end)
-local trace_containers = false trackers.register("resolvers.containers", function(v) trace_containers = v end)
-local trace_storage = false trackers.register("resolvers.storage", function(v) trace_storage = v end)
-
---[[ldx--
-<p>Once we found ourselves defining similar cache constructs
-several times, containers were introduced. Containers are used
-to collect tables in memory and reuse them when possible based
-on (unique) hashes (to be provided by the calling function).</p>
-
-<p>Caching to disk is disabled by default. Version numbers are
-stored in the saved table which makes it possible to change the
-table structures without bothering about the disk cache.</p>
-
-<p>Examples of usage can be found in the font related code.</p>
---ldx]]--
-
-containers = containers or { }
-
-containers.usecache = true
-
-local function report(container,tag,name)
- if trace_cache or trace_containers then
- logs.report(format("%s cache",container.subcategory),"%s: %s",tag,name or 'invalid')
- end
-end
-
-local allocated = { }
-
-local mt = {
- __index = function(t,k)
- if k == "writable" then
- local writable = caches.getwritablepath(t.category,t.subcategory) or { "." }
- t.writable = writable
- return writable
- elseif k == "readables" then
- local readables = caches.getreadablepaths(t.category,t.subcategory) or { "." }
- t.readables = readables
- return readables
- end
- end
-}
-
-function containers.define(category, subcategory, version, enabled)
- if category and subcategory then
- local c = allocated[category]
- if not c then
- c = { }
- allocated[category] = c
- end
- local s = c[subcategory]
- if not s then
- s = {
- category = category,
- subcategory = subcategory,
- storage = { },
- enabled = enabled,
- version = version or math.pi, -- after all, this is TeX
- trace = false,
- -- writable = caches.getwritablepath and caches.getwritablepath (category,subcategory) or { "." },
- -- readables = caches.getreadablepaths and caches.getreadablepaths(category,subcategory) or { "." },
- }
- setmetatable(s,mt)
- c[subcategory] = s
- end
- return s
- end
-end
-
-function containers.is_usable(container, name)
- return container.enabled and caches and caches.iswritable(container.writable, name)
-end
-
-function containers.is_valid(container, name)
- if name and name ~= "" then
- local storage = container.storage[name]
- return storage and storage.cache_version == container.version
- else
- return false
- end
-end
-
-function containers.read(container,name)
- local storage = container.storage
- local stored = storage[name]
- if not stored and container.enabled and caches and containers.usecache then
- stored = caches.loaddata(container.readables,name)
- if stored and stored.cache_version == container.version then
- report(container,"loaded",name)
- else
- stored = nil
- end
- storage[name] = stored
- elseif stored then
- report(container,"reusing",name)
- end
- return stored
-end
-
-function containers.write(container, name, data)
- if data then
- data.cache_version = container.version
- if container.enabled and caches then
- local unique, shared = data.unique, data.shared
- data.unique, data.shared = nil, nil
- caches.savedata(container.writable, name, data)
- report(container,"saved",name)
- data.unique, data.shared = unique, shared
- end
- report(container,"stored",name)
- container.storage[name] = data
- end
- return data
-end
-
-function containers.content(container,name)
- return container.storage[name]
-end
-
-function containers.cleanname(name)
- return (gsub(lower(name),"[^%w%d]+","-"))
-end
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/otfl-font-cid.lua b/Master/texmf-dist/tex/luatex/luaotfload/otfl-font-cid.lua
deleted file mode 100644
index d1c727af2d9..00000000000
--- a/Master/texmf-dist/tex/luatex/luaotfload/otfl-font-cid.lua
+++ /dev/null
@@ -1,147 +0,0 @@
-if not modules then modules = { } end modules ['font-cid'] = {
- version = 1.001,
- comment = "companion to font-otf.lua (cidmaps)",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local format, match, lower = string.format, string.match, string.lower
-local tonumber = tonumber
-local lpegmatch = lpeg.match
-
-local trace_loading = false trackers.register("otf.loading", function(v) trace_loading = v end)
-
-fonts = fonts or { }
-fonts.cid = fonts.cid or { }
-fonts.cid.map = fonts.cid.map or { }
-fonts.cid.max = fonts.cid.max or 10
-
-
--- original string parser: 0.109, lpeg parser: 0.036 seconds for Adobe-CNS1-4.cidmap
---
--- 18964 18964 (leader)
--- 0 /.notdef
--- 1..95 0020
--- 99 3000
-
-local number = lpeg.C(lpeg.R("09","af","AF")^1)
-local space = lpeg.S(" \n\r\t")
-local spaces = space^0
-local period = lpeg.P(".")
-local periods = period * period
-local name = lpeg.P("/") * lpeg.C((1-space)^1)
-
-local unicodes, names = { }, { }
-
-local function do_one(a,b)
- unicodes[tonumber(a)] = tonumber(b,16)
-end
-
-local function do_range(a,b,c)
- c = tonumber(c,16)
- for i=tonumber(a),tonumber(b) do
- unicodes[i] = c
- c = c + 1
- end
-end
-
-local function do_name(a,b)
- names[tonumber(a)] = b
-end
-
-local grammar = lpeg.P { "start",
- start = number * spaces * number * lpeg.V("series"),
- series = (spaces * (lpeg.V("one") + lpeg.V("range") + lpeg.V("named")) )^1,
- one = (number * spaces * number) / do_one,
- range = (number * periods * number * spaces * number) / do_range,
- named = (number * spaces * name) / do_name
-}
-
-function fonts.cid.load(filename)
- local data = io.loaddata(filename)
- if data then
- unicodes, names = { }, { }
- lpegmatch(grammar,data)
- local supplement, registry, ordering = match(filename,"^(.-)%-(.-)%-()%.(.-)$")
- return {
- supplement = supplement,
- registry = registry,
- ordering = ordering,
- filename = filename,
- unicodes = unicodes,
- names = names
- }
- else
- return nil
- end
-end
-
-local template = "%s-%s-%s.cidmap"
-
-
-local function locate(registry,ordering,supplement)
- local filename = format(template,registry,ordering,supplement)
- local hashname = lower(filename)
- local cidmap = fonts.cid.map[hashname]
- if not cidmap then
- if trace_loading then
- logs.report("load otf","checking cidmap, registry: %s, ordering: %s, supplement: %s, filename: %s",registry,ordering,supplement,filename)
- end
- local fullname = resolvers.find_file(filename,'cid') or ""
- if fullname ~= "" then
- cidmap = fonts.cid.load(fullname)
- if cidmap then
- if trace_loading then
- logs.report("load otf","using cidmap file %s",filename)
- end
- fonts.cid.map[hashname] = cidmap
- cidmap.usedname = file.basename(filename)
- return cidmap
- end
- end
- end
- return cidmap
-end
-
-function fonts.cid.getmap(registry,ordering,supplement)
- -- cf Arthur R. we can safely scan upwards since cids are downward compatible
- local supplement = tonumber(supplement)
- if trace_loading then
- logs.report("load otf","needed cidmap, registry: %s, ordering: %s, supplement: %s",registry,ordering,supplement)
- end
- local cidmap = locate(registry,ordering,supplement)
- if not cidmap then
- local cidnum = nil
- -- next highest (alternatively we could start high)
- if supplement < fonts.cid.max then
- for supplement=supplement+1,fonts.cid.max do
- local c = locate(registry,ordering,supplement)
- if c then
- cidmap, cidnum = c, supplement
- break
- end
- end
- end
- -- next lowest (least worse fit)
- if not cidmap and supplement > 0 then
- for supplement=supplement-1,0,-1 do
- local c = locate(registry,ordering,supplement)
- if c then
- cidmap, cidnum = c, supplement
- break
- end
- end
- end
- -- prevent further lookups
- if cidmap and cidnum > 0 then
- for s=0,cidnum-1 do
- filename = format(template,registry,ordering,s)
- if not fonts.cid.map[filename] then
- fonts.cid.map[filename] = cidmap -- copy of ref
- end
- end
- end
- end
- return cidmap
-end
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/otfl-font-clr.lua b/Master/texmf-dist/tex/luatex/luaotfload/otfl-font-clr.lua
deleted file mode 100644
index e02d22a6db9..00000000000
--- a/Master/texmf-dist/tex/luatex/luaotfload/otfl-font-clr.lua
+++ /dev/null
@@ -1,170 +0,0 @@
-if not modules then modules = { } end modules ['font-clr'] = {
- version = 1.001,
- comment = "companion to font-otf.lua (font color)",
- author = "Khaled Hosny and Elie Roux",
- copyright = "Luaotfload Development Team",
- license = "GPL"
-}
-
-fonts.triggers = fonts.triggers or { }
-fonts.initializers = fonts.initializers or { }
-fonts.initializers.common = fonts.initializers.common or { }
-
-local initializers, format = fonts.initializers, string.format
-
-table.insert(fonts.triggers,"color")
-
-function initializers.common.color(tfmdata,value)
- local sanitized
-
- if value then
- value = tostring(value)
- if #value == 6 or #value == 8 then
- sanitized = value
- elseif #value == 7 then
- _, _, sanitized = value:find("(......)")
- elseif #value > 8 then
- _, _, sanitized = value:find("(........)")
- else
- -- broken color code ignored, issue a warning?
- end
- end
-
- if sanitized then
- tfmdata.color = sanitized
- add_color_callback()
- end
-end
-
-initializers.base.otf.color = initializers.common.color
-initializers.node.otf.color = initializers.common.color
-
-local function hex2dec(hex,one)
- if one then
- return format("%.1g", tonumber(hex, 16)/255)
- else
- return format("%.3g", tonumber(hex, 16)/255)
- end
-end
-
-local res
-
-local function pageresources(a)
- local res2
- if not res then
- res = "/TransGs1<</ca 1/CA 1>>"
- end
- res2 = format("/TransGs%s<</ca %s/CA %s>>", a, a, a)
- res = format("%s%s", res, res:find(res2) and "" or res2)
-end
-
-local function hex_to_rgba(hex)
- local r, g, b, a, push, pop, res3
- if hex then
- if #hex == 6 then
- _, _, r, g, b = hex:find('(..)(..)(..)')
- elseif #hex == 8 then
- _, _, r, g, b, a = hex:find('(..)(..)(..)(..)')
- a = hex2dec(a,true)
- pageresources(a)
- end
- else
- return nil
- end
- r = hex2dec(r)
- g = hex2dec(g)
- b = hex2dec(b)
- if a then
- push = format('/TransGs%g gs %s %s %s rg', a, r, g, b)
- pop = '0 g /TransGs1 gs'
- else
- push = format('%s %s %s rg', r, g, b)
- pop = '0 g'
- end
- return push, pop
-end
-
-local glyph = node.id('glyph')
-local hlist = node.id('hlist')
-local vlist = node.id('vlist')
-local whatsit = node.id('whatsit')
-local pgi = node.id('page_insert')
-local sbox = node.id('sub_box')
-
-local function lookup_next_color(head)
- for n in node.traverse(head) do
- if n.id == glyph then
- if fonts.ids[n.font] and fonts.ids[n.font].color then
- return fonts.ids[n.font].color
- else
- return -1
- end
- elseif n.id == vlist or n.id == hlist or n.id == sbox then
- local r = lookup_next_color(n.list)
- if r == -1 then
- return -1
- elseif r then
- return r
- end
- elseif n.id == whatsit or n.id == pgi then
- return -1
- end
- end
- return nil
-end
-
-local function node_colorize(head, current_color, next_color)
- for n in node.traverse(head) do
- if n.id == hlist or n.id == vlist or n.id == sbox then
- local next_color_in = lookup_next_color(n.next) or next_color
- n.list, current_color = node_colorize(n.list, current_color, next_color_in)
- elseif n.id == glyph then
- local tfmdata = fonts.ids[n.font]
- if tfmdata and tfmdata.color then
- if tfmdata.color ~= current_color then
- local pushcolor = hex_to_rgba(tfmdata.color)
- local push = node.new(whatsit, 8)
- push.mode = 1
- push.data = pushcolor
- head = node.insert_before(head, n, push)
- current_color = tfmdata.color
- end
- local next_color_in = lookup_next_color (n.next) or next_color
- if next_color_in ~= tfmdata.color then
- local _, popcolor = hex_to_rgba(tfmdata.color)
- local pop = node.new(whatsit, 8)
- pop.mode = 1
- pop.data = popcolor
- head = node.insert_after(head, n, pop)
- current_color = nil
- end
- end
- end
- end
- return head, current_color
-end
-
-local function font_colorize(head)
- -- check if our page resources existed in the previous run
- -- and remove it to avoid duplicating it later
- if res then
- local r = "/ExtGState<<"..res..">>"
- tex.pdfpageresources = tex.pdfpageresources:gsub(r, "")
- end
- local h = node_colorize(head, nil, nil)
- -- now append our page resources
- if res and res:find("%S") then -- test for non-empty string
- local r = "/ExtGState<<"..res..">>"
- tex.pdfpageresources = tex.pdfpageresources..r
- end
- return h
-end
-
-local color_callback_activated = 0
-
-function add_color_callback()
- if color_callback_activated == 0 then
- luatexbase.add_to_callback("pre_output_filter", font_colorize, "loaotfload.colorize")
- color_callback_activated = 1
- end
-end
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/otfl-font-def.lua b/Master/texmf-dist/tex/luatex/luaotfload/otfl-font-def.lua
deleted file mode 100644
index 8e648725615..00000000000
--- a/Master/texmf-dist/tex/luatex/luaotfload/otfl-font-def.lua
+++ /dev/null
@@ -1,662 +0,0 @@
-if not modules then modules = { } end modules ['font-def'] = {
- version = 1.001,
- comment = "companion to font-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local format, concat, gmatch, match, find, lower = string.format, table.concat, string.gmatch, string.match, string.find, string.lower
-local tostring, next = tostring, next
-local lpegmatch = lpeg.match
-
-local trace_defining = false trackers .register("fonts.defining", function(v) trace_defining = v end)
-local directive_embedall = false directives.register("fonts.embedall", function(v) directive_embedall = v end)
-
-trackers.register("fonts.loading", "fonts.defining", "otf.loading", "afm.loading", "tfm.loading")
-trackers.register("fonts.all", "fonts.*", "otf.*", "afm.*", "tfm.*")
-
---[[ldx--
-<p>Here we deal with defining fonts. We do so by intercepting the
-default loader that only handles <l n='tfm'/>.</p>
---ldx]]--
-
-fonts = fonts or { }
-fonts.define = fonts.define or { }
-fonts.tfm = fonts.tfm or { }
-fonts.ids = fonts.ids or { }
-fonts.vf = fonts.vf or { }
-fonts.used = fonts.used or { }
-
-local tfm = fonts.tfm
-local vf = fonts.vf
-local define = fonts.define
-
-tfm.version = 1.01
-tfm.cache = containers.define("fonts", "tfm", tfm.version, false) -- better in font-tfm
-
-define.method = "afm or tfm" -- afm, tfm, afm or tfm, tfm or afm
-define.specify = fonts.define.specify or { }
-define.methods = fonts.define.methods or { }
-
-tfm.fonts = tfm.fonts or { }
-tfm.readers = tfm.readers or { }
-tfm.internalized = tfm.internalized or { } -- internal tex numbers
-
-tfm.readers.sequence = { 'otf', 'ttf', 'afm', 'tfm' }
-
-tfm.auto_afm = true
-
-local readers = tfm.readers
-local sequence = readers.sequence
-
---[[ldx--
-<p>We hardly gain anything when we cache the final (pre scaled)
-<l n='tfm'/> table. But it can be handy for debugging.</p>
---ldx]]--
-
-fonts.version = 1.05
-fonts.cache = containers.define("fonts", "def", fonts.version, false)
-
---[[ldx--
-<p>We can prefix a font specification by <type>name:</type> or
-<type>file:</type>. The first case will result in a lookup in the
-synonym table.</p>
-
-<typing>
-[ name: | file: ] identifier [ separator [ specification ] ]
-</typing>
-
-<p>The following function split the font specification into components
-and prepares a table that will move along as we proceed.</p>
---ldx]]--
-
--- beware, we discard additional specs
---
--- method:name method:name(sub) method:name(sub)*spec method:name*spec
--- name name(sub) name(sub)*spec name*spec
--- name@spec*oeps
-
-local splitter, specifiers = nil, ""
-
-local P, C, S, Cc = lpeg.P, lpeg.C, lpeg.S, lpeg.Cc
-
-local left = P("(")
-local right = P(")")
-local colon = P(":")
-local space = P(" ")
-
-define.defaultlookup = "file"
-
-local prefixpattern = P(false)
-
-function define.add_specifier(symbol)
- specifiers = specifiers .. symbol
- local method = S(specifiers)
- local lookup = C(prefixpattern) * colon
- local sub = left * C(P(1-left-right-method)^1) * right
- local specification = C(method) * C(P(1)^1)
- local name = C((1-sub-specification)^1)
- splitter = P((lookup + Cc("")) * name * (sub + Cc("")) * (specification + Cc("")))
-end
-
-function define.add_lookup(str,default)
- prefixpattern = prefixpattern + P(str)
-end
-
-define.add_lookup("file")
-define.add_lookup("name")
-define.add_lookup("spec")
-
-function define.get_specification(str)
- return lpegmatch(splitter,str)
-end
-
-function define.register_split(symbol,action)
- define.add_specifier(symbol)
- define.specify[symbol] = action
-end
-
-function define.makespecification(specification, lookup, name, sub, method, detail, size)
- size = size or 655360
- if trace_defining then
- logs.report("define font","%s -> lookup: %s, name: %s, sub: %s, method: %s, detail: %s",
- specification, (lookup ~= "" and lookup) or "[file]", (name ~= "" and name) or "-",
- (sub ~= "" and sub) or "-", (method ~= "" and method) or "-", (detail ~= "" and detail) or "-")
- end
---~ if specification.lookup then
---~ lookup = specification.lookup -- can come from xetex [] syntax
---~ specification.lookup = nil
---~ end
- if not lookup or lookup == "" then
- lookup = define.defaultlookup
- end
- local t = {
- lookup = lookup, -- forced type
- specification = specification, -- full specification
- size = size, -- size in scaled points or -1000*n
- name = name, -- font or filename
- sub = sub, -- subfont (eg in ttc)
- method = method, -- specification method
- detail = detail, -- specification
- resolved = "", -- resolved font name
- forced = "", -- forced loader
- features = { }, -- preprocessed features
- }
- return t
-end
-
-function define.analyze(specification, size)
- -- can be optimized with locals
- local lookup, name, sub, method, detail = define.get_specification(specification or "")
- return define.makespecification(specification, lookup, name, sub, method, detail, size)
-end
-
---[[ldx--
-<p>A unique hash value is generated by:</p>
---ldx]]--
-
-local sortedhashkeys = table.sortedhashkeys
-
-function tfm.hash_features(specification)
- local features = specification.features
- if features then
- local t = { }
- local normal = features.normal
- if normal and next(normal) then
- local f = sortedhashkeys(normal)
- for i=1,#f do
- local v = f[i]
- if v ~= "number" and v ~= "features" then -- i need to figure this out, features
- t[#t+1] = v .. '=' .. tostring(normal[v])
- end
- end
- end
- local vtf = features.vtf
- if vtf and next(vtf) then
- local f = sortedhashkeys(vtf)
- for i=1,#f do
- local v = f[i]
- t[#t+1] = v .. '=' .. tostring(vtf[v])
- end
- end
---~ if specification.mathsize then
---~ t[#t+1] = "mathsize=" .. specification.mathsize
---~ end
- if #t > 0 then
- return concat(t,"+")
- end
- end
- return "unknown"
-end
-
-fonts.designsizes = { }
-
---[[ldx--
-<p>In principle we can share tfm tables when we are in node for a font, but then
-we need to define a font switch as an id/attr switch which is no fun, so in that
-case users can best use dynamic features ... so, we will not use that speedup. Okay,
-when we get rid of base mode we can optimize even further by sharing, but then we
-loose our testcases for <l n='luatex'/>.</p>
---ldx]]--
-
-function tfm.hash_instance(specification,force)
- local hash, size, fallbacks = specification.hash, specification.size, specification.fallbacks
- if force or not hash then
- hash = tfm.hash_features(specification)
- specification.hash = hash
- end
- if size < 1000 and fonts.designsizes[hash] then
- size = math.round(tfm.scaled(size, fonts.designsizes[hash]))
- specification.size = size
- end
---~ local mathsize = specification.mathsize or 0
---~ if mathsize > 0 then
---~ local textsize = specification.textsize
---~ if fallbacks then
---~ return hash .. ' @ ' .. tostring(size) .. ' [ ' .. tostring(mathsize) .. ' : ' .. tostring(textsize) .. ' ] @ ' .. fallbacks
---~ else
---~ return hash .. ' @ ' .. tostring(size) .. ' [ ' .. tostring(mathsize) .. ' : ' .. tostring(textsize) .. ' ]'
---~ end
---~ else
- if fallbacks then
- return hash .. ' @ ' .. tostring(size) .. ' @ ' .. fallbacks
- else
- return hash .. ' @ ' .. tostring(size)
- end
---~ end
-end
-
---[[ldx--
-<p>We can resolve the filename using the next function:</p>
---ldx]]--
-
-define.resolvers = resolvers
-
--- todo: reporter
-
-function define.resolvers.file(specification)
- local suffix = file.suffix(specification.name)
- if fonts.formats[suffix] then
- specification.forced = suffix
- specification.name = file.removesuffix(specification.name)
- end
-end
-
-function define.resolvers.name(specification)
- local resolve = fonts.names.resolve
- if resolve then
- local resolved, sub = fonts.names.resolve(specification)
- specification.resolved, specification.sub = resolved, sub
- if resolved then
- local suffix = file.suffix(resolved)
- if fonts.formats[suffix] then
- specification.forced = suffix
- specification.name = file.removesuffix(resolved)
- else
- specification.name = resolved
- end
- end
- else
- define.resolvers.file(specification)
- end
-end
-
-function define.resolvers.spec(specification)
- local resolvespec = fonts.names.resolvespec
- if resolvespec then
- specification.resolved, specification.sub = fonts.names.resolvespec(specification)
- if specification.resolved then
- specification.forced = file.extname(specification.resolved)
- specification.name = file.removesuffix(specification.resolved)
- end
- else
- define.resolvers.name(specification)
- end
-end
-
-function define.resolve(specification)
- if not specification.resolved or specification.resolved == "" then -- resolved itself not per se in mapping hash
- local r = define.resolvers[specification.lookup]
- if r then
- r(specification)
- end
- end
- if specification.forced == "" then
- specification.forced = nil
- else
- specification.forced = specification.forced
- end
- specification.hash = lower(specification.name .. ' @ ' .. tfm.hash_features(specification))
- if specification.sub and specification.sub ~= "" then
- specification.hash = specification.sub .. ' @ ' .. specification.hash
- end
- return specification
-end
-
---[[ldx--
-<p>The main read function either uses a forced reader (as determined by
-a lookup) or tries to resolve the name using the list of readers.</p>
-
-<p>We need to cache when possible. We do cache raw tfm data (from <l
-n='tfm'/>, <l n='afm'/> or <l n='otf'/>). After that we can cache based
-on specificstion (name) and size, that is, <l n='tex'/> only needs a number
-for an already loaded fonts. However, it may make sense to cache fonts
-before they're scaled as well (store <l n='tfm'/>'s with applied methods
-and features). However, there may be a relation between the size and
-features (esp in virtual fonts) so let's not do that now.</p>
-
-<p>Watch out, here we do load a font, but we don't prepare the
-specification yet.</p>
---ldx]]--
-
-function tfm.read(specification)
- local hash = tfm.hash_instance(specification)
- local tfmtable = tfm.fonts[hash] -- hashes by size !
- if not tfmtable then
- local forced = specification.forced or ""
- if forced ~= "" then
- tfmtable = readers[lower(forced)](specification)
- if not tfmtable then
- logs.report("define font","forced type %s of %s not found",forced,specification.name)
- end
- else
- for s=1,#sequence do -- reader sequence
- local reader = sequence[s]
- if readers[reader] then -- not really needed
- if trace_defining then
- logs.report("define font","trying (reader sequence driven) type %s for %s with file %s",reader,specification.name,specification.filename or "unknown")
- end
- tfmtable = readers[reader](specification)
- if tfmtable then
- break
- else
- specification.filename = nil
- end
- end
- end
- end
- if tfmtable then
- if directive_embedall then
- tfmtable.embedding = "full"
- elseif tfmtable.filename and fonts.dontembed[tfmtable.filename] then
- tfmtable.embedding = "no"
- else
- tfmtable.embedding = "subset"
- end
- tfm.fonts[hash] = tfmtable
- fonts.designsizes[specification.hash] = tfmtable.designsize -- we only know this for sure after loading once
- --~ tfmtable.mode = specification.features.normal.mode or "base"
- end
- end
- if not tfmtable then
- logs.report("define font","font with name %s is not found",specification.name)
- end
- return tfmtable
-end
-
---[[ldx--
-<p>For virtual fonts we need a slightly different approach:</p>
---ldx]]--
-
-function tfm.read_and_define(name,size) -- no id
- local specification = define.analyze(name,size)
- local method = specification.method
- if method and define.specify[method] then
- specification = define.specify[method](specification)
- end
- specification = define.resolve(specification)
- local hash = tfm.hash_instance(specification)
- local id = define.registered(hash)
- if not id then
- local fontdata = tfm.read(specification)
- if fontdata then
- fontdata.hash = hash
- id = font.define(fontdata)
- define.register(fontdata,id)
- tfm.cleanup_table(fontdata)
- else
- id = 0 -- signal
- end
- end
- return fonts.ids[id], id
-end
-
---[[ldx--
-<p>Next follow the readers. This code was written while <l n='luatex'/>
-evolved. Each one has its own way of dealing with its format.</p>
---ldx]]--
-
-local function check_tfm(specification,fullname)
- -- ofm directive blocks local path search unless set; btw, in context we
- -- don't support ofm files anyway as this format is obsolete
- local foundname = resolvers.findbinfile(fullname, 'tfm') or "" -- just to be sure
- if foundname == "" then
- foundname = resolvers.findbinfile(fullname, 'ofm') or "" -- bonus for usage outside context
- end
- if foundname ~= "" then
- specification.filename, specification.format = foundname, "ofm"
- return tfm.read_from_tfm(specification)
- end
-end
-
-local function check_afm(specification,fullname)
- local foundname = resolvers.findbinfile(fullname, 'afm') or "" -- just to be sure
- if foundname == "" and tfm.auto_afm then
- local encoding, shortname = match(fullname,"^(.-)%-(.*)$") -- context: encoding-name.*
- if encoding and shortname and fonts.enc.known[encoding] then
- shortname = resolvers.findbinfile(shortname,'afm') or "" -- just to be sure
- if shortname ~= "" then
- foundname = shortname
- -- tfm.set_normal_feature(specification,'encoding',encoding) -- will go away
- if trace_loading then
- logs.report("load afm","stripping encoding prefix from filename %s",afmname)
- end
- end
- end
- end
- if foundname ~= "" then
- specification.filename, specification.format = foundname, "afm"
- return tfm.read_from_afm(specification)
- end
-end
-
-function readers.tfm(specification)
- local fullname, tfmtable = specification.filename or "", nil
- if fullname == "" then
- local forced = specification.forced or ""
- if forced ~= "" then
- tfmtable = check_tfm(specification,specification.name .. "." .. forced)
- end
- if not tfmtable then
- tfmtable = check_tfm(specification,specification.name)
- end
- else
- tfmtable = check_tfm(specification,fullname)
- end
- return tfmtable
-end
-
-function readers.afm(specification,method)
- local fullname, tfmtable = specification.filename or "", nil
- if fullname == "" then
- local forced = specification.forced or ""
- if forced ~= "" then
- tfmtable = check_afm(specification,specification.name .. "." .. forced)
- end
- if not tfmtable then
- method = method or define.method or "afm or tfm"
- if method == "tfm" then
- tfmtable = check_tfm(specification,specification.name)
- elseif method == "afm" then
- tfmtable = check_afm(specification,specification.name)
- elseif method == "tfm or afm" then
- tfmtable = check_tfm(specification,specification.name) or check_afm(specification,specification.name)
- else -- method == "afm or tfm" or method == "" then
- tfmtable = check_afm(specification,specification.name) or check_tfm(specification,specification.name)
- end
- end
- else
- tfmtable = check_afm(specification,fullname)
- end
- return tfmtable
-end
-
--- maybe some day a set of names
-
-local function check_otf(forced,specification,suffix,what)
- local name = specification.name
- if forced then
- name = file.addsuffix(name,suffix,true)
- end
- local fullname, tfmtable = resolvers.findbinfile(name,suffix) or "", nil -- one shot
- if fullname == "" then
- local fb = fonts.names.old_to_new[name]
- if fb then
- fullname = resolvers.findbinfile(fb,suffix) or ""
- end
- end
- if fullname == "" then
- local fb = fonts.names.new_to_old[name]
- if fb then
- fullname = resolvers.findbinfile(fb,suffix) or ""
- end
- end
- if fullname ~= "" then
- specification.filename, specification.format = fullname, what -- hm, so we do set the filename, then
- tfmtable = tfm.read_from_open_type(specification) -- we need to do it for all matches / todo
- end
- return tfmtable
-end
-
-function readers.opentype(specification,suffix,what)
- local forced = specification.forced or ""
- if forced == "otf" then
- return check_otf(true,specification,forced,"opentype")
- elseif forced == "ttf" or forced == "ttc" or forced == "dfont" then
- return check_otf(true,specification,forced,"truetype")
- else
- return check_otf(false,specification,suffix,what)
- end
-end
-
-function readers.otf (specification) return readers.opentype(specification,"otf","opentype") end
-function readers.ttf (specification) return readers.opentype(specification,"ttf","truetype") end
-function readers.ttc (specification) return readers.opentype(specification,"ttf","truetype") end -- !!
-function readers.dfont(specification) return readers.opentype(specification,"ttf","truetype") end -- !!
-
---[[ldx--
-<p>We need to check for default features. For this we provide
-a helper function.</p>
---ldx]]--
-
-function define.check(features,defaults) -- nb adapts features !
- local done = false
- if features and next(features) then
- for k,v in next, defaults do
- if features[k] == nil then
- features[k], done = v, true
- end
- end
- else
- features, done = table.fastcopy(defaults), true
- end
- return features, done -- done signals a change
-end
-
---[[ldx--
-<p>So far the specifyers. Now comes the real definer. Here we cache
-based on id's. Here we also intercept the virtual font handler. Since
-it evolved stepwise I may rewrite this bit (combine code).</p>
-
-In the previously defined reader (the one resulting in a <l n='tfm'/>
-table) we cached the (scaled) instances. Here we cache them again, but
-this time based on id. We could combine this in one cache but this does
-not gain much. By the way, passing id's back to in the callback was
-introduced later in the development.</p>
---ldx]]--
-
-define.last = nil
-
-function define.register(fontdata,id)
- if fontdata and id then
- local hash = fontdata.hash
- if not tfm.internalized[hash] then
- if trace_defining then
- logs.report("define font","loading at 2 id %s, hash: %s",id or "?",hash or "?")
- end
- fonts.identifiers[id] = fontdata
- fonts.characters [id] = fontdata.characters
- fonts.quads [id] = fontdata.parameters.quad
- -- todo: extra functions, e.g. setdigitwidth etc in list
- tfm.internalized[hash] = id
- end
- end
-end
-
-function define.registered(hash)
- local id = tfm.internalized[hash]
- return id, id and fonts.ids[id]
-end
-
-local cache_them = false
-
-function tfm.make(specification)
- -- currently fonts are scaled while constructing the font, so we
- -- have to do scaling of commands in the vf at that point using
- -- e.g. "local scale = g.factor or 1" after all, we need to work
- -- with copies anyway and scaling needs to be done at some point;
- -- however, when virtual tricks are used as feature (makes more
- -- sense) we scale the commands in fonts.tfm.scale (and set the
- -- factor there)
- local fvm = define.methods[specification.features.vtf.preset]
- if fvm then
- return fvm(specification)
- else
- return nil
- end
-end
-
-function define.read(specification,size,id) -- id can be optional, name can already be table
- statistics.starttiming(fonts)
- if type(specification) == "string" then
- specification = define.analyze(specification,size)
- end
- local method = specification.method
- if method and define.specify[method] then
- specification = define.specify[method](specification)
- end
- specification = define.resolve(specification)
- local hash = tfm.hash_instance(specification)
- if cache_them then
- local fontdata = containers.read(fonts.cache,hash) -- for tracing purposes
- end
- local fontdata = define.registered(hash) -- id
- if not fontdata then
- if specification.features.vtf and specification.features.vtf.preset then
- fontdata = tfm.make(specification)
- else
- fontdata = tfm.read(specification)
- if fontdata then
- tfm.check_virtual_id(fontdata)
- end
- end
- if cache_them then
- fontdata = containers.write(fonts.cache,hash,fontdata) -- for tracing purposes
- end
- if fontdata then
- fontdata.hash = hash
- fontdata.cache = "no"
- if id then
- define.register(fontdata,id)
- end
- end
- end
- define.last = fontdata or id -- todo ! ! ! ! !
- if not fontdata then
- logs.report("define font", "unknown font %s, loading aborted",specification.name)
- elseif trace_defining and type(fontdata) == "table" then
- logs.report("define font","using %s font with id %s, name:%s size:%s bytes:%s encoding:%s fullname:%s filename:%s",
- fontdata.type or "unknown",
- id or "?",
- fontdata.name or "?",
- fontdata.size or "default",
- fontdata.encodingbytes or "?",
- fontdata.encodingname or "unicode",
- fontdata.fullname or "?",
- file.basename(fontdata.filename or "?"))
-
- end
- statistics.stoptiming(fonts)
- return fontdata
-end
-
-function vf.find(name)
- name = file.removesuffix(file.basename(name))
- if tfm.resolve_vf then
- local format = fonts.logger.format(name)
- if format == 'tfm' or format == 'ofm' then
- if trace_defining then
- logs.report("define font","locating vf for %s",name)
- end
- return resolvers.findbinfile(name,"ovf")
- else
- if trace_defining then
- logs.report("define font","vf for %s is already taken care of",name)
- end
- return nil -- ""
- end
- else
- if trace_defining then
- logs.report("define font","locating vf for %s",name)
- end
- return resolvers.findbinfile(name,"ovf")
- end
-end
-
---[[ldx--
-<p>We overload both the <l n='tfm'/> and <l n='vf'/> readers.</p>
---ldx]]--
-
-callbacks.register('define_font' , define.read, "definition of fonts (tfmtable preparation)")
-callbacks.register('find_vf_file', vf.find , "locating virtual fonts, insofar needed") -- not that relevant any more
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/otfl-font-dum.lua b/Master/texmf-dist/tex/luatex/luaotfload/otfl-font-dum.lua
deleted file mode 100644
index c9ffb635c16..00000000000
--- a/Master/texmf-dist/tex/luatex/luaotfload/otfl-font-dum.lua
+++ /dev/null
@@ -1,400 +0,0 @@
-if not modules then modules = { } end modules ['font-dum'] = {
- version = 1.001,
- comment = "companion to luatex-*.tex",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-fonts = fonts or { }
-
--- general
-
-fonts.otf.pack = false
-fonts.tfm.resolve_vf = false -- no sure about this
-fonts.tfm.fontname_mode = "specification" -- somehow latex needs this
-
--- readers
-
-fonts.tfm.readers = fonts.tfm.readers or { }
-fonts.tfm.readers.sequence = { 'otf', 'ttf', 'tfm' }
-fonts.tfm.readers.afm = nil
-
--- define
-
-fonts.define = fonts.define or { }
-
---~ fonts.define.method = "tfm"
-
-fonts.define.specify.colonized_default_lookup = "name"
-
-function fonts.define.get_specification(str)
- return "", str, "", ":", str
-end
-
--- logger
-
-fonts.logger = fonts.logger or { }
-
-function fonts.logger.save()
-end
-
--- names
---
--- Watch out, the version number is the same as the one used in
--- the mtx-fonts.lua function scripts.fonts.names as we use a
--- simplified font database in the plain solution and by using
--- a different number we're less dependent on context.
-
-fonts.names = fonts.names or { }
-
-fonts.names.version = 1.001 -- not the same as in context
-fonts.names.basename = "luatex-fonts-names.lua"
-fonts.names.new_to_old = { }
-fonts.names.old_to_new = { }
-
-local data, loaded = nil, false
-
-local fileformats = { "lua", "tex", "other text files" }
-
-function fonts.names.resolve(name,sub)
- if not loaded then
- local basename = fonts.names.basename
- if basename and basename ~= "" then
- for i=1,#fileformats do
- local format = fileformats[i]
- local foundname = resolvers.find_file(basename,format) or ""
- if foundname ~= "" then
- data = dofile(foundname)
- break
- end
- end
- end
- loaded = true
- end
- if type(data) == "table" and data.version == fonts.names.version then
- local condensed = string.gsub(string.lower(name),"[^%a%d]","")
- local found = data.mappings and data.mappings[condensed]
- if found then
- local fontname, filename, subfont = found[1], found[2], found[3]
- if subfont then
- return filename, fontname
- else
- return filename, false
- end
- else
- return name, false -- fallback to filename
- end
- end
-end
-
-fonts.names.resolvespec = fonts.names.resolve -- only supported in mkiv
-
--- For the moment we put this (adapted) pseudo feature here.
-
-table.insert(fonts.triggers,"itlc")
-
-local function itlc(tfmdata,value)
- if value then
- -- the magic 40 and it formula come from Dohyun Kim
- local metadata = tfmdata.shared.otfdata.metadata
- if metadata then
- local italicangle = metadata.italicangle
- if italicangle and italicangle ~= 0 then
- local uwidth = (metadata.uwidth or 40)/2
- for unicode, d in next, tfmdata.descriptions do
- local it = d.boundingbox[3] - d.width + uwidth
- if it ~= 0 then
- d.italic = it
- end
- end
- tfmdata.has_italic = true
- end
- end
- end
-end
-
-fonts.initializers.base.otf.itlc = itlc
-fonts.initializers.node.otf.itlc = itlc
-
--- slant and extend
-
-function fonts.initializers.common.slant(tfmdata,value)
- value = tonumber(value)
- if not value then
- value = 0
- elseif value > 1 then
- value = 1
- elseif value < -1 then
- value = -1
- end
- tfmdata.slant_factor = value
-end
-
-function fonts.initializers.common.extend(tfmdata,value)
- value = tonumber(value)
- if not value then
- value = 0
- elseif value > 10 then
- value = 10
- elseif value < -10 then
- value = -10
- end
- tfmdata.extend_factor = value
-end
-
-table.insert(fonts.triggers,"slant")
-table.insert(fonts.triggers,"extend")
-
-fonts.initializers.base.otf.slant = fonts.initializers.common.slant
-fonts.initializers.node.otf.slant = fonts.initializers.common.slant
-fonts.initializers.base.otf.extend = fonts.initializers.common.extend
-fonts.initializers.node.otf.extend = fonts.initializers.common.extend
-
--- expansion and protrusion
-
-fonts.protrusions = fonts.protrusions or { }
-fonts.protrusions.setups = fonts.protrusions.setups or { }
-
-local setups = fonts.protrusions.setups
-
--- As this is experimental code, users should not depend on it. The
--- implications are still discussed on the ConTeXt Dev List and we're
--- not sure yet what exactly the spec is (the next code is tested with
--- a gyre font patched by / fea file made by Khaled Hosny). The double
--- trick should not be needed it proper hanging punctuation is used in
--- which case values < 1 can be used.
---
--- preferred (in context, usine vectors):
---
--- \definefontfeature[whatever][default][mode=node,protrusion=quality]
---
--- using lfbd and rtbd, with possibibility to enable only one side :
---
--- \definefontfeature[whocares][default][mode=node,protrusion=yes, opbd=yes,script=latn]
--- \definefontfeature[whocares][default][mode=node,protrusion=right,opbd=yes,script=latn]
---
--- idem, using multiplier
---
--- \definefontfeature[whocares][default][mode=node,protrusion=2,opbd=yes,script=latn]
--- \definefontfeature[whocares][default][mode=node,protrusion=double,opbd=yes,script=latn]
---
--- idem, using named feature file (less frozen):
---
--- \definefontfeature[whocares][default][mode=node,protrusion=2,opbd=yes,script=latn,featurefile=texgyrepagella-regularxx.fea]
-
-local function map_opbd_onto_protrusion(tfmdata,value,opbd)
- local characters, descriptions = tfmdata.characters, tfmdata.descriptions
- local otfdata = tfmdata.shared.otfdata
- local singles = otfdata.shared.featuredata.gpos_single
- local script, language = tfmdata.script, tfmdata.language
- local done, factor, left, right = false, 1, 1, 1
- local setup = setups[value]
- if setup then
- factor = setup.factor or 1
- left = setup.left or 1
- right = setup.right or 1
- else
- factor = tonumber(value) or 1
- end
- if opbd ~= "right" then
- local validlookups, lookuplist = fonts.otf.collect_lookups(otfdata,"lfbd",script,language)
- if validlookups then
- for i=1,#lookuplist do
- local lookup = lookuplist[i]
- local data = singles[lookup]
- if data then
- if trace_protrusion then
- logs.report("fonts","set left protrusion using lfbd lookup '%s'",lookup)
- end
- for k, v in next, data do
- -- local p = - v[3] / descriptions[k].width-- or 1 ~= 0 too but the same
- local p = - (v[1] / 1000) * factor * left
- characters[k].left_protruding = p
- if trace_protrusion then
- logs.report("opbd","lfbd -> %s -> 0x%05X (%s) -> %0.03f (%s)",lookup,k,utfchar(k),p,concat(v," "))
- end
- end
- done = true
- end
- end
- end
- end
- if opbd ~= "left" then
- local validlookups, lookuplist = fonts.otf.collect_lookups(otfdata,"rtbd",script,language)
- if validlookups then
- for i=1,#lookuplist do
- local lookup = lookuplist[i]
- local data = singles[lookup]
- if data then
- if trace_protrusion then
- logs.report("fonts","set right protrusion using rtbd lookup '%s'",lookup)
- end
- for k, v in next, data do
- -- local p = v[3] / descriptions[k].width -- or 3
- local p = (v[1] / 1000) * factor * right
- characters[k].right_protruding = p
- if trace_protrusion then
- logs.report("opbd","rtbd -> %s -> 0x%05X (%s) -> %0.03f (%s)",lookup,k,utfchar(k),p,concat(v," "))
- end
- end
- end
- done = true
- end
- end
- end
- tfmdata.auto_protrude = done
-end
-
--- The opbd test is just there because it was discussed on the
--- context development list. However, the mentioned fxlbi.otf font
--- only has some kerns for digits. So, consider this feature not
--- supported till we have a proper test font.
-
-function fonts.initializers.common.protrusion(tfmdata,value)
- if value then
- local opbd = tfmdata.shared.features.opbd
- if opbd then
- -- possible values: left right both yes no (experimental)
- map_opbd_onto_protrusion(tfmdata,value,opbd)
- elseif value then
- local setup = setups[value]
- if setup then
- local factor, left, right = setup.factor or 1, setup.left or 1, setup.right or 1
- local emwidth = tfmdata.parameters.quad
- tfmdata.auto_protrude = true
- for i, chr in next, tfmdata.characters do
- local v, pl, pr = setup[i], nil, nil
- if v then
- pl, pr = v[1], v[2]
- end
- if pl and pl ~= 0 then chr.left_protruding = left *pl*factor end
- if pr and pr ~= 0 then chr.right_protruding = right*pr*factor end
- end
- end
- end
- end
-end
-
-fonts.expansions = fonts.expansions or { }
-fonts.expansions.setups = fonts.expansions.setups or { }
-
-local setups = fonts.expansions.setups
-
-function fonts.initializers.common.expansion(tfmdata,value)
- if value then
- local setup = setups[value]
- if setup then
- local stretch, shrink, step, factor = setup.stretch or 0, setup.shrink or 0, setup.step or 0, setup.factor or 1
- tfmdata.stretch, tfmdata.shrink, tfmdata.step, tfmdata.auto_expand = stretch * 10, shrink * 10, step * 10, true
- for i, chr in next, tfmdata.characters do
- local v = setup[i]
- if v and v ~= 0 then
- chr.expansion_factor = v*factor
- else -- can be option
- chr.expansion_factor = factor
- end
- end
- end
- end
-end
-
-table.insert(fonts.manipulators,"protrusion")
-table.insert(fonts.manipulators,"expansion")
-
-fonts.initializers.base.otf.protrusion = fonts.initializers.common.protrusion
-fonts.initializers.node.otf.protrusion = fonts.initializers.common.protrusion
-fonts.initializers.base.otf.expansion = fonts.initializers.common.expansion
-fonts.initializers.node.otf.expansion = fonts.initializers.common.expansion
-
--- left over
-
-function fonts.register_message()
-end
-
--- example vectors
-
-local byte = string.byte
-
-fonts.expansions.setups['default'] = {
-
- stretch = 2, shrink = 2, step = .5, factor = 1,
-
- [byte('A')] = 0.5, [byte('B')] = 0.7, [byte('C')] = 0.7, [byte('D')] = 0.5, [byte('E')] = 0.7,
- [byte('F')] = 0.7, [byte('G')] = 0.5, [byte('H')] = 0.7, [byte('K')] = 0.7, [byte('M')] = 0.7,
- [byte('N')] = 0.7, [byte('O')] = 0.5, [byte('P')] = 0.7, [byte('Q')] = 0.5, [byte('R')] = 0.7,
- [byte('S')] = 0.7, [byte('U')] = 0.7, [byte('W')] = 0.7, [byte('Z')] = 0.7,
- [byte('a')] = 0.7, [byte('b')] = 0.7, [byte('c')] = 0.7, [byte('d')] = 0.7, [byte('e')] = 0.7,
- [byte('g')] = 0.7, [byte('h')] = 0.7, [byte('k')] = 0.7, [byte('m')] = 0.7, [byte('n')] = 0.7,
- [byte('o')] = 0.7, [byte('p')] = 0.7, [byte('q')] = 0.7, [byte('s')] = 0.7, [byte('u')] = 0.7,
- [byte('w')] = 0.7, [byte('z')] = 0.7,
- [byte('2')] = 0.7, [byte('3')] = 0.7, [byte('6')] = 0.7, [byte('8')] = 0.7, [byte('9')] = 0.7,
-}
-
-fonts.protrusions.setups['default'] = {
-
- factor = 1, left = 1, right = 1,
-
- [0x002C] = { 0, 1 }, -- comma
- [0x002E] = { 0, 1 }, -- period
- [0x003A] = { 0, 1 }, -- colon
- [0x003B] = { 0, 1 }, -- semicolon
- [0x002D] = { 0, 1 }, -- hyphen
- [0x2013] = { 0, 0.50 }, -- endash
- [0x2014] = { 0, 0.33 }, -- emdash
- [0x3001] = { 0, 1 }, -- ideographic comma 、
- [0x3002] = { 0, 1 }, -- ideographic full stop 。
- [0x060C] = { 0, 1 }, -- arabic comma ،
- [0x061B] = { 0, 1 }, -- arabic semicolon ؛
- [0x06D4] = { 0, 1 }, -- arabic full stop ۔
-
-}
-
--- normalizer
-
-fonts.otf.meanings = fonts.otf.meanings or { }
-
-fonts.otf.meanings.normalize = fonts.otf.meanings.normalize or function(t)
- if t.rand then
- t.rand = "random"
- end
-end
-
--- bonus
-
-function fonts.otf.name_to_slot(name)
- local tfmdata = fonts.ids[font.current()]
- if tfmdata and tfmdata.shared then
- local otfdata = tfmdata.shared.otfdata
- local unicode = otfdata.luatex.unicodes[name]
- return unicode and (type(unicode) == "number" and unicode or unicode[1])
- end
-end
-
-function fonts.otf.char(n)
- if type(n) == "string" then
- n = fonts.otf.name_to_slot(n)
- end
- if type(n) == "number" then
- tex.sprint("\\char" .. n)
- end
-end
-
--- another one:
-
-fonts.strippables = table.tohash {
- 0x000AD, 0x017B4, 0x017B5, 0x0200B, 0x0200C, 0x0200D, 0x0200E, 0x0200F, 0x0202A, 0x0202B,
- 0x0202C, 0x0202D, 0x0202E, 0x02060, 0x02061, 0x02062, 0x02063, 0x0206A, 0x0206B, 0x0206C,
- 0x0206D, 0x0206E, 0x0206F, 0x0FEFF, 0x1D173, 0x1D174, 0x1D175, 0x1D176, 0x1D177, 0x1D178,
- 0x1D179, 0x1D17A, 0xE0001, 0xE0020, 0xE0021, 0xE0022, 0xE0023, 0xE0024, 0xE0025, 0xE0026,
- 0xE0027, 0xE0028, 0xE0029, 0xE002A, 0xE002B, 0xE002C, 0xE002D, 0xE002E, 0xE002F, 0xE0030,
- 0xE0031, 0xE0032, 0xE0033, 0xE0034, 0xE0035, 0xE0036, 0xE0037, 0xE0038, 0xE0039, 0xE003A,
- 0xE003B, 0xE003C, 0xE003D, 0xE003E, 0xE003F, 0xE0040, 0xE0041, 0xE0042, 0xE0043, 0xE0044,
- 0xE0045, 0xE0046, 0xE0047, 0xE0048, 0xE0049, 0xE004A, 0xE004B, 0xE004C, 0xE004D, 0xE004E,
- 0xE004F, 0xE0050, 0xE0051, 0xE0052, 0xE0053, 0xE0054, 0xE0055, 0xE0056, 0xE0057, 0xE0058,
- 0xE0059, 0xE005A, 0xE005B, 0xE005C, 0xE005D, 0xE005E, 0xE005F, 0xE0060, 0xE0061, 0xE0062,
- 0xE0063, 0xE0064, 0xE0065, 0xE0066, 0xE0067, 0xE0068, 0xE0069, 0xE006A, 0xE006B, 0xE006C,
- 0xE006D, 0xE006E, 0xE006F, 0xE0070, 0xE0071, 0xE0072, 0xE0073, 0xE0074, 0xE0075, 0xE0076,
- 0xE0077, 0xE0078, 0xE0079, 0xE007A, 0xE007B, 0xE007C, 0xE007D, 0xE007E, 0xE007F,
-}
-
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/otfl-font-ini.lua b/Master/texmf-dist/tex/luatex/luaotfload/otfl-font-ini.lua
deleted file mode 100644
index c695ec4ae75..00000000000
--- a/Master/texmf-dist/tex/luatex/luaotfload/otfl-font-ini.lua
+++ /dev/null
@@ -1,96 +0,0 @@
-if not modules then modules = { } end modules ['font-ini'] = {
- version = 1.001,
- comment = "companion to font-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
---[[ldx--
-<p>Not much is happening here.</p>
---ldx]]--
-
-local utf = unicode.utf8
-local format, serialize = string.format, table.serialize
-local write_nl = texio.write_nl
-local lower = string.lower
-
-if not fontloader then fontloader = fontforge end
-
-fontloader.totable = fontloader.to_table
-
--- vtf comes first
--- fix comes last
-
-fonts = fonts or { }
-
-fonts.ids = fonts.ids or { } fonts.identifiers = fonts.ids -- aka fontdata
-fonts.chr = fonts.chr or { } fonts.characters = fonts.chr -- aka chardata
-fonts.qua = fonts.qua or { } fonts.quads = fonts.qua -- aka quaddata
-
-fonts.tfm = fonts.tfm or { }
-
-fonts.mode = 'base'
-fonts.private = 0xF0000 -- 0x10FFFF
-fonts.verbose = false -- more verbose cache tables
-
-fonts.ids[0] = { -- nullfont
- characters = { },
- descriptions = { },
- name = "nullfont",
-}
-
-fonts.chr[0] = { }
-
-fonts.methods = fonts.methods or {
- base = { tfm = { }, afm = { }, otf = { }, vtf = { }, fix = { } },
- node = { tfm = { }, afm = { }, otf = { }, vtf = { }, fix = { } },
-}
-
-fonts.initializers = fonts.initializers or {
- base = { tfm = { }, afm = { }, otf = { }, vtf = { }, fix = { } },
- node = { tfm = { }, afm = { }, otf = { }, vtf = { }, fix = { } }
-}
-
-fonts.triggers = fonts.triggers or {
- 'mode',
- 'language',
- 'script',
- 'strategy',
-}
-
-fonts.processors = fonts.processors or {
-}
-
-fonts.manipulators = fonts.manipulators or {
-}
-
-fonts.define = fonts.define or { }
-fonts.define.specify = fonts.define.specify or { }
-fonts.define.specify.synonyms = fonts.define.specify.synonyms or { }
-
--- tracing
-
-if not fonts.color then
-
- fonts.color = {
- set = function() end,
- reset = function() end,
- }
-
-end
-
--- format identification
-
-fonts.formats = { }
-
-function fonts.fontformat(filename,default)
- local extname = lower(file.extname(filename))
- local format = fonts.formats[extname]
- if format then
- return format
- else
- logs.report("fonts define","unable to determine font format for '%s'",filename)
- return default
- end
-end
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/otfl-font-map.lua b/Master/texmf-dist/tex/luatex/luaotfload/otfl-font-map.lua
deleted file mode 100644
index 299508764a1..00000000000
--- a/Master/texmf-dist/tex/luatex/luaotfload/otfl-font-map.lua
+++ /dev/null
@@ -1,370 +0,0 @@
-if not modules then modules = { } end modules ['font-map'] = {
- version = 1.001,
- comment = "companion to font-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local utf = unicode.utf8
-local match, format, find, concat, gsub, lower = string.match, string.format, string.find, table.concat, string.gsub, string.lower
-local lpegmatch = lpeg.match
-local utfbyte = utf.byte
-
-local trace_loading = false trackers.register("otf.loading", function(v) trace_loading = v end)
-local trace_unimapping = false trackers.register("otf.unimapping", function(v) trace_unimapping = v end)
-
-local ctxcatcodes = tex and tex.ctxcatcodes
-
---[[ldx--
-<p>Eventually this code will disappear because map files are kind
-of obsolete. Some code may move to runtime or auxiliary modules.</p>
-<p>The name to unciode related code will stay of course.</p>
---ldx]]--
-
-fonts = fonts or { }
-fonts.map = fonts.map or { }
-
-local function load_lum_table(filename) -- will move to font goodies
- local lumname = file.replacesuffix(file.basename(filename),"lum")
- local lumfile = resolvers.find_file(lumname,"map") or ""
- if lumfile ~= "" and lfs.isfile(lumfile) then
- if trace_loading or trace_unimapping then
- logs.report("load otf","enhance: loading %s ",lumfile)
- end
- lumunic = dofile(lumfile)
- return lumunic, lumfile
- end
-end
-
-local hex = lpeg.R("AF","09")
-local hexfour = (hex*hex*hex*hex) / function(s) return tonumber(s,16) end
-local hexsix = (hex^1) / function(s) return tonumber(s,16) end
-local dec = (lpeg.R("09")^1) / tonumber
-local period = lpeg.P(".")
-
-local unicode = lpeg.P("uni") * (hexfour * (period + lpeg.P(-1)) * lpeg.Cc(false) + lpeg.Ct(hexfour^1) * lpeg.Cc(true))
-local ucode = lpeg.P("u") * (hexsix * (period + lpeg.P(-1)) * lpeg.Cc(false) + lpeg.Ct(hexsix ^1) * lpeg.Cc(true))
-local index = lpeg.P("index") * dec * lpeg.Cc(false)
-
-local parser = unicode + ucode + index
-
-local parsers = { }
-
-local function make_name_parser(str)
- if not str or str == "" then
- return parser
- else
- local p = parsers[str]
- if not p then
- p = lpeg.P(str) * period * dec * lpeg.Cc(false)
- parsers[str] = p
- end
- return p
- end
-end
-
---~ local parser = fonts.map.make_name_parser("Japan1")
---~ local parser = fonts.map.make_name_parser()
---~ local function test(str)
---~ local b, a = lpegmatch(parser,str)
---~ print((a and table.serialize(b)) or b)
---~ end
---~ test("a.sc")
---~ test("a")
---~ test("uni1234")
---~ test("uni1234.xx")
---~ test("uni12349876")
---~ test("index1234")
---~ test("Japan1.123")
-
-local function tounicode16(unicode)
- if unicode < 0x10000 then
- return format("%04X",unicode)
- else
- return format("%04X%04X",unicode/1024+0xD800,unicode%1024+0xDC00)
- end
-end
-
-local function tounicode16sequence(unicodes)
- local t = { }
- for l=1,#unicodes do
- local unicode = unicodes[l]
- if unicode < 0x10000 then
- t[l] = format("%04X",unicode)
- else
- t[l] = format("%04X%04X",unicode/1024+0xD800,unicode%1024+0xDC00)
- end
- end
- return concat(t)
-end
-
---~ This is quite a bit faster but at the cost of some memory but if we
---~ do this we will also use it elsewhere so let's not follow this route
---~ now. I might use this method in the plain variant (no caching there)
---~ but then I need a flag that distinguishes between code branches.
---~
---~ local cache = { }
---~
---~ function fonts.map.tounicode16(unicode)
---~ local s = cache[unicode]
---~ if not s then
---~ if unicode < 0x10000 then
---~ s = format("%04X",unicode)
---~ else
---~ s = format("%04X%04X",unicode/1024+0xD800,unicode%1024+0xDC00)
---~ end
---~ cache[unicode] = s
---~ end
---~ return s
---~ end
-
-fonts.map.load_lum_table = load_lum_table
-fonts.map.make_name_parser = make_name_parser
-fonts.map.tounicode16 = tounicode16
-fonts.map.tounicode16sequence = tounicode16sequence
-
-local separator = lpeg.S("_.")
-local other = lpeg.C((1 - separator)^1)
-local ligsplitter = lpeg.Ct(other * (separator * other)^0)
-
---~ print(table.serialize(lpegmatch(ligsplitter,"this")))
---~ print(table.serialize(lpegmatch(ligsplitter,"this.that")))
---~ print(table.serialize(lpegmatch(ligsplitter,"japan1.123")))
---~ print(table.serialize(lpegmatch(ligsplitter,"such_so_more")))
---~ print(table.serialize(lpegmatch(ligsplitter,"such_so_more.that")))
-
-fonts.map.add_to_unicode = function(data,filename)
- local unicodes = data.luatex and data.luatex.unicodes
- if not unicodes then
- return
- end
- -- we need to move this code
- unicodes['space'] = unicodes['space'] or 32
- unicodes['hyphen'] = unicodes['hyphen'] or 45
- unicodes['zwj'] = unicodes['zwj'] or 0x200D
- unicodes['zwnj'] = unicodes['zwnj'] or 0x200C
- -- the tounicode mapping is sparse and only needed for alternatives
- local tounicode, originals, ns, nl, private, unknown = { }, { }, 0, 0, fonts.private, format("%04X",utfbyte("?"))
- data.luatex.tounicode, data.luatex.originals = tounicode, originals
- local lumunic, uparser, oparser
- if false then -- will become an option
- lumunic = load_lum_table(filename)
- lumunic = lumunic and lumunic.tounicode
- end
- local cidinfo, cidnames, cidcodes = data.cidinfo
- local usedmap = cidinfo and cidinfo.usedname
- usedmap = usedmap and lower(usedmap)
- usedmap = usedmap and fonts.cid.map[usedmap]
- if usedmap then
- oparser = usedmap and make_name_parser(cidinfo.ordering)
- cidnames = usedmap.names
- cidcodes = usedmap.unicodes
- end
- uparser = make_name_parser()
- local aglmap = fonts.map and fonts.map.agl_to_unicode
- for index, glyph in next, data.glyphs do
- local name, unic = glyph.name, glyph.unicode or -1 -- play safe
- if unic == -1 or unic >= private or (unic >= 0xE000 and unic <= 0xF8FF) or unic == 0xFFFE or unic == 0xFFFF then
- local unicode = (lumunic and lumunic[name]) or (aglmap and aglmap[name])
- if unicode then
- originals[index], tounicode[index], ns = unicode, tounicode16(unicode), ns + 1
- end
- -- cidmap heuristics, beware, there is no guarantee for a match unless
- -- the chain resolves
- if (not unicode) and usedmap then
- local foundindex = lpegmatch(oparser,name)
- if foundindex then
- unicode = cidcodes[foundindex] -- name to number
- if unicode then
- originals[index], tounicode[index], ns = unicode, tounicode16(unicode), ns + 1
- else
- local reference = cidnames[foundindex] -- number to name
- if reference then
- local foundindex = lpegmatch(oparser,reference)
- if foundindex then
- unicode = cidcodes[foundindex]
- if unicode then
- originals[index], tounicode[index], ns = unicode, tounicode16(unicode), ns + 1
- end
- end
- if not unicode then
- local foundcodes, multiple = lpegmatch(uparser,reference)
- if foundcodes then
- if multiple then
- originals[index], tounicode[index], nl, unicode = foundcodes, tounicode16sequence(foundcodes), nl + 1, true
- else
- originals[index], tounicode[index], ns, unicode = foundcodes, tounicode16(foundcodes), ns + 1, foundcodes
- end
- end
- end
- end
- end
- end
- end
- -- a.whatever or a_b_c.whatever or a_b_c (no numbers)
- if not unicode then
- local split = lpegmatch(ligsplitter,name)
- local nplit = (split and #split) or 0
- if nplit == 0 then
- -- skip
- elseif nplit == 1 then
- local base = split[1]
- unicode = unicodes[base] or (aglmap and aglmap[base])
- if unicode then
- if type(unicode) == "table" then
- unicode = unicode[1]
- end
- originals[index], tounicode[index], ns = unicode, tounicode16(unicode), ns + 1
- end
- else
- local t = { }
- for l=1,nplit do
- local base = split[l]
- local u = unicodes[base] or (aglmap and aglmap[base])
- if not u then
- break
- elseif type(u) == "table" then
- t[#t+1] = u[1]
- else
- t[#t+1] = u
- end
- end
- if #t > 0 then -- done then
- originals[index], tounicode[index], nl, unicode = t, tounicode16sequence(t), nl + 1, true
- end
- end
- end
- -- last resort
- if not unicode then
- local foundcodes, multiple = lpegmatch(uparser,name)
- if foundcodes then
- if multiple then
- originals[index], tounicode[index], nl, unicode = foundcodes, tounicode16sequence(foundcodes), nl + 1, true
- else
- originals[index], tounicode[index], ns, unicode = foundcodes, tounicode16(foundcodes), ns + 1, foundcodes
- end
- end
- end
- if not unicode then
- originals[index], tounicode[index] = 0xFFFD, "FFFD"
- end
- end
- end
- if trace_unimapping then
- for index, glyph in table.sortedhash(data.glyphs) do
- local toun, name, unic = tounicode[index], glyph.name, glyph.unicode or -1 -- play safe
- if toun then
- logs.report("load otf","internal: 0x%05X, name: %s, unicode: 0x%05X, tounicode: %s",index,name,unic,toun)
- else
- logs.report("load otf","internal: 0x%05X, name: %s, unicode: 0x%05X",index,name,unic)
- end
- end
- end
- if trace_loading and (ns > 0 or nl > 0) then
- logs.report("load otf","enhance: %s tounicode entries added (%s ligatures)",nl+ns, ns)
- end
-end
-
--- the following is sort of obsolete
---
--- fonts.map.data = fonts.map.data or { }
--- fonts.map.encodings = fonts.map.encodings or { }
--- fonts.map.loaded = fonts.map.loaded or { }
--- fonts.map.line = fonts.map.line or { }
---
--- function fonts.map.line.pdftex(e)
--- if e.name and e.fontfile then
--- local fullname = e.fullname or ""
--- if e.slant and e.slant ~= 0 then
--- if e.encoding then
--- pdf.mapline(format('= %s %s "%g SlantFont" <%s <%s',e.name,fullname,e.slant,e.encoding,e.fontfile)))
--- else
--- pdf.mapline(format('= %s %s "%g SlantFont" <%s',e.name,fullname,e.slant,e.fontfile)))
--- end
--- elseif e.extend and e.extend ~= 1 and e.extend ~= 0 then
--- if e.encoding then
--- pdf.mapline(format('= %s %s "%g ExtendFont" <%s <%s',e.name,fullname,e.extend,e.encoding,e.fontfile)))
--- else
--- pdf.mapline(format('= %s %s "%g ExtendFont" <%s',e.name,fullname,e.extend,e.fontfile)))
--- end
--- else
--- if e.encoding then
--- pdf.mapline(format('= %s %s <%s <%s',e.name,fullname,e.encoding,e.fontfile)))
--- else
--- pdf.mapline(format('= %s %s <%s',e.name,fullname,e.fontfile)))
--- end
--- end
--- else
--- return nil
--- end
--- end
---
--- function fonts.map.flush(backend) -- will also erase the accumulated data
--- local flushline = fonts.map.line[backend or "pdftex"] or fonts.map.line.pdftex
--- for _, e in next, fonts.map.data do
--- flushline(e)
--- end
--- fonts.map.data = { }
--- end
---
--- fonts.map.line.dvips = fonts.map.line.pdftex
--- fonts.map.line.dvipdfmx = function() end
---
--- function fonts.map.convert_entries(filename)
--- if not fonts.map.loaded[filename] then
--- fonts.map.data, fonts.map.encodings = fonts.map.load_file(filename,fonts.map.data, fonts.map.encodings)
--- fonts.map.loaded[filename] = true
--- end
--- end
---
--- function fonts.map.load_file(filename, entries, encodings)
--- entries = entries or { }
--- encodings = encodings or { }
--- local f = io.open(filename)
--- if f then
--- local data = f:read("*a")
--- if data then
--- for line in gmatch(data,"(.-)[\n\t]") do
--- if find(line,"^[%#%%%s]") then
--- -- print(line)
--- else
--- local extend, slant, name, fullname, fontfile, encoding
--- line = gsub(line,'"(.+)"', function(s)
--- extend = find(s,'"([^"]+) ExtendFont"')
--- slant = find(s,'"([^"]+) SlantFont"')
--- return ""
--- end)
--- if not name then
--- -- name fullname encoding fontfile
--- name, fullname, encoding, fontfile = match(line,"^(%S+)%s+(%S*)[%s<]+(%S*)[%s<]+(%S*)%s*$")
--- end
--- if not name then
--- -- name fullname (flag) fontfile encoding
--- name, fullname, fontfile, encoding = match(line,"^(%S+)%s+(%S*)[%d%s<]+(%S*)[%s<]+(%S*)%s*$")
--- end
--- if not name then
--- -- name fontfile
--- name, fontfile = match(line,"^(%S+)%s+[%d%s<]+(%S*)%s*$")
--- end
--- if name then
--- if encoding == "" then encoding = nil end
--- entries[name] = {
--- name = name, -- handy
--- fullname = fullname,
--- encoding = encoding,
--- fontfile = fontfile,
--- slant = tonumber(slant),
--- extend = tonumber(extend)
--- }
--- encodings[name] = encoding
--- elseif line ~= "" then
--- -- print(line)
--- end
--- end
--- end
--- end
--- f:close()
--- end
--- return entries, encodings
--- end
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/otfl-font-nms.lua b/Master/texmf-dist/tex/luatex/luaotfload/otfl-font-nms.lua
deleted file mode 100644
index fa9edcaef66..00000000000
--- a/Master/texmf-dist/tex/luatex/luaotfload/otfl-font-nms.lua
+++ /dev/null
@@ -1,688 +0,0 @@
-if not modules then modules = { } end modules ['font-nms'] = {
- version = 1.002,
- comment = "companion to luaotfload.lua",
- author = "Khaled Hosny and Elie Roux",
- copyright = "Luaotfload Development Team",
- license = "GNU GPL v2"
-}
-
-fonts = fonts or { }
-fonts.names = fonts.names or { }
-
-local names = fonts.names
-local names_dir = "luatex-cache/generic/names"
-names.version = 2.009 -- not the same as in context
-names.data = nil
-names.path = {
- basename = "otfl-names.lua",
- localdir = file.join(kpse.expand_var("$TEXMFVAR"), names_dir),
- systemdir = file.join(kpse.expand_var("$TEXMFSYSVAR"), names_dir),
-}
-
-
-local splitpath, expandpath = file.split_path, kpse.expand_path
-local glob, basename = dir.glob, file.basename
-local upper, lower, format = string.upper, string.lower, string.format
-local gsub, match, rpadd = string.gsub, string.match, string.rpadd
-local gmatch, sub, find = string.gmatch, string.sub, string.find
-local utfgsub = unicode.utf8.gsub
-
-local trace_short = false --tracing adapted to rebuilding of the database inside a document
-local trace_search = false --trackers.register("names.search", function(v) trace_search = v end)
-local trace_loading = false --trackers.register("names.loading", function(v) trace_loading = v end)
-
-local function sanitize(str)
- if str then
- return utfgsub(lower(str), "[^%a%d]", "")
- else
- return str -- nil
- end
-end
-
-local function fontnames_init()
- return {
- mappings = { },
- status = { },
- version = names.version,
- }
-end
-
-local function load_names()
- local localpath = file.join(names.path.localdir, names.path.basename)
- local systempath = file.join(names.path.systemdir, names.path.basename)
- local kpsefound = kpse.find_file(names.path.basename)
- local foundname
- local data
- if kpsefound and file.isreadable(kpsefound) then
- data = dofile(kpsefound)
- foundname = kpsefound
- elseif file.isreadable(localpath) then
- data = dofile(localpath)
- foundname = localpath
- elseif file.isreadable(systempath) then
- data = dofile(systempath)
- foundname = systempath
- end
- if data then
- logs.info("Font names database loaded: " .. foundname)
- else
- logs.info([[Font names database not found, generating new one.
- This can take several minutes; please be patient.]])
- data = names.update(fontnames_init())
- names.save(data)
- end
- return data
-end
-
-local synonyms = {
- regular = { "normal", "roman", "plain", "book", "medium" },
- -- boldregular was for old versions of Linux Libertine, is it still useful?
- -- semibold is in new versions of Linux Libertine, but there is also a bold,
- -- not sure it's useful here...
- bold = { "demi", "demibold", "semibold", "boldregular" },
- italic = { "regularitalic", "normalitalic", "oblique", "slanted" },
- bolditalic = { "boldoblique", "boldslanted", "demiitalic", "demioblique", "demislanted", "demibolditalic", "semibolditalic" },
-}
-
-local loaded = false
-local reloaded = false
-
-function names.resolve(specification)
- local name = sanitize(specification.name)
- local style = sanitize(specification.style) or "regular"
-
- local size
- if specification.optsize then
- size = tonumber(specification.optsize)
- elseif specification.size then
- size = specification.size / 65536
- end
-
-
- if not loaded then
- names.data = names.load()
- loaded = true
- end
-
- local data = names.data
- if type(data) == "table" and data.version == names.version then
- if data.mappings then
- local found = { }
- for _,face in next, data.mappings do
- local family = sanitize(face.names.family)
- local subfamily = sanitize(face.names.subfamily)
- local fullname = sanitize(face.names.fullname)
- local psname = sanitize(face.names.psname)
- local fontname = sanitize(face.fontname)
- local pfullname = sanitize(face.fullname)
- local optsize, dsnsize, maxsize, minsize
- if #face.size > 0 then
- optsize = face.size
- dsnsize = optsize[1] and optsize[1] / 10
- -- can be nil
- maxsize = optsize[2] and optsize[2] / 10 or dsnsize
- minsize = optsize[3] and optsize[3] / 10 or dsnsize
- end
- if name == family then
- if subfamily == style then
- if optsize then
- if dsnsize == size
- or (size > minsize and size <= maxsize) then
- found[1] = face
- break
- else
- found[#found+1] = face
- end
- else
- found[1] = face
- break
- end
- elseif synonyms[style] and
- table.contains(synonyms[style], subfamily) then
- if optsize then
- if dsnsize == size
- or (size > minsize and size <= maxsize) then
- found[1] = face
- break
- else
- found[#found+1] = face
- end
- else
- found[1] = face
- break
- end
- elseif subfamily == "regular" or
- table.contains(synonyms.regular, subfamily) then
- found.fallback = face
- end
- end
- if name == fullname
- or name == pfullname
- or name == fontname
- or name == psname then
- if optsize then
- if dsnsize == size
- or (size > minsize and size <= maxsize) then
- found[1] = face
- break
- else
- found[#found+1] = face
- end
- else
- found[1] = face
- break
- end
- end
- end
- if #found == 1 then
- if kpse.lookup(found[1].filename[1]) then
- logs.report("load font",
- "font family='%s', subfamily='%s' found: %s",
- name, style, found[1].filename[1])
- return found[1].filename[1], found[1].filename[2]
- end
- elseif #found > 1 then
- -- we found matching font(s) but not in the requested optical
- -- sizes, so we loop through the matches to find the one with
- -- least difference from the requested size.
- local closest
- local least = math.huge -- initial value is infinity
- for i,face in next, found do
- local dsnsize = face.size[1]/10
- local difference = math.abs(dsnsize-size)
- if difference < least then
- closest = face
- least = difference
- end
- end
- if kpse.lookup(closest.filename[1]) then
- logs.report("load font",
- "font family='%s', subfamily='%s' found: %s",
- name, style, closest.filename[1])
- return closest.filename[1], closest.filename[2]
- end
- elseif found.fallback then
- return found.fallback.filename[1], found.fallback.filename[2]
- end
- -- no font found so far
- if not reloaded then
- -- try reloading the database
- names.data = names.update(names.data)
- names.save(names.data)
- reloaded = true
- return names.resolve(specification)
- else
- -- else, fallback to filename
- return specification.name, false
- end
- end
- else
- if not reloaded then
- names.data = names.update()
- names.save(names.data)
- reloaded = true
- return names.resolve(specification)
- else
- return specification.name, false
- end
- end
-end
-
-names.resolvespec = names.resolve
-
-function names.set_log_level(level)
- if level == 2 then
- trace_loading = true
- elseif level >= 3 then
- trace_loading = true
- trace_search = true
- end
-end
-
-local lastislog = 0
-
-local function log(fmt, ...)
- lastislog = 1
- texio.write_nl(format("luaotfload | %s", format(fmt,...)))
- io.flush()
-end
-
-logs = logs or { }
-logs.report = logs.report or log
-logs.info = logs.info or log
-
-local function font_fullinfo(filename, subfont, texmf)
- local t = { }
- local f = fontloader.open(filename, subfont)
- if not f then
- if trace_loading then
- logs.report("error: failed to open %s", filename)
- end
- return
- end
- local m = fontloader.to_table(f)
- fontloader.close(f)
- collectgarbage('collect')
- -- see http://www.microsoft.com/typography/OTSPEC/features_pt.htm#size
- if m.fontstyle_name then
- for _,v in next, m.fontstyle_name do
- if v.lang == 1033 then
- t.fontstyle_name = v.name
- end
- end
- end
- if m.names then
- for _,v in next, m.names do
- if v.lang == "English (US)" then
- t.names = {
- -- see
- -- http://developer.apple.com/textfonts/
- -- TTRefMan/RM06/Chap6name.html
- fullname = v.names.compatfull or v.names.fullname,
- family = v.names.preffamilyname or v.names.family,
- subfamily= t.fontstyle_name or v.names.prefmodifiers or v.names.subfamily,
- psname = v.names.postscriptname
- }
- end
- end
- else
- -- no names table, propably a broken font
- if trace_loading then
- logs.report("broken font rejected: %s", basefile)
- end
- return
- end
- t.fontname = m.fontname
- t.fullname = m.fullname
- t.familyname = m.familyname
- t.filename = { texmf and basename(filename) or filename, subfont }
- t.weight = m.pfminfo.weight
- t.width = m.pfminfo.width
- t.slant = m.italicangle
- -- don't waste the space with zero values
- t.size = {
- m.design_size ~= 0 and m.design_size or nil,
- m.design_range_top ~= 0 and m.design_range_top or nil,
- m.design_range_bottom ~= 0 and m.design_range_bottom or nil,
- }
- return t
-end
-
-local function load_font(filename, fontnames, newfontnames, texmf)
- local newmappings = newfontnames.mappings
- local newstatus = newfontnames.status
- local mappings = fontnames.mappings
- local status = fontnames.status
- local basefile = texmf and basename(filename) or filename
- if filename then
- if table.contains(names.blacklist, filename) or
- table.contains(names.blacklist, basename(filename)) then
- if trace_search then
- logs.report("ignoring font '%s'", filename)
- end
- return
- end
- local timestamp, db_timestamp
- db_timestamp = status[basefile] and status[basefile].timestamp
- timestamp = lfs.attributes(filename, "modification")
-
- local index_status = newstatus[basefile] or (not texmf and newstatus[basename(filename)])
- if index_status and index_status.timestamp == timestamp then
- -- already indexed this run
- return
- end
-
- newstatus[basefile] = newstatus[basefile] or { }
- newstatus[basefile].timestamp = timestamp
- newstatus[basefile].index = newstatus[basefile].index or { }
-
- if db_timestamp == timestamp and not newstatus[basefile].index[1] then
- for _,v in next, status[basefile].index do
- local index = #newstatus[basefile].index
- newmappings[#newmappings+1] = mappings[v]
- newstatus[basefile].index[index+1] = #newmappings
- end
- if trace_loading then
- logs.report("font already indexed: %s", basefile)
- end
- return
- end
- local info = fontloader.info(filename)
- if info then
- if type(info) == "table" and #info > 1 then
- for i in next, info do
- local fullinfo = font_fullinfo(filename, i-1, texmf)
- if not fullinfo then
- return
- end
- local index = newstatus[basefile].index[i]
- if not index then
- index = #newmappings+1
- end
- newmappings[index] = fullinfo
- newstatus[basefile].index[i] = index
- end
- else
- local fullinfo = font_fullinfo(filename, false, texmf)
- if not fullinfo then
- return
- end
- local index = newstatus[basefile].index[1]
- if not index then
- index = #newmappings+1
- end
- newmappings[index] = fullinfo
- newstatus[basefile].index[1] = index
- end
- else
- if trace_loading then
- logs.report("failed to load %s", basefile)
- end
- end
- end
-end
-
-local function path_normalize(path)
- --[[
- path normalization:
- - a\b\c -> a/b/c
- - a/../b -> b
- - /cygdrive/a/b -> a:/b
- - reading symlinks under non-Win32
- - using kpse.readable_file on Win32
- ]]
- if os.type == "windows" or os.type == "msdos" or os.name == "cygwin" then
- path = path:gsub('\\', '/')
- path = path:lower()
- path = path:gsub('^/cygdrive/(%a)/', '%1:/')
- end
- if os.type ~= "windows" and os.type ~= "msdos" then
- local dest = lfs.readlink(path)
- if dest then
- if kpse.readable_file(dest) then
- path = dest
- elseif kpse.readable_file(file.join(file.dirname(path), dest)) then
- path = file.join(file.dirname(path), dest)
- else
- -- broken symlink?
- end
- end
- end
- path = file.collapse_path(path)
- return path
-end
-
-fonts.path_normalize = path_normalize
-
-names.blacklist = { }
-
-local function read_blacklist()
- local files = {
- kpse.lookup("otfl-blacklist.cnf", {all=true, format="tex"})
- }
- local blacklist = names.blacklist
-
- if files and type(files) == "table" then
- for _,v in next, files do
- for line in io.lines(v) do
- line = line:strip() -- to get rid of lines like " % foo"
- if line:find("^%%") or line:is_empty() then
- -- comment or empty line
- else
- line = line:split("%")[1]
- line = line:strip()
- if trace_search then
- logs.report("blacklisted file: %s", line)
- end
- blacklist[#blacklist+1] = line
- end
- end
- end
- end
-end
-
-local font_extensions = { "otf", "ttf", "ttc", "dfont" }
-
-local function scan_dir(dirname, fontnames, newfontnames, texmf)
- --[[
- This function scans a directory and populates the list of fonts
- with all the fonts it finds.
- - dirname is the name of the directory to scan
- - names is the font database to fill
- - texmf is a boolean saying if we are scanning a texmf directory
- ]]
- local list, found = { }, { }
- local nbfound = 0
- if trace_search then
- logs.report("scanning '%s'", dirname)
- end
- for _,i in next, font_extensions do
- for _,ext in next, { i, upper(i) } do
- found = glob(format("%s/**.%s$", dirname, ext))
- -- note that glob fails silently on broken symlinks, which happens
- -- sometimes in TeX Live.
- if trace_search then
- logs.report("%s '%s' fonts found", #found, ext)
- end
- nbfound = nbfound + #found
- table.append(list, found)
- end
- end
- if trace_search then
- logs.report("%d fonts found in '%s'", nbfound, dirname)
- end
-
- for _,file in next, list do
- file = path_normalize(file)
- if trace_loading then
- logs.report("loading font: %s", file)
- end
- load_font(file, fontnames, newfontnames, texmf)
- end
-end
-
-local function scan_texmf_fonts(fontnames, newfontnames)
- --[[
- This function scans all fonts in the texmf tree, through kpathsea
- variables OPENTYPEFONTS and TTFONTS of texmf.cnf
- ]]
- if expandpath("$OSFONTDIR"):is_empty() then
- logs.info("Scanning TEXMF fonts...")
- else
- logs.info("Scanning TEXMF and OS fonts...")
- end
- local fontdirs = expandpath("$OPENTYPEFONTS"):gsub("^%.", "")
- fontdirs = fontdirs .. expandpath("$TTFONTS"):gsub("^%.", "")
- if not fontdirs:is_empty() then
- for _,d in next, splitpath(fontdirs) do
- scan_dir(d, fontnames, newfontnames, true)
- end
- end
-end
-
---[[
- For the OS fonts, there are several options:
- - if OSFONTDIR is set (which is the case under windows by default but
- not on the other OSs), it scans it at the same time as the texmf tree,
- in the scan_texmf_fonts.
- - in addition:
- - under Windows and Mac OSX, we take a look at some hardcoded directories
- - under Unix, we read /etc/fonts/fonts.conf and read the directories in it
-
- This means that if you have fonts in fancy directories, you need to set them
- in OSFONTDIR if they cannot be found by fontconfig.
-]]
-
-local function read_fonts_conf(path, results, passed_paths)
- --[[
- This function parses /etc/fonts/fonts.conf and returns all the dir it finds.
- The code is minimal, please report any error it may generate.
- ]]
- local f = io.open(path)
- table.insert(passed_paths, path)
- if not f then
- logs.info("Warning: unable to read "..path.. ", skipping...")
- return results
- end
- local incomments = false
- for line in f:lines() do
- while line and line ~= "" do
- -- spaghetti code... hmmm...
- if incomments then
- local tmp = find(line, '-->')
- if tmp then
- incomments = false
- line = sub(line, tmp+3)
- else
- line = nil
- end
- else
- local tmp = find(line, '<!--')
- local newline = line
- if tmp then
- -- for the analysis, we take everything that is before the
- -- comment sign
- newline = sub(line, 1, tmp-1)
- -- and we loop again with the comment
- incomments = true
- line = sub(line, tmp+4)
- else
- -- if there is no comment start, the block after that will
- -- end the analysis, we exit the while loop
- line = nil
- end
- for dir in gmatch(newline, '<dir>([^<]+)</dir>') do
- -- now we need to replace ~ by kpse.expand_path('~')
- if sub(dir, 1, 1) == '~' then
- dir = file.join(kpse.expand_path('~'), sub(dir, 2))
- end
- -- we exclude paths with texmf in them, as they should be
- -- found anyway
- if not find(dir, 'texmf') then
- results[#results+1] = dir
- end
- end
- for include in gmatch(newline, '<include[^<]*>([^<]+)</include>') do
- -- include here can be four things: a directory or a file,
- -- in absolute or relative path.
- if sub(include, 1, 1) == '~' then
- include = file.join(kpse.expand_path('~'),sub(include, 2))
- -- First if the path is relative, we make it absolute:
- elseif not lfs.isfile(include) and not lfs.isdir(include) then
- include = file.join(file.dirname(path), include)
- end
- if lfs.isfile(include) and kpse.readable_file(include) and not table.contains(passed_paths, include) then
- -- we exclude path with texmf in them, as they should
- -- be found otherwise
- read_fonts_conf(include, results, passed_paths)
- elseif lfs.isdir(include) then
- for _,f in next, glob(file.join(include, "*.conf")) do
- if not table.contains(passed_paths, f) then
- read_fonts_conf(f, results, passed_paths)
- end
- end
- end
- end
- end
- end
- end
- f:close()
- return results
-end
-
--- for testing purpose
-names.read_fonts_conf = read_fonts_conf
-
-local function get_os_dirs()
- if os.name == 'macosx' then
- return {
- file.join(kpse.expand_path('~'), "Library/Fonts"),
- "/Library/Fonts",
- "/System/Library/Fonts",
- "/Network/Library/Fonts",
- }
- elseif os.type == "windows" or os.type == "msdos" or os.name == "cygwin" then
- local windir = os.getenv("WINDIR")
- return { file.join(windir, 'Fonts') }
- else
- return read_fonts_conf("/etc/fonts/fonts.conf", {}, {})
- end
-end
-
-local function scan_os_fonts(fontnames, newfontnames)
- --[[
- This function scans the OS fonts through
- - fontcache for Unix (reads the fonts.conf file and scans the directories)
- - a static set of directories for Windows and MacOSX
- ]]
- logs.info("Scanning OS fonts...")
- if trace_search then
- logs.info("Searching in static system directories...")
- end
- for _,d in next, get_os_dirs() do
- scan_dir(d, fontnames, newfontnames, false)
- end
-end
-
-local function update_names(fontnames, force)
- --[[
- The main function, scans everything
- - fontnames is the final table to return
- - force is whether we rebuild it from scratch or not
- ]]
- logs.info("Updating the font names database:")
-
- if force then
- fontnames = fontnames_init()
- else
- if not fontnames then
- fontnames = names.load()
- end
- if fontnames.version ~= names.version then
- fontnames = fontnames_init()
- if trace_search then
- logs.report("No font names database or old one found; "
- .."generating new one")
- end
- end
- end
- local newfontnames = fontnames_init()
- read_blacklist()
- scan_texmf_fonts(fontnames, newfontnames)
- scan_os_fonts(fontnames, newfontnames)
- return newfontnames
-end
-
-local function save_names(fontnames)
- local savepath = names.path.localdir
- if not lfs.isdir(savepath) then
- dir.mkdirs(savepath)
- end
- savepath = file.join(savepath, names.path.basename)
- if file.iswritable(savepath) then
- table.tofile(savepath, fontnames, true)
- logs.info("Font names database saved: %s \n", savepath)
- return savepath
- else
- logs.info("Failed to save names database\n")
- return nil
- end
-end
-
-local function scan_external_dir(dir)
- local old_names, new_names
- if loaded then
- old_names = names.data
- else
- old_names = names.load()
- loaded = true
- end
- new_names = table.copy(old_names)
- scan_dir(dir, old_names, new_names)
- names.data = new_names
-end
-
-names.scan = scan_external_dir
-names.load = load_names
-names.update = update_names
-names.save = save_names
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/otfl-font-ota.lua b/Master/texmf-dist/tex/luatex/luaotfload/otfl-font-ota.lua
deleted file mode 100644
index 0e5b5554265..00000000000
--- a/Master/texmf-dist/tex/luatex/luaotfload/otfl-font-ota.lua
+++ /dev/null
@@ -1,287 +0,0 @@
-if not modules then modules = { } end modules ['font-ota'] = {
- version = 1.001,
- comment = "companion to font-otf.lua (analysing)",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- this might become scrp-*.lua
-
-local type, tostring, match, format, concat = type, tostring, string.match, string.format, table.concat
-
-if not trackers then trackers = { register = function() end } end
-
-local trace_analyzing = false trackers.register("otf.analyzing", function(v) trace_analyzing = v end)
-local trace_cjk = false trackers.register("cjk.injections", function(v) trace_cjk = v end)
-
-trackers.register("cjk.analyzing","otf.analyzing")
-
-fonts = fonts or { }
-fonts.analyzers = fonts.analyzers or { }
-fonts.analyzers.initializers = fonts.analyzers.initializers or { node = { otf = { } } }
-fonts.analyzers.methods = fonts.analyzers.methods or { node = { otf = { } } }
-
-local otf = fonts.otf
-local tfm = fonts.tfm
-
-local initializers = fonts.analyzers.initializers
-local methods = fonts.analyzers.methods
-
-local glyph = node.id('glyph')
-local glue = node.id('glue')
-local penalty = node.id('penalty')
-
-local set_attribute = node.set_attribute
-local has_attribute = node.has_attribute
-local traverse_id = node.traverse_id
-local traverse_node_list = node.traverse
-
-local fontdata = fonts.ids
-local state = attributes.private('state')
-
-local fcs = (fonts.color and fonts.color.set) or function() end
-local fcr = (fonts.color and fonts.color.reset) or function() end
-
-local a_to_script = otf.a_to_script
-local a_to_language = otf.a_to_language
-
--- in the future we will use language/script attributes instead of the
--- font related value, but then we also need dynamic features which is
--- somewhat slower; and .. we need a chain of them
-
-function fonts.initializers.node.otf.analyze(tfmdata,value,attr)
- local script, language
- if attr and attr > 0 then
- script, language = a_to_script[attr], a_to_language[attr]
- else
- script, language = tfmdata.script, tfmdata.language
- end
- local action = initializers[script]
- if action then
- if type(action) == "function" then
- return action(tfmdata,value)
- else
- local action = action[language]
- if action then
- return action(tfmdata,value)
- end
- end
- end
- return nil
-end
-
-function fonts.methods.node.otf.analyze(head,font,attr)
- local tfmdata = fontdata[font]
- local script, language
- if attr and attr > 0 then
- script, language = a_to_script[attr], a_to_language[attr]
- else
- script, language = tfmdata.script, tfmdata.language
- end
- local action = methods[script]
- if action then
- if type(action) == "function" then
- return action(head,font,attr)
- else
- action = action[language]
- if action then
- return action(head,font,attr)
- end
- end
- end
- return head, false
-end
-
-otf.features.register("analyze",true) -- we always analyze
-table.insert(fonts.triggers,"analyze") -- we need a proper function for doing this
-
--- latin
-
-fonts.analyzers.methods.latn = fonts.analyzers.aux.setstate
-
--- this info eventually will go into char-def
-
-local zwnj = 0x200C
-local zwj = 0x200D
-
-local isol = {
- [0x0600] = true, [0x0601] = true, [0x0602] = true, [0x0603] = true,
- [0x0608] = true, [0x060B] = true, [0x0621] = true, [0x0674] = true,
- [0x06DD] = true, [zwnj] = true,
-}
-
-local isol_fina = {
- [0x0622] = true, [0x0623] = true, [0x0624] = true, [0x0625] = true,
- [0x0627] = true, [0x0629] = true, [0x062F] = true, [0x0630] = true,
- [0x0631] = true, [0x0632] = true, [0x0648] = true, [0x0671] = true,
- [0x0672] = true, [0x0673] = true, [0x0675] = true, [0x0676] = true,
- [0x0677] = true, [0x0688] = true, [0x0689] = true, [0x068A] = true,
- [0x068B] = true, [0x068C] = true, [0x068D] = true, [0x068E] = true,
- [0x068F] = true, [0x0690] = true, [0x0691] = true, [0x0692] = true,
- [0x0693] = true, [0x0694] = true, [0x0695] = true, [0x0696] = true,
- [0x0697] = true, [0x0698] = true, [0x0699] = true, [0x06C0] = true,
- [0x06C3] = true, [0x06C4] = true, [0x06C5] = true, [0x06C6] = true,
- [0x06C7] = true, [0x06C8] = true, [0x06C9] = true, [0x06CA] = true,
- [0x06CB] = true, [0x06CD] = true, [0x06CF] = true, [0x06D2] = true,
- [0x06D3] = true, [0x06D5] = true, [0x06EE] = true, [0x06EF] = true,
- [0x0759] = true, [0x075A] = true, [0x075B] = true, [0x076B] = true,
- [0x076C] = true, [0x0771] = true, [0x0773] = true, [0x0774] = true,
- [0x0778] = true, [0x0779] = true, [0xFEF5] = true, [0xFEF7] = true,
- [0xFEF9] = true, [0xFEFB] = true,
-}
-
-local isol_fina_medi_init = {
- [0x0626] = true, [0x0628] = true, [0x062A] = true, [0x062B] = true,
- [0x062C] = true, [0x062D] = true, [0x062E] = true, [0x0633] = true,
- [0x0634] = true, [0x0635] = true, [0x0636] = true, [0x0637] = true,
- [0x0638] = true, [0x0639] = true, [0x063A] = true, [0x063B] = true,
- [0x063C] = true, [0x063D] = true, [0x063E] = true, [0x063F] = true,
- [0x0640] = true, [0x0641] = true, [0x0642] = true, [0x0643] = true,
- [0x0644] = true, [0x0645] = true, [0x0646] = true, [0x0647] = true,
- [0x0649] = true, [0x064A] = true, [0x066E] = true, [0x066F] = true,
- [0x0678] = true, [0x0679] = true, [0x067A] = true, [0x067B] = true,
- [0x067C] = true, [0x067D] = true, [0x067E] = true, [0x067F] = true,
- [0x0680] = true, [0x0681] = true, [0x0682] = true, [0x0683] = true,
- [0x0684] = true, [0x0685] = true, [0x0686] = true, [0x0687] = true,
- [0x069A] = true, [0x069B] = true, [0x069C] = true, [0x069D] = true,
- [0x069E] = true, [0x069F] = true, [0x06A0] = true, [0x06A1] = true,
- [0x06A2] = true, [0x06A3] = true, [0x06A4] = true, [0x06A5] = true,
- [0x06A6] = true, [0x06A7] = true, [0x06A8] = true, [0x06A9] = true,
- [0x06AA] = true, [0x06AB] = true, [0x06AC] = true, [0x06AD] = true,
- [0x06AE] = true, [0x06AF] = true, [0x06B0] = true, [0x06B1] = true,
- [0x06B2] = true, [0x06B3] = true, [0x06B4] = true, [0x06B5] = true,
- [0x06B6] = true, [0x06B7] = true, [0x06B8] = true, [0x06B9] = true,
- [0x06BA] = true, [0x06BB] = true, [0x06BC] = true, [0x06BD] = true,
- [0x06BE] = true, [0x06BF] = true, [0x06C1] = true, [0x06C2] = true,
- [0x06CC] = true, [0x06CE] = true, [0x06D0] = true, [0x06D1] = true,
- [0x06FA] = true, [0x06FB] = true, [0x06FC] = true, [0x06FF] = true,
- [0x0750] = true, [0x0751] = true, [0x0752] = true, [0x0753] = true,
- [0x0754] = true, [0x0755] = true, [0x0756] = true, [0x0757] = true,
- [0x0758] = true, [0x075C] = true, [0x075D] = true, [0x075E] = true,
- [0x075F] = true, [0x0760] = true, [0x0761] = true, [0x0762] = true,
- [0x0763] = true, [0x0764] = true, [0x0765] = true, [0x0766] = true,
- [0x0767] = true, [0x0768] = true, [0x0769] = true, [0x076A] = true,
- [0x076D] = true, [0x076E] = true, [0x076F] = true, [0x0770] = true,
- [0x0772] = true, [0x0775] = true, [0x0776] = true, [0x0777] = true,
- [0x077A] = true, [0x077B] = true, [0x077C] = true, [0x077D] = true,
- [0x077E] = true, [0x077F] = true, [zwj] = true,
-}
-
-local arab_warned = { }
-
--- todo: gref
-
-local function warning(current,what)
- local char = current.char
- if not arab_warned[char] then
- log.report("analyze","arab: character %s (U+%04X) has no %s class", char, char, what)
- arab_warned[char] = true
- end
-end
-
-function fonts.analyzers.methods.nocolor(head,font,attr)
- for n in traverse_node_list(head,glyph) do
- if not font or n.font == font then
- fcr(n)
- end
- end
- return head, true
-end
-
-local function finish(first,last)
- if last then
- if first == last then
- local fc = first.char
- if isol_fina_medi_init[fc] or isol_fina[fc] then
- set_attribute(first,state,4) -- isol
- if trace_analyzing then fcs(first,"font:isol") end
- else
- warning(first,"isol")
- set_attribute(first,state,0) -- error
- if trace_analyzing then fcr(first) end
- end
- else
- local lc = last.char
- if isol_fina_medi_init[lc] or isol_fina[lc] then -- why isol here ?
- -- if laststate == 1 or laststate == 2 or laststate == 4 then
- set_attribute(last,state,3) -- fina
- if trace_analyzing then fcs(last,"font:fina") end
- else
- warning(last,"fina")
- set_attribute(last,state,0) -- error
- if trace_analyzing then fcr(last) end
- end
- end
- first, last = nil, nil
- elseif first then
- -- first and last are either both set so we never com here
- local fc = first.char
- if isol_fina_medi_init[fc] or isol_fina[fc] then
- set_attribute(first,state,4) -- isol
- if trace_analyzing then fcs(first,"font:isol") end
- else
- warning(first,"isol")
- set_attribute(first,state,0) -- error
- if trace_analyzing then fcr(first) end
- end
- first = nil
- end
- return first, last
-end
-
-function fonts.analyzers.methods.arab(head,font,attr) -- maybe make a special version with no trace
- local tfmdata = fontdata[font]
- local marks = tfmdata.marks
- local first, last, current, done = nil, nil, head, false
- while current do
- if current.id == glyph and current.subtype<256 and current.font == font and not has_attribute(current,state) then
- done = true
- local char = current.char
- if marks[char] then
- set_attribute(current,state,5) -- mark
- if trace_analyzing then fcs(current,"font:mark") end
- elseif isol[char] then -- can be zwj or zwnj too
- first, last = finish(first,last)
- set_attribute(current,state,4) -- isol
- if trace_analyzing then fcs(current,"font:isol") end
- first, last = nil, nil
- elseif not first then
- if isol_fina_medi_init[char] then
- set_attribute(current,state,1) -- init
- if trace_analyzing then fcs(current,"font:init") end
- first, last = first or current, current
- elseif isol_fina[char] then
- set_attribute(current,state,4) -- isol
- if trace_analyzing then fcs(current,"font:isol") end
- first, last = nil, nil
- else -- no arab
- first, last = finish(first,last)
- end
- elseif isol_fina_medi_init[char] then
- first, last = first or current, current
- set_attribute(current,state,2) -- medi
- if trace_analyzing then fcs(current,"font:medi") end
- elseif isol_fina[char] then
- if not has_attribute(last,state,1) then
- -- tricky, we need to check what last may be !
- set_attribute(last,state,2) -- medi
- if trace_analyzing then fcs(last,"font:medi") end
- end
- set_attribute(current,state,3) -- fina
- if trace_analyzing then fcs(current,"font:fina") end
- first, last = nil, nil
- elseif char >= 0x0600 and char <= 0x06FF then
- if trace_analyzing then fcs(current,"font:rest") end
- first, last = finish(first,last)
- else --no
- first, last = finish(first,last)
- end
- else
- first, last = finish(first,last)
- end
- current = current.next
- end
- first, last = finish(first,last)
- return head, done
-end
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/otfl-font-otb.lua b/Master/texmf-dist/tex/luatex/luaotfload/otfl-font-otb.lua
deleted file mode 100644
index e0528a4e466..00000000000
--- a/Master/texmf-dist/tex/luatex/luaotfload/otfl-font-otb.lua
+++ /dev/null
@@ -1,373 +0,0 @@
-if not modules then modules = { } end modules ['font-otb'] = {
- version = 1.001,
- comment = "companion to font-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local concat = table.concat
-local format, gmatch, gsub, find, match, lower, strip = string.format, string.gmatch, string.gsub, string.find, string.match, string.lower, string.strip
-local type, next, tonumber, tostring = type, next, tonumber, tostring
-local lpegmatch = lpeg.match
-
-local otf = fonts.otf
-local tfm = fonts.tfm
-
-local trace_baseinit = false trackers.register("otf.baseinit", function(v) trace_baseinit = v end)
-local trace_singles = false trackers.register("otf.singles", function(v) trace_singles = v end)
-local trace_multiples = false trackers.register("otf.multiples", function(v) trace_multiples = v end)
-local trace_alternatives = false trackers.register("otf.alternatives", function(v) trace_alternatives = v end)
-local trace_ligatures = false trackers.register("otf.ligatures", function(v) trace_ligatures = v end)
-local trace_kerns = false trackers.register("otf.kerns", function(v) trace_kerns = v end)
-local trace_preparing = false trackers.register("otf.preparing", function(v) trace_preparing = v end)
-
-local wildcard = "*"
-local default = "dflt"
-
-local split_at_space = lpeg.Ct(lpeg.splitat(" ")) -- no trailing or multiple spaces anyway
-
-local pcache, fcache = { }, { } -- could be weak
-
-local function gref(descriptions,n)
- if type(n) == "number" then
- local name = descriptions[n].name
- if name then
- return format("U+%04X (%s)",n,name)
- else
- return format("U+%04X")
- end
- elseif n then
- local num, nam = { }, { }
- for i=1,#n do
- local ni = n[i]
- num[i] = format("U+%04X",ni)
- nam[i] = descriptions[ni].name or "?"
- end
- return format("%s (%s)",concat(num," "), concat(nam," "))
- else
- return "?"
- end
-end
-
-local function cref(kind,lookupname)
- if lookupname then
- return format("feature %s, lookup %s",kind,lookupname)
- else
- return format("feature %s",kind)
- end
-end
-
-local function resolve_ligatures(tfmdata,ligatures,kind)
- kind = kind or "unknown"
- local unicodes = tfmdata.unicodes
- local characters = tfmdata.characters
- local descriptions = tfmdata.descriptions
- local changed = tfmdata.changed
- local done = { }
- while true do
- local ok = false
- for k,v in next, ligatures do
- local lig = v[1]
- if not done[lig] then
- local ligs = lpegmatch(split_at_space,lig)
- if #ligs == 2 then
- local uc = v[2]
- local c, f, s = characters[uc], ligs[1], ligs[2]
- local uft, ust = unicodes[f] or 0, unicodes[s] or 0
- if not uft or not ust then
- logs.report("define otf","%s: unicode problem with base ligature %s = %s + %s",cref(kind),gref(descriptions,uc),gref(descriptions,uft),gref(descriptions,ust))
- -- some kind of error
- else
- if type(uft) == "number" then uft = { uft } end
- if type(ust) == "number" then ust = { ust } end
- for ufi=1,#uft do
- local uf = uft[ufi]
- for usi=1,#ust do
- local us = ust[usi]
- if changed[uf] or changed[us] then
- if trace_baseinit and trace_ligatures then
- logs.report("define otf","%s: base ligature %s + %s ignored",cref(kind),gref(descriptions,uf),gref(descriptions,us))
- end
- else
- local first, second = characters[uf], us
- if first and second then
- local t = first.ligatures
- if not t then
- t = { }
- first.ligatures = t
- end
- if type(uc) == "number" then
- t[second] = { type = 0, char = uc }
- else
- t[second] = { type = 0, char = uc[1] } -- can this still happen?
- end
- if trace_baseinit and trace_ligatures then
- logs.report("define otf","%s: base ligature %s + %s => %s",cref(kind),gref(descriptions,uf),gref(descriptions,us),gref(descriptions,uc))
- end
- end
- end
- end
- end
- end
- ok, done[lig] = true, descriptions[uc].name
- end
- end
- end
- if ok then
- -- done has "a b c" = "a_b_c" and ligatures the already set ligatures: "a b" = 123
- -- and here we add extras (f i i = fi + i and alike)
- --
- -- we could use a hash for fnc and pattern
- --
- -- this might be interfering !
- for d,n in next, done do
- local pattern = pcache[d] if not pattern then pattern = "^(" .. d .. ") " pcache[d] = pattern end
- local fnc = fcache[n] if not fnc then fnc = function() return n .. " " end fcache[n] = fnc end
- for k,v in next, ligatures do
- v[1] = gsub(v[1],pattern,fnc)
- end
- end
- else
- break
- end
- end
-end
-
-local splitter = lpeg.splitat(" ")
-
-local function prepare_base_substitutions(tfmdata,kind,value) -- we can share some code with the node features
- if value then
- local otfdata = tfmdata.shared.otfdata
- local validlookups, lookuplist = otf.collect_lookups(otfdata,kind,tfmdata.script,tfmdata.language)
- if validlookups then
- local ligatures = { }
- local unicodes = tfmdata.unicodes -- names to unicodes
- local indices = tfmdata.indices
- local characters = tfmdata.characters
- local descriptions = tfmdata.descriptions
- local changed = tfmdata.changed
- --
- local actions = {
- substitution = function(p,lookup,k,glyph,unicode)
- local pv = p[2] -- p.variant
- if pv then
- local upv = unicodes[pv]
- if upv then
- if type(upv) == "table" then
- upv = upv[1]
- end
- if characters[upv] then
- if trace_baseinit and trace_singles then
- logs.report("define otf","%s: base substitution %s => %s",cref(kind,lookup),gref(descriptions,k),gref(descriptions,upv))
- end
- changed[k] = upv
- end
- end
- end
- end,
- alternate = function(p,lookup,k,glyph,unicode)
- local pc = p[2] -- p.components
- if pc then
- -- a bit optimized ugliness
- if value == 1 then
- pc = lpegmatch(splitter,pc)
- elseif value == 2 then
- local a, b = lpegmatch(splitter,pc)
- pc = b or a
- else
- pc = { lpegmatch(splitter,pc) }
- pc = pc[value] or pc[#pc]
- end
- if pc then
- local upc = unicodes[pc]
- if upc then
- if type(upc) == "table" then
- upc = upc[1]
- end
- if characters[upc] then
- if trace_baseinit and trace_alternatives then
- logs.report("define otf","%s: base alternate %s %s => %s",cref(kind,lookup),tostring(value),gref(descriptions,k),gref(descriptions,upc))
- end
- changed[k] = upc
- end
- end
- end
- end
- end,
- ligature = function(p,lookup,k,glyph,unicode)
- local pc = p[2]
- if pc then
- if trace_baseinit and trace_ligatures then
- local upc = { lpegmatch(splitter,pc) }
- for i=1,#upc do upc[i] = unicodes[upc[i]] end
- -- we assume that it's no table
- logs.report("define otf","%s: base ligature %s => %s",cref(kind,lookup),gref(descriptions,upc),gref(descriptions,k))
- end
- ligatures[#ligatures+1] = { pc, k }
- end
- end,
- }
- --
- for k,c in next, characters do
- local glyph = descriptions[k]
- local lookups = glyph.slookups
- if lookups then
- for l=1,#lookuplist do
- local lookup = lookuplist[l]
- local p = lookups[lookup]
- if p then
- local a = actions[p[1]]
- if a then
- a(p,lookup,k,glyph,unicode)
- end
- end
- end
- end
- local lookups = glyph.mlookups
- if lookups then
- for l=1,#lookuplist do
- local lookup = lookuplist[l]
- local ps = lookups[lookup]
- if ps then
- for i=1,#ps do
- local p = ps[i]
- local a = actions[p[1]]
- if a then
- a(p,lookup,k,glyph,unicode)
- end
- end
- end
- end
- end
- end
- resolve_ligatures(tfmdata,ligatures,kind)
- end
- else
- tfmdata.ligatures = tfmdata.ligatures or { } -- left over from what ?
- end
-end
-
-local function prepare_base_kerns(tfmdata,kind,value) -- todo what kind of kerns, currently all
- if value then
- local otfdata = tfmdata.shared.otfdata
- local validlookups, lookuplist = otf.collect_lookups(otfdata,kind,tfmdata.script,tfmdata.language)
- if validlookups then
- local unicodes = tfmdata.unicodes -- names to unicodes
- local indices = tfmdata.indices
- local characters = tfmdata.characters
- local descriptions = tfmdata.descriptions
- local sharedkerns = { }
- for u, chr in next, characters do
- local d = descriptions[u]
- if d then
- local dk = d.mykerns -- shared
- if dk then
- local s = sharedkerns[dk]
- if s == false then
- -- skip
- elseif s then
- chr.kerns = s
- else
- local t, done = chr.kerns or { }, false
- for l=1,#lookuplist do
- local lookup = lookuplist[l]
- local kerns = dk[lookup]
- if kerns then
- for k, v in next, kerns do
- if v ~= 0 and not t[k] then -- maybe no 0 test here
- t[k], done = v, true
- if trace_baseinit and trace_kerns then
- logs.report("define otf","%s: base kern %s + %s => %s",cref(kind,lookup),gref(descriptions,u),gref(descriptions,k),v)
- end
- end
- end
- end
- end
- if done then
- sharedkerns[dk] = t
- chr.kerns = t -- no empty assignments
- else
- sharedkerns[dk] = false
- end
- end
- end
- end
- end
- end
- end
-end
-
--- In principle we could register each feature individually which was
--- what we did in earlier versions. However, after the rewrite it
--- made more sense to collect them in an overall features initializer
--- just as with the node variant. There it was needed because we need
--- to do complete mixed runs and not run featurewise (as we did before).
-
-local supported_gsub = {
- 'liga', 'dlig', 'rlig', 'hlig',
- 'pnum', 'onum', 'tnum', 'lnum',
- 'zero',
- 'smcp', 'cpsp', 'c2sc', 'ornm', 'aalt',
- 'hwid', 'fwid',
- 'ssty', 'rtlm', -- math
--- 'tlig', 'trep',
-}
-
-local supported_gpos = {
- 'kern'
-}
-
-function otf.features.register_base_substitution(tag)
- supported_gsub[#supported_gsub+1] = tag
-end
-function otf.features.register_base_kern(tag)
- supported_gsub[#supported_gpos+1] = tag
-end
-
-local basehash, basehashes = { }, 1
-
-function fonts.initializers.base.otf.features(tfmdata,value)
- if true then -- value then
- -- not shared
- local t = trace_preparing and os.clock()
- local features = tfmdata.shared.features
- if features then
- local h = { }
- for f=1,#supported_gsub do
- local feature = supported_gsub[f]
- local value = features[feature]
- prepare_base_substitutions(tfmdata,feature,value)
- if value then
- h[#h+1] = feature .. "=" .. tostring(value)
- end
- end
- for f=1,#supported_gpos do
- local feature = supported_gpos[f]
- local value = features[feature]
- prepare_base_kerns(tfmdata,feature,features[feature])
- if value then
- h[#h+1] = feature .. "=" .. tostring(value)
- end
- end
- local hash = concat(h," ")
- local base = basehash[hash]
- if not base then
- basehashes = basehashes + 1
- base = basehashes
- basehash[hash] = base
- end
- -- We need to make sure that luatex sees the difference between
- -- base fonts that have different glyphs in the same slots in fonts
- -- that have the same fullname (or filename). LuaTeX will merge fonts
- -- eventually (and subset later on). If needed we can use a more
- -- verbose name as long as we don't use <()<>[]{}/%> and the length
- -- is < 128.
- tfmdata.fullname = tfmdata.fullname .. "-" .. base -- tfmdata.psname is the original
- --~ logs.report("otf define","fullname base hash: '%s', featureset '%s'",tfmdata.fullname,hash)
- end
- if trace_preparing then
- logs.report("otf define","preparation time is %0.3f seconds for %s",os.clock()-t,tfmdata.fullname or "?")
- end
- end
-end
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/otfl-font-otc.lua b/Master/texmf-dist/tex/luatex/luaotfload/otfl-font-otc.lua
deleted file mode 100644
index 35555ed05f6..00000000000
--- a/Master/texmf-dist/tex/luatex/luaotfload/otfl-font-otc.lua
+++ /dev/null
@@ -1,217 +0,0 @@
-if not modules then modules = { } end modules ['font-otc'] = {
- version = 1.001,
- comment = "companion to font-otf.lua (context)",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local format, insert = string.format, table.insert
-local type, next = type, next
-
--- we assume that the other otf stuff is loaded already
-
-local trace_loading = false trackers.register("otf.loading", function(v) trace_loading = v end)
-
-local otf = fonts.otf
-local tfm = fonts.tfm
-
--- instead of "script = "DFLT", langs = { 'dflt' }" we now use wildcards (we used to
--- have always); some day we can write a "force always when true" trick for other
--- features as well
---
--- we could have a tnum variant as well
-
-local extra_lists = {
- tlig = {
- {
- endash = "hyphen hyphen",
- emdash = "hyphen hyphen hyphen",
- quotedblleft = "quoteleft quoteleft",
- quotedblright = "quoteright quoteright",
- quotedblleft = "grave grave",
- quotedblright = "quotesingle quotesingle",
- quotedblbase = "comma comma",
- exclamdown = "exclam grave",
- questiondown = "question grave",
- guillemotleft = "less less",
- guillemotright= "greater greater",
- },
- },
- trep = {
- {
- [0x0022] = 0x201D,
- [0x0027] = 0x2019,
- [0x0060] = 0x2018,
- },
- },
- anum = {
- { -- arabic
- [0x0030] = 0x0660,
- [0x0031] = 0x0661,
- [0x0032] = 0x0662,
- [0x0033] = 0x0663,
- [0x0034] = 0x0664,
- [0x0035] = 0x0665,
- [0x0036] = 0x0666,
- [0x0037] = 0x0667,
- [0x0038] = 0x0668,
- [0x0039] = 0x0669,
- },
- { -- persian
- [0x0030] = 0x06F0,
- [0x0031] = 0x06F1,
- [0x0032] = 0x06F2,
- [0x0033] = 0x06F3,
- [0x0034] = 0x06F4,
- [0x0035] = 0x06F5,
- [0x0036] = 0x06F6,
- [0x0037] = 0x06F7,
- [0x0038] = 0x06F8,
- [0x0039] = 0x06F9,
- },
- },
-}
-
-local extra_features = { -- maybe just 1..n so that we prescribe order
- tlig = {
- {
- features = { { scripts = { { script = "*", langs = { "*" }, } }, tag = "tlig", comment = "added bij mkiv" }, },
- name = "ctx_tlig_1",
- subtables = { { name = "ctx_tlig_1_s" } },
- type = "gsub_ligature",
- flags = { },
- },
- },
- trep = {
- {
- features = { { scripts = { { script = "*", langs = { "*" }, } }, tag = "trep", comment = "added bij mkiv" }, },
- name = "ctx_trep_1",
- subtables = { { name = "ctx_trep_1_s" } },
- type = "gsub_single",
- flags = { },
- },
- },
- anum = {
- {
- features = { { scripts = { { script = "arab", langs = { "dflt", "ARA" }, } }, tag = "anum", comment = "added bij mkiv" }, },
- name = "ctx_anum_1",
- subtables = { { name = "ctx_anum_1_s" } },
- type = "gsub_single",
- flags = { },
- },
- {
- features = { { scripts = { { script = "arab", langs = { "FAR" }, } }, tag = "anum", comment = "added bij mkiv" }, },
- name = "ctx_anum_2",
- subtables = { { name = "ctx_anum_2_s" } },
- type = "gsub_single",
- flags = { },
- },
- },
-}
-
-fonts.otf.enhancers["add some missing characters"] = function(data,filename)
- -- todo
-end
-
-fonts.otf.enhancers["enrich with features"] = function(data,filename)
- -- could be done elsewhere (true can be #)
- local used = { }
- for i=1,#otf.glists do
- local g = data[otf.glists[i]]
- if g then
- for i=1,#g do
- local f = g[i].features
- if f then
- for i=1,#f do
- local t = f[i].tag
- if t then used[t] = true end
- end
- end
- end
- end
- end
- --
- local glyphs = data.glyphs
- local indices = data.map.map
- data.gsub = data.gsub or { }
- for kind, specifications in next, extra_features do
- if not used[kind] then
- local done = 0
- for s=1,#specifications do
- local added = false
- local specification = specifications[s]
- local list = extra_lists[kind][s]
- local name = specification.name .. "_s"
- if specification.type == "gsub_ligature" then
- for unicode, index in next, indices do
- local glyph = glyphs[index]
- local ligature = list[glyph.name]
- if ligature then
- local o = glyph.lookups or { }
- -- o[name] = { "ligature", ligature, glyph.name }
- o[name] = {
- {
- ["type"] = "ligature",
- ["specification"] = {
- char = glyph.name,
- components = ligature,
- }
- }
- }
- glyph.lookups, done, added = o, done+1, true
- end
- end
- elseif specification.type == "gsub_single" then
- for unicode, index in next, indices do
- local glyph = glyphs[index]
- local r = list[unicode]
- if r then
- local replacement = indices[r]
- if replacement and glyphs[replacement] then
- local o = glyph.lookups or { }
- -- o[name] = { { "substitution", glyphs[replacement].name } }
- o[name] = {
- {
- ["type"] = "substitution",
- ["specification"] = {
- variant = glyphs[replacement].name,
- }
- }
- }
- glyph.lookups, done, added = o, done+1, true
- end
- end
- end
- end
- if added then
- insert(data.gsub,s,table.fastcopy(specification)) -- right order
- end
- end
- if done > 0 then
- if trace_loading then
- logs.report("load otf","enhance: registering %s feature (%s glyphs affected)",kind,done)
- end
- end
- end
- end
-end
-
-otf.tables.features['tlig'] = 'TeX Ligatures'
-otf.tables.features['trep'] = 'TeX Replacements'
-otf.tables.features['anum'] = 'Arabic Digits'
-
-otf.features.register_base_substitution('tlig')
-otf.features.register_base_substitution('trep')
-otf.features.register_base_substitution('anum')
-
--- the functionality is defined elsewhere
-
-fonts.initializers.base.otf.equaldigits = fonts.initializers.common.equaldigits
-fonts.initializers.node.otf.equaldigits = fonts.initializers.common.equaldigits
-
-fonts.initializers.base.otf.lineheight = fonts.initializers.common.lineheight
-fonts.initializers.node.otf.lineheight = fonts.initializers.common.lineheight
-
-fonts.initializers.base.otf.compose = fonts.initializers.common.compose
-fonts.initializers.node.otf.compose = fonts.initializers.common.compose
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/otfl-font-otd.lua b/Master/texmf-dist/tex/luatex/luaotfload/otfl-font-otd.lua
deleted file mode 100644
index 46899fd7e7f..00000000000
--- a/Master/texmf-dist/tex/luatex/luaotfload/otfl-font-otd.lua
+++ /dev/null
@@ -1,79 +0,0 @@
-if not modules then modules = { } end modules ['font-otd'] = {
- version = 1.001,
- comment = "companion to font-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local trace_dynamics = false trackers.register("otf.dynamics", function(v) trace_dynamics = v end)
-
-fonts = fonts or { }
-fonts.otf = fonts.otf or { }
-
-local otf = fonts.otf
-local fontdata = fonts.ids
-
-otf.features = otf.features or { }
-otf.features.default = otf.features.default or { }
-
-local context_setups = fonts.define.specify.context_setups
-local context_numbers = fonts.define.specify.context_numbers
-
-local a_to_script = { } otf.a_to_script = a_to_script
-local a_to_language = { } otf.a_to_language = a_to_language
-
-function otf.set_dynamics(font,dynamics,attribute)
- local features = context_setups[context_numbers[attribute]] -- can be moved to caller
- if features then
- local script = features.script or 'dflt'
- local language = features.language or 'dflt'
- local ds = dynamics[script]
- if not ds then
- ds = { }
- dynamics[script] = ds
- end
- local dsl = ds[language]
- if not dsl then
- dsl = { }
- ds[language] = dsl
- end
- local dsla = dsl[attribute]
- if dsla then
- -- if trace_dynamics then
- -- logs.report("otf define","using dynamics %s: attribute %s, script %s, language %s",context_numbers[attribute],attribute,script,language)
- -- end
- return dsla
- else
- local tfmdata = fontdata[font]
- a_to_script [attribute] = script
- a_to_language[attribute] = language
- -- we need to save some values
- local saved = {
- script = tfmdata.script,
- language = tfmdata.language,
- mode = tfmdata.mode,
- features = tfmdata.shared.features
- }
- tfmdata.mode = "node"
- tfmdata.language = language
- tfmdata.script = script
- tfmdata.shared.features = { }
- -- end of save
- local set = fonts.define.check(features,otf.features.default)
- dsla = otf.set_features(tfmdata,set)
- if trace_dynamics then
- logs.report("otf define","setting dynamics %s: attribute %s, script %s, language %s, set: %s",context_numbers[attribute],attribute,script,language,table.sequenced(set))
- end
- -- we need to restore some values
- tfmdata.script = saved.script
- tfmdata.language = saved.language
- tfmdata.mode = saved.mode
- tfmdata.shared.features = saved.features
- -- end of restore
- dynamics[script][language][attribute] = dsla -- cache
- return dsla
- end
- end
- return nil -- { }
-end
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/otfl-font-otf.lua b/Master/texmf-dist/tex/luatex/luaotfload/otfl-font-otf.lua
deleted file mode 100644
index ce540865574..00000000000
--- a/Master/texmf-dist/tex/luatex/luaotfload/otfl-font-otf.lua
+++ /dev/null
@@ -1,1787 +0,0 @@
-if not modules then modules = { } end modules ['font-otf'] = {
- version = 1.001,
- comment = "companion to font-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local utf = unicode.utf8
-
-local concat, utfbyte = table.concat, utf.byte
-local format, gmatch, gsub, find, match, lower, strip = string.format, string.gmatch, string.gsub, string.find, string.match, string.lower, string.strip
-local type, next, tonumber, tostring = type, next, tonumber, tostring
-local abs = math.abs
-local getn = table.getn
-local lpegmatch = lpeg.match
-
-local trace_private = false trackers.register("otf.private", function(v) trace_private = v end)
-local trace_loading = false trackers.register("otf.loading", function(v) trace_loading = v end)
-local trace_features = false trackers.register("otf.features", function(v) trace_features = v end)
-local trace_dynamics = false trackers.register("otf.dynamics", function(v) trace_dynamics = v end)
-local trace_sequences = false trackers.register("otf.sequences", function(v) trace_sequences = v end)
-local trace_math = false trackers.register("otf.math", function(v) trace_math = v end)
-local trace_defining = false trackers.register("fonts.defining", function(v) trace_defining = v end)
-
---~ trackers.enable("otf.loading")
-
---[[ldx--
-<p>The fontforge table has organized lookups in a certain way. A first implementation
-of this code was organized featurewise: information related to features was
-collected and processing boiled down to a run over the features. The current
-implementation honors the order in the main feature table. Since we can reorder this
-table as we want, we can eventually support several models of processing. We kept
-the static as well as dynamic feature processing, because it had proved to be
-rather useful. The formerly three loop variants have beem discarded but will
-reapear at some time.</p>
-
-<itemize>
-<item>we loop over all lookups</item>
-<item>for each lookup we do a run over the list of glyphs</item>
-<item>but we only process them for features that are enabled</item>
-<item>if we're dealing with a contextual lookup, we loop over all contexts</item>
-<item>in that loop we quit at a match and then process the list of sublookups</item>
-<item>we always continue after the match</item>
-</itemize>
-
-<p>In <l n='context'/> we do this for each font that is used in a list, so in
-practice we have quite some nested loops.</p>
-
-<p>We process the whole list and then consult the glyph nodes. An alternative approach
-is to collect strings of characters using the same font including spaces (because some
-lookups involve spaces). However, we then need to reconstruct the list which is no fun.
-Also, we need to carry quite some information, like attributes, so eventually we don't
-gain much (if we gain something at all).</p>
-
-<p>Another consideration has been to operate on sublists (subhead, subtail) but again
-this would complicate matters as we then neext to keep track of a changing subhead
-and subtail. On the other hand, this might save some runtime. The number of changes
-involved is not that large. This only makes sense when we have many fonts in a list
-and don't change to frequently.</p>
---ldx]]--
-
-fonts = fonts or { }
-fonts.otf = fonts.otf or { }
-fonts.tfm = fonts.tfm or { }
-
-local otf = fonts.otf
-local tfm = fonts.tfm
-
-local fontdata = fonts.ids
-
-otf.tables = otf.tables or { } -- defined in font-ott.lua
-otf.meanings = otf.meanings or { } -- defined in font-ott.lua
-otf.tables.features = otf.tables.features or { } -- defined in font-ott.lua
-otf.tables.languages = otf.tables.languages or { } -- defined in font-ott.lua
-otf.tables.scripts = otf.tables.scripts or { } -- defined in font-ott.lua
-
-otf.features = otf.features or { }
-otf.features.list = otf.features.list or { }
-otf.features.default = otf.features.default or { }
-
-otf.enhancers = otf.enhancers or { }
-otf.glists = { "gsub", "gpos" }
-
-otf.version = 2.653 -- beware: also sync font-mis.lua
-otf.pack = true -- beware: also sync font-mis.lua
-otf.syncspace = true
-otf.notdef = false
-otf.cache = containers.define("fonts", "otf", otf.version, true)
-otf.cleanup_aat = false -- only context
-
-local wildcard = "*"
-local default = "dflt"
-
---[[ldx--
-<p>We start with a lot of tables and related functions.</p>
---ldx]]--
-
-otf.tables.global_fields = table.tohash {
- "lookups",
- "glyphs",
- "subfonts",
- "luatex",
- "pfminfo",
- "cidinfo",
- "tables",
- "names",
- "unicodes",
- "names",
---~ "math",
- "anchor_classes",
- "kern_classes",
- "gpos",
- "gsub"
-}
-
-otf.tables.valid_fields = {
- "anchor_classes",
- "ascent",
- "cache_version",
- "cidinfo",
- "copyright",
- "creationtime",
- "descent",
- "design_range_bottom",
- "design_range_top",
- "design_size",
- "encodingchanged",
- "extrema_bound",
- "familyname",
- "fontname",
- "fontstyle_id",
- "fontstyle_name",
- "fullname",
- "glyphs",
- "hasvmetrics",
- "head_optimized_for_cleartype",
- "horiz_base",
- "issans",
- "isserif",
- "italicangle",
- "kerns",
- "lookups",
- -- "luatex",
- "macstyle",
- "modificationtime",
- "onlybitmaps",
- "origname",
- "os2_version",
- "pfminfo",
- "private",
- "serifcheck",
- "sfd_version",
- -- "size",
- "strokedfont",
- "strokewidth",
- "subfonts",
- "table_version",
- -- "tables",
- -- "ttf_tab_saved",
- "ttf_tables",
- "uni_interp",
- "uniqueid",
- "units_per_em",
- "upos",
- "use_typo_metrics",
- "uwidth",
- "validation_state",
- "verbose",
- "version",
- "vert_base",
- "weight",
- "weight_width_slope_only",
- "xuid",
-}
-
---[[ldx--
-<p>Here we go.</p>
---ldx]]--
-
-local function load_featurefile(ff,featurefile)
- if featurefile then
- featurefile = resolvers.find_file(file.addsuffix(featurefile,'fea'),'fea')
- if featurefile and featurefile ~= "" then
- if trace_loading then
- logs.report("load otf", "featurefile: %s", featurefile)
- end
- fontloader.apply_featurefile(ff, featurefile)
- end
- end
-end
-
-function otf.enhance(name,data,filename,verbose)
- local enhancer = otf.enhancers[name]
- if enhancer then
- if (verbose ~= nil and verbose) or trace_loading then
- logs.report("load otf","enhance: %s (%s)",name,filename)
- end
- enhancer(data,filename)
- end
-end
-
-local enhancers = {
- -- pack and unpack are handled separately; they might even be moved
- -- away from the enhancers namespace
- "patch bugs",
- "merge cid fonts", "prepare unicode", "cleanup ttf tables", "compact glyphs", "reverse coverage",
- "cleanup aat", "enrich with features", "add some missing characters",
- "reorganize mark classes",
- "reorganize kerns", -- moved here
- "flatten glyph lookups", "flatten anchor tables", "flatten feature tables",
- "simplify glyph lookups", -- some saving
- "prepare luatex tables",
- "analyse features", "rehash features",
- "analyse anchors", "analyse marks", "analyse unicodes", "analyse subtables",
- "check italic correction","check math",
- "share widths",
- "strip not needed data",
- "migrate metadata",
- "check math parameters",
-}
-
-function otf.load(filename,format,sub,featurefile)
- local name = file.basename(file.removesuffix(filename))
- local attr = lfs.attributes(filename)
- local size, time = attr.size or 0, attr.modification or 0
- if featurefile then
- local fattr = lfs.attributes(featurefile)
- local fsize, ftime = fattr and fattr.size or 0, fattr and fattr.modification or 0
- name = name .. "@" .. file.removesuffix(file.basename(featurefile)) .. ftime .. fsize
- end
- if sub == "" then sub = false end
- local hash = name
- if sub then
- hash = hash .. "-" .. sub
- end
- hash = containers.cleanname(hash)
- local data = containers.read(otf.cache,hash)
- if not data or data.verbose ~= fonts.verbose or data.size ~= size or data.time ~= time then
- logs.report("load otf","loading: %s (hash: %s)",filename,hash)
- local ff, messages
- if sub then
- ff, messages = fontloader.open(filename,sub)
- else
- ff, messages = fontloader.open(filename)
- end
- if trace_loading and messages and #messages > 0 then
- if type(messages) == "string" then
- logs.report("load otf","warning: %s",messages)
- else
- for m=1,#messages do
- logs.report("load otf","warning: %s",tostring(messages[m]))
- end
- end
- else
- logs.report("load otf","font loaded okay")
- end
- if ff then
- load_featurefile(ff,featurefile)
- data = fontloader.to_table(ff)
- fontloader.close(ff)
- if data then
- logs.report("load otf","file size: %s", size)
- logs.report("load otf","enhancing ...")
- for e=1,#enhancers do
- otf.enhance(enhancers[e],data,filename)
- io.flush() -- we want instant messages
- end
- if otf.pack and not fonts.verbose then
- otf.enhance("pack",data,filename)
- end
- data.size = size
- data.time = time
- data.verbose = fonts.verbose
- logs.report("load otf","saving in cache: %s",filename)
- data = containers.write(otf.cache, hash, data)
- collectgarbage("collect")
- data = containers.read(otf.cache, hash) -- this frees the old table and load the sparse one
- collectgarbage("collect")
- else
- logs.report("load otf","loading failed (table conversion error)")
- end
- else
- logs.report("load otf","loading failed (file read error)")
- end
- end
- if data then
- if trace_defining then
- logs.report("define font","loading from cache: %s",hash)
- end
- otf.enhance("unpack",data,filename,false) -- no message here
- otf.add_dimensions(data)
- if trace_sequences then
- otf.show_feature_order(data,filename)
- end
- end
- return data
-end
-
-function otf.add_dimensions(data)
- -- todo: forget about the width if it's the defaultwidth (saves mem)
- -- we could also build the marks hash here (instead of storing it)
- if data then
- local force = otf.notdef
- local luatex = data.luatex
- local defaultwidth = luatex.defaultwidth or 0
- local defaultheight = luatex.defaultheight or 0
- local defaultdepth = luatex.defaultdepth or 0
- for _, d in next, data.glyphs do
- local bb, wd = d.boundingbox, d.width
- if not wd then
- d.width = defaultwidth
- elseif wd ~= 0 and d.class == "mark" then
- d.width = -wd
- end
- if force and not d.name then
- d.name = ".notdef"
- end
- if bb then
- local ht, dp = bb[4], -bb[2]
- if ht == 0 or ht < 0 then
- -- no need to set it and no negative heights, nil == 0
- else
- d.height = ht
- end
- if dp == 0 or dp < 0 then
- -- no negative depths and no negative depths, nil == 0
- else
- d.depth = dp
- end
- end
- end
- end
-end
-
-function otf.show_feature_order(otfdata,filename)
- local sequences = otfdata.luatex.sequences
- if sequences and #sequences > 0 then
- if trace_loading then
- logs.report("otf check","font %s has %s sequences",filename,#sequences)
- logs.report("otf check"," ")
- end
- for nos=1,#sequences do
- local sequence = sequences[nos]
- local typ = sequence.type or "no-type"
- local name = sequence.name or "no-name"
- local subtables = sequence.subtables or { "no-subtables" }
- local features = sequence.features
- if trace_loading then
- logs.report("otf check","%3i %-15s %-20s [%s]",nos,name,typ,concat(subtables,","))
- end
- if features then
- for feature, scripts in next, features do
- local tt = { }
- for script, languages in next, scripts do
- local ttt = { }
- for language, _ in next, languages do
- ttt[#ttt+1] = language
- end
- tt[#tt+1] = format("[%s: %s]",script,concat(ttt," "))
- end
- if trace_loading then
- logs.report("otf check"," %s: %s",feature,concat(tt," "))
- end
- end
- end
- end
- if trace_loading then
- logs.report("otf check","\n")
- end
- elseif trace_loading then
- logs.report("otf check","font %s has no sequences",filename)
- end
-end
-
--- todo: normalize, design_size => designsize
-
-otf.enhancers["reorganize mark classes"] = function(data,filename)
- if data.mark_classes then
- local unicodes = data.luatex.unicodes
- local reverse = { }
- for name, class in next, data.mark_classes do
- local t = { }
- for s in gmatch(class,"[^ ]+") do
- local us = unicodes[s]
- if type(us) == "table" then
- for u=1,#us do
- t[us[u]] = true
- end
- else
- t[us] = true
- end
- end
- reverse[name] = t
- end
- data.luatex.markclasses = reverse
- data.mark_classes = nil
- end
-end
-
-otf.enhancers["prepare luatex tables"] = function(data,filename)
- data.luatex = data.luatex or { }
- local luatex = data.luatex
- luatex.filename = filename
- luatex.version = otf.version
- luatex.creator = "context mkiv"
-end
-
-otf.enhancers["cleanup aat"] = function(data,filename)
- if otf.cleanup_aat then
- end
-end
-
-local function analyze_features(g, features)
- if g then
- local t, done = { }, { }
- for k=1,#g do
- local f = features or g[k].features
- if f then
- for k=1,#f do
- -- scripts and tag
- local tag = f[k].tag
- if not done[tag] then
- t[#t+1] = tag
- done[tag] = true
- end
- end
- end
- end
- if #t > 0 then
- return t
- end
- end
- return nil
-end
-
-otf.enhancers["analyse features"] = function(data,filename)
- -- local luatex = data.luatex
- -- luatex.gposfeatures = analyze_features(data.gpos)
- -- luatex.gsubfeatures = analyze_features(data.gsub)
-end
-
-otf.enhancers["rehash features"] = function(data,filename)
- local features = { }
- data.luatex.features = features
- for k, what in next, otf.glists do
- local dw = data[what]
- if dw then
- local f = { }
- features[what] = f
- for i=1,#dw do
- local d= dw[i]
- local dfeatures = d.features
- if dfeatures then
- for i=1,#dfeatures do
- local df = dfeatures[i]
- local tag = strip(lower(df.tag))
- local ft = f[tag] if not ft then ft = {} f[tag] = ft end
- local dscripts = df.scripts
- for script, languages in next, dscripts do
- script = strip(lower(script))
- local fts = ft[script] if not fts then fts = {} ft[script] = fts end
- for i=1,#languages do
- fts[strip(lower(languages[i]))] = true
- end
- end
- end
- end
- end
- end
- end
-end
-
-otf.enhancers["analyse anchors"] = function(data,filename)
- local classes = data.anchor_classes
- local luatex = data.luatex
- local anchor_to_lookup, lookup_to_anchor = { }, { }
- luatex.anchor_to_lookup, luatex.lookup_to_anchor = anchor_to_lookup, lookup_to_anchor
- if classes then
- for c=1,#classes do
- local class = classes[c]
- local anchor = class.name
- local lookups = class.lookup
- if type(lookups) ~= "table" then
- lookups = { lookups }
- end
- local a = anchor_to_lookup[anchor]
- if not a then a = { } anchor_to_lookup[anchor] = a end
- for l=1,#lookups do
- local lookup = lookups[l]
- local l = lookup_to_anchor[lookup]
- if not l then l = { } lookup_to_anchor[lookup] = l end
- l[anchor] = true
- a[lookup] = true
- end
- end
- end
-end
-
-otf.enhancers["analyse marks"] = function(data,filename)
- local glyphs = data.glyphs
- local marks = { }
- data.luatex.marks = marks
- for unicode, index in next, data.luatex.indices do
- local glyph = glyphs[index]
- if glyph.class == "mark" then
- marks[unicode] = true
- end
- end
-end
-
-otf.enhancers["analyse unicodes"] = fonts.map.add_to_unicode
-
-otf.enhancers["analyse subtables"] = function(data,filename)
- data.luatex = data.luatex or { }
- local luatex = data.luatex
- local sequences = { }
- local lookups = { }
- luatex.sequences = sequences
- luatex.lookups = lookups
- for _, g in next, { data.gsub, data.gpos } do
- for k=1,#g do
- local gk = g[k]
- local typ = gk.type
- if typ == "gsub_contextchain" or typ == "gpos_contextchain" then
- gk.chain = 1
- elseif typ == "gsub_reversecontextchain" or typ == "gpos_reversecontextchain" then
- gk.chain = -1
- else
- gk.chain = 0
- end
- local features = gk.features
- if features then
- sequences[#sequences+1] = gk
- -- scripts, tag, ismac
- local t = { }
- for f=1,#features do
- local feature = features[f]
- local hash = { }
- -- only script and langs matter
- for s, languages in next, feature.scripts do
- s = lower(s)
- local h = hash[s]
- if not h then h = { } hash[s] = h end
- for l=1,#languages do
- h[strip(lower(languages[l]))] = true
- end
- end
- t[feature.tag] = hash
- end
- gk.features = t
- else
- lookups[gk.name] = gk
- gk.name = nil
- end
- local subtables = gk.subtables
- if subtables then
- local t = { }
- for s=1,#subtables do
- local subtable = subtables[s]
- local name = subtable.name
- t[#t+1] = name
- end
- gk.subtables = t
- end
- local flags = gk.flags
- if flags then
- gk.flags = { -- forcing false packs nicer
- (flags.ignorecombiningmarks and "mark") or false,
- (flags.ignoreligatures and "ligature") or false,
- (flags.ignorebaseglyphs and "base") or false,
- flags.r2l or false,
- }
- if flags.mark_class then
- gk.markclass = luatex.markclasses[flags.mark_class]
- end
- end
- end
- end
-end
-
-otf.enhancers["merge cid fonts"] = function(data,filename)
- -- we can also move the names to data.luatex.names which might
- -- save us some more memory (at the cost of harder tracing)
- if data.subfonts then
- if data.glyphs and next(data.glyphs) then
- logs.report("load otf","replacing existing glyph table due to subfonts")
- end
- local cidinfo = data.cidinfo
- local verbose = fonts.verbose
- if cidinfo.registry then
- local cidmap, cidname = fonts.cid.getmap(cidinfo.registry,cidinfo.ordering,cidinfo.supplement)
- if cidmap then
- cidinfo.usedname = cidmap.usedname
- local glyphs, uni_to_int, int_to_uni, nofnames, nofunicodes = { }, { }, { }, 0, 0
- local unicodes, names = cidmap.unicodes, cidmap.names
- for n, subfont in next, data.subfonts do
- for index, g in next, subfont.glyphs do
- if not next(g) then
- -- dummy entry
- else
- local unicode, name = unicodes[index], names[index]
- g.cidindex = n
- g.boundingbox = g.boundingbox -- or zerobox
- g.name = g.name or name or "unknown"
- if unicode then
- uni_to_int[unicode] = index
- int_to_uni[index] = unicode
- nofunicodes = nofunicodes + 1
- g.unicode = unicode
- elseif name then
- nofnames = nofnames + 1
- g.unicode = -1
- end
- glyphs[index] = g
- end
- end
- subfont.glyphs = nil
- end
- if trace_loading then
- logs.report("load otf","cid font remapped, %s unicode points, %s symbolic names, %s glyphs",nofunicodes, nofnames, nofunicodes+nofnames)
- end
- data.glyphs = glyphs
- data.map = data.map or { }
- data.map.map = uni_to_int
- data.map.backmap = int_to_uni
- elseif trace_loading then
- logs.report("load otf","unable to remap cid font, missing cid file for %s",filename)
- end
- elseif trace_loading then
- logs.report("load otf","font %s has no glyphs",filename)
- end
- end
-end
-
-otf.enhancers["prepare unicode"] = function(data,filename)
- local luatex = data.luatex
- if not luatex then luatex = { } data.luatex = luatex end
- local indices, unicodes, multiples, internals = { }, { }, { }, { }
- local glyphs = data.glyphs
- local mapmap = data.map
- if not mapmap then
- logs.report("load otf","no map in %s",filename)
- mapmap = { }
- data.map = { map = mapmap }
- elseif not mapmap.map then
- logs.report("load otf","no unicode map in %s",filename)
- mapmap = { }
- data.map.map = mapmap
- else
- mapmap = mapmap.map
- end
- local criterium = fonts.private
- local private = fonts.private
- for index, glyph in next, glyphs do
- if index > 0 then
- local name = glyph.name
- if name then
- local unicode = glyph.unicode
- if unicode == -1 or unicode >= criterium then
- glyph.unicode = private
- indices[private] = index
- unicodes[name] = private
- internals[index] = true
- if trace_private then
- logs.report("load otf","enhance: glyph %s at index U+%04X is moved to private unicode slot U+%04X",name,index,private)
- end
- private = private + 1
- else
- indices[unicode] = index
- unicodes[name] = unicode
- end
- end
- end
- end
- -- beware: the indices table is used to initialize the tfm table
- for unicode, index in next, mapmap do
- if not internals[index] then
- local name = glyphs[index].name
- if name then
- local un = unicodes[name]
- if not un then
- unicodes[name] = unicode -- or 0
- elseif type(un) == "number" then
- if un ~= unicode then
- multiples[#multiples+1] = name
- unicodes[name] = { un, unicode }
- indices[unicode] = index
- end
- else
- local ok = false
- for u=1,#un do
- if un[u] == unicode then
- ok = true
- break
- end
- end
- if not ok then
- multiples[#multiples+1] = name
- un[#un+1] = unicode
- indices[unicode] = index
- end
- end
- end
- end
- end
- if trace_loading then
- if #multiples > 0 then
- logs.report("load otf","%s glyph are reused: %s",#multiples, concat(multiples," "))
- else
- logs.report("load otf","no glyph are reused")
- end
- end
- luatex.indices = indices
- luatex.unicodes = unicodes
- luatex.private = private
-end
-
-otf.enhancers["cleanup ttf tables"] = function(data,filename)
- local ttf_tables = data.ttf_tables
- if ttf_tables then
- for k=1,#ttf_tables do
- if ttf_tables[k].data then ttf_tables[k].data = "deleted" end
- end
- end
- data.ttf_tab_saved = nil
-end
-
-otf.enhancers["compact glyphs"] = function(data,filename)
- table.compact(data.glyphs) -- needed?
- if data.subfonts then
- for _, subfont in next, data.subfonts do
- table.compact(subfont.glyphs) -- needed?
- end
- end
-end
-
-otf.enhancers["reverse coverage"] = function(data,filename)
- -- we prefer the before lookups in a normal order
- if data.lookups then
- for _, v in next, data.lookups do
- if v.rules then
- for _, vv in next, v.rules do
- local c = vv.coverage
- if c and c.before then
- c.before = table.reverse(c.before)
- end
- end
- end
- end
- end
-end
-
-otf.enhancers["check italic correction"] = function(data,filename)
- local glyphs = data.glyphs
- local ok = false
- for index, glyph in next, glyphs do
- local ic = glyph.italic_correction
- if ic then
- if ic ~= 0 then
- glyph.italic = ic
- end
- glyph.italic_correction = nil
- ok = true
- end
- end
- -- we can use this to avoid calculations
- otf.tables.valid_fields[#otf.tables.valid_fields+1] = "has_italic"
- data.has_italic = true
-end
-
-otf.enhancers["check math"] = function(data,filename)
- if data.math then
- -- we move the math stuff into a math subtable because we then can
- -- test faster in the tfm copy
- local glyphs = data.glyphs
- local unicodes = data.luatex.unicodes
- for index, glyph in next, glyphs do
- local mk = glyph.mathkern
- local hv = glyph.horiz_variants
- local vv = glyph.vert_variants
- if mk or hv or vv then
- local math = { }
- glyph.math = math
- if mk then
- for k, v in next, mk do
- if not next(v) then
- mk[k] = nil
- end
- end
- math.kerns = mk
- glyph.mathkern = nil
- end
- if hv then
- math.horiz_variants = hv.variants
- local p = hv.parts
- if p and #p > 0 then
- for i=1,#p do
- local pi = p[i]
- pi.glyph = unicodes[pi.component] or 0
- end
- math.horiz_parts = p
- end
- local ic = hv.italic_correction
- if ic and ic ~= 0 then
- math.horiz_italic_correction = ic
- end
- glyph.horiz_variants = nil
- end
- if vv then
- local uc = unicodes[index]
- math.vert_variants = vv.variants
- local p = vv.parts
- if p and #p > 0 then
- for i=1,#p do
- local pi = p[i]
- pi.glyph = unicodes[pi.component] or 0
- end
- math.vert_parts = p
- end
- local ic = vv.italic_correction
- if ic and ic ~= 0 then
- math.vert_italic_correction = ic
- end
- glyph.vert_variants = nil
- end
- local ic = glyph.italic_correction
- if ic then
- if ic ~= 0 then
- math.italic_correction = ic
- end
- glyph.italic_correction = nil
- end
- end
- end
- end
-end
-
-otf.enhancers["share widths"] = function(data,filename)
- local glyphs = data.glyphs
- local widths = { }
- for index, glyph in next, glyphs do
- local width = glyph.width
- widths[width] = (widths[width] or 0) + 1
- end
- -- share width for cjk fonts
- local wd, most = 0, 1
- for k,v in next, widths do
- if v > most then
- wd, most = k, v
- end
- end
- if most > 1000 then
- if trace_loading then
- logs.report("load otf", "most common width: %s (%s times), sharing (cjk font)",wd,most)
- end
- for k, v in next, glyphs do
- if v.width == wd then
- v.width = nil
- end
- end
- data.luatex.defaultwidth = wd
- end
-end
-
--- kern: ttf has a table with kerns
-
--- Weird, as maxfirst and maxseconds can have holes, first seems to be indexed, but
--- seconds can start at 2 .. this need to be fixed as getn as well as # are sort of
--- unpredictable alternatively we could force an [1] if not set (maybe I will do that
--- anyway).
-
---~ otf.enhancers["reorganize kerns"] = function(data,filename)
---~ local glyphs, mapmap, unicodes = data.glyphs, data.luatex.indices, data.luatex.unicodes
---~ local mkdone = false
---~ for index, glyph in next, glyphs do
---~ if glyph.kerns then
---~ local mykerns = { }
---~ for k,v in next, glyph.kerns do
---~ local vc, vo, vl = v.char, v.off, v.lookup
---~ if vc and vo and vl then -- brrr, wrong! we miss the non unicode ones
---~ local uvc = unicodes[vc]
---~ if not uvc then
---~ if trace_loading then
---~ logs.report("load otf","problems with unicode %s of kern %s at glyph %s",vc,k,index)
---~ end
---~ else
---~ if type(vl) ~= "table" then
---~ vl = { vl }
---~ end
---~ for l=1,#vl do
---~ local vll = vl[l]
---~ local mkl = mykerns[vll]
---~ if not mkl then
---~ mkl = { }
---~ mykerns[vll] = mkl
---~ end
---~ if type(uvc) == "table" then
---~ for u=1,#uvc do
---~ mkl[uvc[u]] = vo
---~ end
---~ else
---~ mkl[uvc] = vo
---~ end
---~ end
---~ end
---~ end
---~ end
---~ glyph.mykerns = mykerns
---~ glyph.kerns = nil -- saves space and time
---~ mkdone = true
---~ end
---~ end
---~ if trace_loading and mkdone then
---~ logs.report("load otf", "replacing 'kerns' tables by 'mykerns' tables")
---~ end
---~ if data.kerns then
---~ if trace_loading then
---~ logs.report("load otf", "removing global 'kern' table")
---~ end
---~ data.kerns = nil
---~ end
---~ local dgpos = data.gpos
---~ if dgpos then
---~ local separator = lpeg.P(" ")
---~ local other = ((1 - separator)^0) / unicodes
---~ local splitter = lpeg.Ct(other * (separator * other)^0)
---~ for gp=1,#dgpos do
---~ local gpos = dgpos[gp]
---~ local subtables = gpos.subtables
---~ if subtables then
---~ for s=1,#subtables do
---~ local subtable = subtables[s]
---~ local kernclass = subtable.kernclass -- name is inconsistent with anchor_classes
---~ if kernclass then -- the next one is quite slow
---~ local split = { } -- saves time
---~ for k=1,#kernclass do
---~ local kcl = kernclass[k]
---~ local firsts, seconds, offsets, lookups = kcl.firsts, kcl.seconds, kcl.offsets, kcl.lookup -- singular
---~ if type(lookups) ~= "table" then
---~ lookups = { lookups }
---~ end
---~ local maxfirsts, maxseconds = getn(firsts), getn(seconds)
---~ for _, s in next, firsts do
---~ split[s] = split[s] or lpegmatch(splitter,s)
---~ end
---~ for _, s in next, seconds do
---~ split[s] = split[s] or lpegmatch(splitter,s)
---~ end
---~ for l=1,#lookups do
---~ local lookup = lookups[l]
---~ local function do_it(fk,first_unicode)
---~ local glyph = glyphs[mapmap[first_unicode]]
---~ if glyph then
---~ local mykerns = glyph.mykerns
---~ if not mykerns then
---~ mykerns = { } -- unicode indexed !
---~ glyph.mykerns = mykerns
---~ end
---~ local lookupkerns = mykerns[lookup]
---~ if not lookupkerns then
---~ lookupkerns = { }
---~ mykerns[lookup] = lookupkerns
---~ end
---~ local baseoffset = (fk-1) * maxseconds
---~ for sk=2,maxseconds do -- we can avoid this loop with a table
---~ local sv = seconds[sk]
---~ local splt = split[sv]
---~ if splt then
---~ local offset = offsets[baseoffset + sk]
---~ --~ local offset = offsets[sk] -- (fk-1) * maxseconds + sk]
---~ if offset then
---~ for i=1,#splt do
---~ local second_unicode = splt[i]
---~ if tonumber(second_unicode) then
---~ lookupkerns[second_unicode] = offset
---~ else for s=1,#second_unicode do
---~ lookupkerns[second_unicode[s]] = offset
---~ end end
---~ end
---~ end
---~ end
---~ end
---~ elseif trace_loading then
---~ logs.report("load otf", "no glyph data for U+%04X", first_unicode)
---~ end
---~ end
---~ for fk=1,#firsts do
---~ local fv = firsts[fk]
---~ local splt = split[fv]
---~ if splt then
---~ for i=1,#splt do
---~ local first_unicode = splt[i]
---~ if tonumber(first_unicode) then
---~ do_it(fk,first_unicode)
---~ else for f=1,#first_unicode do
---~ do_it(fk,first_unicode[f])
---~ end end
---~ end
---~ end
---~ end
---~ end
---~ end
---~ subtable.comment = "The kernclass table is merged into mykerns in the indexed glyph tables."
---~ subtable.kernclass = { }
---~ end
---~ end
---~ end
---~ end
---~ end
---~ end
-
-otf.enhancers["reorganize kerns"] = function(data,filename)
- local glyphs, mapmap, unicodes = data.glyphs, data.luatex.indices, data.luatex.unicodes
- local mkdone = false
- local function do_it(lookup,first_unicode,kerns)
- local glyph = glyphs[mapmap[first_unicode]]
- if glyph then
- local mykerns = glyph.mykerns
- if not mykerns then
- mykerns = { } -- unicode indexed !
- glyph.mykerns = mykerns
- end
- local lookupkerns = mykerns[lookup]
- if not lookupkerns then
- lookupkerns = { }
- mykerns[lookup] = lookupkerns
- end
- for second_unicode, kern in next, kerns do
- lookupkerns[second_unicode] = kern
- end
- elseif trace_loading then
- logs.report("load otf", "no glyph data for U+%04X", first_unicode)
- end
- end
- for index, glyph in next, glyphs do
- if glyph.kerns then
- local mykerns = { }
- for k,v in next, glyph.kerns do
- local vc, vo, vl = v.char, v.off, v.lookup
- if vc and vo and vl then -- brrr, wrong! we miss the non unicode ones
- local uvc = unicodes[vc]
- if not uvc then
- if trace_loading then
- logs.report("load otf","problems with unicode %s of kern %s at glyph %s",vc,k,index)
- end
- else
- if type(vl) ~= "table" then
- vl = { vl }
- end
- for l=1,#vl do
- local vll = vl[l]
- local mkl = mykerns[vll]
- if not mkl then
- mkl = { }
- mykerns[vll] = mkl
- end
- if type(uvc) == "table" then
- for u=1,#uvc do
- mkl[uvc[u]] = vo
- end
- else
- mkl[uvc] = vo
- end
- end
- end
- end
- end
- glyph.mykerns = mykerns
- glyph.kerns = nil -- saves space and time
- mkdone = true
- end
- end
- if trace_loading and mkdone then
- logs.report("load otf", "replacing 'kerns' tables by 'mykerns' tables")
- end
- if data.kerns then
- if trace_loading then
- logs.report("load otf", "removing global 'kern' table")
- end
- data.kerns = nil
- end
- local dgpos = data.gpos
- if dgpos then
- local separator = lpeg.P(" ")
- local other = ((1 - separator)^0) / unicodes
- local splitter = lpeg.Ct(other * (separator * other)^0)
- for gp=1,#dgpos do
- local gpos = dgpos[gp]
- local subtables = gpos.subtables
- if subtables then
- for s=1,#subtables do
- local subtable = subtables[s]
- local kernclass = subtable.kernclass -- name is inconsistent with anchor_classes
- if kernclass then -- the next one is quite slow
- local split = { } -- saves time
- for k=1,#kernclass do
- local kcl = kernclass[k]
- local firsts, seconds, offsets, lookups = kcl.firsts, kcl.seconds, kcl.offsets, kcl.lookup -- singular
- if type(lookups) ~= "table" then
- lookups = { lookups }
- end
- local maxfirsts, maxseconds = #firsts, #seconds
- -- here we could convert split into a list of unicodes which is a bit
- -- faster but as this is only done when caching it does not save us much
- for _, s in next, firsts do
- split[s] = split[s] or lpegmatch(splitter,s)
- end
- for _, s in next, seconds do
- split[s] = split[s] or lpegmatch(splitter,s)
- end
- for l=1,#lookups do
- local lookup = lookups[l]
- for fk=1,#firsts do
- local fv = firsts[fk]
- local splt = split[fv]
- if splt then
- local kerns, baseoffset = { }, (fk-1) * maxseconds
- for sk=2,maxseconds do
- local sv = seconds[sk]
- local splt = split[sv]
- if splt then
- local offset = offsets[baseoffset + sk]
- if offset then
- for i=1,#splt do
- local second_unicode = splt[i]
- if tonumber(second_unicode) then
- kerns[second_unicode] = offset
- else for s=1,#second_unicode do
- kerns[second_unicode[s]] = offset
- end end
- end
- end
- end
- end
- for i=1,#splt do
- local first_unicode = splt[i]
- if tonumber(first_unicode) then
- do_it(lookup,first_unicode,kerns)
- else for f=1,#first_unicode do
- do_it(lookup,first_unicode[f],kerns)
- end end
- end
- end
- end
- end
- end
- subtable.comment = "The kernclass table is merged into mykerns in the indexed glyph tables."
- subtable.kernclass = { }
- end
- end
- end
- end
- end
-end
-
-
-
-
-
-
-
-
-
-otf.enhancers["strip not needed data"] = function(data,filename)
- local verbose = fonts.verbose
- local int_to_uni = data.luatex.unicodes
- for k, v in next, data.glyphs do
- local d = v.dependents
- if d then v.dependents = nil end
- local a = v.altuni
- if a then v.altuni = nil end
- if verbose then
- local code = int_to_uni[k]
- -- looks like this is done twice ... bug?
- if code then
- local vu = v.unicode
- if not vu then
- v.unicode = code
- elseif type(vu) == "table" then
- if vu[#vu] == code then
- -- weird
- else
- vu[#vu+1] = code
- end
- elseif vu ~= code then
- v.unicode = { vu, code }
- end
- end
- else
- v.unicode = nil
- v.index = nil
- end
- end
- data.luatex.comment = "Glyph tables have their original index. When present, mykern tables are indexed by unicode."
- data.map = nil
- data.names = nil -- funny names for editors
- data.glyphcnt = nil
- data.glyphmax = nil
- if true then
- data.gpos = nil
- data.gsub = nil
- data.anchor_classes = nil
- end
-end
-
-otf.enhancers["migrate metadata"] = function(data,filename)
- local global_fields = otf.tables.global_fields
- local metadata = { }
- for k,v in next, data do
- if not global_fields[k] then
- metadata[k] = v
- data[k] = nil
- end
- end
- data.metadata = metadata
- -- goodies
- local pfminfo = data.pfminfo
- metadata.isfixedpitch = metadata.isfixedpitch or (pfminfo.panose and pfminfo.panose["proportion"] == "Monospaced")
- metadata.charwidth = pfminfo and pfminfo.avgwidth
-end
-
-local private_math_parameters = {
- "FractionDelimiterSize",
- "FractionDelimiterDisplayStyleSize",
-}
-
-otf.enhancers["check math parameters"] = function(data,filename)
- local mathdata = data.metadata.math
- if mathdata then
- for m=1,#private_math_parameters do
- local pmp = private_math_parameters[m]
- if not mathdata[pmp] then
- if trace_loading then
- logs.report("load otf", "setting math parameter '%s' to 0", pmp)
- end
- mathdata[pmp] = 0
- end
- end
- end
-end
-
-otf.enhancers["flatten glyph lookups"] = function(data,filename)
- for k, v in next, data.glyphs do
- local lookups = v.lookups
- if lookups then
- for kk, vv in next, lookups do
- for kkk=1,#vv do
- local vvv = vv[kkk]
- local s = vvv.specification
- if s then
- local t = vvv.type
- if t == "ligature" then
- vv[kkk] = { "ligature", s.components, s.char }
- elseif t == "alternate" then
- vv[kkk] = { "alternate", s.components }
- elseif t == "substitution" then
- vv[kkk] = { "substitution", s.variant }
- elseif t == "multiple" then
- vv[kkk] = { "multiple", s.components }
- elseif t == "position" then
- vv[kkk] = { "position", { s.x or 0, s.y or 0, s.h or 0, s.v or 0 } }
- elseif t == "pair" then
- local one, two, paired = s.offsets[1], s.offsets[2], s.paired or ""
- if one then
- if two then
- vv[kkk] = { "pair", paired, { one.x or 0, one.y or 0, one.h or 0, one.v or 0 }, { two.x or 0, two.y or 0, two.h or 0, two.v or 0 } }
- else
- vv[kkk] = { "pair", paired, { one.x or 0, one.y or 0, one.h or 0, one.v or 0 } }
- end
- else
- if two then
- vv[kkk] = { "pair", paired, { }, { two.x or 0, two.y or 0, two.h or 0, two.v or 0} } -- maybe nil instead of { }
- else
- vv[kkk] = { "pair", paired }
- end
- end
- else
- if trace_loading then
- logs.report("load otf", "flattening needed, report to context list")
- end
- for a, b in next, s do
- if trace_loading and vvv[a] then
- logs.report("load otf", "flattening conflict, report to context list")
- end
- vvv[a] = b
- end
- vvv.specification = nil
- end
- end
- end
- end
- end
- end
-end
-
-otf.enhancers["simplify glyph lookups"] = function(data,filename)
- for k, v in next, data.glyphs do
- local lookups = v.lookups
- if lookups then
- local slookups, mlookups
- for kk, vv in next, lookups do
- if #vv == 1 then
- if not slookups then
- slookups = { }
- v.slookups = slookups
- end
- slookups[kk] = vv[1]
- else
- if not mlookups then
- mlookups = { }
- v.mlookups = mlookups
- end
- mlookups[kk] = vv
- end
- end
- v.lookups = nil
- end
- end
-end
-
-otf.enhancers["flatten anchor tables"] = function(data,filename)
- for k, v in next, data.glyphs do
- if v.anchors then
- for kk, vv in next, v.anchors do
- for kkk, vvv in next, vv do
- if vvv.x or vvv.y then
- vv[kkk] = { vvv.x or 0, vvv.y or 0 }
- else
- for kkkk=1,#vvv do
- local vvvv = vvv[kkkk]
- vvv[kkkk] = { vvvv.x or 0, vvvv.y or 0 }
- end
- end
- end
- end
- end
- end
-end
-
-otf.enhancers["flatten feature tables"] = function(data,filename)
- -- is this needed? do we still use them at all?
- for _, tag in next, otf.glists do
- if data[tag] then
- if trace_loading then
- logs.report("load otf", "flattening %s table", tag)
- end
- for k, v in next, data[tag] do
- local features = v.features
- if features then
- for kk=1,#features do
- local vv = features[kk]
- local t = { }
- local scripts = vv.scripts
- for kkk=1,#scripts do
- local vvv = scripts[kkk]
- t[vvv.script] = vvv.langs
- end
- vv.scripts = t
- end
- end
- end
- end
- end
-end
-
-otf.enhancers.patches = otf.enhancers.patches or { }
-
-otf.enhancers["patch bugs"] = function(data,filename)
- local basename = file.basename(lower(filename))
- for pattern, action in next, otf.enhancers.patches do
- if find(basename,pattern) then
- action(data,filename)
- end
- end
-end
-
--- tex features
-
-fonts.otf.enhancers["enrich with features"] = function(data,filename)
- -- later, ctx only
-end
-
-function otf.features.register(name,default)
- otf.features.list[#otf.features.list+1] = name
- otf.features.default[name] = default
-end
-
--- for context this will become a task handler
-
-function otf.set_features(tfmdata,features)
- local processes = { }
- if features and next(features) then
- local lists = { -- why local
- fonts.triggers,
- fonts.processors,
- fonts.manipulators,
- }
- local mode = tfmdata.mode or fonts.mode -- or features.mode
- local initializers = fonts.initializers
- local fi = initializers[mode]
- if fi then
- local fiotf = fi.otf
- if fiotf then
- local done = { }
- for l=1,4 do
- local list = lists[l]
- if list then
- for i=1,#list do
- local f = list[i]
- local value = features[f]
- if value and fiotf[f] then -- brr
- if not done[f] then -- so, we can move some to triggers
- if trace_features then
- logs.report("define otf","initializing feature %s to %s for mode %s for font %s",f,tostring(value),mode or 'unknown', tfmdata.fullname or 'unknown')
- end
- fiotf[f](tfmdata,value) -- can set mode (no need to pass otf)
- mode = tfmdata.mode or fonts.mode -- keep this, mode can be set local !
- local im = initializers[mode]
- if im then
- fiotf = initializers[mode].otf
- end
- done[f] = true
- end
- end
- end
- end
- end
- end
- end
- local fm = fonts.methods[mode] -- todo: zonder node/mode otf/...
- if fm then
- local fmotf = fm.otf
- if fmotf then
- for l=1,4 do
- local list = lists[l]
- if list then
- for i=1,#list do
- local f = list[i]
- if fmotf[f] then -- brr
- if trace_features then
- logs.report("define otf","installing feature handler %s for mode %s for font %s",f,mode or 'unknown', tfmdata.fullname or 'unknown')
- end
- processes[#processes+1] = fmotf[f]
- end
- end
- end
- end
- end
- else
- -- message
- end
- end
- return processes, features
-end
-
-function otf.otf_to_tfm(specification)
- local name = specification.name
- local sub = specification.sub
- local filename = specification.filename
- local format = specification.format
- local features = specification.features.normal
- local cache_id = specification.hash
- local tfmdata = containers.read(tfm.cache,cache_id)
---~ print(cache_id)
- if not tfmdata then
- local otfdata = otf.load(filename,format,sub,features and features.featurefile)
- if otfdata and next(otfdata) then
- otfdata.shared = otfdata.shared or {
- featuredata = { },
- anchorhash = { },
- initialized = false,
- }
- tfmdata = otf.copy_to_tfm(otfdata,cache_id)
- if tfmdata and next(tfmdata) then
- tfmdata.unique = tfmdata.unique or { }
- tfmdata.shared = tfmdata.shared or { } -- combine
- local shared = tfmdata.shared
- shared.otfdata = otfdata
- shared.features = features -- default
- shared.dynamics = { }
- shared.processes = { }
- shared.set_dynamics = otf.set_dynamics -- fast access and makes other modules independent
- -- this will be done later anyway, but it's convenient to have
- -- them already for fast access
- tfmdata.luatex = otfdata.luatex
- tfmdata.indices = otfdata.luatex.indices
- tfmdata.unicodes = otfdata.luatex.unicodes
- tfmdata.marks = otfdata.luatex.marks
- tfmdata.originals = otfdata.luatex.originals
- tfmdata.changed = { }
- tfmdata.has_italic = otfdata.metadata.has_italic
- if not tfmdata.language then tfmdata.language = 'dflt' end
- if not tfmdata.script then tfmdata.script = 'dflt' end
- shared.processes, shared.features = otf.set_features(tfmdata,fonts.define.check(features,otf.features.default))
- end
- end
- containers.write(tfm.cache,cache_id,tfmdata)
- end
- return tfmdata
-end
-
---~ {
---~ ['boundingbox']={ 95, -458, 733, 1449 },
---~ ['class']="base",
---~ ['name']="braceleft",
---~ ['unicode']=123,
---~ ['vert_variants']={
---~ ['italic_correction']=0,
---~ ['parts']={
---~ { ['component']="uni23A9", ['endConnectorLength']=1000, ['fullAdvance']=2546, ['is_extender']=0, ['startConnectorLength']=0, }, -- bot
---~ { ['component']="uni23AA", ['endConnectorLength']=2500, ['fullAdvance']=2501, ['is_extender']=1, ['startConnectorLength']=2500, }, -- rep
---~ { ['component']="uni23A8", ['endConnectorLength']=1000, ['fullAdvance']=4688, ['is_extender']=0, ['startConnectorLength']=1000, }, -- mid
---~ { ['component']="uni23AA", ['endConnectorLength']=2500, ['fullAdvance']=2501, ['is_extender']=1, ['startConnectorLength']=2500, }, -- rep
---~ { ['component']="uni23A7", ['endConnectorLength']=0, ['fullAdvance']=2546, ['is_extender']=0, ['startConnectorLength']=1000, }, -- top
---~ },
---~ ['variants']="braceleft braceleft.vsize1 braceleft.vsize2 braceleft.vsize3 braceleft.vsize4 braceleft.vsize5 braceleft.vsize6 braceleft.vsize7",
---~ },
---~ ['width']=793,
---~ },
-
--- the first version made a top/mid/not extensible table, now we just pass on the variants data
--- and deal with it in the tfm scaler (there is no longer an extensible table anyway)
-
--- we cannot share descriptions as virtual fonts might extend them (ok, we could
--- use a cache with a hash
-
-fonts.formats.dfont = "truetype"
-fonts.formats.ttc = "truetype"
-fonts.formats.ttf = "truetype"
-fonts.formats.otf = "opentype"
-
-function otf.copy_to_tfm(data,cache_id) -- we can save a copy when we reorder the tma to unicode (nasty due to one->many)
- if data then
- local glyphs, pfminfo, metadata = data.glyphs or { }, data.pfminfo or { }, data.metadata or { }
- local luatex = data.luatex
- local unicodes = luatex.unicodes -- names to unicodes
- local indices = luatex.indices
- local characters, parameters, math_parameters, descriptions = { }, { }, { }, { }
- local designsize = metadata.designsize or metadata.design_size or 100
- if designsize == 0 then
- designsize = 100
- end
- local spaceunits, spacer = 500, "space"
- -- indices maps from unicodes to indices
- for u, i in next, indices do
- characters[u] = { } -- we need this because for instance we add protruding info and loop over characters
- descriptions[u] = glyphs[i]
- end
- -- math
- if metadata.math then
- -- parameters
- for name, value in next, metadata.math do
- math_parameters[name] = value
- end
- -- we could use a subset
- for u, char in next, characters do
- local d = descriptions[u]
- local m = d.math
- -- we have them shared because that packs nicer
- -- we could prepare the variants and keep 'm in descriptions
- if m then
- local variants, parts, c, uc = m.horiz_variants, m.horiz_parts, char, u
- if variants then
- for n in gmatch(variants,"[^ ]+") do
- local un = unicodes[n]
- if un and uc ~= un then
- c.next = un
- c = characters[un]
- uc = un
- end
- end
- c.horiz_variants = parts
- elseif parts then
- c.horiz_variants = parts
- end
- local variants, parts, c, uc = m.vert_variants, m.vert_parts, char, u
- if variants then
- for n in gmatch(variants,"[^ ]+") do
- local un = unicodes[n]
- if un and uc ~= un then
- c.next = un
- c = characters[un]
- uc = un
- end
- end -- c is now last in chain
- c.vert_variants = parts
- elseif parts then
- c.vert_variants = parts
- end
- local italic_correction = m.vert_italic_correction
- if italic_correction then
- c.vert_italic_correction = italic_correction
- end
- local kerns = m.kerns
- if kerns then
- char.mathkerns = kerns
- end
- end
- end
- end
- -- end math
- local endash, emdash, space = 0x20, 0x2014, "space" -- unicodes['space'], unicodes['emdash']
- if metadata.isfixedpitch then
- if descriptions[endash] then
- spaceunits, spacer = descriptions[endash].width, "space"
- end
- if not spaceunits and descriptions[emdash] then
- spaceunits, spacer = descriptions[emdash].width, "emdash"
- end
- if not spaceunits and metadata.charwidth then
- spaceunits, spacer = metadata.charwidth, "charwidth"
- end
- else
- if descriptions[endash] then
- spaceunits, spacer = descriptions[endash].width, "space"
- end
- if not spaceunits and descriptions[emdash] then
- spaceunits, spacer = descriptions[emdash].width/2, "emdash/2"
- end
- if not spaceunits and metadata.charwidth then
- spaceunits, spacer = metadata.charwidth, "charwidth"
- end
- end
- spaceunits = tonumber(spaceunits) or tfm.units/2 -- 500 -- brrr
- -- we need a runtime lookup because of running from cdrom or zip, brrr (shouldn't we use the basename then?)
- local filename = fonts.tfm.checked_filename(luatex)
- local fontname = metadata.fontname
- local fullname = metadata.fullname or fontname
- local cidinfo = data.cidinfo
- local units = metadata.units_per_em or 1000
- --
- cidinfo.registry = cidinfo and cidinfo.registry or "" -- weird here, fix upstream
- --
- parameters.slant = 0
- parameters.space = spaceunits -- 3.333 (cmr10)
- parameters.space_stretch = units/2 -- 500 -- 1.666 (cmr10)
- parameters.space_shrink = 1*units/3 -- 333 -- 1.111 (cmr10)
- parameters.x_height = 2*units/5 -- 400
- parameters.quad = units -- 1000
- if spaceunits < 2*units/5 then
- -- todo: warning
- end
- local italicangle = metadata.italicangle
- if italicangle then -- maybe also in afm _
- parameters.slant = parameters.slant - math.round(math.tan(italicangle*math.pi/180))
- end
- if metadata.isfixedpitch then
- parameters.space_stretch = 0
- parameters.space_shrink = 0
- elseif otf.syncspace then --
- parameters.space_stretch = spaceunits/2
- parameters.space_shrink = spaceunits/3
- end
- parameters.extra_space = parameters.space_shrink -- 1.111 (cmr10)
- if pfminfo.os2_xheight and pfminfo.os2_xheight > 0 then
- parameters.x_height = pfminfo.os2_xheight
- else
- local x = 0x78 -- unicodes['x']
- if x then
- local x = descriptions[x]
- if x then
- parameters.x_height = x.height
- end
- end
- end
- --
- return {
- characters = characters,
- parameters = parameters,
- math_parameters = math_parameters,
- descriptions = descriptions,
- indices = indices,
- unicodes = unicodes,
- type = "real",
- direction = 0,
- boundarychar_label = 0,
- boundarychar = 65536,
- designsize = (designsize/10)*65536,
- spacer = "500 units",
- encodingbytes = 2,
- filename = filename,
- fontname = fontname,
- fullname = fullname,
- psname = fontname or fullname,
- name = filename or fullname,
- units = units,
- format = fonts.fontformat(filename,"opentype"),
- cidinfo = cidinfo,
- ascender = abs(metadata.ascent or 0),
- descender = abs(metadata.descent or 0),
- spacer = spacer,
- italicangle = italicangle,
- }
- else
- return nil
- end
-end
-
-otf.features.register('mathsize')
-
-function tfm.read_from_open_type(specification)
- local tfmtable = otf.otf_to_tfm(specification)
- if tfmtable then
- local otfdata = tfmtable.shared.otfdata
- tfmtable.name = specification.name
- tfmtable.sub = specification.sub
- local s = specification.size
- local m = otfdata.metadata.math
- if m then
- -- this will move to a function
- local f = specification.features
- if f then
- local f = f.normal
- if f and f.mathsize then
- local mathsize = specification.mathsize or 0
- if mathsize == 2 then
- local p = m.ScriptPercentScaleDown
- if p then
- local ps = p * specification.textsize / 100
- if trace_math then
- logs.report("define font","asked script size: %s, used: %s (%2.2f %%)",s,ps,(ps/s)*100)
- end
- s = ps
- end
- elseif mathsize == 3 then
- local p = m.ScriptScriptPercentScaleDown
- if p then
- local ps = p * specification.textsize / 100
- if trace_math then
- logs.report("define font","asked scriptscript size: %s, used: %s (%2.2f %%)",s,ps,(ps/s)*100)
- end
- s = ps
- end
- end
- end
- end
- end
- tfmtable = tfm.scale(tfmtable,s,specification.relativeid)
- if tfm.fontname_mode == "specification" then
- -- not to be used in context !
- local specname = specification.specification
- if specname then
- tfmtable.name = specname
- if trace_defining then
- logs.report("define font","overloaded fontname: '%s'",specname)
- end
- end
- end
- fonts.logger.save(tfmtable,file.extname(specification.filename),specification)
- end
---~ print(tfmtable.fullname)
- return tfmtable
-end
-
--- helpers
-
-function otf.collect_lookups(otfdata,kind,script,language)
- -- maybe store this in the font
- local sequences = otfdata.luatex.sequences
- if sequences then
- local featuremap, featurelist = { }, { }
- for s=1,#sequences do
- local sequence = sequences[s]
- local features = sequence.features
- features = features and features[kind]
- features = features and (features[script] or features[default] or features[wildcard])
- features = features and (features[language] or features[default] or features[wildcard])
- if features then
- local subtables = sequence.subtables
- if subtables then
- for s=1,#subtables do
- local ss = subtables[s]
- if not featuremap[s] then
- featuremap[ss] = true
- featurelist[#featurelist+1] = ss
- end
- end
- end
- end
- end
- if #featurelist > 0 then
- return featuremap, featurelist
- end
- end
- return nil, nil
-end
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/otfl-font-oti.lua b/Master/texmf-dist/tex/luatex/luaotfload/otfl-font-oti.lua
deleted file mode 100644
index 4cb27062623..00000000000
--- a/Master/texmf-dist/tex/luatex/luaotfload/otfl-font-oti.lua
+++ /dev/null
@@ -1,57 +0,0 @@
-if not modules then modules = { } end modules ['font-oti'] = {
- version = 1.001,
- comment = "companion to font-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- i need to check features=yes|no also in relation to hashing
-
-local lower = string.lower
-
-local otf = fonts.otf
-
-otf.default_language = 'latn'
-otf.default_script = 'dflt'
-
-local languages = otf.tables.languages
-local scripts = otf.tables.scripts
-
-function otf.features.language(tfmdata,value)
- if value then
- value = lower(value)
- if languages[value] then
- tfmdata.language = value
- end
- end
-end
-
-function otf.features.script(tfmdata,value)
- if value then
- value = lower(value)
- if scripts[value] then
- tfmdata.script = value
- end
- end
-end
-
-function otf.features.mode(tfmdata,value)
- if value then
- tfmdata.mode = lower(value)
- end
-end
-
-fonts.initializers.base.otf.language = otf.features.language
-fonts.initializers.base.otf.script = otf.features.script
-fonts.initializers.base.otf.mode = otf.features.mode
-fonts.initializers.base.otf.method = otf.features.mode
-
-fonts.initializers.node.otf.language = otf.features.language
-fonts.initializers.node.otf.script = otf.features.script
-fonts.initializers.node.otf.mode = otf.features.mode
-fonts.initializers.node.otf.method = otf.features.mode
-
-otf.features.register("features",true) -- we always do features
-table.insert(fonts.processors,"features") -- we need a proper function for doing this
-
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/otfl-font-otn.lua b/Master/texmf-dist/tex/luatex/luaotfload/otfl-font-otn.lua
deleted file mode 100644
index 6a6a046d8b8..00000000000
--- a/Master/texmf-dist/tex/luatex/luaotfload/otfl-font-otn.lua
+++ /dev/null
@@ -1,2688 +0,0 @@
-if not modules then modules = { } end modules ['font-otn'] = {
- version = 1.001,
- comment = "companion to font-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- this is still somewhat preliminary and it will get better in due time;
--- much functionality could only be implemented thanks to the husayni font
--- of Idris Samawi Hamid to who we dedicate this module.
-
--- I'm in the process of cleaning up the code (which happens in another
--- file) so don't rely on things staying the same.
-
--- some day when we can jit this, we can use more functions
-
--- we can use more lpegs when lpeg is extended with function args and so
--- resolving to unicode does not gain much
-
--- in retrospect it always looks easy but believe it or not, it took a lot
--- of work to get proper open type support done: buggy fonts, fuzzy specs,
--- special made testfonts, many skype sessions between taco, idris and me,
--- torture tests etc etc ... unfortunately the code does not show how much
--- time it took ...
-
--- todo:
---
--- kerning is probably not yet ok for latin around dics nodes
--- extension infrastructure (for usage out of context)
--- sorting features according to vendors/renderers
--- alternative loop quitters
--- check cursive and r2l
--- find out where ignore-mark-classes went
--- remove unused tables
--- slide tail (always glue at the end so only needed once
--- default features (per language, script)
--- cleanup kern(class) code, remove double info
--- handle positions (we need example fonts)
--- handle gpos_single (we might want an extra width field in glyph nodes because adding kerns might interfere)
-
---[[ldx--
-<p>This module is a bit more split up that I'd like but since we also want to test
-with plain <l n='tex'/> it has to be so. This module is part of <l n='context'/>
-and discussion about improvements and functionality mostly happens on the
-<l n='context'/> mailing list.</p>
-
-<p>The specification of OpenType is kind of vague. Apart from a lack of a proper
-free specifications there's also the problem that Microsoft and Adobe
-may have their own interpretation of how and in what order to apply features.
-In general the Microsoft website has more detailed specifications and is a
-better reference. There is also some information in the FontForge help files.</p>
-
-<p>Because there is so much possible, fonts might contain bugs and/or be made to
-work with certain rederers. These may evolve over time which may have the side
-effect that suddenly fonts behave differently.</p>
-
-<p>After a lot of experiments (mostly by Taco, me and Idris) we're now at yet another
-implementation. Of course all errors are mine and of course the code can be
-improved. There are quite some optimizations going on here and processing speed
-is currently acceptable. Not all functions are implemented yet, often because I
-lack the fonts for testing. Many scripts are not yet supported either, but I will
-look into them as soon as <l n='context'/> users ask for it.</p>
-
-<p>Because there are different interpretations possible, I will extend the code
-with more (configureable) variants. I can also add hooks for users so that they can
-write their own extensions.</p>
-
-<p>Glyphs are indexed not by unicode but in their own way. This is because there is no
-relationship with unicode at all, apart from the fact that a font might cover certain
-ranges of characters. One character can have multiple shapes. However, at the
-<l n='tex'/> end we use unicode so and all extra glyphs are mapped into a private
-space. This is needed because we need to access them and <l n='tex'/> has to include
-then in the output eventually.</p>
-
-<p>The raw table as it coms from <l n='fontforge'/> gets reorganized in to fit out needs.
-In <l n='context'/> that table is packed (similar tables are shared) and cached on disk
-so that successive runs can use the optimized table (after loading the table is
-unpacked). The flattening code used later is a prelude to an even more compact table
-format (and as such it keeps evolving).</p>
-
-<p>This module is sparsely documented because it is a moving target. The table format
-of the reader changes and we experiment a lot with different methods for supporting
-features.</p>
-
-<p>As with the <l n='afm'/> code, we may decide to store more information in the
-<l n='otf'/> table.</p>
-
-<p>Incrementing the version number will force a re-cache. We jump the number by one
-when there's a fix in the <l n='fontforge'/> library or <l n='lua'/> code that
-results in different tables.</p>
---ldx]]--
-
--- action handler chainproc chainmore comment
---
--- gsub_single ok ok ok
--- gsub_multiple ok ok not implemented yet
--- gsub_alternate ok ok not implemented yet
--- gsub_ligature ok ok ok
--- gsub_context ok --
--- gsub_contextchain ok --
--- gsub_reversecontextchain ok --
--- chainsub -- ok
--- reversesub -- ok
--- gpos_mark2base ok ok
--- gpos_mark2ligature ok ok
--- gpos_mark2mark ok ok
--- gpos_cursive ok untested
--- gpos_single ok ok
--- gpos_pair ok ok
--- gpos_context ok --
--- gpos_contextchain ok --
---
--- actions:
---
--- handler : actions triggered by lookup
--- chainproc : actions triggered by contextual lookup
--- chainmore : multiple substitutions triggered by contextual lookup (e.g. fij -> f + ij)
---
--- remark: the 'not implemented yet' variants will be done when we have fonts that use them
--- remark: we need to check what to do with discretionaries
-
-local concat, insert, remove = table.concat, table.insert, table.remove
-local format, gmatch, gsub, find, match, lower, strip = string.format, string.gmatch, string.gsub, string.find, string.match, string.lower, string.strip
-local type, next, tonumber, tostring = type, next, tonumber, tostring
-local lpegmatch = lpeg.match
-
-local otf = fonts.otf
-local tfm = fonts.tfm
-
-local trace_lookups = false trackers.register("otf.lookups", function(v) trace_lookups = v end)
-local trace_singles = false trackers.register("otf.singles", function(v) trace_singles = v end)
-local trace_multiples = false trackers.register("otf.multiples", function(v) trace_multiples = v end)
-local trace_alternatives = false trackers.register("otf.alternatives", function(v) trace_alternatives = v end)
-local trace_ligatures = false trackers.register("otf.ligatures", function(v) trace_ligatures = v end)
-local trace_contexts = false trackers.register("otf.contexts", function(v) trace_contexts = v end)
-local trace_marks = false trackers.register("otf.marks", function(v) trace_marks = v end)
-local trace_kerns = false trackers.register("otf.kerns", function(v) trace_kerns = v end)
-local trace_cursive = false trackers.register("otf.cursive", function(v) trace_cursive = v end)
-local trace_preparing = false trackers.register("otf.preparing", function(v) trace_preparing = v end)
-local trace_bugs = false trackers.register("otf.bugs", function(v) trace_bugs = v end)
-local trace_details = false trackers.register("otf.details", function(v) trace_details = v end)
-local trace_applied = false trackers.register("otf.applied", function(v) trace_applied = v end)
-local trace_steps = false trackers.register("otf.steps", function(v) trace_steps = v end)
-local trace_skips = false trackers.register("otf.skips", function(v) trace_skips = v end)
-local trace_directions = false trackers.register("otf.directions", function(v) trace_directions = v end)
-
-trackers.register("otf.verbose_chain", function(v) otf.setcontextchain(v and "verbose") end)
-trackers.register("otf.normal_chain", function(v) otf.setcontextchain(v and "normal") end)
-
-trackers.register("otf.replacements", "otf.singles,otf.multiples,otf.alternatives,otf.ligatures")
-trackers.register("otf.positions","otf.marks,otf.kerns,otf.cursive")
-trackers.register("otf.actions","otf.replacements,otf.positions")
-trackers.register("otf.injections","nodes.injections")
-
-trackers.register("*otf.sample","otf.steps,otf.actions,otf.analyzing")
-
-local insert_node_after = node.insert_after
-local delete_node = nodes.delete
-local copy_node = node.copy
-local find_node_tail = node.tail or node.slide
-local set_attribute = node.set_attribute
-local has_attribute = node.has_attribute
-
-local zwnj = 0x200C
-local zwj = 0x200D
-local wildcard = "*"
-local default = "dflt"
-
-local split_at_space = lpeg.splitters[" "] or lpeg.Ct(lpeg.splitat(" ")) -- no trailing or multiple spaces anyway
-
-local glyph = node.id('glyph')
-local glue = node.id('glue')
-local kern = node.id('kern')
-local disc = node.id('disc')
-local whatsit = node.id('whatsit')
-
-local state = attributes.private('state')
-local markbase = attributes.private('markbase')
-local markmark = attributes.private('markmark')
-local markdone = attributes.private('markdone')
-local cursbase = attributes.private('cursbase')
-local curscurs = attributes.private('curscurs')
-local cursdone = attributes.private('cursdone')
-local kernpair = attributes.private('kernpair')
-
-local set_mark = nodes.set_mark
-local set_cursive = nodes.set_cursive
-local set_kern = nodes.set_kern
-local set_pair = nodes.set_pair
-
-local markonce = true
-local cursonce = true
-local kernonce = true
-
-local fontdata = fonts.ids
-
-otf.features.process = { }
-
--- we share some vars here, after all, we have no nested lookups and
--- less code
-
-local tfmdata = false
-local otfdata = false
-local characters = false
-local descriptions = false
-local marks = false
-local indices = false
-local unicodes = false
-local currentfont = false
-local lookuptable = false
-local anchorlookups = false
-local handlers = { }
-local rlmode = 0
-local featurevalue = false
-
--- we cheat a bit and assume that a font,attr combination are kind of ranged
-
-local context_setups = fonts.define.specify.context_setups
-local context_numbers = fonts.define.specify.context_numbers
-local context_merged = fonts.define.specify.context_merged
-
--- we cannot optimize with "start = first_character(head)" because then we don't
--- know which rlmode we're in which messes up cursive handling later on
---
--- head is always a whatsit so we can safely assume that head is not changed
-
-local special_attributes = {
- init = 1,
- medi = 2,
- fina = 3,
- isol = 4
-}
-
--- we use this for special testing and documentation
-
-local checkstep = (nodes and nodes.tracers and nodes.tracers.steppers.check) or function() end
-local registerstep = (nodes and nodes.tracers and nodes.tracers.steppers.register) or function() end
-local registermessage = (nodes and nodes.tracers and nodes.tracers.steppers.message) or function() end
-
-local function logprocess(...)
- if trace_steps then
- registermessage(...)
- end
- logs.report("otf direct",...)
-end
-local function logwarning(...)
- logs.report("otf direct",...)
-end
-
-local function gref(n)
- if type(n) == "number" then
- local description = descriptions[n]
- local name = description and description.name
- if name then
- return format("U+%04X (%s)",n,name)
- else
- return format("U+%04X",n)
- end
- elseif not n then
- return "<error in tracing>"
- else
- local num, nam = { }, { }
- for i=1,#n do
- local ni = n[i]
- num[#num+1] = format("U+%04X",ni)
- local dni = descriptions[ni]
- nam[#num] = (dni and dni.name) or "?"
- end
- return format("%s (%s)",concat(num," "), concat(nam," "))
- end
-end
-
-local function cref(kind,chainname,chainlookupname,lookupname,index)
- if index then
- return format("feature %s, chain %s, sub %s, lookup %s, index %s",kind,chainname,chainlookupname,lookupname,index)
- elseif lookupname then
- return format("feature %s, chain %s, sub %s, lookup %s",kind,chainname or "?",chainlookupname or "?",lookupname)
- elseif chainlookupname then
- return format("feature %s, chain %s, sub %s",kind,chainname or "?",chainlookupname)
- elseif chainname then
- return format("feature %s, chain %s",kind,chainname)
- else
- return format("feature %s",kind)
- end
-end
-
-local function pref(kind,lookupname)
- return format("feature %s, lookup %s",kind,lookupname)
-end
-
--- we can assume that languages that use marks are not hyphenated
--- we can also assume that at most one discretionary is present
-
-local function markstoligature(kind,lookupname,start,stop,char)
- local n = copy_node(start)
- local keep = start
- local current
- current, start = insert_node_after(start,start,n)
- local snext = stop.next
- current.next = snext
- if snext then
- snext.prev = current
- end
- start.prev, stop.next = nil, nil
- current.char, current.subtype, current.components = char, 2, start
- return keep
-end
-
-local function toligature(kind,lookupname,start,stop,char,markflag,discfound) -- brr head
- if start ~= stop then
---~ if discfound then
---~ local lignode = copy_node(start)
---~ lignode.font = start.font
---~ lignode.char = char
---~ lignode.subtype = 2
---~ start = node.do_ligature_n(start, stop, lignode)
---~ if start.id == disc then
---~ local prev = start.prev
---~ start = start.next
---~ end
- if discfound then
- -- print("start->stop",nodes.tosequence(start,stop))
- local lignode = copy_node(start)
- lignode.font, lignode.char, lignode.subtype = start.font, char, 2
- local next, prev = stop.next, start.prev
- stop.next = nil
- lignode = node.do_ligature_n(start, stop, lignode)
- prev.next = lignode
- if next then
- next.prev = lignode
- end
- lignode.next, lignode.prev = next, prev
- start = lignode
- -- print("start->end",nodes.tosequence(start))
- else -- start is the ligature
- local deletemarks = markflag ~= "mark"
- local n = copy_node(start)
- local current
- current, start = insert_node_after(start,start,n)
- local snext = stop.next
- current.next = snext
- if snext then
- snext.prev = current
- end
- start.prev, stop.next = nil, nil
- current.char, current.subtype, current.components = char, 2, start
- local head = current
- if deletemarks then
- if trace_marks then
- while start do
- if marks[start.char] then
- logwarning("%s: remove mark %s",pref(kind,lookupname),gref(start.char))
- end
- start = start.next
- end
- end
- else
- local i = 0
- while start do
- if marks[start.char] then
- set_attribute(start,markdone,i)
- if trace_marks then
- logwarning("%s: keep mark %s, gets index %s",pref(kind,lookupname),gref(start.char),i)
- end
- head, current = insert_node_after(head,current,copy_node(start))
- else
- i = i + 1
- end
- start = start.next
- end
- start = current.next
- while start and start.id == glyph do
- if marks[start.char] then
- set_attribute(start,markdone,i)
- if trace_marks then
- logwarning("%s: keep mark %s, gets index %s",pref(kind,lookupname),gref(start.char),i)
- end
- else
- break
- end
- start = start.next
- end
- end
- return head
- end
- else
- start.char = char
- end
- return start
-end
-
-function handlers.gsub_single(start,kind,lookupname,replacement)
- if trace_singles then
- logprocess("%s: replacing %s by single %s",pref(kind,lookupname),gref(start.char),gref(replacement))
- end
- start.char = replacement
- return start, true
-end
-
-local function alternative_glyph(start,alternatives,kind,chainname,chainlookupname,lookupname) -- chainname and chainlookupname optional
- local value, choice, n = featurevalue or tfmdata.shared.features[kind], nil, #alternatives -- global value, brrr
- if value == "random" then
- local r = math.random(1,n)
- value, choice = format("random, choice %s",r), alternatives[r]
- elseif value == "first" then
- value, choice = format("first, choice %s",1), alternatives[1]
- elseif value == "last" then
- value, choice = format("last, choice %s",n), alternatives[n]
- else
- value = tonumber(value)
- if type(value) ~= "number" then
- value, choice = "default, choice 1", alternatives[1]
- elseif value > n then
- value, choice = format("no %s variants, taking %s",value,n), alternatives[n]
- elseif value == 0 then
- value, choice = format("choice %s (no change)",value), start.char
- elseif value < 1 then
- value, choice = format("no %s variants, taking %s",value,1), alternatives[1]
- else
- value, choice = format("choice %s",value), alternatives[value]
- end
- end
- if not choice then
- logwarning("%s: no variant %s for %s",cref(kind,chainname,chainlookupname,lookupname),value,gref(start.char))
- choice, value = start.char, format("no replacement instead of %s",value)
- end
- return choice, value
-end
-
-function handlers.gsub_alternate(start,kind,lookupname,alternative,sequence)
- local choice, index = alternative_glyph(start,alternative,kind,lookupname)
- if trace_alternatives then
- logprocess("%s: replacing %s by alternative %s (%s)",pref(kind,lookupname),gref(start.char),gref(choice),index)
- end
- start.char = choice
- return start, true
-end
-
-function handlers.gsub_multiple(start,kind,lookupname,multiple)
- if trace_multiples then
- logprocess("%s: replacing %s by multiple %s",pref(kind,lookupname),gref(start.char),gref(multiple))
- end
- start.char = multiple[1]
- if #multiple > 1 then
- for k=2,#multiple do
- local n = copy_node(start)
- n.char = multiple[k]
- local sn = start.next
- n.next = sn
- n.prev = start
- if sn then
- sn.prev = n
- end
- start.next = n
- start = n
- end
- end
- return start, true
-end
-
-function handlers.gsub_ligature(start,kind,lookupname,ligature,sequence) --or maybe pass lookup ref
- local s, stop, discfound = start.next, nil, false
- local startchar = start.char
- if marks[startchar] then
- while s do
- local id = s.id
- if id == glyph and s.subtype<256 then
- if s.font == currentfont then
- local char = s.char
- local lg = ligature[1][char]
- if not lg then
- break
- else
- stop = s
- ligature = lg
- s = s.next
- end
- else
- break
- end
- else
- break
- end
- end
- if stop and ligature[2] then
- if trace_ligatures then
- local stopchar = stop.char
- start = markstoligature(kind,lookupname,start,stop,ligature[2])
- logprocess("%s: replacing %s upto %s by ligature %s",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(start.char))
- else
- start = markstoligature(kind,lookupname,start,stop,ligature[2])
- end
- return start, true
- end
- else
- local skipmark = sequence.flags[1]
- while s do
- local id = s.id
- if id == glyph and s.subtype<256 then
- if s.font == currentfont then
- local char = s.char
- if skipmark and marks[char] then
- s = s.next
- else
- local lg = ligature[1][char]
- if not lg then
- break
- else
- stop = s
- ligature = lg
- s = s.next
- end
- end
- else
- break
- end
- elseif id == disc then
- discfound = true
- s = s.next
- else
- break
- end
- end
- if stop and ligature[2] then
- if trace_ligatures then
- local stopchar = stop.char
- start = toligature(kind,lookupname,start,stop,ligature[2],skipmark,discfound)
- logprocess("%s: replacing %s upto %s by ligature %s",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(start.char))
- else
- start = toligature(kind,lookupname,start,stop,ligature[2],skipmark,discfound)
- end
- return start, true
- end
- end
- return start, false
-end
-
---[[ldx--
-<p>We get hits on a mark, but we're not sure if the it has to be applied so
-we need to explicitly test for basechar, baselig and basemark entries.</p>
---ldx]]--
-
-function handlers.gpos_mark2base(start,kind,lookupname,markanchors,sequence)
- local markchar = start.char
- if marks[markchar] then
- local base = start.prev -- [glyph] [start=mark]
- if base and base.id == glyph and base.subtype<256 and base.font == currentfont then
- local basechar = base.char
- if marks[basechar] then
- while true do
- base = base.prev
- if base and base.id == glyph and base.subtype<256 and base.font == currentfont then
- basechar = base.char
- if not marks[basechar] then
- break
- end
- else
- if trace_bugs then
- logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar))
- end
- return start, false
- end
- end
- end
- local baseanchors = descriptions[basechar]
- if baseanchors then
- baseanchors = baseanchors.anchors
- end
- if baseanchors then
- local baseanchors = baseanchors['basechar']
- if baseanchors then
- local al = anchorlookups[lookupname]
- for anchor,ba in next, baseanchors do
- if al[anchor] then
- local ma = markanchors[anchor]
- if ma then
- local dx, dy, bound = set_mark(start,base,tfmdata.factor,rlmode,ba,ma)
- if trace_marks then
- logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%s,%s)",
- pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
- end
- return start, true
- end
- end
- end
- if trace_bugs then
- logwarning("%s, no matching anchors for mark %s and base %s",pref(kind,lookupname),gref(markchar),gref(basechar))
- end
- end
- else -- if trace_bugs then
- -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(basechar))
- fonts.register_message(currentfont,basechar,"no base anchors")
- end
- elseif trace_bugs then
- logwarning("%s: prev node is no char",pref(kind,lookupname))
- end
- elseif trace_bugs then
- logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar))
- end
- return start, false
-end
-
-function handlers.gpos_mark2ligature(start,kind,lookupname,markanchors,sequence)
- -- check chainpos variant
- local markchar = start.char
- if marks[markchar] then
- local base = start.prev -- [glyph] [optional marks] [start=mark]
- local index = 1
- if base and base.id == glyph and base.subtype<256 and base.font == currentfont then
- local basechar = base.char
- if marks[basechar] then
- index = index + 1
- while true do
- base = base.prev
- if base and base.id == glyph and base.subtype<256 and base.font == currentfont then
- basechar = base.char
- if marks[basechar] then
- index = index + 1
- else
- break
- end
- else
- if trace_bugs then
- logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar))
- end
- return start, false
- end
- end
- end
- local i = has_attribute(start,markdone)
- if i then index = i end
- local baseanchors = descriptions[basechar]
- if baseanchors then
- baseanchors = baseanchors.anchors
- if baseanchors then
- local baseanchors = baseanchors['baselig']
- if baseanchors then
- local al = anchorlookups[lookupname]
- for anchor,ba in next, baseanchors do
- if al[anchor] then
- local ma = markanchors[anchor]
- if ma then
- ba = ba[index]
- if ba then
- local dx, dy, bound = set_mark(start,base,tfmdata.factor,rlmode,ba,ma,index)
- if trace_marks then
- logprocess("%s, anchor %s, index %s, bound %s: anchoring mark %s to baselig %s at index %s => (%s,%s)",
- pref(kind,lookupname),anchor,index,bound,gref(markchar),gref(basechar),index,dx,dy)
- end
- return start, true
- end
- end
- end
- end
- if trace_bugs then
- logwarning("%s: no matching anchors for mark %s and baselig %s",pref(kind,lookupname),gref(markchar),gref(basechar))
- end
- end
- end
- else -- if trace_bugs then
- -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(basechar))
- fonts.register_message(currentfont,basechar,"no base anchors")
- end
- elseif trace_bugs then
- logwarning("%s: prev node is no char",pref(kind,lookupname))
- end
- elseif trace_bugs then
- logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar))
- end
- return start, false
-end
-
-function handlers.gpos_mark2mark(start,kind,lookupname,markanchors,sequence)
- local markchar = start.char
- if marks[markchar] then
---~ local alreadydone = markonce and has_attribute(start,markmark)
---~ if not alreadydone then
- local base = start.prev -- [glyph] [basemark] [start=mark]
- if base and base.id == glyph and base.subtype<256 and base.font == currentfont then -- subtype test can go
- local basechar = base.char
- local baseanchors = descriptions[basechar]
- if baseanchors then
- baseanchors = baseanchors.anchors
- if baseanchors then
- baseanchors = baseanchors['basemark']
- if baseanchors then
- local al = anchorlookups[lookupname]
- for anchor,ba in next, baseanchors do
- if al[anchor] then
- local ma = markanchors[anchor]
- if ma then
- local dx, dy, bound = set_mark(start,base,tfmdata.factor,rlmode,ba,ma)
- if trace_marks then
- logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%s,%s)",
- pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
- end
- return start,true
- end
- end
- end
- if trace_bugs then
- logwarning("%s: no matching anchors for mark %s and basemark %s",pref(kind,lookupname),gref(markchar),gref(basechar))
- end
- end
- end
- else -- if trace_bugs then
- -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(basechar))
- fonts.register_message(currentfont,basechar,"no base anchors")
- end
- elseif trace_bugs then
- logwarning("%s: prev node is no mark",pref(kind,lookupname))
- end
---~ elseif trace_marks and trace_details then
---~ logprocess("%s, mark %s is already bound (n=%s), ignoring mark2mark",pref(kind,lookupname),gref(markchar),alreadydone)
---~ end
- elseif trace_bugs then
- logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar))
- end
- return start,false
-end
-
-function handlers.gpos_cursive(start,kind,lookupname,exitanchors,sequence) -- to be checked
- local alreadydone = cursonce and has_attribute(start,cursbase)
- if not alreadydone then
- local done = false
- local startchar = start.char
- if marks[startchar] then
- if trace_cursive then
- logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar))
- end
- else
- local nxt = start.next
- while not done and nxt and nxt.id == glyph and nxt.subtype<256 and nxt.font == currentfont do
- local nextchar = nxt.char
- if marks[nextchar] then
- -- should not happen (maybe warning)
- nxt = nxt.next
- else
- local entryanchors = descriptions[nextchar]
- if entryanchors then
- entryanchors = entryanchors.anchors
- if entryanchors then
- entryanchors = entryanchors['centry']
- if entryanchors then
- local al = anchorlookups[lookupname]
- for anchor, entry in next, entryanchors do
- if al[anchor] then
- local exit = exitanchors[anchor]
- if exit then
- local dx, dy, bound = set_cursive(start,nxt,tfmdata.factor,rlmode,exit,entry,characters[startchar],characters[nextchar])
- if trace_cursive then
- logprocess("%s: moving %s to %s cursive (%s,%s) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode)
- end
- done = true
- break
- end
- end
- end
- end
- end
- else -- if trace_bugs then
- -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(startchar))
- fonts.register_message(currentfont,startchar,"no entry anchors")
- end
- break
- end
- end
- end
- return start, done
- else
- if trace_cursive and trace_details then
- logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(start.char),alreadydone)
- end
- return start, false
- end
-end
-
-function handlers.gpos_single(start,kind,lookupname,kerns,sequence)
- local startchar = start.char
- local dx, dy, w, h = set_pair(start,tfmdata.factor,rlmode,sequence.flags[4],kerns,characters[startchar])
- if trace_kerns then
- logprocess("%s: shifting single %s by (%s,%s) and correction (%s,%s)",pref(kind,lookupname),gref(startchar),dx,dy,w,h)
- end
- return start, false
-end
-
-function handlers.gpos_pair(start,kind,lookupname,kerns,sequence)
- -- todo: kerns in disc nodes: pre, post, replace -> loop over disc too
- -- todo: kerns in components of ligatures
- local snext = start.next
- if not snext then
- return start, false
- else
- local prev, done = start, false
- local factor = tfmdata.factor
- while snext and snext.id == glyph and snext.subtype<256 and snext.font == currentfont do
- local nextchar = snext.char
-local krn = kerns[nextchar]
- if not krn and marks[nextchar] then
- prev = snext
- snext = snext.next
- else
- local krn = kerns[nextchar]
- if not krn then
- -- skip
- elseif type(krn) == "table" then
- if krn[1] == "pair" then
- local a, b = krn[3], krn[4]
- if a and #a > 0 then
- local startchar = start.char
- local x, y, w, h = set_pair(start,factor,rlmode,sequence.flags[4],a,characters[startchar])
- if trace_kerns then
- logprocess("%s: shifting first of pair %s and %s by (%s,%s) and correction (%s,%s)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h)
- end
- end
- if b and #b > 0 then
- local startchar = start.char
- local x, y, w, h = set_pair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar])
- if trace_kerns then
- logprocess("%s: shifting second of pair %s and %s by (%s,%s) and correction (%s,%s)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h)
- end
- end
- else
- logs.report("%s: check this out (old kern stuff)",pref(kind,lookupname))
- local a, b = krn[3], krn[7]
- if a and a ~= 0 then
- local k = set_kern(snext,factor,rlmode,a)
- if trace_kerns then
- logprocess("%s: inserting first kern %s between %s and %s",pref(kind,lookupname),k,gref(prev.char),gref(nextchar))
- end
- end
- if b and b ~= 0 then
- logwarning("%s: ignoring second kern xoff %s",pref(kind,lookupname),b*factor)
- end
- end
- done = true
- elseif krn ~= 0 then
- local k = set_kern(snext,factor,rlmode,krn)
- if trace_kerns then
- logprocess("%s: inserting kern %s between %s and %s",pref(kind,lookupname),k,gref(prev.char),gref(nextchar))
- end
- done = true
- end
- break
- end
- end
- return start, done
- end
-end
-
---[[ldx--
-<p>I will implement multiple chain replacements once I run into a font that uses
-it. It's not that complex to handle.</p>
---ldx]]--
-
-local chainmores = { }
-local chainprocs = { }
-
-local function logprocess(...)
- if trace_steps then
- registermessage(...)
- end
- logs.report("otf subchain",...)
-end
-local function logwarning(...)
- logs.report("otf subchain",...)
-end
-
--- ['coverage']={
--- ['after']={ "r" },
--- ['before']={ "q" },
--- ['current']={ "a", "b", "c" },
--- },
--- ['lookups']={ "ls_l_1", "ls_l_1", "ls_l_1" },
-
-function chainmores.chainsub(start,stop,kind,chainname,currentcontext,cache,lookuplist,chainlookupname,n)
- logprocess("%s: a direct call to chainsub cannot happen",cref(kind,chainname,chainlookupname))
- return start, false
-end
-
--- handled later:
---
--- function chainmores.gsub_single(start,stop,kind,chainname,currentcontext,cache,currentlookup,chainlookupname,n)
--- return chainprocs.gsub_single(start,stop,kind,chainname,currentcontext,cache,currentlookup,chainlookupname,n)
--- end
-
-function chainmores.gsub_multiple(start,stop,kind,chainname,currentcontext,cache,currentlookup,chainlookupname,n)
- logprocess("%s: gsub_multiple not yet supported",cref(kind,chainname,chainlookupname))
- return start, false
-end
-function chainmores.gsub_alternate(start,stop,kind,chainname,currentcontext,cache,currentlookup,chainlookupname,n)
- logprocess("%s: gsub_alternate not yet supported",cref(kind,chainname,chainlookupname))
- return start, false
-end
-
--- handled later:
---
--- function chainmores.gsub_ligature(start,stop,kind,chainname,currentcontext,cache,currentlookup,chainlookupname,n)
--- return chainprocs.gsub_ligature(start,stop,kind,chainname,currentcontext,cache,currentlookup,chainlookupname,n)
--- end
-
-local function logprocess(...)
- if trace_steps then
- registermessage(...)
- end
- logs.report("otf chain",...)
-end
-local function logwarning(...)
- logs.report("otf chain",...)
-end
-
--- We could share functions but that would lead to extra function calls with many
--- arguments, redundant tests and confusing messages.
-
-function chainprocs.chainsub(start,stop,kind,chainname,currentcontext,cache,lookuplist,chainlookupname)
- logwarning("%s: a direct call to chainsub cannot happen",cref(kind,chainname,chainlookupname))
- return start, false
-end
-
--- The reversesub is a special case, which is why we need to store the replacements
--- in a bit weird way. There is no lookup and the replacement comes from the lookup
--- itself. It is meant mostly for dealing with Urdu.
-
-function chainprocs.reversesub(start,stop,kind,chainname,currentcontext,cache,replacements)
- local char = start.char
- local replacement = replacements[char]
- if replacement then
- if trace_singles then
- logprocess("%s: single reverse replacement of %s by %s",cref(kind,chainname),gref(char),gref(replacement))
- end
- start.char = replacement
- return start, true
- else
- return start, false
- end
-end
-
---[[ldx--
-<p>This chain stuff is somewhat tricky since we can have a sequence of actions to be
-applied: single, alternate, multiple or ligature where ligature can be an invalid
-one in the sense that it will replace multiple by one but not neccessary one that
-looks like the combination (i.e. it is the counterpart of multiple then). For
-example, the following is valid:</p>
-
-<typing>
-<line>xxxabcdexxx [single a->A][multiple b->BCD][ligature cde->E] xxxABCDExxx</line>
-</typing>
-
-<p>Therefore we we don't really do the replacement here already unless we have the
-single lookup case. The efficiency of the replacements can be improved by deleting
-as less as needed but that would also mke the code even more messy.</p>
---ldx]]--
-
-local function delete_till_stop(start,stop,ignoremarks)
- if start ~= stop then
- -- todo keep marks
- local done = false
- while not done do
- done = start == stop
- delete_node(start,start.next)
- end
- end
-end
-
---[[ldx--
-<p>Here we replace start by a single variant, First we delete the rest of the
-match.</p>
---ldx]]--
-
-function chainprocs.gsub_single(start,stop,kind,chainname,currentcontext,cache,currentlookup,chainlookupname,chainindex)
- -- todo: marks ?
- if not chainindex then
- delete_till_stop(start,stop) -- ,currentlookup.flags[1])
- end
- local current = start
- local subtables = currentlookup.subtables
- while current do
- if current.id == glyph then
- local currentchar = current.char
- local lookupname = subtables[1]
- local replacement = cache.gsub_single[lookupname]
- if not replacement then
- if trace_bugs then
- logwarning("%s: no single hits",cref(kind,chainname,chainlookupname,lookupname,chainindex))
- end
- else
- replacement = replacement[currentchar]
- if not replacement then
- if trace_bugs then
- logwarning("%s: no single for %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(currentchar))
- end
- else
- if trace_singles then
- logprocess("%s: replacing single %s by %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(currentchar),gref(replacement))
- end
- current.char = replacement
- end
- end
- return start, true
- elseif current == stop then
- break
- else
- current = current.next
- end
- end
- return start, false
-end
-
-chainmores.gsub_single = chainprocs.gsub_single
-
---[[ldx--
-<p>Here we replace start by a sequence of new glyphs. First we delete the rest of
-the match.</p>
---ldx]]--
-
-function chainprocs.gsub_multiple(start,stop,kind,chainname,currentcontext,cache,currentlookup,chainlookupname)
- delete_till_stop(start,stop)
- local startchar = start.char
- local subtables = currentlookup.subtables
- local lookupname = subtables[1]
- local replacements = cache.gsub_multiple[lookupname]
- if not replacements then
- if trace_bugs then
- logwarning("%s: no multiple hits",cref(kind,chainname,chainlookupname,lookupname))
- end
- else
- replacements = replacements[startchar]
- if not replacements then
- if trace_bugs then
- logwarning("%s: no multiple for %s",cref(kind,chainname,chainlookupname,lookupname),gref(startchar))
- end
- else
- if trace_multiples then
- logprocess("%s: replacing %s by multiple characters %s",cref(kind,chainname,chainlookupname,lookupname),gref(startchar),gref(replacements))
- end
- local sn = start.next
- for k=1,#replacements do
- if k == 1 then
- start.char = replacements[k]
- else
- local n = copy_node(start) -- maybe delete the components and such
- n.char = replacements[k]
- n.next, n.prev = sn, start
- if sn then
- sn.prev = n
- end
- start.next, start = n, n
- end
- end
- return start, true
- end
- end
- return start, false
-end
-
---[[ldx--
-<p>Here we replace start by new glyph. First we delete the rest of the match.</p>
---ldx]]--
-
-function chainprocs.gsub_alternate(start,stop,kind,chainname,currentcontext,cache,currentlookup,chainlookupname)
- -- todo: marks ?
- delete_till_stop(start,stop)
- local current = start
- local subtables = currentlookup.subtables
- while current do
- if current.id == glyph then
- local currentchar = current.char
- local lookupname = subtables[1]
- local alternatives = cache.gsub_alternate[lookupname]
- if not alternatives then
- if trace_bugs then
- logwarning("%s: no alternative hits",cref(kind,chainname,chainlookupname,lookupname))
- end
- else
- alternatives = alternatives[currentchar]
- if not alternatives then
- if trace_bugs then
- logwarning("%s: no alternative for %s",cref(kind,chainname,chainlookupname,lookupname),gref(currentchar))
- end
- else
- local choice, index = alternative_glyph(current,alternatives,kind,chainname,chainlookupname,lookupname)
- current.char = choice
- if trace_alternatives then
- logprocess("%s: replacing single %s by alternative %s (%s)",cref(kind,chainname,chainlookupname,lookupname),index,gref(currentchar),gref(choice),index)
- end
- end
- end
- return start, true
- elseif current == stop then
- break
- else
- current = current.next
- end
- end
- return start, false
-end
-
---[[ldx--
-<p>When we replace ligatures we use a helper that handles the marks. I might change
-this function (move code inline and handle the marks by a separate function). We
-assume rather stupid ligatures (no complex disc nodes).</p>
---ldx]]--
-
-function chainprocs.gsub_ligature(start,stop,kind,chainname,currentcontext,cache,currentlookup,chainlookupname,chainindex)
- local startchar = start.char
- local subtables = currentlookup.subtables
- local lookupname = subtables[1]
- local ligatures = cache.gsub_ligature[lookupname]
- if not ligatures then
- if trace_bugs then
- logwarning("%s: no ligature hits",cref(kind,chainname,chainlookupname,lookupname,chainindex))
- end
- else
- ligatures = ligatures[startchar]
- if not ligatures then
- if trace_bugs then
- logwarning("%s: no ligatures starting with %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar))
- end
- else
- local s, discfound, last, nofreplacements = start.next, false, stop, 0
- while s do
- local id = s.id
- if id == disc then
- s = s.next
- discfound = true
- else
- local schar = s.char
- if marks[schar] then -- marks
- s = s.next
- else
- local lg = ligatures[1][schar]
- if not lg then
- break
- else
- ligatures, last, nofreplacements = lg, s, nofreplacements + 1
- if s == stop then
- break
- else
- s = s.next
- end
- end
- end
- end
- end
- local l2 = ligatures[2]
- if l2 then
- if chainindex then
- stop = last
- end
- if trace_ligatures then
- if start == stop then
- logprocess("%s: replacing character %s by ligature %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(l2))
- else
- logprocess("%s: replacing character %s upto %s by ligature %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(stop.char),gref(l2))
- end
- end
- start = toligature(kind,lookupname,start,stop,l2,currentlookup.flags[1],discfound)
- return start, true, nofreplacements
- elseif trace_bugs then
- if start == stop then
- logwarning("%s: replacing character %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar))
- else
- logwarning("%s: replacing character %s upto %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(stop.char))
- end
- end
- end
- end
- return start, false, 0
-end
-
-chainmores.gsub_ligature = chainprocs.gsub_ligature
-
-function chainprocs.gpos_mark2base(start,stop,kind,chainname,currentcontext,cache,currentlookup,chainlookupname)
- local markchar = start.char
- if marks[markchar] then
- local subtables = currentlookup.subtables
- local lookupname = subtables[1]
- local markanchors = cache.gpos_mark2base[lookupname]
- if markanchors then
- markanchors = markanchors[markchar]
- end
- if markanchors then
- local base = start.prev -- [glyph] [start=mark]
- if base and base.id == glyph and base.subtype<256 and base.font == currentfont then
- local basechar = base.char
- if marks[basechar] then
- while true do
- base = base.prev
- if base and base.id == glyph and base.subtype<256 and base.font == currentfont then
- basechar = base.char
- if not marks[basechar] then
- break
- end
- else
- if trace_bugs then
- logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar))
- end
- return start, false
- end
- end
- end
- local baseanchors = descriptions[basechar].anchors
- if baseanchors then
- local baseanchors = baseanchors['basechar']
- if baseanchors then
- local al = anchorlookups[lookupname]
- for anchor,ba in next, baseanchors do
- if al[anchor] then
- local ma = markanchors[anchor]
- if ma then
- local dx, dy, bound = set_mark(start,base,tfmdata.factor,rlmode,ba,ma)
- if trace_marks then
- logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%s,%s)",
- cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
- end
- return start, true
- end
- end
- end
- if trace_bugs then
- logwarning("%s, no matching anchors for mark %s and base %s",cref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar))
- end
- end
- end
- elseif trace_bugs then
- logwarning("%s: prev node is no char",cref(kind,chainname,chainlookupname,lookupname))
- end
- elseif trace_bugs then
- logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar))
- end
- elseif trace_bugs then
- logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar))
- end
- return start, false
-end
-
-function chainprocs.gpos_mark2ligature(start,stop,kind,chainname,currentcontext,cache,currentlookup,chainlookupname)
- local markchar = start.char
- if marks[markchar] then
- local subtables = currentlookup.subtables
- local lookupname = subtables[1]
- local markanchors = cache.gpos_mark2ligature[lookupname]
- if markanchors then
- markanchors = markanchors[markchar]
- end
- if markanchors then
- local base = start.prev -- [glyph] [optional marks] [start=mark]
- local index = 1
- if base and base.id == glyph and base.subtype<256 and base.font == currentfont then
- local basechar = base.char
- if marks[basechar] then
- index = index + 1
- while true do
- base = base.prev
- if base and base.id == glyph and base.subtype<256 and base.font == currentfont then
- basechar = base.char
- if marks[basechar] then
- index = index + 1
- else
- break
- end
- else
- if trace_bugs then
- logwarning("%s: no base for mark %s",cref(kind,chainname,chainlookupname,lookupname),markchar)
- end
- return start, false
- end
- end
- end
- -- todo: like marks a ligatures hash
- local i = has_attribute(start,markdone)
- if i then index = i end
- local baseanchors = descriptions[basechar].anchors
- if baseanchors then
- local baseanchors = baseanchors['baselig']
- if baseanchors then
- local al = anchorlookups[lookupname]
- for anchor,ba in next, baseanchors do
- if al[anchor] then
- local ma = markanchors[anchor]
- if ma then
- ba = ba[index]
- if ba then
- local dx, dy, bound = set_mark(start,base,tfmdata.factor,rlmode,ba,ma,index)
- if trace_marks then
- logprocess("%s, anchor %s, bound %s: anchoring mark %s to baselig %s at index %s => (%s,%s)",
- cref(kind,chainname,chainlookupname,lookupname),anchor,a or bound,gref(markchar),gref(basechar),index,dx,dy)
- end
- return start, true
- end
- end
- end
- end
- if trace_bugs then
- logwarning("%s: no matching anchors for mark %s and baselig %s",cref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar))
- end
- end
- end
- elseif trace_bugs then
- logwarning("feature %s, lookup %s: prev node is no char",kind,lookupname)
- end
- elseif trace_bugs then
- logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar))
- end
- elseif trace_bugs then
- logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar))
- end
- return start, false
-end
-
-function chainprocs.gpos_mark2mark(start,stop,kind,chainname,currentcontext,cache,currentlookup,chainlookupname)
- local markchar = start.char
- if marks[markchar] then
---~ local alreadydone = markonce and has_attribute(start,markmark)
---~ if not alreadydone then
- -- local markanchors = descriptions[markchar].anchors markanchors = markanchors and markanchors.mark
- local subtables = currentlookup.subtables
- local lookupname = subtables[1]
- local markanchors = cache.gpos_mark2mark[lookupname]
- if markanchors then
- markanchors = markanchors[markchar]
- end
- if markanchors then
- local base = start.prev -- [glyph] [basemark] [start=mark]
- if base and base.id == glyph and base.subtype<256 and base.font == currentfont then -- subtype test can go
- local basechar = base.char
- local baseanchors = descriptions[basechar].anchors
- if baseanchors then
- baseanchors = baseanchors['basemark']
- if baseanchors then
- local al = anchorlookups[lookupname]
- for anchor,ba in next, baseanchors do
- if al[anchor] then
- local ma = markanchors[anchor]
- if ma then
- local dx, dy, bound = set_mark(start,base,tfmdata.factor,rlmode,ba,ma)
- if trace_marks then
- logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%s,%s)",
- cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
- end
- return start, true
- end
- end
- end
- if trace_bugs then
- logwarning("%s: no matching anchors for mark %s and basemark %s",gref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar))
- end
- end
- end
- elseif trace_bugs then
- logwarning("%s: prev node is no mark",cref(kind,chainname,chainlookupname,lookupname))
- end
- elseif trace_bugs then
- logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar))
- end
---~ elseif trace_marks and trace_details then
---~ logprocess("%s, mark %s is already bound (n=%s), ignoring mark2mark",pref(kind,lookupname),gref(markchar),alreadydone)
---~ end
- elseif trace_bugs then
- logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar))
- end
- return start, false
-end
-
--- ! ! ! untested ! ! !
-
-function chainprocs.gpos_cursive(start,stop,kind,chainname,currentcontext,cache,currentlookup,chainlookupname)
- local alreadydone = cursonce and has_attribute(start,cursbase)
- if not alreadydone then
- local startchar = start.char
- local subtables = currentlookup.subtables
- local lookupname = subtables[1]
- local exitanchors = cache.gpos_cursive[lookupname]
- if exitanchors then
- exitanchors = exitanchors[startchar]
- end
- if exitanchors then
- local done = false
- if marks[startchar] then
- if trace_cursive then
- logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar))
- end
- else
- local nxt = start.next
- while not done and nxt and nxt.id == glyph and nxt.subtype<256 and nxt.font == currentfont do
- local nextchar = nxt.char
- if marks[nextchar] then
- -- should not happen (maybe warning)
- nxt = nxt.next
- else
- local entryanchors = descriptions[nextchar]
- if entryanchors then
- entryanchors = entryanchors.anchors
- if entryanchors then
- entryanchors = entryanchors['centry']
- if entryanchors then
- local al = anchorlookups[lookupname]
- for anchor, entry in next, entryanchors do
- if al[anchor] then
- local exit = exitanchors[anchor]
- if exit then
- local dx, dy, bound = set_cursive(start,nxt,tfmdata.factor,rlmode,exit,entry,characters[startchar],characters[nextchar])
- if trace_cursive then
- logprocess("%s: moving %s to %s cursive (%s,%s) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode)
- end
- done = true
- break
- end
- end
- end
- end
- end
- else -- if trace_bugs then
- -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(startchar))
- fonts.register_message(currentfont,startchar,"no entry anchors")
- end
- break
- end
- end
- end
- return start, done
- else
- if trace_cursive and trace_details then
- logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(start.char),alreadydone)
- end
- return start, false
- end
- end
- return start, false
-end
-
-function chainprocs.gpos_single(start,stop,kind,chainname,currentcontext,cache,currentlookup,chainlookupname,chainindex,sequence)
- -- untested
- local startchar = start.char
- local subtables = currentlookup.subtables
- local lookupname = subtables[1]
- local kerns = cache.gpos_single[lookupname]
- if kerns then
- kerns = kerns[startchar]
- if kerns then
- local dx, dy, w, h = set_pair(start,tfmdata.factor,rlmode,sequence.flags[4],kerns,characters[startchar])
- if trace_kerns then
- logprocess("%s: shifting single %s by (%s,%s) and correction (%s,%s)",cref(kind,chainname,chainlookupname),gref(startchar),dx,dy,w,h)
- end
- end
- end
- return start, false
-end
-
--- when machines become faster i will make a shared function
-
-function chainprocs.gpos_pair(start,stop,kind,chainname,currentcontext,cache,currentlookup,chainlookupname,chainindex,sequence)
--- logwarning("%s: gpos_pair not yet supported",cref(kind,chainname,chainlookupname))
- local snext = start.next
- if snext then
- local startchar = start.char
- local subtables = currentlookup.subtables
- local lookupname = subtables[1]
- local kerns = cache.gpos_pair[lookupname]
- if kerns then
- kerns = kerns[startchar]
- if kerns then
- local prev, done = start, false
- local factor = tfmdata.factor
- while snext and snext.id == glyph and snext.subtype<256 and snext.font == currentfont do
- local nextchar = snext.char
- local krn = kerns[nextchar]
- if not krn and marks[nextchar] then
- prev = snext
- snext = snext.next
- else
- if not krn then
- -- skip
- elseif type(krn) == "table" then
- if krn[1] == "pair" then
- local a, b = krn[3], krn[4]
- if a and #a > 0 then
- local startchar = start.char
- local x, y, w, h = set_pair(start,factor,rlmode,sequence.flags[4],a,characters[startchar])
- if trace_kerns then
- logprocess("%s: shifting first of pair %s and %s by (%s,%s) and correction (%s,%s)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h)
- end
- end
- if b and #b > 0 then
- local startchar = start.char
- local x, y, w, h = set_pair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar])
- if trace_kerns then
- logprocess("%s: shifting second of pair %s and %s by (%s,%s) and correction (%s,%s)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h)
- end
- end
- else
- logs.report("%s: check this out (old kern stuff)",cref(kind,chainname,chainlookupname))
- local a, b = krn[3], krn[7]
- if a and a ~= 0 then
- local k = set_kern(snext,factor,rlmode,a)
- if trace_kerns then
- logprocess("%s: inserting first kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(prev.char),gref(nextchar))
- end
- end
- if b and b ~= 0 then
- logwarning("%s: ignoring second kern xoff %s",cref(kind,chainname,chainlookupname),b*factor)
- end
- end
- done = true
- elseif krn ~= 0 then
- local k = set_kern(snext,factor,rlmode,krn)
- if trace_kerns then
- logprocess("%s: inserting kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(prev.char),gref(nextchar))
- end
- done = true
- end
- break
- end
- end
- return start, done
- end
- end
- end
- return start, false
-end
-
--- what pointer to return, spec says stop
--- to be discussed ... is bidi changer a space?
--- elseif char == zwnj and sequence[n][32] then -- brrr
-
--- somehow l or f is global
--- we don't need to pass the currentcontext, saves a bit
--- make a slow variant then can be activated but with more tracing
-
-local function show_skip(kind,chainname,char,ck,class)
- if ck[9] then
- logwarning("%s: skipping char %s (%s) in rule %s, lookuptype %s (%s=>%s)",cref(kind,chainname),gref(char),class,ck[1],ck[2],ck[9],ck[10])
- else
- logwarning("%s: skipping char %s (%s) in rule %s, lookuptype %s",cref(kind,chainname),gref(char),class,ck[1],ck[2])
- end
-end
-
-local function normal_handle_contextchain(start,kind,chainname,contexts,sequence,cache)
- -- local rule, lookuptype, sequence, f, l, lookups = ck[1], ck[2] ,ck[3], ck[4], ck[5], ck[6]
- local flags, done = sequence.flags, false
- local skipmark, skipligature, skipbase = flags[1], flags[2], flags[3]
- local someskip = skipmark or skipligature or skipbase -- could be stored in flags for a fast test (hm, flags could be false !)
- local markclass = sequence.markclass -- todo, first we need a proper test
- local skipped = false
- for k=1,#contexts do
- local match, current, last = true, start, start
- local ck = contexts[k]
- local seq = ck[3]
- local s = #seq
- -- f..l = mid string
- if s == 1 then
- -- never happens
- match = current.id == glyph and current.subtype<256 and current.font == currentfont and seq[1][current.char]
- else
- -- todo: better space check (maybe check for glue)
- local f, l = ck[4], ck[5]
- if f == l then
- -- already a hit
- match = true
- else
- -- no need to test first hit (to be optimized)
- local n = f + 1
- last = last.next
- -- we cannot optimize for n=2 because there can be disc nodes
- -- if not someskip and n == l then
- -- -- n=2 and no skips then faster loop
- -- match = last and last.id == glyph and last.subtype<256 and last.font == currentfont and seq[n][last.char]
- -- else
- while n <= l do
- if last then
- local id = last.id
- if id == glyph then
- if last.subtype<256 and last.font == currentfont then
- local char = last.char
- local ccd = descriptions[char]
- if ccd then
- local class = ccd.class
- if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then
- skipped = true
- if trace_skips then
- show_skip(kind,chainname,char,ck,class)
- end
- last = last.next
- elseif seq[n][char] then
- if n < l then
- last = last.next
- end
- n = n + 1
- else
- match = false break
- end
- else
- match = false break
- end
- else
- match = false break
- end
- elseif id == disc then -- what to do with kerns?
- last = last.next
- else
- match = false break
- end
- else
- match = false break
- end
- end
- -- end
- end
- if match and f > 1 then
- -- before
- local prev = start.prev
- if prev then
- local n = f-1
- while n >= 1 do
- if prev then
- local id = prev.id
- if id == glyph then
- if prev.subtype<256 and prev.font == currentfont then -- normal char
- local char = prev.char
- local ccd = descriptions[char]
- if ccd then
- local class = ccd.class
- if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then
- skipped = true
- if trace_skips then
- show_skip(kind,chainname,char,ck,class)
- end
- elseif seq[n][char] then
- n = n -1
- else
- match = false break
- end
- else
- match = false break
- end
- else
- match = false break
- end
- elseif id == disc then
- -- skip 'm
- elseif seq[n][32] then
- n = n -1
- else
- match = false break
- end
- prev = prev.prev
- elseif seq[n][32] then
- n = n -1
- else
- match = false break
- end
- end
- elseif f == 2 then
- match = seq[1][32]
- else
- for n=f-1,1 do
- if not seq[n][32] then
- match = false break
- end
- end
- end
- end
- if match and s > l then
- -- after
- local current = last.next
- if current then
- -- removed optimization for s-l == 1, we have to deal with marks anyway
- local n = l + 1
- while n <= s do
- if current then
- local id = current.id
- if id == glyph then
- if current.subtype<256 and current.font == currentfont then -- normal char
- local char = current.char
- local ccd = descriptions[char]
- if ccd then
- local class = ccd.class
- if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then
- skipped = true
- if trace_skips then
- show_skip(kind,chainname,char,ck,class)
- end
- elseif seq[n][char] then
- n = n + 1
- else
- match = false break
- end
- else
- match = false break
- end
- else
- match = false break
- end
- elseif id == disc then
- -- skip 'm
- elseif seq[n][32] then -- brrr
- n = n + 1
- else
- match = false break
- end
- current = current.next
- elseif seq[n][32] then
- n = n + 1
- else
- match = false break
- end
- end
- elseif s-l == 1 then
- match = seq[s][32]
- else
- for n=l+1,s do
- if not seq[n][32] then
- match = false break
- end
- end
- end
- end
- end
- if match then
- -- ck == currentcontext
- if trace_contexts then
- local rule, lookuptype, f, l = ck[1], ck[2], ck[4], ck[5]
- local char = start.char
- if ck[9] then
- logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %s (%s=>%s)",cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype,ck[9],ck[10])
- else
- logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %s",cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype)
- end
- end
- local chainlookups = ck[6]
- if chainlookups then
- local nofchainlookups = #chainlookups
- -- we can speed this up if needed
- if nofchainlookups == 1 then
- local chainlookupname = chainlookups[1]
- local chainlookup = lookuptable[chainlookupname]
- local cp = chainprocs[chainlookup.type]
- if cp then
- start, done = cp(start,last,kind,chainname,ck,cache,chainlookup,chainlookupname,nil,sequence)
- else
- logprocess("%s: %s is not yet supported",cref(kind,chainname,chainlookupname),chainlookup.type)
- end
- else
- -- actually this needs a more complex treatment for which we will use chainmores
---~ local i = 1
---~ repeat
---~ local chainlookupname = chainlookups[i]
---~ local chainlookup = lookuptable[chainlookupname]
---~ local cp = chainmores[chainlookup.type]
---~ if cp then
---~ local ok, n
---~ start, ok, n = cp(start,last,kind,chainname,ck,cache,chainlookup,chainlookupname,i,sequence)
---~ -- messy since last can be changed !
---~ if ok then
---~ done = true
---~ start = start.next
---~ if n then
---~ -- skip next one(s) if ligature
---~ i = i + n - 1
---~ end
---~ end
---~ else
---~ logprocess("%s: multiple subchains for %s are not yet supported",cref(kind,chainname,chainlookupname),chainlookup.type)
---~ end
---~ i = i + 1
---~ until i > nofchainlookups
-
- local i = 1
- repeat
-if skipped then
- while true do
- local char = start.char
- local ccd = descriptions[char]
- if ccd then
- local class = ccd.class
- if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then
- start = start.next
- else
- break
- end
- else
- break
- end
- end
-end
- local chainlookupname = chainlookups[i]
- local chainlookup = lookuptable[chainlookupname]
- local cp = chainmores[chainlookup.type]
- if cp then
- local ok, n
- start, ok, n = cp(start,last,kind,chainname,ck,cache,chainlookup,chainlookupname,i,sequence)
- -- messy since last can be changed !
- if ok then
- done = true
- -- skip next one(s) if ligature
- i = i + (n or 1)
- else
- i = i + 1
- end
- else
- logprocess("%s: multiple subchains for %s are not yet supported",cref(kind,chainname,chainlookupname),chainlookup.type)
- i = i + 1
- end
- start = start.next
- until i > nofchainlookups
-
- end
- else
- local replacements = ck[7]
- if replacements then
- start, done = chainprocs.reversesub(start,last,kind,chainname,ck,cache,replacements) -- sequence
- else
- done = true -- can be meant to be skipped
- if trace_contexts then
- logprocess("%s: skipping match",cref(kind,chainname))
- end
- end
- end
- end
- end
- return start, done
-end
-
--- Because we want to keep this elsewhere (an because speed is less an issue) we
--- pass the font id so that the verbose variant can access the relevant helper tables.
-
-local verbose_handle_contextchain = function(font,...)
- logwarning("no verbose handler installed, reverting to 'normal'")
- otf.setcontextchain()
- return normal_handle_contextchain(...)
-end
-
-otf.chainhandlers = {
- normal = normal_handle_contextchain,
- verbose = verbose_handle_contextchain,
-}
-
-function otf.setcontextchain(method)
- if not method or method == "normal" or not otf.chainhandlers[method] then
- if handlers.contextchain then -- no need for a message while making the format
- logwarning("installing normal contextchain handler")
- end
- handlers.contextchain = normal_handle_contextchain
- else
- logwarning("installing contextchain handler '%s'",method)
- local handler = otf.chainhandlers[method]
- handlers.contextchain = function(...)
- return handler(currentfont,...) -- hm, get rid of ...
- end
- end
- handlers.gsub_context = handlers.contextchain
- handlers.gsub_contextchain = handlers.contextchain
- handlers.gsub_reversecontextchain = handlers.contextchain
- handlers.gpos_contextchain = handlers.contextchain
- handlers.gpos_context = handlers.contextchain
-end
-
-otf.setcontextchain()
-
-local missing = { } -- we only report once
-
-local function logprocess(...)
- if trace_steps then
- registermessage(...)
- end
- logs.report("otf process",...)
-end
-local function logwarning(...)
- logs.report("otf process",...)
-end
-
-local function report_missing_cache(typ,lookup)
- local f = missing[currentfont] if not f then f = { } missing[currentfont] = f end
- local t = f[typ] if not t then t = { } f[typ] = t end
- if not t[lookup] then
- t[lookup] = true
- logwarning("missing cache for lookup %s of type %s in font %s (%s)",lookup,typ,currentfont,tfmdata.fullname)
- end
-end
-
-local resolved = { } -- we only resolve a font,script,language pair once
-
--- todo: pass all these 'locals' in a table
---
--- dynamics will be isolated some day ... for the moment we catch attribute zero
--- not being set
-
-function fonts.methods.node.otf.features(head,font,attr)
- if trace_steps then
- checkstep(head)
- end
- tfmdata = fontdata[font]
- local shared = tfmdata.shared
- otfdata = shared.otfdata
- local luatex = otfdata.luatex
- descriptions = tfmdata.descriptions
- characters = tfmdata.characters
- indices = tfmdata.indices
- unicodes = tfmdata.unicodes
- marks = tfmdata.marks
- anchorlookups = luatex.lookup_to_anchor
- currentfont = font
- rlmode = 0
- local featuredata = otfdata.shared.featuredata -- can be made local to closure
- local sequences = luatex.sequences
- lookuptable = luatex.lookups
- local done = false
- local script, language, s_enabled, a_enabled, dyn
- local attribute_driven = attr and attr ~= 0
- if attribute_driven then
- local features = context_setups[context_numbers[attr]] -- could be a direct list
- dyn = context_merged[attr] or 0
- language, script = features.language or "dflt", features.script or "dflt"
- a_enabled = features -- shared.features -- can be made local to the resolver
- if dyn == 2 or dyn == -2 then
- -- font based
- s_enabled = shared.features
- end
- else
- language, script = tfmdata.language or "dflt", tfmdata.script or "dflt"
- s_enabled = shared.features -- can be made local to the resolver
- dyn = 0
- end
- -- we can save some runtime by caching feature tests
- local res = resolved[font] if not res then res = { } resolved[font] = res end
- local rs = res [script] if not rs then rs = { } res [script] = rs end
- local rl = rs [language] if not rl then rl = { } rs [language] = rl end
- local ra = rl [attr] if ra == nil then ra = { } rl [attr] = ra end -- attr can be false
- -- sequences always > 1 so no need for optimization
- for s=1,#sequences do
- local pardir, txtdir, success = 0, { }, false
- local sequence = sequences[s]
- local r = ra[s] -- cache
- if r == nil then
- --
- -- this bit will move to font-ctx and become a function
- ---
- local chain = sequence.chain or 0
- local features = sequence.features
- if not features then
- -- indirect lookup, part of chain (todo: make this a separate table)
- r = false -- { false, false, chain }
- else
- local valid, attribute, kind, what = false, false
- for k,v in next, features do
- -- we can quit earlier but for the moment we want the tracing
- local s_e = s_enabled and s_enabled[k]
- local a_e = a_enabled and a_enabled[k]
- if s_e or a_e then
- local l = v[script] or v[wildcard]
- if l then
- -- not l[language] or l[default] or l[wildcard] because we want tracing
- -- only first attribute match check, so we assume simple fina's
- -- default can become a font feature itself
- if l[language] then
- valid, what = s_e or a_e, language
- -- elseif l[default] then
- -- valid, what = true, default
- elseif l[wildcard] then
- valid, what = s_e or a_e, wildcard
- end
- if valid then
- kind, attribute = k, special_attributes[k] or false
- if a_e and dyn < 0 then
- valid = false
- end
- if trace_applied then
- local typ, action = match(sequence.type,"(.*)_(.*)")
- logs.report("otf node mode",
- "%s font: %03i, dynamic: %03i, kind: %s, lookup: %3i, script: %-4s, language: %-4s (%-4s), type: %s, action: %s, name: %s",
- (valid and "+") or "-",font,attr or 0,kind,s,script,language,what,typ,action,sequence.name)
- end
- break
- end
- end
- end
- end
- if valid then
- r = { valid, attribute, chain, kind }
- else
- r = false -- { valid, attribute, chain, "generic" } -- false anyway, could be flag instead of table
- end
- end
- ra[s] = r
- end
- featurevalue = r and r[1] -- todo: pass to function instead of using a global
- if featurevalue then
- local attribute, chain, typ, subtables = r[2], r[3], sequence.type, sequence.subtables
- if chain < 0 then
- -- this is a limited case, no special treatments like 'init' etc
- local handler = handlers[typ]
- local thecache = featuredata[typ] or { }
- -- we need to get rid of this slide !
- local start = find_node_tail(head) -- slow (we can store tail because there's always a skip at the end): todo
- while start do
- local id = start.id
- if id == glyph then
- if start.subtype<256 and start.font == font then
- local a = has_attribute(start,0)
- if a then
- a = a == attr
- else
- a = true
- end
- if a then
- for i=1,#subtables do
- local lookupname = subtables[i]
- local lookupcache = thecache[lookupname]
- if lookupcache then
- local lookupmatch = lookupcache[start.char]
- if lookupmatch then
- start, success = handler(start,r[4],lookupname,lookupmatch,sequence,featuredata,i)
- if success then
- break
- end
- end
- else
- report_missing_cache(typ,lookupname)
- end
- end
- if start then start = start.prev end
- else
- start = start.prev
- end
- else
- start = start.prev
- end
- else
- start = start.prev
- end
- end
- else
- local handler = handlers[typ]
- local ns = #subtables
- local thecache = featuredata[typ] or { }
- local start = head -- local ?
- rlmode = 0 -- to be checked ?
- if ns == 1 then
- local lookupname = subtables[1]
- local lookupcache = thecache[lookupname]
- if not lookupcache then
- report_missing_cache(typ,lookupname)
- else
- while start do
- local id = start.id
- if id == glyph then
- if start.subtype<256 and start.font == font then
- local a = has_attribute(start,0)
- if a then
- a = (a == attr) and (not attribute or has_attribute(start,state,attribute))
- else
- a = not attribute or has_attribute(start,state,attribute)
- end
- if a then
- local lookupmatch = lookupcache[start.char]
- if lookupmatch then
- -- sequence kan weg
- local ok
- start, ok = handler(start,r[4],lookupname,lookupmatch,sequence,featuredata,1)
- if ok then
- success = true
- end
- end
- if start then start = start.next end
- else
- start = start.next
- end
- else
- start = start.next
- end
- -- elseif id == glue then
- -- if p[5] then -- chain
- -- local pc = pp[32]
- -- if pc then
- -- start, ok = start, false -- p[1](start,kind,p[2],pc,p[3],p[4])
- -- if ok then
- -- done = true
- -- end
- -- if start then start = start.next end
- -- else
- -- start = start.next
- -- end
- -- else
- -- start = start.next
- -- end
- elseif id == whatsit then
- local subtype = start.subtype
- if subtype == 7 then
- local dir = start.dir
- if dir == "+TRT" or dir == "+TLT" then
- insert(txtdir,dir)
- elseif dir == "-TRT" or dir == "-TLT" then
- remove(txtdir)
- end
- local d = txtdir[#txtdir]
- if d == "+TRT" then
- rlmode = -1
- elseif d == "+TLT" then
- rlmode = 1
- else
- rlmode = pardir
- end
- if trace_directions then
- logs.report("fonts","directions after textdir %s: pardir=%s, txtdir=%s:%s, rlmode=%s",dir,pardir,#txtdir,txtdir[#txtdir] or "unset",rlmode)
- end
- elseif subtype == 6 then
- local dir = start.dir
- if dir == "TRT" then
- pardir = -1
- elseif dir == "TLT" then
- pardir = 1
- else
- pardir = 0
- end
- rlmode = pardir
- --~ txtdir = { }
- if trace_directions then
- logs.report("fonts","directions after pardir %s: pardir=%s, txtdir=%s:%s, rlmode=%s",dir,pardir,#txtdir,txtdir[#txtdir] or "unset",rlmode)
- end
- end
- start = start.next
- else
- start = start.next
- end
- end
- end
- else
- while start do
- local id = start.id
- if id == glyph then
- if start.subtype<256 and start.font == font then
- local a = has_attribute(start,0)
- if a then
- a = (a == attr) and (not attribute or has_attribute(start,state,attribute))
- else
- a = not attribute or has_attribute(start,state,attribute)
- end
- if a then
- for i=1,ns do
- local lookupname = subtables[i]
- local lookupcache = thecache[lookupname]
- if lookupcache then
- local lookupmatch = lookupcache[start.char]
- if lookupmatch then
- -- we could move all code inline but that makes things even more unreadable
- local ok
- start, ok = handler(start,r[4],lookupname,lookupmatch,sequence,featuredata,i)
- if ok then
- success = true
- break
- end
- end
- else
- report_missing_cache(typ,lookupname)
- end
- end
- if start then start = start.next end
- else
- start = start.next
- end
- else
- start = start.next
- end
- -- elseif id == glue then
- -- if p[5] then -- chain
- -- local pc = pp[32]
- -- if pc then
- -- start, ok = start, false -- p[1](start,kind,p[2],pc,p[3],p[4])
- -- if ok then
- -- done = true
- -- end
- -- if start then start = start.next end
- -- else
- -- start = start.next
- -- end
- -- else
- -- start = start.next
- -- end
- elseif id == whatsit then
- local subtype = start.subtype
- if subtype == 7 then
- local dir = start.dir
- if dir == "+TRT" or dir == "+TLT" then
- insert(txtdir,dir)
- elseif dir == "-TRT" or dir == "-TLT" then
- remove(txtdir)
- end
- local d = txtdir[#txtdir]
- if d == "+TRT" then
- rlmode = -1
- elseif d == "+TLT" then
- rlmode = 1
- else
- rlmode = pardir
- end
- if trace_directions then
- logs.report("fonts","directions after textdir %s: pardir=%s, txtdir=%s:%s, rlmode=%s",dir,pardir,#txtdir,txtdir[#txtdir] or "unset",rlmode)
- end
- elseif subtype == 6 then
- local dir = start.dir
- if dir == "TRT" then
- pardir = -1
- elseif dir == "TLT" then
- pardir = 1
- else
- pardir = 0
- end
- rlmode = pardir
- --~ txtdir = { }
- if trace_directions then
- logs.report("fonts","directions after pardir %s: pardir=%s, txtdir=%s:%s, rlmode=%s",dir,pardir,#txtdir,txtdir[#txtdir] or "unset",rlmode)
- end
- end
- start = start.next
- else
- start = start.next
- end
- end
- end
- end
- if success then
- done = true
- end
- if trace_steps then -- ?
- registerstep(head)
- end
- end
- end
- return head, done
-end
-
-otf.features.prepare = { }
-
--- we used to share code in the following functions but that costs a lot of
--- memory due to extensive calls to functions (easily hundreds of thousands per
--- document)
-
-local function split(replacement,original,cache,unicodes)
- -- we can cache this too, but not the same (although unicode is a unique enough hash)
- local o, t, n = { }, { }, 0
- for s in gmatch(original,"[^ ]+") do
- local us = unicodes[s]
- if type(us) == "number" then -- tonumber(us)
- o[#o+1] = us
- else
- o[#o+1] = us[1]
- end
- end
- for s in gmatch(replacement,"[^ ]+") do
- n = n + 1
- local us = unicodes[s]
- if type(us) == "number" then -- tonumber(us)
- t[o[n]] = us
- else
- t[o[n]] = us[1]
- end
- end
- return t
-end
-
-local function uncover(covers,result,cache,unicodes)
- -- lpeg hardly faster (.005 sec on mk)
- for n=1,#covers do
- local c = covers[n]
- local cc = cache[c]
- if not cc then
- local t = { }
- for s in gmatch(c,"[^ ]+") do
- local us = unicodes[s]
- if type(us) == "number" then
- t[us] = true
- else
- for i=1,#us do
- t[us[i]] = true
- end
- end
- end
- cache[c] = t
- result[#result+1] = t
- else
- result[#result+1] = cc
- end
- end
-end
-
-local function prepare_lookups(tfmdata)
- local otfdata = tfmdata.shared.otfdata
- local featuredata = otfdata.shared.featuredata
- local anchor_to_lookup = otfdata.luatex.anchor_to_lookup
- local lookup_to_anchor = otfdata.luatex.lookup_to_anchor
- --
- local multiple = featuredata.gsub_multiple
- local alternate = featuredata.gsub_alternate
- local single = featuredata.gsub_single
- local ligature = featuredata.gsub_ligature
- local pair = featuredata.gpos_pair
- local position = featuredata.gpos_single
- local kerns = featuredata.gpos_pair
- local mark = featuredata.gpos_mark2mark
- local cursive = featuredata.gpos_cursive
- --
- local unicodes = tfmdata.unicodes -- names to unicodes
- local indices = tfmdata.indices
- local descriptions = tfmdata.descriptions
- --
- -- we can change the otf table after loading but then we need to adapt base mode
- -- as well (no big deal)
- --
- local action = {
- substitution = function(p,lookup,glyph,unicode)
- local old, new = unicode, unicodes[p[2]]
- if type(new) == "table" then
- new = new[1]
- end
- local s = single[lookup]
- if not s then s = { } single[lookup] = s end
- s[old] = new
- --~ if trace_lookups then
- --~ logs.report("define otf","lookup %s: substitution %s => %s",lookup,old,new)
- --~ end
- end,
- multiple = function (p,lookup,glyph,unicode)
- local old, new = unicode, { }
- local m = multiple[lookup]
- if not m then m = { } multiple[lookup] = m end
- m[old] = new
- for pc in gmatch(p[2],"[^ ]+") do
- local upc = unicodes[pc]
- if type(upc) == "number" then
- new[#new+1] = upc
- else
- new[#new+1] = upc[1]
- end
- end
- --~ if trace_lookups then
- --~ logs.report("define otf","lookup %s: multiple %s => %s",lookup,old,concat(new," "))
- --~ end
- end,
- alternate = function(p,lookup,glyph,unicode)
- local old, new = unicode, { }
- local a = alternate[lookup]
- if not a then a = { } alternate[lookup] = a end
- a[old] = new
- for pc in gmatch(p[2],"[^ ]+") do
- local upc = unicodes[pc]
- if type(upc) == "number" then
- new[#new+1] = upc
- else
- new[#new+1] = upc[1]
- end
- end
- --~ if trace_lookups then
- --~ logs.report("define otf","lookup %s: alternate %s => %s",lookup,old,concat(new,"|"))
- --~ end
- end,
- ligature = function (p,lookup,glyph,unicode)
- --~ if trace_lookups then
- --~ logs.report("define otf","lookup %s: ligature %s => %s",lookup,p[2],glyph.name)
- --~ end
- local first = true
- local t = ligature[lookup]
- if not t then t = { } ligature[lookup] = t end
- for s in gmatch(p[2],"[^ ]+") do
- if first then
- local u = unicodes[s]
- if not u then
- logs.report("define otf","lookup %s: ligature %s => %s ignored due to invalid unicode",lookup,p[2],glyph.name)
- break
- elseif type(u) == "number" then
- if not t[u] then
- t[u] = { { } }
- end
- t = t[u]
- else
- local tt = t
- local tu
- for i=1,#u do
- local u = u[i]
- if i==1 then
- if not t[u] then
- t[u] = { { } }
- end
- tu = t[u]
- t = tu
- else
- if not t[u] then
- tt[u] = tu
- end
- end
- end
- end
- first = false
- else
- s = unicodes[s]
- local t1 = t[1]
- if not t1[s] then
- t1[s] = { { } }
- end
- t = t1[s]
- end
- end
- t[2] = unicode
- end,
- position = function(p,lookup,glyph,unicode)
- -- not used
- local s = position[lookup]
- if not s then s = { } position[lookup] = s end
- s[unicode] = p[2] -- direct pointer to kern spec
- end,
- pair = function(p,lookup,glyph,unicode)
- local s = pair[lookup]
- if not s then s = { } pair[lookup] = s end
- local others = s[unicode]
- if not others then others = { } s[unicode] = others end
- -- todo: fast check for space
- local two = p[2]
- local upc = unicodes[two]
- if not upc then
- for pc in gmatch(two,"[^ ]+") do
- local upc = unicodes[pc]
- if type(upc) == "number" then
- others[upc] = p -- direct pointer to main table
- else
- for i=1,#upc do
- others[upc[i]] = p -- direct pointer to main table
- end
- end
- end
- elseif type(upc) == "number" then
- others[upc] = p -- direct pointer to main table
- else
- for i=1,#upc do
- others[upc[i]] = p -- direct pointer to main table
- end
- end
- --~ if trace_lookups then
- --~ logs.report("define otf","lookup %s: pair for U+%04X",lookup,unicode)
- --~ end
- end,
- }
- --
- for unicode, glyph in next, descriptions do
- local lookups = glyph.slookups
- if lookups then
- for lookup, p in next, lookups do
- action[p[1]](p,lookup,glyph,unicode)
- end
- end
- local lookups = glyph.mlookups
- if lookups then
- for lookup, whatever in next, lookups do
- for i=1,#whatever do -- normaly one
- local p = whatever[i]
- action[p[1]](p,lookup,glyph,unicode)
- end
- end
- end
- local list = glyph.mykerns
- if list then
- for lookup, krn in next, list do
- local k = kerns[lookup]
- if not k then k = { } kerns[lookup] = k end
- k[unicode] = krn -- ref to glyph, saves lookup
- --~ if trace_lookups then
- --~ logs.report("define otf","lookup %s: kern for U+%04X",lookup,unicode)
- --~ end
- end
- end
- local oanchor = glyph.anchors
- if oanchor then
- for typ, anchors in next, oanchor do -- types
- if typ == "mark" then
- for name, anchor in next, anchors do
- local lookups = anchor_to_lookup[name]
- if lookups then
- for lookup, _ in next, lookups do
- local f = mark[lookup]
- if not f then f = { } mark[lookup] = f end
- f[unicode] = anchors -- ref to glyph, saves lookup
- --~ if trace_lookups then
- --~ logs.report("define otf","lookup %s: mark anchor %s for U+%04X",lookup,name,unicode)
- --~ end
- end
- end
- end
- elseif typ == "cexit" then -- or entry?
- for name, anchor in next, anchors do
- local lookups = anchor_to_lookup[name]
- if lookups then
- for lookup, _ in next, lookups do
- local f = cursive[lookup]
- if not f then f = { } cursive[lookup] = f end
- f[unicode] = anchors -- ref to glyph, saves lookup
- --~ if trace_lookups then
- --~ logs.report("define otf","lookup %s: exit anchor %s for U+%04X",lookup,name,unicode)
- --~ end
- end
- end
- end
- end
- end
- end
- end
-end
-
--- local cache = { }
-luatex = luatex or {} -- this has to change ... we need a better one
-
-local function prepare_contextchains(tfmdata)
- local otfdata = tfmdata.shared.otfdata
- local lookups = otfdata.lookups
- if lookups then
- local featuredata = otfdata.shared.featuredata
- local contextchain = featuredata.gsub_contextchain -- shared with gpos
- local reversecontextchain = featuredata.gsub_reversecontextchain -- shared with gpos
- local characters = tfmdata.characters
- local unicodes = tfmdata.unicodes
- local indices = tfmdata.indices
- local cache = luatex.covers
- if not cache then
- cache = { }
- luatex.covers = cache
- end
- --
- for lookupname, lookupdata in next, otfdata.lookups do
- local lookuptype = lookupdata.type
- if not lookuptype then
- logs.report("otf process","missing lookuptype for %s",lookupname)
- else
- local rules = lookupdata.rules
- if rules then
- local fmt = lookupdata.format
- -- contextchain[lookupname][unicode]
- if fmt == "coverage" then
- if lookuptype ~= "chainsub" and lookuptype ~= "chainpos" then
- logs.report("otf process","unsupported coverage %s for %s",lookuptype,lookupname)
- else
- local contexts = contextchain[lookupname]
- if not contexts then
- contexts = { }
- contextchain[lookupname] = contexts
- end
- local t = { }
- for nofrules=1,#rules do -- does #rules>1 happen often?
- local rule = rules[nofrules]
- local coverage = rule.coverage
- if coverage and coverage.current then
- local current, before, after, sequence = coverage.current, coverage.before, coverage.after, { }
- if before then
- uncover(before,sequence,cache,unicodes)
- end
- local start = #sequence + 1
- uncover(current,sequence,cache,unicodes)
- local stop = #sequence
- if after then
- uncover(after,sequence,cache,unicodes)
- end
- if sequence[1] then
- t[#t+1] = { nofrules, lookuptype, sequence, start, stop, rule.lookups }
- for unic, _ in next, sequence[start] do
- local cu = contexts[unic]
- if not cu then
- contexts[unic] = t
- end
- end
- end
- end
- end
- end
- elseif fmt == "reversecoverage" then
- if lookuptype ~= "reversesub" then
- logs.report("otf process","unsupported reverse coverage %s for %s",lookuptype,lookupname)
- else
- local contexts = reversecontextchain[lookupname]
- if not contexts then
- contexts = { }
- reversecontextchain[lookupname] = contexts
- end
- local t = { }
- for nofrules=1,#rules do
- local rule = rules[nofrules]
- local reversecoverage = rule.reversecoverage
- if reversecoverage and reversecoverage.current then
- local current, before, after, replacements, sequence = reversecoverage.current, reversecoverage.before, reversecoverage.after, reversecoverage.replacements, { }
- if before then
- uncover(before,sequence,cache,unicodes)
- end
- local start = #sequence + 1
- uncover(current,sequence,cache,unicodes)
- local stop = #sequence
- if after then
- uncover(after,sequence,cache,unicodes)
- end
- if replacements then
- replacements = split(replacements,current[1],cache,unicodes)
- end
- if sequence[1] then
- -- this is different from normal coverage, we assume only replacements
- t[#t+1] = { nofrules, lookuptype, sequence, start, stop, rule.lookups, replacements }
- for unic, _ in next, sequence[start] do
- local cu = contexts[unic]
- if not cu then
- contexts[unic] = t
- end
- end
- end
- end
- end
- end
- elseif fmt == "glyphs" then
- if lookuptype ~= "chainsub" and lookuptype ~= "chainpos" then
- logs.report("otf process","unsupported coverage %s for %s",lookuptype,lookupname)
- else
- local contexts = contextchain[lookupname]
- if not contexts then
- contexts = { }
- contextchain[lookupname] = contexts
- end
- local t = { }
- for nofrules=1,#rules do
- -- nearly the same as coverage so we could as well rename it
- local rule = rules[nofrules]
- local glyphs = rule.glyphs
- if glyphs and glyphs.names then
- local fore, back, names, sequence = glyphs.fore, glyphs.back, glyphs.names, { }
- if fore and fore ~= "" then
- fore = lpegmatch(split_at_space,fore)
- uncover(fore,sequence,cache,unicodes)
- end
- local start = #sequence + 1
- names = lpegmatch(split_at_space,names)
- uncover(names,sequence,cache,unicodes)
- local stop = #sequence
- if back and back ~= "" then
- back = lpegmatch(split_at_space,back)
- uncover(back,sequence,cache,unicodes)
- end
- if sequence[1] then
- t[#t+1] = { nofrules, lookuptype, sequence, start, stop, rule.lookups }
- for unic, _ in next, sequence[start] do
- local cu = contexts[unic]
- if not cu then
- contexts[unic] = t
- end
- end
- end
- end
- end
- end
- end
- end
- end
- end
- end
-end
-
-function fonts.initializers.node.otf.features(tfmdata,value)
- if true then -- value then
- if not tfmdata.shared.otfdata.shared.initialized then
- local t = trace_preparing and os.clock()
- local otfdata = tfmdata.shared.otfdata
- local featuredata = otfdata.shared.featuredata
- -- caches
- featuredata.gsub_multiple = { }
- featuredata.gsub_alternate = { }
- featuredata.gsub_single = { }
- featuredata.gsub_ligature = { }
- featuredata.gsub_contextchain = { }
- featuredata.gsub_reversecontextchain = { }
- featuredata.gpos_pair = { }
- featuredata.gpos_single = { }
- featuredata.gpos_mark2base = { }
- featuredata.gpos_mark2ligature = featuredata.gpos_mark2base
- featuredata.gpos_mark2mark = featuredata.gpos_mark2base
- featuredata.gpos_cursive = { }
- featuredata.gpos_contextchain = featuredata.gsub_contextchain
- featuredata.gpos_reversecontextchain = featuredata.gsub_reversecontextchain
- --
- prepare_contextchains(tfmdata)
- prepare_lookups(tfmdata)
- otfdata.shared.initialized = true
- if trace_preparing then
- logs.report("otf process","preparation time is %0.3f seconds for %s",os.clock()-t,tfmdata.fullname or "?")
- end
- end
- end
-end
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/otfl-font-ott.lua b/Master/texmf-dist/tex/luatex/luaotfload/otfl-font-ott.lua
deleted file mode 100644
index c56e984981b..00000000000
--- a/Master/texmf-dist/tex/luatex/luaotfload/otfl-font-ott.lua
+++ /dev/null
@@ -1,955 +0,0 @@
-if not modules then modules = { } end modules ['font-otf'] = {
- version = 1.001,
- comment = "companion to font-otf.lua (tables)",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local type, next, tonumber, tostring = type, next, tonumber, tostring
-local gsub, lower = string.gsub, string.lower
-
-fonts = fonts or { }
-fonts.otf = fonts.otf or { }
-
-local otf = fonts.otf
-
-otf.tables = otf.tables or { }
-otf.meanings = otf.meanings or { }
-
-otf.tables.scripts = {
- ['dflt'] = 'Default',
-
- ['arab'] = 'Arabic',
- ['armn'] = 'Armenian',
- ['bali'] = 'Balinese',
- ['beng'] = 'Bengali',
- ['bopo'] = 'Bopomofo',
- ['brai'] = 'Braille',
- ['bugi'] = 'Buginese',
- ['buhd'] = 'Buhid',
- ['byzm'] = 'Byzantine Music',
- ['cans'] = 'Canadian Syllabics',
- ['cher'] = 'Cherokee',
- ['copt'] = 'Coptic',
- ['cprt'] = 'Cypriot Syllabary',
- ['cyrl'] = 'Cyrillic',
- ['deva'] = 'Devanagari',
- ['dsrt'] = 'Deseret',
- ['ethi'] = 'Ethiopic',
- ['geor'] = 'Georgian',
- ['glag'] = 'Glagolitic',
- ['goth'] = 'Gothic',
- ['grek'] = 'Greek',
- ['gujr'] = 'Gujarati',
- ['guru'] = 'Gurmukhi',
- ['hang'] = 'Hangul',
- ['hani'] = 'CJK Ideographic',
- ['hano'] = 'Hanunoo',
- ['hebr'] = 'Hebrew',
- ['ital'] = 'Old Italic',
- ['jamo'] = 'Hangul Jamo',
- ['java'] = 'Javanese',
- ['kana'] = 'Hiragana and Katakana',
- ['khar'] = 'Kharosthi',
- ['khmr'] = 'Khmer',
- ['knda'] = 'Kannada',
- ['lao' ] = 'Lao',
- ['latn'] = 'Latin',
- ['limb'] = 'Limbu',
- ['linb'] = 'Linear B',
- ['math'] = 'Mathematical Alphanumeric Symbols',
- ['mlym'] = 'Malayalam',
- ['mong'] = 'Mongolian',
- ['musc'] = 'Musical Symbols',
- ['mymr'] = 'Myanmar',
- ['nko' ] = "N'ko",
- ['ogam'] = 'Ogham',
- ['orya'] = 'Oriya',
- ['osma'] = 'Osmanya',
- ['phag'] = 'Phags-pa',
- ['phnx'] = 'Phoenician',
- ['runr'] = 'Runic',
- ['shaw'] = 'Shavian',
- ['sinh'] = 'Sinhala',
- ['sylo'] = 'Syloti Nagri',
- ['syrc'] = 'Syriac',
- ['tagb'] = 'Tagbanwa',
- ['tale'] = 'Tai Le',
- ['talu'] = 'Tai Lu',
- ['taml'] = 'Tamil',
- ['telu'] = 'Telugu',
- ['tfng'] = 'Tifinagh',
- ['tglg'] = 'Tagalog',
- ['thaa'] = 'Thaana',
- ['thai'] = 'Thai',
- ['tibt'] = 'Tibetan',
- ['ugar'] = 'Ugaritic Cuneiform',
- ['xpeo'] = 'Old Persian Cuneiform',
- ['xsux'] = 'Sumero-Akkadian Cuneiform',
- ['yi' ] = 'Yi',
-}
-
-otf.tables.languages = {
- ['dflt'] = 'Default',
-
- ['aba'] = 'Abaza',
- ['abk'] = 'Abkhazian',
- ['ady'] = 'Adyghe',
- ['afk'] = 'Afrikaans',
- ['afr'] = 'Afar',
- ['agw'] = 'Agaw',
- ['als'] = 'Alsatian',
- ['alt'] = 'Altai',
- ['amh'] = 'Amharic',
- ['ara'] = 'Arabic',
- ['ari'] = 'Aari',
- ['ark'] = 'Arakanese',
- ['asm'] = 'Assamese',
- ['ath'] = 'Athapaskan',
- ['avr'] = 'Avar',
- ['awa'] = 'Awadhi',
- ['aym'] = 'Aymara',
- ['aze'] = 'Azeri',
- ['bad'] = 'Badaga',
- ['bag'] = 'Baghelkhandi',
- ['bal'] = 'Balkar',
- ['bau'] = 'Baule',
- ['bbr'] = 'Berber',
- ['bch'] = 'Bench',
- ['bcr'] = 'Bible Cree',
- ['bel'] = 'Belarussian',
- ['bem'] = 'Bemba',
- ['ben'] = 'Bengali',
- ['bgr'] = 'Bulgarian',
- ['bhi'] = 'Bhili',
- ['bho'] = 'Bhojpuri',
- ['bik'] = 'Bikol',
- ['bil'] = 'Bilen',
- ['bkf'] = 'Blackfoot',
- ['bli'] = 'Balochi',
- ['bln'] = 'Balante',
- ['blt'] = 'Balti',
- ['bmb'] = 'Bambara',
- ['bml'] = 'Bamileke',
- ['bos'] = 'Bosnian',
- ['bre'] = 'Breton',
- ['brh'] = 'Brahui',
- ['bri'] = 'Braj Bhasha',
- ['brm'] = 'Burmese',
- ['bsh'] = 'Bashkir',
- ['bti'] = 'Beti',
- ['cat'] = 'Catalan',
- ['ceb'] = 'Cebuano',
- ['che'] = 'Chechen',
- ['chg'] = 'Chaha Gurage',
- ['chh'] = 'Chattisgarhi',
- ['chi'] = 'Chichewa',
- ['chk'] = 'Chukchi',
- ['chp'] = 'Chipewyan',
- ['chr'] = 'Cherokee',
- ['chu'] = 'Chuvash',
- ['cmr'] = 'Comorian',
- ['cop'] = 'Coptic',
- ['cos'] = 'Corsican',
- ['cre'] = 'Cree',
- ['crr'] = 'Carrier',
- ['crt'] = 'Crimean Tatar',
- ['csl'] = 'Church Slavonic',
- ['csy'] = 'Czech',
- ['dan'] = 'Danish',
- ['dar'] = 'Dargwa',
- ['dcr'] = 'Woods Cree',
- ['deu'] = 'German',
- ['dgr'] = 'Dogri',
- ['div'] = 'Divehi',
- ['djr'] = 'Djerma',
- ['dng'] = 'Dangme',
- ['dnk'] = 'Dinka',
- ['dri'] = 'Dari',
- ['dun'] = 'Dungan',
- ['dzn'] = 'Dzongkha',
- ['ebi'] = 'Ebira',
- ['ecr'] = 'Eastern Cree',
- ['edo'] = 'Edo',
- ['efi'] = 'Efik',
- ['ell'] = 'Greek',
- ['eng'] = 'English',
- ['erz'] = 'Erzya',
- ['esp'] = 'Spanish',
- ['eti'] = 'Estonian',
- ['euq'] = 'Basque',
- ['evk'] = 'Evenki',
- ['evn'] = 'Even',
- ['ewe'] = 'Ewe',
- ['fan'] = 'French Antillean',
- ['far'] = 'Farsi',
- ['fin'] = 'Finnish',
- ['fji'] = 'Fijian',
- ['fle'] = 'Flemish',
- ['fne'] = 'Forest Nenets',
- ['fon'] = 'Fon',
- ['fos'] = 'Faroese',
- ['fra'] = 'French',
- ['fri'] = 'Frisian',
- ['frl'] = 'Friulian',
- ['fta'] = 'Futa',
- ['ful'] = 'Fulani',
- ['gad'] = 'Ga',
- ['gae'] = 'Gaelic',
- ['gag'] = 'Gagauz',
- ['gal'] = 'Galician',
- ['gar'] = 'Garshuni',
- ['gaw'] = 'Garhwali',
- ['gez'] = "Ge'ez",
- ['gil'] = 'Gilyak',
- ['gmz'] = 'Gumuz',
- ['gon'] = 'Gondi',
- ['grn'] = 'Greenlandic',
- ['gro'] = 'Garo',
- ['gua'] = 'Guarani',
- ['guj'] = 'Gujarati',
- ['hai'] = 'Haitian',
- ['hal'] = 'Halam',
- ['har'] = 'Harauti',
- ['hau'] = 'Hausa',
- ['haw'] = 'Hawaiin',
- ['hbn'] = 'Hammer-Banna',
- ['hil'] = 'Hiligaynon',
- ['hin'] = 'Hindi',
- ['hma'] = 'High Mari',
- ['hnd'] = 'Hindko',
- ['ho'] = 'Ho',
- ['hri'] = 'Harari',
- ['hrv'] = 'Croatian',
- ['hun'] = 'Hungarian',
- ['hye'] = 'Armenian',
- ['ibo'] = 'Igbo',
- ['ijo'] = 'Ijo',
- ['ilo'] = 'Ilokano',
- ['ind'] = 'Indonesian',
- ['ing'] = 'Ingush',
- ['inu'] = 'Inuktitut',
- ['iri'] = 'Irish',
- ['irt'] = 'Irish Traditional',
- ['isl'] = 'Icelandic',
- ['ism'] = 'Inari Sami',
- ['ita'] = 'Italian',
- ['iwr'] = 'Hebrew',
- ['jan'] = 'Japanese',
- ['jav'] = 'Javanese',
- ['jii'] = 'Yiddish',
- ['jud'] = 'Judezmo',
- ['jul'] = 'Jula',
- ['kab'] = 'Kabardian',
- ['kac'] = 'Kachchi',
- ['kal'] = 'Kalenjin',
- ['kan'] = 'Kannada',
- ['kar'] = 'Karachay',
- ['kat'] = 'Georgian',
- ['kaz'] = 'Kazakh',
- ['keb'] = 'Kebena',
- ['kge'] = 'Khutsuri Georgian',
- ['kha'] = 'Khakass',
- ['khk'] = 'Khanty-Kazim',
- ['khm'] = 'Khmer',
- ['khs'] = 'Khanty-Shurishkar',
- ['khv'] = 'Khanty-Vakhi',
- ['khw'] = 'Khowar',
- ['kik'] = 'Kikuyu',
- ['kir'] = 'Kirghiz',
- ['kis'] = 'Kisii',
- ['kkn'] = 'Kokni',
- ['klm'] = 'Kalmyk',
- ['kmb'] = 'Kamba',
- ['kmn'] = 'Kumaoni',
- ['kmo'] = 'Komo',
- ['kms'] = 'Komso',
- ['knr'] = 'Kanuri',
- ['kod'] = 'Kodagu',
- ['koh'] = 'Korean Old Hangul',
- ['kok'] = 'Konkani',
- ['kon'] = 'Kikongo',
- ['kop'] = 'Komi-Permyak',
- ['kor'] = 'Korean',
- ['koz'] = 'Komi-Zyrian',
- ['kpl'] = 'Kpelle',
- ['kri'] = 'Krio',
- ['krk'] = 'Karakalpak',
- ['krl'] = 'Karelian',
- ['krm'] = 'Karaim',
- ['krn'] = 'Karen',
- ['krt'] = 'Koorete',
- ['ksh'] = 'Kashmiri',
- ['ksi'] = 'Khasi',
- ['ksm'] = 'Kildin Sami',
- ['kui'] = 'Kui',
- ['kul'] = 'Kulvi',
- ['kum'] = 'Kumyk',
- ['kur'] = 'Kurdish',
- ['kuu'] = 'Kurukh',
- ['kuy'] = 'Kuy',
- ['kyk'] = 'Koryak',
- ['lad'] = 'Ladin',
- ['lah'] = 'Lahuli',
- ['lak'] = 'Lak',
- ['lam'] = 'Lambani',
- ['lao'] = 'Lao',
- ['lat'] = 'Latin',
- ['laz'] = 'Laz',
- ['lcr'] = 'L-Cree',
- ['ldk'] = 'Ladakhi',
- ['lez'] = 'Lezgi',
- ['lin'] = 'Lingala',
- ['lma'] = 'Low Mari',
- ['lmb'] = 'Limbu',
- ['lmw'] = 'Lomwe',
- ['lsb'] = 'Lower Sorbian',
- ['lsm'] = 'Lule Sami',
- ['lth'] = 'Lithuanian',
- ['ltz'] = 'Luxembourgish',
- ['lub'] = 'Luba',
- ['lug'] = 'Luganda',
- ['luh'] = 'Luhya',
- ['luo'] = 'Luo',
- ['lvi'] = 'Latvian',
- ['maj'] = 'Majang',
- ['mak'] = 'Makua',
- ['mal'] = 'Malayalam Traditional',
- ['man'] = 'Mansi',
- ['map'] = 'Mapudungun',
- ['mar'] = 'Marathi',
- ['maw'] = 'Marwari',
- ['mbn'] = 'Mbundu',
- ['mch'] = 'Manchu',
- ['mcr'] = 'Moose Cree',
- ['mde'] = 'Mende',
- ['men'] = "Me'en",
- ['miz'] = 'Mizo',
- ['mkd'] = 'Macedonian',
- ['mle'] = 'Male',
- ['mlg'] = 'Malagasy',
- ['mln'] = 'Malinke',
- ['mlr'] = 'Malayalam Reformed',
- ['mly'] = 'Malay',
- ['mnd'] = 'Mandinka',
- ['mng'] = 'Mongolian',
- ['mni'] = 'Manipuri',
- ['mnk'] = 'Maninka',
- ['mnx'] = 'Manx Gaelic',
- ['moh'] = 'Mohawk',
- ['mok'] = 'Moksha',
- ['mol'] = 'Moldavian',
- ['mon'] = 'Mon',
- ['mor'] = 'Moroccan',
- ['mri'] = 'Maori',
- ['mth'] = 'Maithili',
- ['mts'] = 'Maltese',
- ['mun'] = 'Mundari',
- ['nag'] = 'Naga-Assamese',
- ['nan'] = 'Nanai',
- ['nas'] = 'Naskapi',
- ['ncr'] = 'N-Cree',
- ['ndb'] = 'Ndebele',
- ['ndg'] = 'Ndonga',
- ['nep'] = 'Nepali',
- ['new'] = 'Newari',
- ['ngr'] = 'Nagari',
- ['nhc'] = 'Norway House Cree',
- ['nis'] = 'Nisi',
- ['niu'] = 'Niuean',
- ['nkl'] = 'Nkole',
- ['nko'] = "N'ko",
- ['nld'] = 'Dutch',
- ['nog'] = 'Nogai',
- ['nor'] = 'Norwegian',
- ['nsm'] = 'Northern Sami',
- ['nta'] = 'Northern Tai',
- ['nto'] = 'Esperanto',
- ['nyn'] = 'Nynorsk',
- ['oci'] = 'Occitan',
- ['ocr'] = 'Oji-Cree',
- ['ojb'] = 'Ojibway',
- ['ori'] = 'Oriya',
- ['oro'] = 'Oromo',
- ['oss'] = 'Ossetian',
- ['paa'] = 'Palestinian Aramaic',
- ['pal'] = 'Pali',
- ['pan'] = 'Punjabi',
- ['pap'] = 'Palpa',
- ['pas'] = 'Pashto',
- ['pgr'] = 'Polytonic Greek',
- ['pil'] = 'Pilipino',
- ['plg'] = 'Palaung',
- ['plk'] = 'Polish',
- ['pro'] = 'Provencal',
- ['ptg'] = 'Portuguese',
- ['qin'] = 'Chin',
- ['raj'] = 'Rajasthani',
- ['rbu'] = 'Russian Buriat',
- ['rcr'] = 'R-Cree',
- ['ria'] = 'Riang',
- ['rms'] = 'Rhaeto-Romanic',
- ['rom'] = 'Romanian',
- ['roy'] = 'Romany',
- ['rsy'] = 'Rusyn',
- ['rua'] = 'Ruanda',
- ['rus'] = 'Russian',
- ['sad'] = 'Sadri',
- ['san'] = 'Sanskrit',
- ['sat'] = 'Santali',
- ['say'] = 'Sayisi',
- ['sek'] = 'Sekota',
- ['sel'] = 'Selkup',
- ['sgo'] = 'Sango',
- ['shn'] = 'Shan',
- ['sib'] = 'Sibe',
- ['sid'] = 'Sidamo',
- ['sig'] = 'Silte Gurage',
- ['sks'] = 'Skolt Sami',
- ['sky'] = 'Slovak',
- ['sla'] = 'Slavey',
- ['slv'] = 'Slovenian',
- ['sml'] = 'Somali',
- ['smo'] = 'Samoan',
- ['sna'] = 'Sena',
- ['snd'] = 'Sindhi',
- ['snh'] = 'Sinhalese',
- ['snk'] = 'Soninke',
- ['sog'] = 'Sodo Gurage',
- ['sot'] = 'Sotho',
- ['sqi'] = 'Albanian',
- ['srb'] = 'Serbian',
- ['srk'] = 'Saraiki',
- ['srr'] = 'Serer',
- ['ssl'] = 'South Slavey',
- ['ssm'] = 'Southern Sami',
- ['sur'] = 'Suri',
- ['sva'] = 'Svan',
- ['sve'] = 'Swedish',
- ['swa'] = 'Swadaya Aramaic',
- ['swk'] = 'Swahili',
- ['swz'] = 'Swazi',
- ['sxt'] = 'Sutu',
- ['syr'] = 'Syriac',
- ['tab'] = 'Tabasaran',
- ['taj'] = 'Tajiki',
- ['tam'] = 'Tamil',
- ['tat'] = 'Tatar',
- ['tcr'] = 'TH-Cree',
- ['tel'] = 'Telugu',
- ['tgn'] = 'Tongan',
- ['tgr'] = 'Tigre',
- ['tgy'] = 'Tigrinya',
- ['tha'] = 'Thai',
- ['tht'] = 'Tahitian',
- ['tib'] = 'Tibetan',
- ['tkm'] = 'Turkmen',
- ['tmn'] = 'Temne',
- ['tna'] = 'Tswana',
- ['tne'] = 'Tundra Nenets',
- ['tng'] = 'Tonga',
- ['tod'] = 'Todo',
- ['trk'] = 'Turkish',
- ['tsg'] = 'Tsonga',
- ['tua'] = 'Turoyo Aramaic',
- ['tul'] = 'Tulu',
- ['tuv'] = 'Tuvin',
- ['twi'] = 'Twi',
- ['udm'] = 'Udmurt',
- ['ukr'] = 'Ukrainian',
- ['urd'] = 'Urdu',
- ['usb'] = 'Upper Sorbian',
- ['uyg'] = 'Uyghur',
- ['uzb'] = 'Uzbek',
- ['ven'] = 'Venda',
- ['vit'] = 'Vietnamese',
- ['wa' ] = 'Wa',
- ['wag'] = 'Wagdi',
- ['wcr'] = 'West-Cree',
- ['wel'] = 'Welsh',
- ['wlf'] = 'Wolof',
- ['xbd'] = 'Tai Lue',
- ['xhs'] = 'Xhosa',
- ['yak'] = 'Yakut',
- ['yba'] = 'Yoruba',
- ['ycr'] = 'Y-Cree',
- ['yic'] = 'Yi Classic',
- ['yim'] = 'Yi Modern',
- ['zhh'] = 'Chinese Hong Kong',
- ['zhp'] = 'Chinese Phonetic',
- ['zhs'] = 'Chinese Simplified',
- ['zht'] = 'Chinese Traditional',
- ['znd'] = 'Zande',
- ['zul'] = 'Zulu'
-}
-
-otf.tables.features = {
- ['aalt'] = 'Access All Alternates',
- ['abvf'] = 'Above-Base Forms',
- ['abvm'] = 'Above-Base Mark Positioning',
- ['abvs'] = 'Above-Base Substitutions',
- ['afrc'] = 'Alternative Fractions',
- ['akhn'] = 'Akhands',
- ['blwf'] = 'Below-Base Forms',
- ['blwm'] = 'Below-Base Mark Positioning',
- ['blws'] = 'Below-Base Substitutions',
- ['c2pc'] = 'Petite Capitals From Capitals',
- ['c2sc'] = 'Small Capitals From Capitals',
- ['calt'] = 'Contextual Alternates',
- ['case'] = 'Case-Sensitive Forms',
- ['ccmp'] = 'Glyph Composition/Decomposition',
- ['cjct'] = 'Conjunct Forms',
- ['clig'] = 'Contextual Ligatures',
- ['cpsp'] = 'Capital Spacing',
- ['cswh'] = 'Contextual Swash',
- ['curs'] = 'Cursive Positioning',
- ['dflt'] = 'Default Processing',
- ['dist'] = 'Distances',
- ['dlig'] = 'Discretionary Ligatures',
- ['dnom'] = 'Denominators',
- ['dtls'] = 'Dotless Forms', -- math
- ['expt'] = 'Expert Forms',
- ['falt'] = 'Final glyph Alternates',
- ['fin2'] = 'Terminal Forms #2',
- ['fin3'] = 'Terminal Forms #3',
- ['fina'] = 'Terminal Forms',
- ['flac'] = 'Flattened Accents Over Capitals', -- math
- ['frac'] = 'Fractions',
- ['fwid'] = 'Full Width',
- ['half'] = 'Half Forms',
- ['haln'] = 'Halant Forms',
- ['halt'] = 'Alternate Half Width',
- ['hist'] = 'Historical Forms',
- ['hkna'] = 'Horizontal Kana Alternates',
- ['hlig'] = 'Historical Ligatures',
- ['hngl'] = 'Hangul',
- ['hojo'] = 'Hojo Kanji Forms',
- ['hwid'] = 'Half Width',
- ['init'] = 'Initial Forms',
- ['isol'] = 'Isolated Forms',
- ['ital'] = 'Italics',
- ['jalt'] = 'Justification Alternatives',
- ['jp04'] = 'JIS2004 Forms',
- ['jp78'] = 'JIS78 Forms',
- ['jp83'] = 'JIS83 Forms',
- ['jp90'] = 'JIS90 Forms',
- ['kern'] = 'Kerning',
- ['lfbd'] = 'Left Bounds',
- ['liga'] = 'Standard Ligatures',
- ['ljmo'] = 'Leading Jamo Forms',
- ['lnum'] = 'Lining Figures',
- ['locl'] = 'Localized Forms',
- ['mark'] = 'Mark Positioning',
- ['med2'] = 'Medial Forms #2',
- ['medi'] = 'Medial Forms',
- ['mgrk'] = 'Mathematical Greek',
- ['mkmk'] = 'Mark to Mark Positioning',
- ['mset'] = 'Mark Positioning via Substitution',
- ['nalt'] = 'Alternate Annotation Forms',
- ['nlck'] = 'NLC Kanji Forms',
- ['nukt'] = 'Nukta Forms',
- ['numr'] = 'Numerators',
- ['onum'] = 'Old Style Figures',
- ['opbd'] = 'Optical Bounds',
- ['ordn'] = 'Ordinals',
- ['ornm'] = 'Ornaments',
- ['palt'] = 'Proportional Alternate Width',
- ['pcap'] = 'Petite Capitals',
- ['pnum'] = 'Proportional Figures',
- ['pref'] = 'Pre-base Forms',
- ['pres'] = 'Pre-base Substitutions',
- ['pstf'] = 'Post-base Forms',
- ['psts'] = 'Post-base Substitutions',
- ['pwid'] = 'Proportional Widths',
- ['qwid'] = 'Quarter Widths',
- ['rand'] = 'Randomize',
- ['rkrf'] = 'Rakar Forms',
- ['rlig'] = 'Required Ligatures',
- ['rphf'] = 'Reph Form',
- ['rtbd'] = 'Right Bounds',
- ['rtla'] = 'Right-To-Left Alternates',
- ['rtlm'] = 'Right To Left Math', -- math
- ['ruby'] = 'Ruby Notation Forms',
- ['salt'] = 'Stylistic Alternates',
- ['sinf'] = 'Scientific Inferiors',
- ['size'] = 'Optical Size',
- ['smcp'] = 'Small Capitals',
- ['smpl'] = 'Simplified Forms',
- ['ss01'] = 'Stylistic Set 1',
- ['ss02'] = 'Stylistic Set 2',
- ['ss03'] = 'Stylistic Set 3',
- ['ss04'] = 'Stylistic Set 4',
- ['ss05'] = 'Stylistic Set 5',
- ['ss06'] = 'Stylistic Set 6',
- ['ss07'] = 'Stylistic Set 7',
- ['ss08'] = 'Stylistic Set 8',
- ['ss09'] = 'Stylistic Set 9',
- ['ss10'] = 'Stylistic Set 10',
- ['ss11'] = 'Stylistic Set 11',
- ['ss12'] = 'Stylistic Set 12',
- ['ss13'] = 'Stylistic Set 13',
- ['ss14'] = 'Stylistic Set 14',
- ['ss15'] = 'Stylistic Set 15',
- ['ss16'] = 'Stylistic Set 16',
- ['ss17'] = 'Stylistic Set 17',
- ['ss18'] = 'Stylistic Set 18',
- ['ss19'] = 'Stylistic Set 19',
- ['ss20'] = 'Stylistic Set 20',
- ['ssty'] = 'Script Style', -- math
- ['subs'] = 'Subscript',
- ['sups'] = 'Superscript',
- ['swsh'] = 'Swash',
- ['titl'] = 'Titling',
- ['tjmo'] = 'Trailing Jamo Forms',
- ['tnam'] = 'Traditional Name Forms',
- ['tnum'] = 'Tabular Figures',
- ['trad'] = 'Traditional Forms',
- ['twid'] = 'Third Widths',
- ['unic'] = 'Unicase',
- ['valt'] = 'Alternate Vertical Metrics',
- ['vatu'] = 'Vattu Variants',
- ['vert'] = 'Vertical Writing',
- ['vhal'] = 'Alternate Vertical Half Metrics',
- ['vjmo'] = 'Vowel Jamo Forms',
- ['vkna'] = 'Vertical Kana Alternates',
- ['vkrn'] = 'Vertical Kerning',
- ['vpal'] = 'Proportional Alternate Vertical Metrics',
- ['vrt2'] = 'Vertical Rotation',
- ['zero'] = 'Slashed Zero',
-
- ['trep'] = 'Traditional TeX Replacements',
- ['tlig'] = 'Traditional TeX Ligatures',
-}
-
-otf.tables.baselines = {
- ['hang'] = 'Hanging baseline',
- ['icfb'] = 'Ideographic character face bottom edge baseline',
- ['icft'] = 'Ideographic character face tope edige baseline',
- ['ideo'] = 'Ideographic em-box bottom edge baseline',
- ['idtp'] = 'Ideographic em-box top edge baseline',
- ['math'] = 'Mathmatical centered baseline',
- ['romn'] = 'Roman baseline'
-}
-
--- can be sped up by local tables
-
-function otf.tables.to_tag(id)
- return stringformat("%4s",lower(id))
-end
-
-local function resolve(tab,id)
- if tab and id then
- id = lower(id)
- return tab[id] or tab[gsub(id," ","")] or tab['dflt'] or ''
- else
- return "unknown"
- end
-end
-
-function otf.meanings.script(id)
- return resolve(otf.tables.scripts,id)
-end
-function otf.meanings.language(id)
- return resolve(otf.tables.languages,id)
-end
-function otf.meanings.feature(id)
- return resolve(otf.tables.features,id)
-end
-function otf.meanings.baseline(id)
- return resolve(otf.tables.baselines,id)
-end
-
-otf.tables.to_scripts = table.reverse_hash(otf.tables.scripts )
-otf.tables.to_languages = table.reverse_hash(otf.tables.languages)
-otf.tables.to_features = table.reverse_hash(otf.tables.features )
-
-local scripts = otf.tables.scripts
-local languages = otf.tables.languages
-local features = otf.tables.features
-
-local to_scripts = otf.tables.to_scripts
-local to_languages = otf.tables.to_languages
-local to_features = otf.tables.to_features
-
-for k, v in next, to_features do
- local stripped = gsub(k,"%-"," ")
- to_features[stripped] = v
- local stripped = gsub(k,"[^a-zA-Z0-9]","")
- to_features[stripped] = v
-end
-for k, v in next, to_features do
- to_features[lower(k)] = v
-end
-
-otf.meanings.checkers = {
- rand = function(v)
- return v and "random"
- end
-}
-
-local checkers = otf.meanings.checkers
-
-function otf.meanings.normalize(features)
- local h = { }
- for k,v in next, features do
- k = lower(k)
- if k == "language" or k == "lang" then
- v = gsub(lower(v),"[^a-z0-9%-]","")
- if not languages[v] then
- h.language = to_languages[v] or "dflt"
- else
- h.language = v
- end
- elseif k == "script" then
- v = gsub(lower(v),"[^a-z0-9%-]","")
- if not scripts[v] then
- h.script = to_scripts[v] or "dflt"
- else
- h.script = v
- end
- else
- if type(v) == "string" then
- local b = v:is_boolean()
- if type(b) == "nil" then
- v = tonumber(v) or lower(v)
- else
- v = b
- end
- end
- k = to_features[k] or k
- local c = checkers[k]
- h[k] = c and c(v) or v
- end
- end
- return h
-end
-
--- When I feel the need ...
-
---~ otf.tables.aat = {
---~ [ 0] = {
---~ name = "allTypographicFeaturesType",
---~ [ 0] = "allTypeFeaturesOnSelector",
---~ [ 1] = "allTypeFeaturesOffSelector",
---~ },
---~ [ 1] = {
---~ name = "ligaturesType",
---~ [0 ] = "requiredLigaturesOnSelector",
---~ [1 ] = "requiredLigaturesOffSelector",
---~ [2 ] = "commonLigaturesOnSelector",
---~ [3 ] = "commonLigaturesOffSelector",
---~ [4 ] = "rareLigaturesOnSelector",
---~ [5 ] = "rareLigaturesOffSelector",
---~ [6 ] = "logosOnSelector ",
---~ [7 ] = "logosOffSelector ",
---~ [8 ] = "rebusPicturesOnSelector",
---~ [9 ] = "rebusPicturesOffSelector",
---~ [10] = "diphthongLigaturesOnSelector",
---~ [11] = "diphthongLigaturesOffSelector",
---~ [12] = "squaredLigaturesOnSelector",
---~ [13] = "squaredLigaturesOffSelector",
---~ [14] = "abbrevSquaredLigaturesOnSelector",
---~ [15] = "abbrevSquaredLigaturesOffSelector",
---~ },
---~ [ 2] = {
---~ name = "cursiveConnectionType",
---~ [ 0] = "unconnectedSelector",
---~ [ 1] = "partiallyConnectedSelector",
---~ [ 2] = "cursiveSelector ",
---~ },
---~ [ 3] = {
---~ name = "letterCaseType",
---~ [ 0] = "upperAndLowerCaseSelector",
---~ [ 1] = "allCapsSelector ",
---~ [ 2] = "allLowerCaseSelector",
---~ [ 3] = "smallCapsSelector ",
---~ [ 4] = "initialCapsSelector",
---~ [ 5] = "initialCapsAndSmallCapsSelector",
---~ },
---~ [ 4] = {
---~ name = "verticalSubstitutionType",
---~ [ 0] = "substituteVerticalFormsOnSelector",
---~ [ 1] = "substituteVerticalFormsOffSelector",
---~ },
---~ [ 5] = {
---~ name = "linguisticRearrangementType",
---~ [ 0] = "linguisticRearrangementOnSelector",
---~ [ 1] = "linguisticRearrangementOffSelector",
---~ },
---~ [ 6] = {
---~ name = "numberSpacingType",
---~ [ 0] = "monospacedNumbersSelector",
---~ [ 1] = "proportionalNumbersSelector",
---~ },
---~ [ 7] = {
---~ name = "appleReserved1Type",
---~ },
---~ [ 8] = {
---~ name = "smartSwashType",
---~ [ 0] = "wordInitialSwashesOnSelector",
---~ [ 1] = "wordInitialSwashesOffSelector",
---~ [ 2] = "wordFinalSwashesOnSelector",
---~ [ 3] = "wordFinalSwashesOffSelector",
---~ [ 4] = "lineInitialSwashesOnSelector",
---~ [ 5] = "lineInitialSwashesOffSelector",
---~ [ 6] = "lineFinalSwashesOnSelector",
---~ [ 7] = "lineFinalSwashesOffSelector",
---~ [ 8] = "nonFinalSwashesOnSelector",
---~ [ 9] = "nonFinalSwashesOffSelector",
---~ },
---~ [ 9] = {
---~ name = "diacriticsType",
---~ [ 0] = "showDiacriticsSelector",
---~ [ 1] = "hideDiacriticsSelector",
---~ [ 2] = "decomposeDiacriticsSelector",
---~ },
---~ [10] = {
---~ name = "verticalPositionType",
---~ [ 0] = "normalPositionSelector",
---~ [ 1] = "superiorsSelector ",
---~ [ 2] = "inferiorsSelector ",
---~ [ 3] = "ordinalsSelector ",
---~ },
---~ [11] = {
---~ name = "fractionsType",
---~ [ 0] = "noFractionsSelector",
---~ [ 1] = "verticalFractionsSelector",
---~ [ 2] = "diagonalFractionsSelector",
---~ },
---~ [12] = {
---~ name = "appleReserved2Type",
---~ },
---~ [13] = {
---~ name = "overlappingCharactersType",
---~ [ 0] = "preventOverlapOnSelector",
---~ [ 1] = "preventOverlapOffSelector",
---~ },
---~ [14] = {
---~ name = "typographicExtrasType",
---~ [0 ] = "hyphensToEmDashOnSelector",
---~ [1 ] = "hyphensToEmDashOffSelector",
---~ [2 ] = "hyphenToEnDashOnSelector",
---~ [3 ] = "hyphenToEnDashOffSelector",
---~ [4 ] = "unslashedZeroOnSelector",
---~ [5 ] = "unslashedZeroOffSelector",
---~ [6 ] = "formInterrobangOnSelector",
---~ [7 ] = "formInterrobangOffSelector",
---~ [8 ] = "smartQuotesOnSelector",
---~ [9 ] = "smartQuotesOffSelector",
---~ [10] = "periodsToEllipsisOnSelector",
---~ [11] = "periodsToEllipsisOffSelector",
---~ },
---~ [15] = {
---~ name = "mathematicalExtrasType",
---~ [ 0] = "hyphenToMinusOnSelector",
---~ [ 1] = "hyphenToMinusOffSelector",
---~ [ 2] = "asteriskToMultiplyOnSelector",
---~ [ 3] = "asteriskToMultiplyOffSelector",
---~ [ 4] = "slashToDivideOnSelector",
---~ [ 5] = "slashToDivideOffSelector",
---~ [ 6] = "inequalityLigaturesOnSelector",
---~ [ 7] = "inequalityLigaturesOffSelector",
---~ [ 8] = "exponentsOnSelector",
---~ [ 9] = "exponentsOffSelector",
---~ },
---~ [16] = {
---~ name = "ornamentSetsType",
---~ [ 0] = "noOrnamentsSelector",
---~ [ 1] = "dingbatsSelector ",
---~ [ 2] = "piCharactersSelector",
---~ [ 3] = "fleuronsSelector ",
---~ [ 4] = "decorativeBordersSelector",
---~ [ 5] = "internationalSymbolsSelector",
---~ [ 6] = "mathSymbolsSelector",
---~ },
---~ [17] = {
---~ name = "characterAlternativesType",
---~ [ 0] = "noAlternatesSelector",
---~ },
---~ [18] = {
---~ name = "designComplexityType",
---~ [ 0] = "designLevel1Selector",
---~ [ 1] = "designLevel2Selector",
---~ [ 2] = "designLevel3Selector",
---~ [ 3] = "designLevel4Selector",
---~ [ 4] = "designLevel5Selector",
---~ },
---~ [19] = {
---~ name = "styleOptionsType",
---~ [ 0] = "noStyleOptionsSelector",
---~ [ 1] = "displayTextSelector",
---~ [ 2] = "engravedTextSelector",
---~ [ 3] = "illuminatedCapsSelector",
---~ [ 4] = "titlingCapsSelector",
---~ [ 5] = "tallCapsSelector ",
---~ },
---~ [20] = {
---~ name = "characterShapeType",
---~ [0 ] = "traditionalCharactersSelector",
---~ [1 ] = "simplifiedCharactersSelector",
---~ [2 ] = "jis1978CharactersSelector",
---~ [3 ] = "jis1983CharactersSelector",
---~ [4 ] = "jis1990CharactersSelector",
---~ [5 ] = "traditionalAltOneSelector",
---~ [6 ] = "traditionalAltTwoSelector",
---~ [7 ] = "traditionalAltThreeSelector",
---~ [8 ] = "traditionalAltFourSelector",
---~ [9 ] = "traditionalAltFiveSelector",
---~ [10] = "expertCharactersSelector",
---~ },
---~ [21] = {
---~ name = "numberCaseType",
---~ [ 0] = "lowerCaseNumbersSelector",
---~ [ 1] = "upperCaseNumbersSelector",
---~ },
---~ [22] = {
---~ name = "textSpacingType",
---~ [ 0] = "proportionalTextSelector",
---~ [ 1] = "monospacedTextSelector",
---~ [ 2] = "halfWidthTextSelector",
---~ [ 3] = "normallySpacedTextSelector",
---~ },
---~ [23] = {
---~ name = "transliterationType",
---~ [ 0] = "noTransliterationSelector",
---~ [ 1] = "hanjaToHangulSelector",
---~ [ 2] = "hiraganaToKatakanaSelector",
---~ [ 3] = "katakanaToHiraganaSelector",
---~ [ 4] = "kanaToRomanizationSelector",
---~ [ 5] = "romanizationToHiraganaSelector",
---~ [ 6] = "romanizationToKatakanaSelector",
---~ [ 7] = "hanjaToHangulAltOneSelector",
---~ [ 8] = "hanjaToHangulAltTwoSelector",
---~ [ 9] = "hanjaToHangulAltThreeSelector",
---~ },
---~ [24] = {
---~ name = "annotationType",
---~ [ 0] = "noAnnotationSelector",
---~ [ 1] = "boxAnnotationSelector",
---~ [ 2] = "roundedBoxAnnotationSelector",
---~ [ 3] = "circleAnnotationSelector",
---~ [ 4] = "invertedCircleAnnotationSelector",
---~ [ 5] = "parenthesisAnnotationSelector",
---~ [ 6] = "periodAnnotationSelector",
---~ [ 7] = "romanNumeralAnnotationSelector",
---~ [ 8] = "diamondAnnotationSelector",
---~ },
---~ [25] = {
---~ name = "kanaSpacingType",
---~ [ 0] = "fullWidthKanaSelector",
---~ [ 1] = "proportionalKanaSelector",
---~ },
---~ [26] = {
---~ name = "ideographicSpacingType",
---~ [ 0] = "fullWidthIdeographsSelector",
---~ [ 1] = "proportionalIdeographsSelector",
---~ },
---~ [103] = {
---~ name = "cjkRomanSpacingType",
---~ [ 0] = "halfWidthCJKRomanSelector",
---~ [ 1] = "proportionalCJKRomanSelector",
---~ [ 2] = "defaultCJKRomanSelector",
---~ [ 3] = "fullWidthCJKRomanSelector",
---~ },
---~ }
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/otfl-font-tfm.lua b/Master/texmf-dist/tex/luatex/luaotfload/otfl-font-tfm.lua
deleted file mode 100644
index 560ba1cba15..00000000000
--- a/Master/texmf-dist/tex/luatex/luaotfload/otfl-font-tfm.lua
+++ /dev/null
@@ -1,741 +0,0 @@
-if not modules then modules = { } end modules ['font-tfm'] = {
- version = 1.001,
- comment = "companion to font-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local utf = unicode.utf8
-
-local next, format, match, lower, gsub = next, string.format, string.match, string.lower, string.gsub
-local concat, sortedkeys, utfbyte, serialize = table.concat, table.sortedkeys, utf.byte, table.serialize
-
-local trace_defining = false trackers.register("fonts.defining", function(v) trace_defining = v end)
-local trace_scaling = false trackers.register("fonts.scaling" , function(v) trace_scaling = v end)
-
--- tfmdata has also fast access to indices and unicodes
--- to be checked: otf -> tfm -> tfmscaled
---
--- watch out: no negative depths and negative eights permitted in regular fonts
-
---[[ldx--
-<p>Here we only implement a few helper functions.</p>
---ldx]]--
-
-fonts = fonts or { }
-fonts.tfm = fonts.tfm or { }
-fonts.ids = fonts.ids or { }
-
-local tfm = fonts.tfm
-
-fonts.loaded = fonts.loaded or { }
-fonts.dontembed = fonts.dontembed or { }
-fonts.triggers = fonts.triggers or { } -- brrr
-fonts.initializers = fonts.initializers or { }
-fonts.initializers.common = fonts.initializers.common or { }
-
-local fontdata = fonts.ids
-local disc = node.id('disc')
-local glyph = node.id('glyph')
-local set_attribute = node.set_attribute
-
---[[ldx--
-<p>The next function encapsulates the standard <l n='tfm'/> loader as
-supplied by <l n='luatex'/>.</p>
---ldx]]--
-
-tfm.resolve_vf = true -- false
-tfm.share_base_kerns = false -- true (.5 sec slower on mk but brings down mem from 410M to 310M, beware: then script/lang share too)
-tfm.mathactions = { }
-tfm.fontname_mode = "fullpath"
-
-tfm.enhance = tfm.enhance or function() end
-
-fonts.formats.tfm = "type1" -- we need to have at least a value here
-
-function tfm.read_from_tfm(specification)
- local fname, tfmdata = specification.filename or "", nil
- if fname ~= "" then
- if trace_defining then
- logs.report("define font","loading tfm file %s at size %s",fname,specification.size)
- end
- tfmdata = font.read_tfm(fname,specification.size) -- not cached, fast enough
- if tfmdata then
- tfmdata.descriptions = tfmdata.descriptions or { }
- if tfm.resolve_vf then
- fonts.logger.save(tfmdata,file.extname(fname),specification) -- strange, why here
- fname = resolvers.findbinfile(specification.name, 'ovf')
- if fname and fname ~= "" then
- local vfdata = font.read_vf(fname,specification.size) -- not cached, fast enough
- if vfdata then
- local chars = tfmdata.characters
- for k,v in next, vfdata.characters do
- chars[k].commands = v.commands
- end
- tfmdata.type = 'virtual'
- tfmdata.fonts = vfdata.fonts
- end
- end
- end
- tfm.enhance(tfmdata,specification)
- end
- elseif trace_defining then
- logs.report("define font","loading tfm with name %s fails",specification.name)
- end
- return tfmdata
-end
-
---[[ldx--
-<p>We need to normalize the scale factor (in scaled points). This has to
-do with the fact that <l n='tex'/> uses a negative multiple of 1000 as
-a signal for a font scaled based on the design size.</p>
---ldx]]--
-
-local factors = {
- pt = 65536.0,
- bp = 65781.8,
-}
-
-function tfm.setfactor(f)
- tfm.factor = factors[f or 'pt'] or factors.pt
-end
-
-tfm.setfactor()
-
-function tfm.scaled(scaledpoints, designsize) -- handles designsize in sp as well
- if scaledpoints < 0 then
- if designsize then
- if designsize > tfm.factor then -- or just 1000 / when? mp?
- return (- scaledpoints/1000) * designsize -- sp's
- else
- return (- scaledpoints/1000) * designsize * tfm.factor
- end
- else
- return (- scaledpoints/1000) * 10 * tfm.factor
- end
- else
- return scaledpoints
- end
-end
-
---[[ldx--
-<p>Before a font is passed to <l n='tex'/> we scale it. Here we also need
-to scale virtual characters.</p>
---ldx]]--
-
-function tfm.get_virtual_id(tfmdata)
- -- since we don't know the id yet, we use 0 as signal
- if not tfmdata.fonts then
- tfmdata.type = "virtual"
- tfmdata.fonts = { { id = 0 } }
- return 1
- else
- tfmdata.fonts[#tfmdata.fonts+1] = { id = 0 }
- return #tfmdata.fonts
- end
-end
-
-function tfm.check_virtual_id(tfmdata, id)
- if tfmdata and tfmdata.type == "virtual" then
- if not tfmdata.fonts or #tfmdata.fonts == 0 then
- tfmdata.type, tfmdata.fonts = "real", nil
- else
- local vfonts = tfmdata.fonts
- for f=1,#vfonts do
- local fnt = vfonts[f]
- if fnt.id and fnt.id == 0 then
- fnt.id = id
- end
- end
- end
- end
-end
-
---[[ldx--
-<p>Beware, the boundingbox is passed as reference so we may not overwrite it
-in the process; numbers are of course copies. Here 65536 equals 1pt. (Due to
-excessive memory usage in CJK fonts, we no longer pass the boundingbox.)</p>
---ldx]]--
-
-fonts.trace_scaling = false
-
--- the following hack costs a bit of runtime but safes memory
---
--- basekerns are scaled and will be hashed by table id
--- sharedkerns are unscaled and are be hashed by concatenated indexes
-
---~ function tfm.check_base_kerns(tfmdata)
---~ if tfm.share_base_kerns then
---~ local sharedkerns = tfmdata.sharedkerns
---~ if sharedkerns then
---~ local basekerns = { }
---~ tfmdata.basekerns = basekerns
---~ return sharedkerns, basekerns
---~ end
---~ end
---~ return nil, nil
---~ end
-
---~ function tfm.prepare_base_kerns(tfmdata)
---~ if tfm.share_base_kerns and not tfmdata.sharedkerns then
---~ local sharedkerns = { }
---~ tfmdata.sharedkerns = sharedkerns
---~ for u, chr in next, tfmdata.characters do
---~ local kerns = chr.kerns
---~ if kerns then
---~ local hash = concat(sortedkeys(kerns), " ")
---~ local base = sharedkerns[hash]
---~ if not base then
---~ sharedkerns[hash] = kerns
---~ else
---~ chr.kerns = base
---~ end
---~ end
---~ end
---~ end
---~ end
-
--- we can have cache scaled characters when we are in node mode and don't have
--- protruding and expansion: hash == fullname @ size @ protruding @ expansion
--- but in practice (except from mk) the otf hash will be enough already so it
--- makes no sense to mess up the code now
-
-local charactercache = { }
-
--- The scaler is only used for otf and afm and virtual fonts. If
--- a virtual font has italic correction make sure to set the
--- has_italic flag. Some more flags will be added in the future.
-
-function tfm.calculate_scale(tfmtable, scaledpoints, relativeid)
- if scaledpoints < 0 then
- scaledpoints = (- scaledpoints/1000) * tfmtable.designsize -- already in sp
- end
- local units = tfmtable.units or 1000
- local delta = scaledpoints/units -- brr, some open type fonts have 2048
- return scaledpoints, delta, units
-end
-
-function tfm.do_scale(tfmtable, scaledpoints, relativeid)
- -- tfm.prepare_base_kerns(tfmtable) -- optimalization
- local t = { } -- the new table
- local scaledpoints, delta, units = tfm.calculate_scale(tfmtable, scaledpoints, relativeid)
- t.units_per_em = units or 1000
- local hdelta, vdelta = delta, delta
- -- unicoded unique descriptions shared cidinfo characters changed parameters indices
- for k,v in next, tfmtable do
- if type(v) == "table" then
- -- print(k)
- else
- t[k] = v
- end
- end
- local extend_factor = tfmtable.extend_factor or 0
- if extend_factor ~= 0 and extend_factor ~= 1 then
- hdelta = hdelta * extend_factor
- t.extend = extend_factor * 1000
- else
- t.extend = 1000
- end
- local slant_factor = tfmtable.slant_factor or 0
- if slant_factor ~= 0 then
- t.slant = slant_factor * 1000
- else
- t.slant = 0
- end
- -- status
- local isvirtual = tfmtable.type == "virtual" or tfmtable.virtualized
- local hasmath = (tfmtable.math_parameters ~= nil and next(tfmtable.math_parameters) ~= nil) or (tfmtable.MathConstants ~= nil and next(tfmtable.MathConstants) ~= nil)
- local nodemode = tfmtable.mode == "node"
- local hasquality = tfmtable.auto_expand or tfmtable.auto_protrude
- local hasitalic = tfmtable.has_italic
- --
- t.parameters = { }
- t.characters = { }
- t.MathConstants = { }
- -- fast access
- local descriptions = tfmtable.descriptions or { }
- t.unicodes = tfmtable.unicodes
- t.indices = tfmtable.indices
- t.marks = tfmtable.marks
-t.goodies = tfmtable.goodies
-t.colorscheme = tfmtable.colorscheme
---~ t.embedding = tfmtable.embedding
- t.descriptions = descriptions
- if tfmtable.fonts then
- t.fonts = table.fastcopy(tfmtable.fonts) -- hm also at the end
- end
- local tp = t.parameters
- local mp = t.math_parameters
- local tfmp = tfmtable.parameters -- let's check for indexes
- --
- tp.slant = (tfmp.slant or tfmp[1] or 0)
- tp.space = (tfmp.space or tfmp[2] or 0)*hdelta
- tp.space_stretch = (tfmp.space_stretch or tfmp[3] or 0)*hdelta
- tp.space_shrink = (tfmp.space_shrink or tfmp[4] or 0)*hdelta
- tp.x_height = (tfmp.x_height or tfmp[5] or 0)*vdelta
- tp.quad = (tfmp.quad or tfmp[6] or 0)*hdelta
- tp.extra_space = (tfmp.extra_space or tfmp[7] or 0)*hdelta
- local protrusionfactor = (tp.quad ~= 0 and 1000/tp.quad) or 0
- local tc = t.characters
- local characters = tfmtable.characters
- local nameneeded = not tfmtable.shared.otfdata --hack
- local changed = tfmtable.changed or { } -- for base mode
- local ischanged = changed and next(changed)
- local indices = tfmtable.indices
- local luatex = tfmtable.luatex
- local tounicode = luatex and luatex.tounicode
- local defaultwidth = luatex and luatex.defaultwidth or 0
- local defaultheight = luatex and luatex.defaultheight or 0
- local defaultdepth = luatex and luatex.defaultdepth or 0
- -- experimental, sharing kerns (unscaled and scaled) saves memory
- -- local sharedkerns, basekerns = tfm.check_base_kerns(tfmtable)
- -- loop over descriptions (afm and otf have descriptions, tfm not)
- -- there is no need (yet) to assign a value to chr.tonunicode
- local scaledwidth = defaultwidth * hdelta
- local scaledheight = defaultheight * vdelta
- local scaleddepth = defaultdepth * vdelta
- local stackmath = tfmtable.ignore_stack_math ~= true
- local private = fonts.private
- local sharedkerns = { }
- for k,v in next, characters do
- local chr, description, index
- if ischanged then
- -- basemode hack
- local c = changed[k]
- if c then
- description = descriptions[c] or v
- v = characters[c] or v
- index = (indices and indices[c]) or c
- else
- description = descriptions[k] or v
- index = (indices and indices[k]) or k
- end
- else
- description = descriptions[k] or v
- index = (indices and indices[k]) or k
- end
- local width = description.width
- local height = description.height
- local depth = description.depth
- if width then width = hdelta*width else width = scaledwidth end
- if height then height = vdelta*height else height = scaledheight end
- -- if depth then depth = vdelta*depth else depth = scaleddepth end
- if depth and depth ~= 0 then
- depth = delta*depth
- if nameneeded then
- chr = {
- name = description.name,
- index = index,
- height = height,
- depth = depth,
- width = width,
- }
- else
- chr = {
- index = index,
- height = height,
- depth = depth,
- width = width,
- }
- end
- else
- -- this saves a little bit of memory time and memory, esp for big cjk fonts
- if nameneeded then
- chr = {
- name = description.name,
- index = index,
- height = height,
- width = width,
- }
- else
- chr = {
- index = index,
- height = height,
- width = width,
- }
- end
- end
- -- if trace_scaling then
- -- logs.report("define font","t=%s, u=%s, i=%s, n=%s c=%s",k,chr.tounicode or k,description.index,description.name or '-',description.class or '-')
- -- end
- if tounicode then
- local tu = tounicode[index] -- nb: index!
- if tu then
- chr.tounicode = tu
- end
- end
- if hasquality then
- -- we could move these calculations elsewhere (saves calculations)
- local ve = v.expansion_factor
- if ve then
- chr.expansion_factor = ve*1000 -- expansionfactor, hm, can happen elsewhere
- end
- local vl = v.left_protruding
- if vl then
- chr.left_protruding = protrusionfactor*width*vl
- end
- local vr = v.right_protruding
- if vr then
- chr.right_protruding = protrusionfactor*width*vr
- end
- end
- -- todo: hasitalic
- if hasitalic then
- local vi = description.italic or v.italic
- if vi and vi ~= 0 then
- chr.italic = vi*hdelta
- end
- end
- -- to be tested
- if hasmath then
- -- todo, just operate on descriptions.math
- local vn = v.next
- if vn then
- chr.next = vn
- --~ if v.vert_variants or v.horiz_variants then
- --~ logs.report("glyph 0x%05X has combination of next, vert_variants and horiz_variants",index)
- --~ end
- else
- local vv = v.vert_variants
- if vv then
- local t = { }
- for i=1,#vv do
- local vvi = vv[i]
- t[i] = {
- ["start"] = (vvi["start"] or 0)*vdelta,
- ["end"] = (vvi["end"] or 0)*vdelta,
- ["advance"] = (vvi["advance"] or 0)*vdelta,
- ["extender"] = vvi["extender"],
- ["glyph"] = vvi["glyph"],
- }
- end
- chr.vert_variants = t
- --~ local ic = v.vert_italic_correction
- --~ if ic then
- --~ chr.italic = ic * hdelta
- --~ print(format("0x%05X -> %s",k,chr.italic))
- --~ end
- else
- local hv = v.horiz_variants
- if hv then
- local t = { }
- for i=1,#hv do
- local hvi = hv[i]
- t[i] = {
- ["start"] = (hvi["start"] or 0)*hdelta,
- ["end"] = (hvi["end"] or 0)*hdelta,
- ["advance"] = (hvi["advance"] or 0)*hdelta,
- ["extender"] = hvi["extender"],
- ["glyph"] = hvi["glyph"],
- }
- end
- chr.horiz_variants = t
- end
- end
- end
- local vt = description.top_accent
- if vt then
- chr.top_accent = vdelta*vt
- end
- if stackmath then
- local mk = v.mathkerns
- if mk then
- local kerns = { }
- local v = mk.top_right if v then local k = { } for i=1,#v do local vi = v[i]
- k[i] = { height = vdelta*vi.height, kern = vdelta*vi.kern }
- end kerns.top_right = k end
- local v = mk.top_left if v then local k = { } for i=1,#v do local vi = v[i]
- k[i] = { height = vdelta*vi.height, kern = vdelta*vi.kern }
- end kerns.top_left = k end
- local v = mk.bottom_left if v then local k = { } for i=1,#v do local vi = v[i]
- k[i] = { height = vdelta*vi.height, kern = vdelta*vi.kern }
- end kerns.bottom_left = k end
- local v = mk.bottom_right if v then local k = { } for i=1,#v do local vi = v[i]
- k[i] = { height = vdelta*vi.height, kern = vdelta*vi.kern }
- end kerns.bottom_right = k end
- chr.mathkern = kerns -- singular
- end
- end
- end
- if not nodemode then
- local vk = v.kerns
- if vk then
- --~ if sharedkerns then
- --~ local base = basekerns[vk] -- hashed by table id, not content
- --~ if not base then
- --~ base = {}
- --~ for k,v in next, vk do base[k] = v*hdelta end
- --~ basekerns[vk] = base
- --~ end
- --~ chr.kerns = base
- --~ else
- --~ local tt = {}
- --~ for k,v in next, vk do tt[k] = v*hdelta end
- --~ chr.kerns = tt
- --~ end
- local s = sharedkerns[vk]
- if not s then
- s = { }
- for k,v in next, vk do s[k] = v*hdelta end
- sharedkerns[vk] = s
- end
- chr.kerns = s
- end
- local vl = v.ligatures
- if vl then
- if true then
- chr.ligatures = vl -- shared
- else
- local tt = { }
- for i,l in next, vl do
- tt[i] = l
- end
- chr.ligatures = tt
- end
- end
- end
- if isvirtual then
- local vc = v.commands
- if vc then
- -- we assume non scaled commands here
- -- tricky .. we need to scale pseudo math glyphs too
- -- which is why we deal with rules too
- local ok = false
- for i=1,#vc do
- local key = vc[i][1]
- if key == "right" or key == "down" then
- ok = true
- break
- end
- end
- if ok then
- local tt = { }
- for i=1,#vc do
- local ivc = vc[i]
- local key = ivc[1]
- if key == "right" then
- tt[#tt+1] = { key, ivc[2]*hdelta }
- elseif key == "down" then
- tt[#tt+1] = { key, ivc[2]*vdelta }
- elseif key == "rule" then
- tt[#tt+1] = { key, ivc[2]*vdelta, ivc[3]*hdelta }
- else -- not comment
- tt[#tt+1] = ivc -- shared since in cache and untouched
- end
- end
- chr.commands = tt
- else
- chr.commands = vc
- end
- end
- end
- tc[k] = chr
- end
- -- t.encodingbytes, t.filename, t.fullname, t.name: elsewhere
- t.size = scaledpoints
- t.factor = delta
- t.hfactor = hdelta
- t.vfactor = vdelta
- if t.fonts then
- t.fonts = table.fastcopy(t.fonts) -- maybe we virtualize more afterwards
- end
- if hasmath then
- -- mathematics.extras.copy(t) -- can be done elsewhere if needed
- local ma = tfm.mathactions
- for i=1,#ma do
- ma[i](t,tfmtable,delta,hdelta,vdelta) -- what delta?
- end
- end
- -- needed for \high cum suis
- local tpx = tp.x_height
- if hasmath then
- if not tp[13] then tp[13] = .86*tpx end -- mathsupdisplay
- if not tp[14] then tp[14] = .86*tpx end -- mathsupnormal
- if not tp[15] then tp[15] = .86*tpx end -- mathsupcramped
- if not tp[16] then tp[16] = .48*tpx end -- mathsubnormal
- if not tp[17] then tp[17] = .48*tpx end -- mathsubcombined
- if not tp[22] then tp[22] = 0 end -- mathaxisheight
- if t.MathConstants then t.MathConstants.AccentBaseHeight = nil end -- safeguard
- end
- t.tounicode = 1
- t.cidinfo = tfmtable.cidinfo
- -- we have t.name=metricfile and t.fullname=RealName and t.filename=diskfilename
- -- when collapsing fonts, luatex looks as both t.name and t.fullname as ttc files
- -- can have multiple subfonts
- if hasmath then
- if trace_defining then
- logs.report("define font","math enabled for: name '%s', fullname: '%s', filename: '%s'",t.name or "noname",t.fullname or "nofullname",t.filename or "nofilename")
- end
- else
- if trace_defining then
- logs.report("define font","math disabled for: name '%s', fullname: '%s', filename: '%s'",t.name or "noname",t.fullname or "nofullname",t.filename or "nofilename")
- end
- t.nomath, t.MathConstants = true, nil
- end
- if not t.psname then
- -- name used in pdf file as well as for selecting subfont in ttc/dfont
- t.psname = t.fontname or (t.fullname and fonts.names.cleanname(t.fullname))
- end
- if trace_defining then
- logs.report("define font","used for accesing subfont: '%s'",t.psname or "nopsname")
- logs.report("define font","used for subsetting: '%s'",t.fontname or "nofontname")
- end
---~ print(t.fontname,table.serialize(t.MathConstants))
- return t, delta
-end
-
---[[ldx--
-<p>The reason why the scaler is split, is that for a while we experimented
-with a helper function. However, in practice the <l n='api'/> calls are too slow to
-make this profitable and the <l n='lua'/> based variant was just faster. A days
-wasted day but an experience richer.</p>
---ldx]]--
-
-tfm.auto_cleanup = true
-
-local lastfont = nil
-
--- we can get rid of the tfm instance when we have fast access to the
--- scaled character dimensions at the tex end, e.g. a fontobject.width
---
--- flushing the kern and ligature tables from memory saves a lot (only
--- base mode) but it complicates vf building where the new characters
--- demand this data .. solution: functions that access them
-
-function tfm.cleanup_table(tfmdata) -- we need a cleanup callback, now we miss the last one
- if tfm.auto_cleanup then -- ok, we can hook this into everyshipout or so ... todo
- if tfmdata.type == 'virtual' or tfmdata.virtualized then
- for k, v in next, tfmdata.characters do
- if v.commands then v.commands = nil end
- -- if v.kerns then v.kerns = nil end
- end
- else
- -- for k, v in next, tfmdata.characters do
- -- if v.kerns then v.kerns = nil end
- -- end
- end
- end
-end
-
-function tfm.cleanup(tfmdata) -- we need a cleanup callback, now we miss the last one
-end
-
-function tfm.scale(tfmtable, scaledpoints, relativeid)
- local t, factor = tfm.do_scale(tfmtable, scaledpoints, relativeid)
- t.factor = factor
- t.ascender = factor*(tfmtable.ascender or 0)
- t.descender = factor*(tfmtable.descender or 0)
- t.shared = tfmtable.shared or { }
- t.unique = table.fastcopy(tfmtable.unique or {})
---~ print("scaling", t.name, t.factor) -- , tfm.hash_features(tfmtable.specification))
- tfm.cleanup(t)
- return t
-end
-
---[[ldx--
-<p>Analyzers run per script and/or language and are needed in order to
-process features right.</p>
---ldx]]--
-
-fonts.analyzers = fonts.analyzers or { }
-fonts.analyzers.aux = fonts.analyzers.aux or { }
-fonts.analyzers.methods = fonts.analyzers.methods or { }
-fonts.analyzers.initializers = fonts.analyzers.initializers or { }
-
--- todo: analyzers per script/lang, cross font, so we need an font id hash -> script
--- e.g. latin -> hyphenate, arab -> 1/2/3 analyze
-
--- an example analyzer (should move to font-ota.lua)
-
-local state = attributes.private('state')
-
-function fonts.analyzers.aux.setstate(head,font)
- local tfmdata = fontdata[font]
- local characters = tfmdata.characters
- local descriptions = tfmdata.descriptions
- local first, last, current, n, done = nil, nil, head, 0, false -- maybe make n boolean
- while current do
- local id = current.id
- if id == glyph and current.font == font then
- local d = descriptions[current.char]
- if d then
- if d.class == "mark" then
- done = true
- set_attribute(current,state,5) -- mark
- elseif n == 0 then
- first, last, n = current, current, 1
- set_attribute(current,state,1) -- init
- else
- last, n = current, n+1
- set_attribute(current,state,2) -- medi
- end
- else -- finish
- if first and first == last then
- set_attribute(last,state,4) -- isol
- elseif last then
- set_attribute(last,state,3) -- fina
- end
- first, last, n = nil, nil, 0
- end
- elseif id == disc then
- -- always in the middle
- set_attribute(current,state,2) -- midi
- last = current
- else -- finish
- if first and first == last then
- set_attribute(last,state,4) -- isol
- elseif last then
- set_attribute(last,state,3) -- fina
- end
- first, last, n = nil, nil, 0
- end
- current = current.next
- end
- if first and first == last then
- set_attribute(last,state,4) -- isol
- elseif last then
- set_attribute(last,state,3) -- fina
- end
- return head, done
-end
-
-function tfm.replacements(tfm,value)
- -- tfm.characters[0x0022] = table.fastcopy(tfm.characters[0x201D])
- -- tfm.characters[0x0027] = table.fastcopy(tfm.characters[0x2019])
- -- tfm.characters[0x0060] = table.fastcopy(tfm.characters[0x2018])
- -- tfm.characters[0x0022] = tfm.characters[0x201D]
- tfm.characters[0x0027] = tfm.characters[0x2019]
- -- tfm.characters[0x0060] = tfm.characters[0x2018]
-end
-
--- checking
-
-function tfm.checked_filename(metadata,whatever)
- local foundfilename = metadata.foundfilename
- if not foundfilename then
- local askedfilename = metadata.filename or ""
- if askedfilename ~= "" then
- foundfilename = resolvers.findbinfile(askedfilename,"") or ""
- if foundfilename == "" then
- logs.report("fonts","source file '%s' is not found",askedfilename)
- foundfilename = resolvers.findbinfile(file.basename(askedfilename),"") or ""
- if foundfilename ~= "" then
- logs.report("fonts","using source file '%s' (cache mismatch)",foundfilename)
- end
- end
- elseif whatever then
- logs.report("fonts","no source file for '%s'",whatever)
- foundfilename = ""
- end
- metadata.foundfilename = foundfilename
- -- logs.report("fonts","using source file '%s'",foundfilename)
- end
- return foundfilename
-end
-
--- status info
-
-statistics.register("fonts load time", function()
- return statistics.elapsedseconds(fonts)
-end)
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/otfl-font-xtx.lua b/Master/texmf-dist/tex/luatex/luaotfload/otfl-font-xtx.lua
deleted file mode 100644
index 40024ec909b..00000000000
--- a/Master/texmf-dist/tex/luatex/luaotfload/otfl-font-xtx.lua
+++ /dev/null
@@ -1,229 +0,0 @@
-if not modules then modules = { } end modules ['font-xtx'] = {
- version = 1.001,
- comment = "companion to font-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local texsprint, count = tex.sprint, tex.count
-local format, concat, gmatch, match, find, lower = string.format, table.concat, string.gmatch, string.match, string.find, string.lower
-local tostring, next = tostring, next
-local lpegmatch = lpeg.match
-
-local trace_defining = false trackers.register("fonts.defining", function(v) trace_defining = v end)
-
---[[ldx--
-<p>Choosing a font by name and specififying its size is only part of the
-game. In order to prevent complex commands, <l n='xetex'/> introduced
-a method to pass feature information as part of the font name. At the
-risk of introducing nasty parsing and compatinility problems, this
-syntax was expanded over time.</p>
-
-<p>For the sake of users who have defined fonts using that syntax, we
-will support it, but we will provide additional methods as well.
-Normally users will not use this direct way, but use a more abstract
-interface.</p>
-
-<p>The next one is the official one. However, in the plain
-variant we need to support the crappy [] specification as
-well and that does not work too well with the general design
-of the specifier.</p>
---ldx]]--
-
---~ function fonts.define.specify.colonized(specification) -- xetex mode
---~ local list = { }
---~ if specification.detail and specification.detail ~= "" then
---~ for v in gmatch(specification.detail,"%s*([^;]+)%s*") do
---~ local a, b = match(v,"^(%S*)%s*=%s*(%S*)$")
---~ if a and b then
---~ list[a] = b:is_boolean()
---~ if type(list[a]) == "nil" then
---~ list[a] = b
---~ end
---~ else
---~ local a, b = match(v,"^([%+%-]?)%s*(%S+)$")
---~ if a and b then
---~ list[b] = a ~= "-"
---~ end
---~ end
---~ end
---~ end
---~ specification.features.normal = list
---~ return specification
---~ end
-
---~ check("oeps/BI:+a;-b;c=d")
---~ check("[oeps]/BI:+a;-b;c=d")
---~ check("file:oeps/BI:+a;-b;c=d")
---~ check("name:oeps/BI:+a;-b;c=d")
-
-local list = { }
-
-fonts.define.specify.colonized_default_lookup = "file"
-
-local function isstyle(s)
- local style = string.lower(s):split("/")
- for _,v in ipairs(style) do
- if v == "b" then
- list.style = "bold"
- elseif v == "i" then
- list.style = "italic"
- elseif v == "bi" or v == "ib" then
- list.style = "bolditalic"
- elseif v:find("^s=") then
- list.optsize = v:split("=")[2]
- elseif v == "aat" or v == "icu" or v == "gr" then
- logs.report("load font", "unsupported font option: %s", v)
- elseif not v:is_empty() then
- list.style = v:gsub("[^%a%d]", "")
- end
- end
-end
-
-fonts = fonts or { }
-fonts.otf = fonts.otf or { }
-
-local otf = fonts.otf
-
-otf.tables = otf.tables or { }
-
-otf.tables.defaults = {
- dflt = {
- "ccmp", "locl", "rlig", "liga", "clig",
- "kern", "mark", "mkmk", "itlc",
- },
- arab = {
- "ccmp", "locl", "isol", "fina", "fin2",
- "fin3", "medi", "med2", "init", "rlig",
- "calt", "liga", "cswh", "mset", "curs",
- "kern", "mark", "mkmk",
- },
- deva = {
- "ccmp", "locl", "init", "nukt", "akhn",
- "rphf", "blwf", "half", "pstf", "vatu",
- "pres", "blws", "abvs", "psts", "haln",
- "calt", "blwm", "abvm", "dist", "kern",
- "mark", "mkmk",
- },
- khmr = {
- "ccmp", "locl", "pref", "blwf", "abvf",
- "pstf", "pres", "blws", "abvs", "psts",
- "clig", "calt", "blwm", "abvm", "dist",
- "kern", "mark", "mkmk",
- },
- thai = {
- "ccmp", "locl", "liga", "kern", "mark",
- "mkmk",
- },
- hang = {
- "ccmp", "ljmo", "vjmo", "tjmo",
- },
-}
-
-otf.tables.defaults.beng = otf.tables.defaults.deva
-otf.tables.defaults.guru = otf.tables.defaults.deva
-otf.tables.defaults.gujr = otf.tables.defaults.deva
-otf.tables.defaults.orya = otf.tables.defaults.deva
-otf.tables.defaults.taml = otf.tables.defaults.deva
-otf.tables.defaults.telu = otf.tables.defaults.deva
-otf.tables.defaults.knda = otf.tables.defaults.deva
-otf.tables.defaults.mlym = otf.tables.defaults.deva
-otf.tables.defaults.sinh = otf.tables.defaults.deva
-
-otf.tables.defaults.syrc = otf.tables.defaults.arab
-otf.tables.defaults.mong = otf.tables.defaults.arab
-otf.tables.defaults.nko = otf.tables.defaults.arab
-
-otf.tables.defaults.tibt = otf.tables.defaults.khmr
-
-otf.tables.defaults.lao = otf.tables.defaults.thai
-
-local function parse_script(script)
- if otf.tables.scripts[script] then
- local dflt
- if otf.tables.defaults[script] then
- logs.report("load font", "auto-selecting default features for script: %s", script)
- dflt = otf.tables.defaults[script]
- else
- logs.report("load font", "auto-selecting default features for script: dflt (was %s)", script)
- dflt = otf.tables.defaults["dflt"]
- end
- for _,v in next, dflt do
- list[v] = "yes"
- end
- else
- logs.report("load font", "unknown script: %s", script)
- end
-end
-
-local function issome () list.lookup = fonts.define.specify.colonized_default_lookup end
-local function isfile () list.lookup = 'file' end
-local function isname () list.lookup = 'name' end
-local function thename(s) list.name = s end
-local function issub (v) list.sub = v end
-local function iskey (k,v)
- if k == "script" then
- parse_script(v)
- end
- list[k] = v
-end
-
-local function istrue (s) list[s] = true end
-local function isfalse(s) list[s] = false end
-
-local spaces = lpeg.P(" ")^0
-local namespec = (1-lpeg.S("/:("))^0 -- was: (1-lpeg.S("/: ("))^0
-local filespec = (lpeg.R("az", "AZ") * lpeg.P(":"))^-1 * (1-lpeg.S(":("))^1
-local crapspec = spaces * lpeg.P("/") * (((1-lpeg.P(":"))^0)/isstyle) * spaces
-local filename = (lpeg.P("file:")/isfile * (filespec/thename)) + (lpeg.P("[") * lpeg.P(true)/isfile * (((1-lpeg.P("]"))^0)/thename) * lpeg.P("]"))
-local fontname = (lpeg.P("name:")/isname * (namespec/thename)) + lpeg.P(true)/issome * (namespec/thename)
-local sometext = (lpeg.R("az","AZ","09") + lpeg.S("+-."))^1
-local truevalue = lpeg.P("+") * spaces * (sometext/istrue)
-local falsevalue = lpeg.P("-") * spaces * (sometext/isfalse)
-local keyvalue = lpeg.P("+") + (lpeg.C(sometext) * spaces * lpeg.P("=") * spaces * lpeg.C(sometext))/iskey
-local somevalue = sometext/istrue
-local subvalue = lpeg.P("(") * (lpeg.C(lpeg.P(1-lpeg.S("()"))^1)/issub) * lpeg.P(")") -- for Kim
-local option = spaces * (keyvalue + falsevalue + truevalue + somevalue) * spaces
-local options = lpeg.P(":") * spaces * (lpeg.P(";")^0 * option)^0
-local pattern = (filename + fontname) * subvalue^0 * crapspec^0 * options^0
-
-local normalize_meanings = fonts.otf.meanings.normalize
-
-function fonts.define.specify.colonized(specification) -- xetex mode
- list = { }
- lpegmatch(pattern,specification.specification)
- if list.style then
- specification.style = list.style
- list.style = nil
- end
- if list.optsize then
- specification.optsize = list.optsize
- list.optsize = nil
- end
- if list.name then
- if resolvers.find_file(list.name, "tfm") then
- list.lookup = "file"
- list.name = file.addsuffix(list.name, "tfm")
- elseif resolvers.find_file(list.name, "ofm") then
- list.lookup = "file"
- list.name = file.addsuffix(list.name, "ofm")
- end
-
- specification.name = list.name
- list.name = nil
- end
- if list.lookup then
- specification.lookup = list.lookup
- list.lookup = nil
- end
- if list.sub then
- specification.sub = list.sub
- list.sub = nil
- end
--- specification.features.normal = list
- specification.features.normal = normalize_meanings(list)
- return specification
-end
-
-fonts.define.register_split(":", fonts.define.specify.colonized)
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/otfl-luat-dum.lua b/Master/texmf-dist/tex/luatex/luaotfload/otfl-luat-dum.lua
deleted file mode 100644
index 13b73b17fff..00000000000
--- a/Master/texmf-dist/tex/luatex/luaotfload/otfl-luat-dum.lua
+++ /dev/null
@@ -1,185 +0,0 @@
-if not modules then modules = { } end modules ['luat-dum'] = {
- version = 1.100,
- comment = "companion to luatex-*.tex",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local dummyfunction = function() end
-
-statistics = {
- register = dummyfunction,
- starttiming = dummyfunction,
- stoptiming = dummyfunction,
-}
-directives = {
- register = dummyfunction,
- enable = dummyfunction,
- disable = dummyfunction,
-}
-trackers = {
- register = dummyfunction,
- enable = dummyfunction,
- disable = dummyfunction,
-}
-experiments = {
- register = dummyfunction,
- enable = dummyfunction,
- disable = dummyfunction,
-}
-storage = {
- register = dummyfunction,
- shared = { },
-}
-logs = {
- report = dummyfunction,
- simple = dummyfunction,
-}
-tasks = {
- new = dummyfunction,
- actions = dummyfunction,
- appendaction = dummyfunction,
- prependaction = dummyfunction,
-}
-callbacks = {
- register = function(n,f) return callback.register(n,f) end,
-}
-
--- we need to cheat a bit here
-
-texconfig.kpse_init = true
-
-resolvers = resolvers or { } -- no fancy file helpers used
-
-local remapper = {
- otf = "opentype fonts",
- ttf = "truetype fonts",
- ttc = "truetype fonts",
- dfont = "truetype fonts",
- cid = "cid maps",
- fea = "font feature files",
-}
-
-function resolvers.find_file(name,kind)
- name = string.gsub(name,"\\","/")
- kind = string.lower(kind)
- return kpse.find_file(name,(kind and kind ~= "" and (remapper[kind] or kind)) or file.extname(name,"tex"))
-end
-
-function resolvers.findbinfile(name,kind)
- if not kind or kind == "" then
- kind = file.extname(name) -- string.match(name,"%.([^%.]-)$")
- end
- return resolvers.find_file(name,(kind and remapper[kind]) or kind)
-end
-
--- Caches ... I will make a real stupid version some day when I'm in the
--- mood. After all, the generic code does not need the more advanced
--- ConTeXt features. Cached data is not shared between ConTeXt and other
--- usage as I don't want any dependency at all. Also, ConTeXt might have
--- different needs and tricks added.
-
---~ containers.usecache = true
-
-caches = { }
-
-local writable, readables = nil, { }
-
-if not caches.namespace or caches.namespace == "" or caches.namespace == "context" then
- caches.namespace = 'generic'
-end
-
-do
-
- local cachepaths
-
- if kpse.expand_var('$TEXMFCACHE') ~= '$TEXMFCACHE' then
- cachepaths = kpse.expand_var('$TEXMFCACHE')
- elseif kpse.expand_var('$TEXMFVAR') ~= '$TEXMFVAR' then
- cachepaths = kpse.expand_var('$TEXMFVAR')
- end
-
- if not cachepaths then
- cachepaths = "."
- end
-
- cachepaths = string.split(cachepaths,os.type == "windows" and ";" or ":")
-
- for i=1,#cachepaths do
- local done
- writable = file.join(cachepaths[i], "luatex-cache")
- writable = file.join(writable,caches.namespace)
- writable, done = dir.mkdirs(writable)
- if done then
- break
- end
- end
-
- for i=1,#cachepaths do
- if file.isreadable(cachepaths[i]) then
- readables[#readables+1] = file.join(cachepaths[i],"luatex-cache",caches.namespace)
- end
- end
-
- if not writable then
- texio.write_nl("quiting: fix your writable cache path\n")
- os.exit()
- elseif #readables == 0 then
- texio.write_nl("quiting: fix your readable cache path\n")
- os.exit()
- elseif #readables == 1 and readables[1] == writable then
- texio.write(string.format("(using cache: %s)",writable))
- else
- texio.write(string.format("(using write cache: %s)",writable))
- texio.write(string.format("(using read cache: %s)",table.concat(readables, " ")))
- end
-
-end
-
-function caches.getwritablepath(category,subcategory)
- local path = file.join(writable,category)
- lfs.mkdir(path)
- path = file.join(path,subcategory)
- lfs.mkdir(path)
- return path
-end
-
-function caches.getreadablepaths(category,subcategory)
- local t = { }
- for i=1,#readables do
- t[i] = file.join(readables[i],category,subcategory)
- end
- return t
-end
-
-local function makefullname(path,name)
- if path and path ~= "" then
- name = "temp-" .. name -- clash prevention
- return file.addsuffix(file.join(path,name),"lua")
- end
-end
-
-function caches.iswritable(path,name)
- local fullname = makefullname(path,name)
- return fullname and file.iswritable(fullname)
-end
-
-function caches.loaddata(paths,name)
- for i=1,#paths do
- local fullname = makefullname(paths[i],name)
- if fullname then
- texio.write(string.format("(load: %s)",fullname))
- local data = loadfile(fullname)
- return data and data()
- end
- end
-end
-
-function caches.savedata(path,name,data)
- local fullname = makefullname(path,name)
- if fullname then
- texio.write(string.format("(save: %s)",fullname))
- table.tofile(fullname,data,'return',false,true,false)
- end
-end
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/otfl-luat-ovr.lua b/Master/texmf-dist/tex/luatex/luaotfload/otfl-luat-ovr.lua
deleted file mode 100644
index 7bac97d0f7c..00000000000
--- a/Master/texmf-dist/tex/luatex/luaotfload/otfl-luat-ovr.lua
+++ /dev/null
@@ -1,46 +0,0 @@
-if not modules then modules = { } end modules ['luat-ovr'] = {
- version = 1.001,
- comment = "companion to luatex-*.tex",
- author = "Khaled Hosny and Elie Roux",
- copyright = "Luaotfload Development Team",
- license = "GNU GPL v2"
-}
-
-
-local write_nl, format, name = texio.write_nl, string.format, "luaotfload"
-local dummyfunction = function() end
-
-callbacks = {
- register = dummyfunction,
-}
-
-function logs.report(category,fmt,...)
- if fmt then
- write_nl('log', format("%s | %s: %s",name,category,format(fmt,...)))
- elseif category then
- write_nl('log', format("%s | %s",name,category))
- else
- write_nl('log', format("%s |",name))
- end
-end
-
-function logs.info(category,fmt,...)
- if fmt then
- write_nl(format("%s | %s: %s",name,category,format(fmt,...)))
- elseif category then
- write_nl(format("%s | %s",name,category))
- else
- write_nl(format("%s |",name))
- end
- io.flush()
-end
-
-function logs.simple(fmt,...)
- if fmt then
- write_nl('log', format("%s | %s",name,format(fmt,...)))
- else
- write_nl('log', format("%s |",name))
- end
-end
-
-tex.ctxcatcodes = luatexbase.catcodetables.latex
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/otfl-node-dum.lua b/Master/texmf-dist/tex/luatex/luaotfload/otfl-node-dum.lua
deleted file mode 100644
index 9483e51fc95..00000000000
--- a/Master/texmf-dist/tex/luatex/luaotfload/otfl-node-dum.lua
+++ /dev/null
@@ -1,127 +0,0 @@
-if not modules then modules = { } end modules ['node-dum'] = {
- version = 1.001,
- comment = "companion to luatex-*.tex",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-nodes = nodes or { }
-fonts = fonts or { }
-attributes = attributes or { }
-
-local traverse_id = node.traverse_id
-local free_node = node.free
-local remove_node = node.remove
-local new_node = node.new
-
-local glyph = node.id('glyph')
-
--- fonts
-
-local fontdata = fonts.ids or { }
-
-function nodes.simple_font_handler(head)
--- lang.hyphenate(head)
- head = nodes.process_characters(head)
- nodes.inject_kerns(head)
- nodes.protect_glyphs(head)
- head = node.ligaturing(head)
- head = node.kerning(head)
- return head
-end
-
-if tex.attribute[0] ~= 0 then
-
- texio.write_nl("log","!")
- texio.write_nl("log","! Attribute 0 is reserved for ConTeXt's font feature management and has to be")
- texio.write_nl("log","! set to zero. Also, some attributes in the range 1-255 are used for special")
- texio.write_nl("log","! purposed so setting them at the TeX end might break the font handler.")
- texio.write_nl("log","!")
-
- tex.attribute[0] = 0 -- else no features
-
-end
-
-nodes.protect_glyphs = node.protect_glyphs
-nodes.unprotect_glyphs = node.unprotect_glyphs
-
-function nodes.process_characters(head)
- local usedfonts, done, prevfont = { }, false, nil
- for n in traverse_id(glyph,head) do
- local font = n.font
- if font ~= prevfont then
- prevfont = font
- local used = usedfonts[font]
- if not used then
- local tfmdata = fontdata[font]
- if tfmdata then
- local shared = tfmdata.shared -- we need to check shared, only when same features
- if shared then
- local processors = shared.processes
- if processors and #processors > 0 then
- usedfonts[font] = processors
- done = true
- end
- end
- end
- end
- end
- end
- if done then
- for font, processors in next, usedfonts do
- for i=1,#processors do
- local h, d = processors[i](head,font,0)
- head, done = h or head, done or d
- end
- end
- end
- return head, true
-end
-
--- helper
-
-function nodes.kern(k)
- local n = new_node("kern",1)
- n.kern = k
- return n
-end
-
-function nodes.remove(head, current, free_too)
- local t = current
- head, current = remove_node(head,current)
- if t then
- if free_too then
- free_node(t)
- t = nil
- else
- t.next, t.prev = nil, nil
- end
- end
- return head, current, t
-end
-
-function nodes.delete(head,current)
- return nodes.remove(head,current,true)
-end
-
-nodes.before = node.insert_before
-nodes.after = node.insert_after
-
--- attributes
-
-attributes.unsetvalue = -0x7FFFFFFF
-
-local numbers, last = { }, 127
-
-function attributes.private(name)
- local number = numbers[name]
- if not number then
- if last < 255 then
- last = last + 1
- end
- number = last
- numbers[name] = number
- end
- return number
-end
diff --git a/Master/texmf-dist/tex/luatex/luaotfload/otfl-node-inj.lua b/Master/texmf-dist/tex/luatex/luaotfload/otfl-node-inj.lua
deleted file mode 100644
index fdea7f1f351..00000000000
--- a/Master/texmf-dist/tex/luatex/luaotfload/otfl-node-inj.lua
+++ /dev/null
@@ -1,443 +0,0 @@
-if not modules then modules = { } end modules ['node-inj'] = {
- version = 1.001,
- comment = "companion to node-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- tricky ... fonts.ids is not yet defined .. to be solved (maybe general tex ini)
-
--- This is very experimental (this will change when we have luatex > .50 and
--- a few pending thingies are available. Also, Idris needs to make a few more
--- test fonts. Btw, future versions of luatex will have extended glyph properties
--- that can be of help.
-
-local next = next
-
-local trace_injections = false trackers.register("nodes.injections", function(v) trace_injections = v end)
-
-fonts = fonts or { }
-fonts.tfm = fonts.tfm or { }
-fonts.ids = fonts.ids or { }
-
-local fontdata = fonts.ids
-
-local glyph = node.id('glyph')
-local kern = node.id('kern')
-
-local traverse_id = node.traverse_id
-local unset_attribute = node.unset_attribute
-local has_attribute = node.has_attribute
-local set_attribute = node.set_attribute
-local insert_node_before = node.insert_before
-local insert_node_after = node.insert_after
-
-local newkern = nodes.kern
-
-local markbase = attributes.private('markbase')
-local markmark = attributes.private('markmark')
-local markdone = attributes.private('markdone')
-local cursbase = attributes.private('cursbase')
-local curscurs = attributes.private('curscurs')
-local cursdone = attributes.private('cursdone')
-local kernpair = attributes.private('kernpair')
-
-local cursives = { }
-local marks = { }
-local kerns = { }
-
--- currently we do gpos/kern in a bit inofficial way but when we
--- have the extra fields in glyphnodes to manipulate ht/dp/wd
--- explicitly i will provide an alternative; also, we can share
--- tables
-
--- for the moment we pass the r2l key ... volt/arabtype tests
-
-function nodes.set_cursive(start,nxt,factor,rlmode,exit,entry,tfmstart,tfmnext)
- local dx, dy = factor*(exit[1]-entry[1]), factor*(exit[2]-entry[2])
- local ws, wn = tfmstart.width, tfmnext.width
- local bound = #cursives + 1
- set_attribute(start,cursbase,bound)
- set_attribute(nxt,curscurs,bound)
- cursives[bound] = { rlmode, dx, dy, ws, wn }
- return dx, dy, bound
-end
-
-function nodes.set_pair(current,factor,rlmode,r2lflag,spec,tfmchr)
- local x, y, w, h = factor*spec[1], factor*spec[2], factor*spec[3], factor*spec[4]
- -- dy = y - h
- if x ~= 0 or w ~= 0 or y ~= 0 or h ~= 0 then
- local bound = has_attribute(current,kernpair)
- if bound then
- local kb = kerns[bound]
- -- inefficient but singles have less, but weird anyway, needs checking
- kb[2], kb[3], kb[4], kb[5] = (kb[2] or 0) + x, (kb[3] or 0) + y, (kb[4] or 0)+ w, (kb[5] or 0) + h
- else
- bound = #kerns + 1
- set_attribute(current,kernpair,bound)
- kerns[bound] = { rlmode, x, y, w, h, r2lflag, tfmchr.width }
- end
- return x, y, w, h, bound
- end
- return x, y, w, h -- no bound
-end
-
-function nodes.set_kern(current,factor,rlmode,x,tfmchr)
- local dx = factor*x
- if dx ~= 0 then
- local bound = #kerns + 1
- set_attribute(current,kernpair,bound)
- kerns[bound] = { rlmode, dx }
- return dx, bound
- else
- return 0, 0
- end
-end
-
-function nodes.set_mark(start,base,factor,rlmode,ba,ma,index) --ba=baseanchor, ma=markanchor
- local dx, dy = factor*(ba[1]-ma[1]), factor*(ba[2]-ma[2])
- local bound = has_attribute(base,markbase)
- if bound then
- local mb = marks[bound]
- if mb then
- if not index then index = #mb + 1 end
- mb[index] = { dx, dy }
- set_attribute(start,markmark,bound)
- set_attribute(start,markdone,index)
- return dx, dy, bound
- else
- logs.report("nodes mark", "possible problem, U+%04X is base without data (id: %s)",base.char,bound)
- end
- end
- index = index or 1
- bound = #marks + 1
- set_attribute(base,markbase,bound)
- set_attribute(start,markmark,bound)
- set_attribute(start,markdone,index)
- marks[bound] = { [index] = { dx, dy, rlmode } }
- return dx, dy, bound
-end
-
-function nodes.trace_injection(head)
- local function dir(n)
- return (n and n<0 and "r-to-l") or (n and n>0 and "l-to-r") or ("unset")
- end
- local function report(...)
- logs.report("nodes finisher",...)
- end
- report("begin run")
- for n in traverse_id(glyph,head) do
- if n.subtype < 256 then
- local kp = has_attribute(n,kernpair)
- local mb = has_attribute(n,markbase)
- local mm = has_attribute(n,markmark)
- local md = has_attribute(n,markdone)
- local cb = has_attribute(n,cursbase)
- local cc = has_attribute(n,curscurs)
- report("char U+%05X, font=%s",n.char,n.font)
- if kp then
- local k = kerns[kp]
- if k[3] then
- report(" pairkern: dir=%s, x=%s, y=%s, w=%s, h=%s",dir(k[1]),k[2] or "?",k[3] or "?",k[4] or "?",k[5] or "?")
- else
- report(" kern: dir=%s, dx=%s",dir(k[1]),k[2] or "?")
- end
- end
- if mb then
- report(" markbase: bound=%s",mb)
- end
- if mm then
- local m = marks[mm]
- if mb then
- local m = m[mb]
- if m then
- report(" markmark: bound=%s, index=%s, dx=%s, dy=%s",mm,md or "?",m[1] or "?",m[2] or "?")
- else
- report(" markmark: bound=%s, missing index",mm)
- end
- else
- m = m[1]
- report(" markmark: bound=%s, dx=%s, dy=%s",mm,m[1] or "?",m[2] or "?")
- end
- end
- if cb then
- report(" cursbase: bound=%s",cb)
- end
- if cc then
- local c = cursives[cc]
- report(" curscurs: bound=%s, dir=%s, dx=%s, dy=%s",cc,dir(c[1]),c[2] or "?",c[3] or "?")
- end
- end
- end
- report("end run")
-end
-
--- todo: reuse tables (i.e. no collection), but will be extra fields anyway
--- todo: check for attribute
-
-function nodes.inject_kerns(head,where,keep)
- local has_marks, has_cursives, has_kerns = next(marks), next(cursives), next(kerns)
- if has_marks or has_cursives then
---~ if has_marks or has_cursives or has_kerns then
- if trace_injections then
- nodes.trace_injection(head)
- end
- -- in the future variant we will not copy items but refs to tables
- local done, ky, rl, valid, cx, wx, mk = false, { }, { }, { }, { }, { }, { }
- if has_kerns then -- move outside loop
- local nf, tm = nil, nil
- for n in traverse_id(glyph,head) do
- if n.subtype < 256 then
- valid[#valid+1] = n
- if n.font ~= nf then
- nf = n.font
- tm = fontdata[nf].marks
- end
- mk[n] = tm[n.char]
- local k = has_attribute(n,kernpair)
- if k then
---~ unset_attribute(k,kernpair)
- local kk = kerns[k]
- if kk then
- local x, y, w, h = kk[2] or 0, kk[3] or 0, kk[4] or 0, kk[5] or 0
- local dy = y - h
- if dy ~= 0 then
- ky[n] = dy
- end
- if w ~= 0 or x ~= 0 then
- wx[n] = kk
- end
- rl[n] = kk[1] -- could move in test
- end
- end
- end
- end
- else
- local nf, tm = nil, nil
- for n in traverse_id(glyph,head) do
- if n.subtype < 256 then
- valid[#valid+1] = n
- if n.font ~= nf then
- nf = n.font
- tm = fontdata[nf].marks
- end
- mk[n] = tm[n.char]
- end
- end
- end
- if #valid > 0 then
- -- we can assume done == true because we have cursives and marks
- local cx = { }
- if has_kerns and next(ky) then
- for n, k in next, ky do
- n.yoffset = k
- end
- end
- -- todo: reuse t and use maxt
- if has_cursives then
- local p_cursbase, p = nil, nil
- -- since we need valid[n+1] we can also use a "while true do"
- local t, d, maxt = { }, { }, 0
- for i=1,#valid do -- valid == glyphs
- local n = valid[i]
- if not mk[n] then
- local n_cursbase = has_attribute(n,cursbase)
- if p_cursbase then
- local n_curscurs = has_attribute(n,curscurs)
- if p_cursbase == n_curscurs then
- local c = cursives[n_curscurs]
- if c then
- local rlmode, dx, dy, ws, wn = c[1], c[2], c[3], c[4], c[5]
- if rlmode >= 0 then
- dx = dx - ws
- else
- dx = dx + wn
- end
- if dx ~= 0 then
- cx[n] = dx
- rl[n] = rlmode
- end
- -- if rlmode and rlmode < 0 then
- dy = -dy
- -- end
- maxt = maxt + 1
- t[maxt] = p
- d[maxt] = dy
- else
- maxt = 0
- end
- end
- elseif maxt > 0 then
- local ny = n.yoffset
- for i=maxt,1,-1 do
- ny = ny + d[i]
- local ti = t[i]
- ti.yoffset = ti.yoffset + ny
- end
- maxt = 0
- end
- if not n_cursbase and maxt > 0 then
- local ny = n.yoffset
- for i=maxt,1,-1 do
- ny = ny + d[i]
- local ti = t[i]
- ti.yoffset = ny
- end
- maxt = 0
- end
- p_cursbase, p = n_cursbase, n
- end
- end
- if maxt > 0 then
- local ny = n.yoffset
- for i=maxt,1,-1 do
- ny = ny + d[i]
- local ti = t[i]
- ti.yoffset = ny
- end
- maxt = 0
- end
- if not keep then
- cursives = { }
- end
- end
- if has_marks then
- for i=1,#valid do
- local p = valid[i]
- local p_markbase = has_attribute(p,markbase)
- if p_markbase then
- local mrks = marks[p_markbase]
- for n in traverse_id(glyph,p.next) do
- local n_markmark = has_attribute(n,markmark)
- if p_markbase == n_markmark then
- local index = has_attribute(n,markdone) or 1
- local d = mrks[index]
- if d then
- local rlmode = d[3]
- if rlmode and rlmode > 0 then
- -- new per 2010-10-06
- local k = wx[p]
- if k then -- maybe (d[1] - p.width) and/or + k[2]
- n.xoffset = p.xoffset - (p.width - d[1]) - k[2]
- else
- n.xoffset = p.xoffset - (p.width - d[1])
- end
- else
- local k = wx[p]
- if k then
- n.xoffset = p.xoffset - d[1] - k[2]
- else
- n.xoffset = p.xoffset - d[1]
- end
- end
- if mk[p] then
- n.yoffset = p.yoffset + d[2]
- else
- n.yoffset = n.yoffset + p.yoffset + d[2]
- end
- end
- else
- break
- end
- end
- end
- end
- if not keep then
- marks = { }
- end
- end
- -- todo : combine
- if next(wx) then
- for n, k in next, wx do
- -- only w can be nil, can be sped up when w == nil
- local rl, x, w, r2l = k[1], k[2] or 0, k[4] or 0, k[6]
- local wx = w - x
- if r2l then
- if wx ~= 0 then
- insert_node_before(head,n,newkern(wx))
- end
- if x ~= 0 then
- insert_node_after (head,n,newkern(x))
- end
- else
- if x ~= 0 then
- insert_node_before(head,n,newkern(x))
- end
- if wx ~= 0 then
- insert_node_after(head,n,newkern(wx))
- end
- end
- end
- end
- if next(cx) then
- for n, k in next, cx do
- if k ~= 0 then
- local rln = rl[n]
- if rln and rln < 0 then
- insert_node_before(head,n,newkern(-k))
- else
- insert_node_before(head,n,newkern(k))
- end
- end
- end
- end
- if not keep then
- kerns = { }
- end
- return head, true
- elseif not keep then
- kerns, cursives, marks = { }, { }, { }
- end
- elseif has_kerns then
- if trace_injections then
- nodes.trace_injection(head)
- end
- for n in traverse_id(glyph,head) do
- if n.subtype < 256 then
- local k = has_attribute(n,kernpair)
- if k then
- local kk = kerns[k]
- if kk then
- local rl, x, y, w = kk[1], kk[2] or 0, kk[3], kk[4]
- if y and y ~= 0 then
- n.yoffset = y -- todo: h ?
- end
- if w then
- -- copied from above
- local r2l = kk[6]
- local wx = w - x
- if r2l then
- if wx ~= 0 then
- insert_node_before(head,n,newkern(wx))
- end
- if x ~= 0 then
- insert_node_after (head,n,newkern(x))
- end
- else
- if x ~= 0 then
- insert_node_before(head,n,newkern(x))
- end
- if wx ~= 0 then
- insert_node_after(head,n,newkern(wx))
- end
- end
- else
- -- simple (e.g. kernclass kerns)
- if x ~= 0 then
- insert_node_before(head,n,newkern(x))
- end
- end
- end
- end
- end
- end
- if not keep then
- kerns = { }
- end
- return head, true
- else
- -- no tracing needed
- end
- return head, false
-end