summaryrefslogtreecommitdiff
path: root/Master/texmf-dist/tex/context/base/mkiv/util-sci.lua
blob: fe28635a386beee46cf658e72283b16dd95bc64a (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
if not modules then modules = { } end modules ['util-sci'] = {
    version   = 1.001,
    comment   = "companion to m-scite.mkiv",
    author    = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
    copyright = "PRAGMA ADE / ConTeXt Development Team",
    license   = "see context related readme files"
}

local gsub, sub, find = string.gsub, string.sub, string.find
local concat = table.concat
local formatters = string.formatters
local lpegmatch = lpeg.match
local setmetatableindex = table.setmetatableindex

local scite     = scite or { }
utilities.scite = scite

local report = logs.reporter("scite")

do
    local lexerroot = "c:/data/system/scite/wscite/context/lexers"
    if not lexerroot then
        lexerroot = file.dirname(resolvers.find_file("scite-context-lexer.lua"))
    end
    if lfs.isdir(lexerroot) then
        package.extraluapath(lexerroot)
        package.extraluapath(lexerroot.."/themes")
        package.extraluapath(lexerroot.."/data")
        report("using lexer root %a",lexerroot)
    else
        report("no valid lexer root")
    end
end

local knownlexers  = {
    tex  = "tex", mkiv = "tex", mkvi = "tex", mkxi = "tex", mkix = "tex", mkii = "tex", cld  = "tex",
    lua  = "lua", lfg  = "lua", lus = "lua",
    mp = "mps", mpiv = "mps", mpii = "mps",
    w = "web", ww = "web",
    c = "cpp", h = "cpp", cpp = "cpp", hpp = "cpp", cxx = "cpp", hxx = "cpp",
    xml = "xml", lmx  = "xml", ctx = "xml", xsl = "xml", xsd = "xml", rlx = "xml", css = "xml", dtd = "xml",
    bib = "bibtex",
    rme = "txt",
 -- todo: pat/hyp ori
}

lexer = nil -- main lexer, global (for the moment needed for themes)

local function loadscitelexer()
    if not lexer then
        lexer = require("scite-context-lexer")
        require("scite-context-theme") -- uses lexer
        if lexer then
            lexer.context.disablewordcheck()
        end
    end
    return lexer
end

local loadedlexers = setmetatableindex(function(t,k)
    local l = knownlexers[k] or k
    loadscitelexer()
    local v = lexer.load(formatters["scite-context-lexer-%s"](l))
    t[l] = v
    t[k] = v
    return v
end)

scite.loadedlexers   = loadedlexers
scite.knownlexers    = knownlexers
scite.loadscitelexer = loadscitelexer

local f_fore_bold  = formatters['.%s { display: inline ; font-weight: bold   ; color: #%02X%02X%02X ; }']
local f_fore_none  = formatters['.%s { display: inline ; font-weight: normal ; color: #%02X%02X%02X ; }']
local f_none_bold  = formatters['.%s { display: inline ; font-weight: bold   ; }']
local f_none_none  = formatters['.%s { display: inline ; font-weight: normal ; }']
local f_div_class  = formatters['<div class="%s">%s</div>']
local f_linenumber = formatters['<div class="linenumber">%s</div>\n']
local f_div_number = formatters['.linenumber { display: inline-block ; font-weight: normal ; width: %sem ; margin-right: 2em ; padding-right: .25em ; text-align: right ; background-color: #C7C7C7 ; }']

local replacer_regular = lpeg.replacer {
    ["<"]  = "&lt;",
    [">"]  = "&gt;",
    ["&"]  = "&amp;",
}

local linenumber  = 0
local linenumbers = { }

local replacer_numbered = lpeg.replacer {
    ["<"]  = "&lt;",
    [">"]  = "&gt;",
    ["&"]  = "&amp;",
    [lpeg.patterns.newline] = function()
        linenumber = linenumber + 1
        linenumbers[linenumber] = f_linenumber(linenumber)
        return "\n"
    end,
}

local css = nil

local function exportcsslexing()
    if not css then
        loadscitelexer()
        local function black(f)
            return (#f == 0 and f[1] == 0) or ((f[1] == f[2]) and (f[2] == f[3]) and (f[3] == 0))
        end
        local result, r = { }, 0
        for k, v in table.sortedhash(lexer.context.styles) do
            local bold = v.bold
            local fore = v.fore
            r = r + 1
            if fore and not black(fore) then
                local cr, cg, cb = fore[1], fore[2], fore[3]
                result[r] = (bold and f_fore_bold or f_fore_none)(k,cr,cg or cr,cb or cr)
            else
                result[r] = (bold and f_none_bold or f_none_none)(k)
            end
        end
        css = concat(result,"\n")
    end
    return css
end

local function exportwhites()
    return setmetatableindex(function(t,k)
        local v = find(k,"white",1,true) and true or false
        t[k] = v
        return v
    end)
end

local function exportstyled(lexer,text,numbered)
    local result = lexer.lex(lexer,text,0)
    local start  = 1
    local whites = exportwhites()
    local buffer = { }
    local b      = 0
    linenumber   = 0
    linenumbers  = { }
    local replacer = numbered and replacer_numbered or replacer_regular
    local n = #result
    for i=1,n,2 do
        local ii = i + 1
        local style = result[i]
        local position = result[ii]
        local txt = sub(text,start,position-1)
        txt = lpegmatch(replacer,txt)
        b = b + 1
        if whites[style] then
            buffer[b] = txt
        else
            buffer[b] = f_div_class(style,txt)
        end
        start = position
    end
    buffer = concat(buffer)
    return buffer, concat(linenumbers)
end

local function exportcsslinenumber()
    return f_div_number(#tostring(linenumber)/2+1)
end

local htmlfile = utilities.templates.replacer([[
<?xml version="1.0"?>
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
    <html xmlns="http://www.w3.org/1999/xhtml">
    <title>%title%</title>
    <meta http-equiv="content-type" content="text/html; charset=UTF-8"/>
    <style type="text/css"><!--
%lexingstyles%
%numberstyles%
    --></style>
    <body>
        <table style="padding:0; margin:0;">
            <tr>
                <td><pre>%linenumbers%</pre></td>
                <td><pre>%lexedcontent%</pre></td>
            </tr>
        </table>
    </body>
</html>
]])

function scite.tohtml(data,lexname,numbered,title)
    local source, lines = exportstyled(loadedlexers[lexname],data or "",numbered)
    return htmlfile {
        lexedcontent = source, -- before numberstyles
        lexingstyles = exportcsslexing(),
        numberstyles = exportcsslinenumber(),
        title        = title or "context source file",
        linenumbers  = lines,
    }
end

local function maketargetname(name)
    if name then
        return file.removesuffix(name) .. "-" .. file.suffix(name) .. ".html"
    else
        return "util-sci.html"
    end
end

function scite.filetohtml(filename,lexname,targetname,numbered,title)
    io.savedata(targetname or "util-sci.html",scite.tohtml(io.loaddata(filename),lexname or file.suffix(filename),numbered,title or filename))
end

function scite.css()
    return exportcsslexing() .. "\n" .. exportcsslinenumber()
end

function scite.html(data,lexname,numbered)
    return exportstyled(loadedlexers[lexname],data or "",numbered)
end

local f_tree_entry = formatters['<a href="%s" class="dir-entry">%s</a>']

local htmlfile = utilities.templates.replacer([[
<?xml version="1.0"?>
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
    <html xmlns="http://www.w3.org/1999/xhtml">
    <title>%title%</title>
    <meta http-equiv="content-type" content="text/html; charset=UTF-8"/>
    <style type="text/css"><!--
%styles%
    --></style>
    <body>
        <pre>
%dirlist%
        </pre>
    </body>
</html>
]])

function scite.converttree(sourceroot,targetroot,numbered)
    if lfs.isdir(sourceroot) then
        statistics.starttiming()
        local skipped = { }
        local noffiles = 0
        dir.makedirs(targetroot)
        local function scan(sourceroot,targetroot,subpath)
            local tree = { }
            for name in lfs.dir(sourceroot) do
                if name ~= "." and name ~= ".." then
                    local sourcename = file.join(sourceroot,name)
                    local targetname = file.join(targetroot,name)
                    local mode = lfs.attributes(sourcename,'mode')
                    local path = subpath and file.join(subpath,name) or name
                    if mode == 'file' then
                        local filetype   = file.suffix(sourcename)
                        local basename   = file.basename(name)
                        local targetname = maketargetname(targetname)
                        local fullname   = file.join(path,name)
                        if knownlexers[filetype] then
                            report("converting file %a to %a",sourcename,targetname)
                            scite.filetohtml(sourcename,nil,targetname,numbered,fullname)
                            noffiles = noffiles + 1
                            tree[#tree+1] = f_tree_entry(file.basename(targetname),basename)
                        else
                            skipped[filetype] = true
                            report("no lexer for %a",sourcename)
                        end
                    else
                        dir.makedirs(targetname)
                        scan(sourcename,targetname,path)
                        tree[#tree+1] = f_tree_entry(file.join(name,"files.html"),name)
                    end
                end
            end
            report("saving tree in %a",targetroot)
            local htmldata = htmlfile {
                dirlist = concat(tree,"\n"),
                styles  = "",
                title   = path or "context dir listing",
            }
            io.savedata(file.join(targetroot,"files.html"),htmldata)
        end
        scan(sourceroot,targetroot)
        if next(skipped) then
            report("skipped filetypes: %a",table.concat(table.sortedkeys(skipped)," "))
        end
        statistics.stoptiming()
        report("conversion time for %s files: %s",noffiles,statistics.elapsedtime())
    end
end

-- scite.filetohtml("strc-sec.mkiv",nil,"e:/tmp/util-sci.html",true)
-- scite.filetohtml("syst-aux.mkiv",nil,"e:/tmp/util-sci.html",true)

-- scite.converttree("t:/texmf/tex/context","e:/tmp/html/context",true)

return scite