1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
|
local gsub, sub, find = string.gsub, string.sub, string.find
local concat = table.concat
local formatters = string.formatters
local lpegmatch = lpeg.match
local setmetatableindex = table.setmetatableindex
local scite = scite or { }
utilities.scite = scite
local report = logs.reporter("scite")
local lexerroot = file.dirname(resolvers.find_file("scite-context-lexer.lua"))
local knownlexers = {
tex = "tex", mkiv = "tex", mkvi = "tex", mkxi = "tex", mkix = "tex", mkii = "tex", cld = "tex",
lua = "lua", lfg = "lua", lus = "lua",
w = "web", ww = "web",
c = "cpp", h = "cpp", cpp = "cpp", hpp = "cpp", cxx = "cpp", hxx = "cpp",
xml = "xml", lmx = "xml", ctx = "xml", xsl = "xml", xsd = "xml", rlx = "xml", css = "xml", dtd = "xml",
bib = "bibtex",
rme = "txt",
-- todo: pat/hyp ori
}
lexer = nil -- main lexer, global (for the moment needed for themes)
local function loadscitelexer()
if not lexer then
dir.push(lexerroot)
lexer = dofile("scite-context-lexer.lua")
dofile("themes/scite-context-theme.lua")
dir.pop()
end
return lexer
end
local loadedlexers = setmetatableindex(function(t,k)
local l = knownlexers[k] or k
dir.push(lexerroot)
loadscitelexer()
local v = lexer.load(formatters["scite-context-lexer-%s"](l))
dir.pop()
t[l] = v
t[k] = v
return v
end)
scite.loadedlexers = loadedlexers
scite.knownlexers = knownlexers
scite.loadscitelexer = loadscitelexer
local f_fore_bold = formatters['.%s { display: inline ; font-weight: bold ; color: #%s%s%s ; }']
local f_fore_none = formatters['.%s { display: inline ; font-weight: normal ; color: #%s%s%s ; }']
local f_none_bold = formatters['.%s { display: inline ; font-weight: bold ; }']
local f_none_none = formatters['.%s { display: inline ; font-weight: normal ; }']
local f_div_class = formatters['<div class="%s">%s</div>']
local f_linenumber = formatters['\n<div class="linenumber">%s</div>']
local f_div_number = formatters['.linenumber { display: inline-block ; font-weight: normal ; width: %sem ; margin-right: 2em ; padding-right: .25em ; text-align: right ; background-color: #C7C7C7 ; }']
local replacer_regular = lpeg.replacer {
["<"] = "<",
[">"] = ">",
["&"] = "&",
}
local linenumber = 0
local replacer_numbered = lpeg.replacer {
["<"] = "<",
[">"] = ">",
["&"] = "&",
[lpeg.patterns.newline] = function() linenumber = linenumber + 1 return f_linenumber(linenumber) end,
}
local css = nil
local function exportcsslexing()
if not css then
loadscitelexer()
local function black(f)
return (f[1] == f[2]) and (f[2] == f[3]) and (f[3] == '00')
end
local result, r = { }, 0
for k, v in table.sortedhash(lexer.context.styles) do
local bold = v.bold
local fore = v.fore
r = r + 1
if fore and not black(fore) then
if bold then
result[r] = f_fore_bold(k,fore[1],fore[2],fore[3])
else
result[r] = f_fore_none(k,fore[1],fore[2],fore[3])
end
else
if bold then
result[r] = f_none_bold(k)
else
result[r] = f_none_none(k)
end
end
end
css = concat(result,"\n")
end
return css
end
local function exportwhites()
return setmetatableindex(function(t,k)
local v = find(k,"white") and true or false
t[k] = v
return v
end)
end
local function exportstyled(lexer,text,numbered)
local result = lexer.lex(lexer,text,0)
local start = 1
local whites = exportwhites()
local buffer, b = { "<pre>" }, 1
linenumber = 1
local replacer = numbered and replacer_numbered or replacer_regular
if numbered then
b = b + 1
buffer[b] = f_linenumber(1)
end
local n = #result
for i=1,n,2 do
local ii = i + 1
local style = result[i]
local position = result[ii]
local txt = sub(text,start,position-1)
if ii == n then
txt = gsub(txt,"[%s]+$","")
end
txt = lpegmatch(replacer,txt)
b = b + 1
if whites[style] then
buffer[b] = txt
else
buffer[b] = f_div_class(style,txt)
end
start = position
end
buffer[b+1] = "</pre>"
buffer = concat(buffer)
return buffer
end
local function exportcsslinenumber()
return f_div_number(#tostring(linenumber)/2+1)
end
local htmlfile = utilities.templates.replacer([[
<?xml version="1.0"?>
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<title>context util-sci web page: text</title>
<meta http-equiv="content-type" content="text/html; charset=UTF-8"/>
<style type="text/css"><!--
%lexingstyles%
%numberstyles%
--></style>
<body>
%lexedcontent%
</body>
</html>
]])
function scite.tohtml(data,lexname,numbered)
return htmlfile {
lexedcontent = exportstyled(loadedlexers[lexname],data or "",numbered), -- before numberstyles
lexingstyles = exportcsslexing(),
numberstyles = exportcsslinenumber(),
}
end
function scite.filetohtml(filename,lexname,targetname,numbered)
io.savedata(targetname or "util-sci.html",scite.tohtml(io.loaddata(filename),lexname or file.suffix(filename),numbered))
end
function scite.css()
return exportcsslexing() .. "\n" .. exportcsslinenumber()
end
function scite.html(data,lexname,numbered)
return exportstyled(loadedlexers[lexname],data or "",numbered)
end
local f_tree_entry = formatters['<a href="%s" class="dir-entry">%s</a>']
local htmlfile = utilities.templates.replacer([[
<?xml version="1.0"?>
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<title>context util-sci web page: text</title>
<meta http-equiv="content-type" content="text/html; charset=UTF-8"/>
<style type="text/css"><!--
%styles%
--></style>
<body>
<pre>
%dirlist%
</pre>
</body>
</html>
]])
function scite.converttree(sourceroot,targetroot,numbered)
if lfs.isdir(sourceroot) then
statistics.starttiming()
local skipped = { }
local noffiles = 0
dir.makedirs(targetroot)
local function scan(sourceroot,targetroot)
local tree = { }
for name in lfs.dir(sourceroot) do
if name ~= "." and name ~= ".." then
local sourcename = file.join(sourceroot,name)
local targetname = file.join(targetroot,name)
local mode = lfs.attributes(sourcename,'mode')
if mode == 'file' then
local filetype = file.suffix(sourcename)
local basename = file.basename(name)
local targetname = file.replacesuffix(targetname,"html")
if knownlexers[filetype] then
report("converting file %a to %a",sourcename,targetname)
scite.filetohtml(sourcename,nil,targetname,numbered)
noffiles = noffiles + 1
tree[#tree+1] = f_tree_entry(file.basename(targetname),basename)
else
skipped[filetype] = true
report("no lexer for %a",sourcename)
end
else
dir.makedirs(targetname)
scan(sourcename,targetname)
tree[#tree+1] = f_tree_entry(file.join(name,"files.html"),name)
end
end
end
report("saving tree in %a",treename)
local htmldata = htmlfile {
dirlist = concat(tree,"\n"),
styles = "",
}
io.savedata(file.join(targetroot,"files.html"),htmldata)
end
scan(sourceroot,targetroot)
if next(skipped) then
report("skipped filetypes: %a",table.concat(table.sortedkeys(skipped)," "))
end
statistics.stoptiming()
report("conversion time for %s files: %s",noffiles,statistics.elapsedtime())
end
end
-- scite.filetohtml("strc-sec.mkiv",nil,"e:/tmp/util-sci.html",true)
-- scite.filetohtml("syst-aux.mkiv",nil,"e:/tmp/util-sci.html",true)
-- scite.converttree("t:/texmf/tex/context","e:/tmp/html/context",true)
return scite
|