1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
|
--- Read a comma or tab (or other delimiter) separated file.
-- This version of a CSV reader differs from others I've seen in that it
--
-- + handles embedded newlines in fields (if they're delimited with double
-- quotes)
-- + is line-ending agnostic
-- + reads the file line-by-line, so it can potientially handle large
-- files.
--
-- Of course, for such a simple format, CSV is horribly complicated, so it
-- likely gets something wrong.
-- (c) Copyright 2013-2014 Incremental IP Limited.
-- (c) Copyright 2014 Kevin Martin
-- Available under the MIT licence. See LICENSE for more information.
local DEFAULT_BUFFER_BLOCK_SIZE = 1024 * 1024
------------------------------------------------------------------------------
local function trim_space(s)
return s:match("^%s*(.-)%s*$")
end
local function fix_quotes(s)
-- the sub(..., -2) is to strip the trailing quote
return string.sub(s:gsub('""', '"'), 1, -2)
end
------------------------------------------------------------------------------
local column_map = {}
column_map.__index = column_map
local function normalise_string(s)
return (s:lower():gsub("[^%w%d]+", " "):gsub("^ *(.-) *$", "%1"))
end
--- Parse a list of columns.
-- The main job here is normalising column names and dealing with columns
-- for which we have more than one possible name in the header.
function column_map:new(columns)
local name_map = {}
for n, v in pairs(columns) do
local names
local t
if type(v) == "table" then
t = { transform = v.transform, default = v.default }
if v.name then
names = { normalise_string(v.name) }
elseif v.names then
names = v.names
for i, n in ipairs(names) do names[i] = normalise_string(n) end
end
else
if type(v) == "function" then
t = { transform = v }
else
t = {}
if type(v) == "string" then
names = { normalise_string(v) }
end
end
end
if not names then
names = { (n:lower():gsub("[^%w%d]+", " ")) }
end
t.name = n
for _, n in ipairs(names) do
name_map[n:lower()] = t
end
end
return setmetatable({ name_map = name_map }, column_map)
end
--- Map "virtual" columns to file columns.
-- Once we've read the header, work out which columns we're interested in and
-- what to do with them. Mostly this is about checking we've got the columns
-- we need and writing a nice complaint if we haven't.
function column_map:read_header(header)
local index_map = {}
-- Match the columns in the file to the columns in the name map
local found = {}
local found_any
for i, word in ipairs(header) do
word = normalise_string(word)
local r = self.name_map[word]
if r then
index_map[i] = r
found[r.name] = true
found_any = true
end
end
if not found_any then return end
-- check we found all the columns we need
local not_found = {}
for name, r in pairs(self.name_map) do
if not found[r.name] then
local nf = not_found[r.name]
if nf then
nf[#nf+1] = name
else
not_found[r.name] = { name }
end
end
end
-- If any columns are missing, assemble an error message
if next(not_found) then
local problems = {}
for k, v in pairs(not_found) do
local missing
if #v == 1 then
missing = "'"..v[1].."'"
else
missing = v[1]
for i = 2, #v - 1 do
missing = missing..", '"..v[i].."'"
end
missing = missing.." or '"..v[#v].."'"
end
problems[#problems+1] = "Couldn't find a column named "..missing
end
error(table.concat(problems, "\n"), 0)
end
self.index_map = index_map
return true
end
function column_map:transform(value, index)
local field = self.index_map[index]
if field then
if field.transform then
local ok
ok, value = pcall(field.transform, value)
if not ok then
error(("Error reading field '%s': %s"):format(field.name, value), 0)
end
end
return value or field.default, field.name
end
end
------------------------------------------------------------------------------
local file_buffer = {}
file_buffer.__index = file_buffer
function file_buffer:new(file, buffer_block_size)
return setmetatable({
file = file,
buffer_block_size = buffer_block_size or DEFAULT_BUFFER_BLOCK_SIZE,
buffer_start = 0,
buffer = "",
}, file_buffer)
end
--- Cut the front off the buffer if we've already read it
function file_buffer:truncate(p)
p = p - self.buffer_start
if p > self.buffer_block_size then
local remove = self.buffer_block_size *
math.floor((p-1) / self.buffer_block_size)
self.buffer = self.buffer:sub(remove + 1)
self.buffer_start = self.buffer_start + remove
end
end
--- Find something in the buffer, extending it if necessary
function file_buffer:find(pattern, init)
while true do
local first, last, capture =
self.buffer:find(pattern, init - self.buffer_start)
-- if we found nothing, or the last character is at the end of the
-- buffer (and the match could potentially be longer) then read some
-- more.
if not first or last == #self.buffer then
local s = self.file:read(self.buffer_block_size)
if not s then
if not first then
return
else
return first + self.buffer_start, last + self.buffer_start, capture
end
end
self.buffer = self.buffer..s
else
return first + self.buffer_start, last + self.buffer_start, capture
end
end
end
--- Extend the buffer so we can see more
function file_buffer:extend(offset)
local extra = offset - #self.buffer - self.buffer_start
if extra > 0 then
local size = self.buffer_block_size *
math.ceil(extra / self.buffer_block_size)
local s = self.file:read(size)
if not s then return end
self.buffer = self.buffer..s
end
end
--- Get a substring from the buffer, extending it if necessary
function file_buffer:sub(a, b)
self:extend(b)
b = b == -1 and b or b - self.buffer_start
return self.buffer:sub(a - self.buffer_start, b)
end
--- Close a file buffer
function file_buffer:close()
self.file:close()
self.file = nil
end
------------------------------------------------------------------------------
local separator_candidates = { ",", "\t", "|" }
local guess_separator_params = { record_limit = 8; }
local function try_separator(buffer, sep, f)
guess_separator_params.separator = sep
local min, max = math.huge, 0
local lines, split_lines = 0, 0
local iterator = coroutine.wrap(function() f(buffer, guess_separator_params) end)
for t in iterator do
min = math.min(min, #t)
max = math.max(max, #t)
split_lines = split_lines + (t[2] and 1 or 0)
lines = lines + 1
end
if split_lines / lines > 0.75 then
return max - min
else
return math.huge
end
end
--- If the user hasn't specified a separator, try to work out what it is.
function guess_separator(buffer, f)
local best_separator, lowest_diff = "", math.huge
for _, s in ipairs(separator_candidates) do
local ok, diff = pcall(function() return try_separator(buffer, s, f) end)
if ok and diff < lowest_diff then
best_separator = s
lowest_diff = diff
end
end
return best_separator
end
local unicode_BOMS =
{
{
length = 2,
BOMS =
{
["\254\255"] = true, -- UTF-16 big-endian
["\255\254"] = true, -- UTF-16 little-endian
}
},
{
length = 3,
BOMS =
{
["\239\187\191"] = true, -- UTF-8
}
}
}
local function find_unicode_BOM(sub)
for _, x in ipairs(unicode_BOMS) do
local code = sub(1, x.length)
if x.BOMS[code] then
return x.length
end
end
return 0
end
--- Iterate through the records in a file
-- Since records might be more than one line (if there's a newline in quotes)
-- and line-endings might not be native, we read the file in chunks of
-- we read the file in chunks using a file_buffer, rather than line-by-line
-- using io.lines.
local function separated_values_iterator(buffer, parameters)
local field_start = 1
local advance
if buffer.truncate then
advance = function(n)
field_start = field_start + n
buffer:truncate(field_start)
end
else
advance = function(n)
field_start = field_start + n
end
end
local function field_sub(a, b)
b = b == -1 and b or b + field_start - 1
return buffer:sub(a + field_start - 1, b)
end
local function field_find(pattern, init)
init = init or 1
local f, l, c = buffer:find(pattern, init + field_start - 1)
if not f then return end
return f - field_start + 1, l - field_start + 1, c
end
-- Is there some kind of Unicode BOM here?
advance(find_unicode_BOM(field_sub))
-- Start reading the file
local sep = "(["..(parameters.separator or
guess_separator(buffer, separated_values_iterator)).."\n\r])"
local line_start = 1
local line = 1
local field_count, fields, starts, nonblanks = 0, {}, {}
local header, header_read
local field_start_line, field_start_column
local record_count = 0
local function problem(message)
error(("%s:%d:%d: %s"):
format(parameters.filename, field_start_line, field_start_column,
message), 0)
end
while true do
local field_end, sep_end, this_sep
local tidy
field_start_line = line
field_start_column = field_start - line_start + 1
-- If the field is quoted, go find the other quote
if field_sub(1, 1) == '"' then
advance(1)
local current_pos = 0
repeat
local a, b, c = field_find('"("?)', current_pos + 1)
current_pos = b
until c ~= '"'
if not current_pos then problem("unmatched quote") end
tidy = fix_quotes
field_end, sep_end, this_sep = field_find(" *([^ ])", current_pos+1)
if this_sep and not this_sep:match(sep) then problem("unmatched quote") end
else
field_end, sep_end, this_sep = field_find(sep, 1)
tidy = trim_space
end
-- Look for the separator or a newline or the end of the file
field_end = (field_end or 0) - 1
-- Read the field, then convert all the line endings to \n, and
-- count any embedded line endings
local value = field_sub(1, field_end)
value = value:gsub("\r\n", "\n"):gsub("\r", "\n")
for nl in value:gmatch("\n()") do
line = line + 1
line_start = nl + field_start
end
value = tidy(value)
if #value > 0 then nonblanks = true end
field_count = field_count + 1
-- Insert the value into the table for this "line"
local key
if parameters.column_map and header_read then
local ok
ok, value, key = pcall(parameters.column_map.transform,
parameters.column_map, value, field_count)
if not ok then problem(value) end
elseif header then
key = header[field_count]
else
key = field_count
end
if key then
fields[key] = value
starts[key] = { line=field_start_line, column=field_start_column }
end
-- if we ended on a newline then yield the fields on this line.
if not this_sep or this_sep == "\r" or this_sep == "\n" then
if parameters.column_map and not header_read then
header_read = parameters.column_map:read_header(fields)
elseif parameters.header and not header_read then
if nonblanks or field_count > 1 then -- ignore blank lines
header = fields
header_read = true
end
else
if nonblanks or field_count > 1 then -- ignore blank lines
coroutine.yield(fields, starts)
record_count = record_count + 1
if parameters.record_limit and
record_count >= parameters.record_limit then
break
end
end
end
field_count, fields, starts, nonblanks = 0, {}, {}
end
-- If we *really* didn't find a separator then we're done.
if not sep_end then break end
-- If we ended on a newline then count it.
if this_sep == "\r" or this_sep == "\n" then
if this_sep == "\r" and field_sub(sep_end+1, sep_end+1) == "\n" then
sep_end = sep_end + 1
end
line = line + 1
line_start = field_start + sep_end
end
advance(sep_end)
end
end
------------------------------------------------------------------------------
local buffer_mt =
{
lines = function(t)
return coroutine.wrap(function()
separated_values_iterator(t.buffer, t.parameters)
end)
end,
close = function(t)
if t.buffer.close then t.buffer:close() end
end,
name = function(t)
return t.parameters.filename
end,
}
buffer_mt.__index = buffer_mt
--- Use an existing file or buffer as a stream to read csv from.
-- (A buffer is just something that looks like a string in that we can do
-- `buffer:sub()` and `buffer:find()`)
-- @return a file object
local function use(
buffer, -- ?string|file|buffer: the buffer to read from. If it's:
-- - a string, read from that;
-- - a file, turn it into a file_buffer;
-- - nil, read from stdin
-- otherwise assume it's already a a buffer.
parameters) -- ?table: parameters controlling reading the file.
-- See README.md
parameters = parameters or {}
parameters.filename = parameters.filename or "<unknown>"
parameters.column_map = parameters.columns and
column_map:new(parameters.columns)
if not buffer then
buffer = file_buffer:new(io.stdin)
elseif io.type(buffer) == "file" then
buffer = file_buffer:new(buffer)
end
local f = { buffer = buffer, parameters = parameters }
return setmetatable(f, buffer_mt)
end
------------------------------------------------------------------------------
--- Open a file for reading as a delimited file
-- @return a file object
local function open(
filename, -- string: name of the file to open
parameters) -- ?table: parameters controlling reading the file.
-- See README.md
local file, message = io.open(filename, "r")
if not file then return nil, message end
parameters = parameters or {}
parameters.filename = filename
return use(file_buffer:new(file), parameters)
end
------------------------------------------------------------------------------
local function makename(s)
local t = {}
t[#t+1] = "<(String) "
t[#t+1] = (s:gmatch("[^\n]+")() or ""):sub(1,15)
if #t[#t] > 14 then t[#t+1] = "..." end
t[#t+1] = " >"
return table.concat(t)
end
--- Open a string for reading as a delimited file
-- @return a file object
local function openstring(
filecontents, -- string: The contents of the delimited file
parameters) -- ?table: parameters controlling reading the file.
-- See README.md
parameters = parameters or {}
parameters.filename = parameters.filename or makename(filecontents)
parameters.buffer_size = parameters.buffer_size or #filecontents
return use(filecontents, parameters)
end
------------------------------------------------------------------------------
return { open = open, openstring = openstring, use = use }
------------------------------------------------------------------------------
|