1 ----------------------------------------------------------------------
2 -- Metalua: $Id: mll.lua,v 1.3 2006/11/15 09:07:50 fab13n Exp $
4 -- Summary: generic Lua-style lexer definition. You need this plus
5 -- some keyword additions to create the complete Lua lexer,
6 -- as is done in mlp_lexer.lua.
10 -- * Make it possible to change lexer on the fly. This implies the
11 -- ability to easily undo any pre-extracted tokens;
13 -- * Make it easy to define new flavors of strings. Replacing the
14 -- lexer.patterns.long_string regexp by an extensible list, with
15 -- customizable token tag, would probably be enough. Maybe add:
16 -- + an index of capture for the regexp, that would specify
17 -- which capture holds the content of the string-like token
19 -- + or a string->string transformer function.
20 ----------------------------------------------------------------------
22 -- Copyright (c) 2006, Fabien Fleutot <metalua@gmail.com>.
24 -- This software is released under the MIT Licence, see licence.txt
27 ----------------------------------------------------------------------
29 module ("lexer", package.seeall)
31 require 'metalua.runtime'
34 lexer = { alpha={ }, sym={ } }
37 local debugf = function() end
40 ----------------------------------------------------------------------
41 -- Patterns used by [lexer:extract] to decompose the raw string into
42 -- correctly tagged tokens.
43 ----------------------------------------------------------------------
45 spaces = "^[ \r\n\t]*()",
46 short_comment = "^%-%-([^\n]*)()\n",
47 final_short_comment = "^%-%-([^\n]*)()$",
48 long_comment = "^%-%-%[(=*)%[\n?(.-)%]%1%]()",
49 long_string = "^%[(=*)%[\n?(.-)%]%1%]()",
53 number_exponant = "^[eE][%+%-]?%d+()",
54 word = "^([%a_][%w_]*)()"
57 ----------------------------------------------------------------------
58 -- Take a letter [x], and returns the character represented by the
59 -- sequence ['\\'..x], e.g. [unesc_letter "n" == "\n"].
60 ----------------------------------------------------------------------
61 local function unesc_letter(x)
63 a = "\a", b = "\b", f = "\f",
64 n = "\n", r = "\r", t = "\t", v = "\v",
65 ["\\"] = "\\", ["'"] = "'", ['"'] = '"' }
66 return t[x] or error("Unknown escape sequence \\"..x)
69 ----------------------------------------------------------------------
70 -- Turn the digits of an escape sequence into the corresponding
71 -- character, e.g. [unesc_digits("123") == string.char(123)].
72 ----------------------------------------------------------------------
73 local function unesc_digits (x)
74 local k, j, i = x:reverse():byte(1, 3)
75 local z = _G.string.byte "0"
76 return _G.string.char ((k or z) + 10*(j or z) + 100*(i or z) - 111*z)
79 ----------------------------------------------------------------------
80 -- unescape a whole string, applying [unesc_digits] and [unesc_letter]
81 -- as many times as required.
82 ----------------------------------------------------------------------
83 local function unescape_string (s)
84 return s:gsub("\\([0-9]+)", unesc_digits):gsub("\\(.)",unesc_letter)
88 "skip_whitespaces_and_comments",
89 "extract_short_string", "extract_word", "extract_number",
90 "extract_long_string", "extract_symbol" }
92 lexer.token_metatable = {
93 -- __tostring = function(a)
94 -- return string.format ("`%s{'%s'}",a.tag, a[1])
98 ----------------------------------------------------------------------
99 -- Really extract next token fron the raw string
100 -- (and update the index).
101 ----------------------------------------------------------------------
102 function lexer:extract ()
103 local previous_i = self.i
104 local loc, eof, token = self.i
106 local function tk (tag, content)
107 assert (tag and content)
108 local i, ln = previous_i, self.line
109 -- update line numbers
111 i = self.src:find("\n", i+1, true)
112 if not i then break end
113 if loc and i <= loc then ln = ln+1 end
114 if i <= self.i then self.line = self.line+1 else break end
116 local a = { tag = tag,
118 lineinfo = { first = ln, last = self.line },
121 -- FIXME [EVE] make lineinfo passing less memory consuming
122 -- FIXME [Fabien] suppress line/lineinfo.line redundancy.
123 if #self.attached_comments > 0 then
124 a.comments = self.attached_comments
125 self.attached_comments = nil
127 return setmetatable (a, self.token_metatable)
130 self.attached_comments = { }
132 for ext_idx, extractor in ipairs(self.extractors) do
133 -- printf("method = %s", method)
134 local tag, content = self[extractor](self)
135 -- [loc] is placed just after the leading whitespaces and comments,
136 -- and the whitespace extractor is at index 1.
137 if ext_idx==1 then loc = self.i end
140 --printf("`%s{ %q }\t%i", tag, content, loc);
141 return tk (tag, content)
145 error "Cant extract anything!"
148 ----------------------------------------------------------------------
149 -- skip whites and comments
150 -- FIXME: doesn't take into account:
151 -- - unterminated long comments
152 -- - short comments without a final \n
153 ----------------------------------------------------------------------
154 function lexer:skip_whitespaces_and_comments()
155 local attached_comments = { }
159 local last_comment_content = nil
161 self.i = self.src:match (self.patterns.spaces, self.i)
162 -- skip a long comment if any
163 _, last_comment_content, j = self.src:match (self.patterns.long_comment, self.i)
165 _G.table.insert(self.attached_comments,
166 {last_comment_content, self.i, j, "long"})
169 -- skip a short comment if any
170 last_comment_content, j = self.src:match (self.patterns.short_comment, self.i)
172 _G.table.insert(attached_comments,
173 {last_comment_content, self.i, j, "short"})
176 if self.i>#self.src then return "Eof", "eof" end
179 if self.src:match (self.patterns.final_short_comment, self.i) then
180 return "Eof", "eof" end
181 --assert (not self.src:match(self.patterns.short_comment, self.i))
182 --assert (not self.src:match(self.patterns.long_comment, self.i))
183 -- --assert (not self.src:match(self.patterns.spaces, self.i))
187 ----------------------------------------------------------------------
189 ----------------------------------------------------------------------
190 function lexer:extract_short_string()
191 -- [k] is the first unread char, [self.i] points to [k] in [self.src]
192 local j, k = self.i, self.src:sub (self.i,self.i)
193 if k=="'" or k=='"' then
197 local kk = self.src:sub (self.i, self.i)
200 kk = self.src:sub (self.i, self.i)
202 if self.i > #self.src then error "Unterminated string" end
203 if self.i == "\r" or self.i == "\n" then error "no \\n in short strings!" end
204 until self.src:sub (self.i, self.i) == k
205 and ( self.src:sub (self.i-1, self.i-1) ~= '\\'
206 or self.src:sub (self.i-2, self.i-2) == '\\')
208 return "String", unescape_string (self.src:sub (j+1,self.i-2))
212 ----------------------------------------------------------------------
214 ----------------------------------------------------------------------
215 function lexer:extract_word()
217 local word, j = self.src:match (self.patterns.word, self.i)
220 if self.alpha [word] then return "Keyword", word
221 else return "Id", word end
225 ----------------------------------------------------------------------
227 ----------------------------------------------------------------------
228 function lexer:extract_number()
230 local j = self.src:match (self.patterns.number_mantissa[1], self.i) or
231 self.src:match (self.patterns.number_mantissa[2], self.i)
233 j = self.src:match (self.patterns.number_exponant, j) or j;
234 local n = tonumber (self.src:sub (self.i, j-1))
240 ----------------------------------------------------------------------
242 ----------------------------------------------------------------------
243 function lexer:extract_long_string()
245 local _, content, j = self.src:match (self.patterns.long_string, self.i)
246 if j then self.i = j; return "String", content end
249 ----------------------------------------------------------------------
251 ----------------------------------------------------------------------
252 function lexer:extract_symbol()
254 local k = self.src:sub (self.i,self.i)
255 local symk = self.sym [k]
260 for _, sym in pairs (symk) do
261 if sym == self.src:sub (self.i, self.i + #sym - 1) then
262 self.i = self.i + #sym;
263 return "Keyword", sym
266 -- single char symbol
271 ----------------------------------------------------------------------
272 -- Add a keyword to the list of keywords recognized by the lexer.
273 ----------------------------------------------------------------------
274 function lexer:add (w, ...)
275 assert(not ..., "lexer:add() takes only one arg, although possibly a table")
276 if type (w) == "table" then
277 for _, x in ipairs (w) do self:add (x) end
279 if w:match (self.patterns.word .. "$") then self.alpha [w] = true
280 elseif w:match "^%p%p+$" then
282 local list = self.sym [k]
283 if not list then list = { }; self.sym [k] = list end
284 _G.table.insert (list, w)
285 elseif w:match "^%p$" then return
286 else error "Invalid keyword" end
290 ----------------------------------------------------------------------
291 -- Return the [n]th next token, without consumming it.
292 -- [n] defaults to 1. If it goes pass the end of the stream, an EOF
293 -- token is returned.
294 ----------------------------------------------------------------------
295 function lexer:peek (n)
297 if not n then n=1 end
298 if n > #self.peeked then
299 for i = #self.peeked+1, n do
300 self.peeked [i] = self:extract()
303 return self.peeked [n]
306 ----------------------------------------------------------------------
307 -- Return the [n]th next token, removing it as well as the 0..n-1
308 -- previous tokens. [n] defaults to 1. If it goes pass the end of the
309 -- stream, an EOF token is returned.
310 ----------------------------------------------------------------------
311 function lexer:next (n)
312 if not n then n=1 end
316 a = _G.table.remove (self.peeked, 1)
318 debugf ("[L:%i K:%i T:%s %q]", a.line or -1, a.char or -1,
319 a.tag or '<none>', a[1]) end
320 self.lastline = a.lineinfo.last
323 return a or eof_token
326 ----------------------------------------------------------------------
327 -- Returns an object which saves the stream's current state.
328 ----------------------------------------------------------------------
329 function lexer:save () return { self.i; _G.table.cat(self.peeked) } end
331 ----------------------------------------------------------------------
332 -- Restore the stream's state, as saved by method [save].
333 ----------------------------------------------------------------------
334 function lexer:restore (s) self.i=s[1]; self.peeked=s[2] end
336 ----------------------------------------------------------------------
338 ----------------------------------------------------------------------
339 function lexer:sync()
340 local p1 = self.peeked[1]
342 self.i, self.line, self.peeked = p1.char, p1.line, { }
346 ----------------------------------------------------------------------
347 -- Take over an old lexer.
348 ----------------------------------------------------------------------
349 function lexer:takeover(old)
351 self.i, self.line, self.src = old.i, old.line, old.src
355 ----------------------------------------------------------------------
356 -- Create a new lexstream.
357 ----------------------------------------------------------------------
358 function lexer:newstream (src_or_stream)
359 if type(src_or_stream)=='table' then -- it's a stream
360 return setmetatable({ }, self):takeover(src_or_stream)
361 elseif type(src_or_stream)=='string' then -- it's a source string
363 src = src_or_stream; -- The source, as a single string
364 peeked = { }; -- Already peeked, but not discarded yet, tokens
365 i = 1; -- Character offset in src
366 line = 1; -- Current line number
368 setmetatable (stream, self)
370 -- skip initial sharp-bang for unix scripts
371 if src and src:match "^#!" then stream.i = src:find "\n" + 1 end
374 assert(false, ":newstream() takes a source string or a stream, not a "..
379 ----------------------------------------------------------------------
380 -- if there's no ... args, return the token a (whose truth value is
381 -- true) if it's a `Keyword{ }, or nil. If there are ... args, they
382 -- have to be strings. if the token a is a keyword, and it's content
383 -- is one of the ... args, then returns it (it's truth value is
384 -- true). If no a keyword or not in ..., return nil.
385 ----------------------------------------------------------------------
386 function lexer:is_keyword (a, ...)
387 if not a or a.tag ~= "Keyword" then return false end
389 if #words == 0 then return a[1] end
390 for _, w in ipairs (words) do
391 if w == a[1] then return w end
396 ----------------------------------------------------------------------
397 -- Cause an error if the next token isn't a keyword whose content
398 -- is listed among ... args (which have to be strings).
399 ----------------------------------------------------------------------
400 function lexer:check (...)
402 local a = self:next()
403 local function err ()
404 error ("Got " .. tostring (a) ..
405 ", expected one of these keywords : '" ..
406 _G.table.concat (words,"', '") .. "'") end
408 if not a or a.tag ~= "Keyword" then err () end
409 if #words == 0 then return a[1] end
410 for _, w in ipairs (words) do
411 if w == a[1] then return w end
416 ----------------------------------------------------------------------
418 ----------------------------------------------------------------------
419 function lexer:clone()
421 alpha = table.deep_copy(self.alpha),
422 sym = table.deep_copy(self.sym) }
423 setmetatable(clone, self)
424 clone.__index = clone