--
-- TODO:
--
--- * Make it possible to change lexer on the fly. This implies the
--- ability to easily undo any pre-extracted tokens;
---
-- * Make it easy to define new flavors of strings. Replacing the
-- lexer.patterns.long_string regexp by an extensible list, with
-- customizable token tag, would probably be enough. Maybe add:
final_short_comment = "^%-%-([^\n]*)()$",
long_comment = "^%-%-%[(=*)%[\n?(.-)%]%1%]()",
long_string = "^%[(=*)%[\n?(.-)%]%1%]()",
- number_mantissa = {
- "^%d+%.?%d*()",
- "^%d*%.%d+()" },
- number_exponant = "^[eE][%+%-]?%d+()",
- number_hex = "^0[xX]%x+()",
- word = "^([%a_][%w_]*)()"
+ number_mantissa = { "^%d+%.?%d*()", "^%d*%.%d+()" },
+ number_exponant = "^[eE][%+%-]?%d+()",
+ number_hex = "^0[xX]%x+()",
+ word = "^([%a_][%w_]*)()"
}
----------------------------------------------------------------------
-- __tostring = function(a)
-- return string.format ("`%s{'%s'}",a.tag, a[1])
-- end
- }
+}
+
+lexer.lineinfo_metatable = { }
----------------------------------------------------------------------
-- Really extract next token fron the raw string
local loc = self.i
local eof, token
- -- Put line info, comments and metatable arount the tag and content
+ -- Put line info, comments and metatable around the tag and content
-- provided by extractors, thus returning a complete lexer token.
-- first_line: line # at the beginning of token
-- first_column_offset: char # of the last '\n' before beginning of token
-- lineinfo entries: [1]=line, [2]=column, [3]=char, [4]=filename
local fli = { first_line, loc-first_column_offset, loc, self.src_name }
local lli = { self.line, self.i-self.column_offset-1, self.i-1, self.src_name }
+ --Pluto barfes when the metatable is set:(
+ setmetatable(fli, lexer.lineinfo_metatable)
+ setmetatable(lli, lexer.lineinfo_metatable)
local a = { tag = tag, lineinfo = { first=fli, last=lli }, content }
if lli[2]==-1 then lli[1], lli[2] = lli[1]-1, previous_line_length-1 end
if #self.attached_comments > 0 then