1 ----------------------------------------------------------------------
\r
2 -- Metalua: $Id: gg.lua,v 1.2 2006/11/15 09:07:50 fab13n Exp $
\r
4 -- Summary: parser generator. Collection of higher order functors,
\r
5 -- which allow to build and combine parsers. Relies on a lexer
\r
6 -- that supports the same API as the one exposed in mll.lua.
\r
8 ----------------------------------------------------------------------
\r
10 -- Copyright (c) 2006, Fabien Fleutot <metalua@gmail.com>.
\r
12 -- This software is released under the MIT Licence, see licence.txt
\r
15 ----------------------------------------------------------------------
\r
19 ----------------------------------------------------------------------
\r
21 --------------------------------------------------------------------------------
\r
25 -- Parser generators:
\r
26 -- * [gg.sequence()]
\r
27 -- * [gg.multisequence()]
\r
30 -- * [gg.onkeyword()]
\r
31 -- * [gg.optkeyword()]
\r
33 -- Other functions:
\r
34 -- * [gg.parse_error()]
\r
35 -- * [gg.make_parser()]
\r
36 -- * [gg.is_parser()]
\r
38 --------------------------------------------------------------------------------
\r
40 module("gg", package.seeall)
\r
42 -------------------------------------------------------------------------------
\r
43 -- parser metatable, which maps __call to method parse, and adds some
\r
44 -- error tracing boilerplate.
\r
45 -------------------------------------------------------------------------------
\r
46 local parser_metatable = { }
\r
47 function parser_metatable.__call (parser, lx, ...)
\r
48 --printf ("Call parser %s/%s", parser.kind, parser.name or "?")
\r
49 if mlc.metabugs then
\r
50 --return parser:parse (lx, ...)
\r
51 local x = parser:parse (lx, ...)
\r
52 --printf ("Result: %s", _G.table.tostring(x, "nohash", 60))
\r
55 local char = lx:peek().char
\r
56 local status, ast = pcall (parser.parse, parser, lx, ...)
\r
57 if status then return ast else
\r
59 if msg then print(msg) end
\r
60 printf(" - (%i) in parser %s", char, parser.name or parser.kind)
\r
66 -------------------------------------------------------------------------------
\r
67 -- Turn a table into a parser, mainly by setting the metatable.
\r
68 -------------------------------------------------------------------------------
\r
69 function make_parser(kind, p)
\r
71 if not p.transformers then p.transformers = { } end
\r
72 function p.transformers:add (x)
\r
73 table.insert (self, x)
\r
75 setmetatable (p, parser_metatable)
\r
79 -------------------------------------------------------------------------------
\r
80 -- Return true iff [x] is a parser.
\r
81 -- If it's a gg-generated parser, reutrn the name of its kind.
\r
82 -------------------------------------------------------------------------------
\r
83 function is_parser (x)
\r
84 return type(x)=="function" or getmetatable(x)==parser_metatable and x.kind
\r
87 -------------------------------------------------------------------------------
\r
88 -- Parse a sequence, without applying builder nor transformers
\r
89 -------------------------------------------------------------------------------
\r
90 local function raw_parse_sequence (lx, p)
\r
94 if type(e) == "string" then
\r
95 if not lx:is_keyword (lx:next(), e) then
\r
96 parse_error (lx, "Keyword '%s' expected", e) end
\r
97 elseif is_parser (e) then
\r
98 table.insert (r, e (lx))
\r
100 gg.parse_error (lx,"Sequence `%s': element #%i is not a string "..
\r
101 "nor a parser: %s",
\r
102 p.name, i, table.tostring(e))
\r
108 -------------------------------------------------------------------------------
\r
109 -- Parse a multisequence, without applying multisequence transformers.
\r
110 -- The sequences are completely parsed.
\r
111 -------------------------------------------------------------------------------
\r
112 local function raw_parse_multisequence (lx, sequence_table, default)
\r
113 local seq_parser = sequence_table[lx:is_keyword(lx:peek())]
\r
114 if seq_parser then return seq_parser (lx)
\r
115 elseif default then return default (lx)
\r
116 else return false end
\r
119 -------------------------------------------------------------------------------
\r
120 -- Applies all transformers listed in parser on ast.
\r
121 -------------------------------------------------------------------------------
\r
122 local function transform (ast, parser)
\r
123 if parser.transformers then
\r
124 for _, t in ipairs (parser.transformers) do ast = t(ast) or ast end
\r
129 -------------------------------------------------------------------------------
\r
130 -- Generate a tracable parsing error (not implemented yet)
\r
131 -------------------------------------------------------------------------------
\r
132 function parse_error(lx, fmt, ...)
\r
133 local line = lx:peek().line or -1
\r
134 local char = lx:peek().char or -1
\r
135 local msg = string.format("line %i, char %i: "..fmt, line, char, ...)
\r
137 if char>0 and src then
\r
138 local i, j = char, char
\r
139 while src:sub(i,i) ~= '\n' and i>=0 do i=i-1 end
\r
140 while src:sub(j,j) ~= '\n' and j<=#src do j=j+1 end
\r
141 local srcline = src:sub (i+1, j-1)
\r
142 local idx = string.rep (" ", char-i-1).."^"
\r
143 msg = printf("%s\n>>> %s\n>>> %s", msg, srcline, idx)
\r
148 -------------------------------------------------------------------------------
\r
150 -- Sequence parser generator
\r
152 -------------------------------------------------------------------------------
\r
155 -- * [builder]: how to build an AST out of sequence parts. let [x] be the list
\r
156 -- of subparser results (keywords are simply omitted). [builder] can be:
\r
157 -- - [nil], in which case the result of parsing is simply [x]
\r
158 -- - a string, which is then put as a tag on [x]
\r
159 -- - a function, which takes [x] as a parameter and returns an AST.
\r
161 -- * [name]: the name of the parser. Used for debug messages
\r
163 -- * [transformers]: a list of AST->AST functions, applied in order on ASTs
\r
164 -- returned by the parser.
\r
166 -- * Table-part entries corresponds to keywords (strings) and subparsers
\r
167 -- (function and callable objects).
\r
169 -- After creation, the following fields are added:
\r
170 -- * [parse] the parsing function lexer->AST
\r
171 -- * [kind] == "sequence"
\r
172 -- * [name] is set, if it wasn't in the input.
\r
174 -------------------------------------------------------------------------------
\r
175 function sequence (p)
\r
176 make_parser ("sequence", p)
\r
178 -------------------------------------------------------------------
\r
180 -------------------------------------------------------------------
\r
181 function p:parse (lx)
\r
183 local x = raw_parse_sequence (lx, self)
\r
185 -- Builder application:
\r
186 local builder, tb = self.builder, type (self.builder)
\r
187 if tb == "string" then x.tag = builder
\r
188 elseif tb == "function" or builder and builder.__call then x = builder(x)
\r
189 elseif builder == nil then -- nothing
\r
190 else error("Invalid builder of type "..tb.." in sequence") end
\r
191 return transform (x, self)
\r
194 -------------------------------------------------------------------
\r
196 -------------------------------------------------------------------
\r
197 -- Try to build a proper name
\r
198 if not p.name and type(p[1])=="string" then
\r
199 p.name = p[1].." ..."
\r
200 if type(p[#p])=="string" then p.name = p.name .. " " .. p[#p] end
\r
202 p.name = "<anonymous>"
\r
209 -------------------------------------------------------------------------------
\r
211 -- Multiple, keyword-driven, sequence parser generator
\r
213 -------------------------------------------------------------------------------
\r
214 -- in [p], useful fields are:
\r
216 -- * [transformers]: as usual
\r
218 -- * [name]: as usual
\r
220 -- * Table-part entries must be sequence parsers, or tables which can
\r
221 -- be turned into a sequence parser by [gg.sequence]. These
\r
222 -- sequences must start with a keyword, and this initial keyword
\r
223 -- must be different for each sequence. The table-part entries will
\r
224 -- be removed after [gg.multisequence] returns.
\r
226 -- * [default]: the parser to run if the next keyword in the lexer is
\r
227 -- none of the registered initial keywords. If there's no default
\r
228 -- parser and no suitable initial keyword, the multisequence parser
\r
229 -- simply returns [false].
\r
231 -- After creation, the following fields are added:
\r
233 -- * [parse] the parsing function lexer->AST
\r
235 -- * [sequences] the table of sequences, indexed by initial keywords.
\r
237 -- * [add] method takes a sequence parser or a config table for
\r
238 -- [gg.sequence], and adds/replaces the corresponding sequence
\r
239 -- parser. If the keyword was already used, the former sequence is
\r
240 -- removed and a warning is issued.
\r
242 -- * [get] method returns a sequence by its initial keyword
\r
244 -- * [kind] == "multisequence"
\r
246 -------------------------------------------------------------------------------
\r
247 function multisequence (p)
\r
248 make_parser ("multisequence", p)
\r
250 -------------------------------------------------------------------
\r
251 -- Add a sequence (might be just a config table for [gg.sequence])
\r
252 -------------------------------------------------------------------
\r
254 -- compile if necessary:
\r
255 if not is_parser(s) then sequence(s) end
\r
256 if type(s[1]) ~= "string" then
\r
257 error "Invalid sequence for multiseq"
\r
258 elseif self.sequences[s[1]] then
\r
259 printf (" *** Warning: keyword %q overloaded in multisequence ***", s[1])
\r
261 self.sequences[s[1]] = s
\r
262 end -- </multisequence.add>
\r
264 -------------------------------------------------------------------
\r
265 -- Get the sequence starting with this keyword. [kw :: string]
\r
266 -------------------------------------------------------------------
\r
267 function p:get (kw) return self.sequences[kw] end
\r
269 -------------------------------------------------------------------
\r
270 -- Remove the sequence starting with keyword [kw :: string]
\r
271 -------------------------------------------------------------------
\r
272 function p:del (kw)
\r
273 if not self.sequences[kw] then
\r
274 printf("*** Warning: trying to delete sequence starting "..
\r
275 "with %q from a multisequence having no such "..
\r
276 "entry ***", kw) end
\r
277 self.sequences[kw] = nil
\r
280 -------------------------------------------------------------------
\r
281 -- Get the sequence starting with this keyword. [kw :: string]
\r
282 -------------------------------------------------------------------
\r
283 function p:remove (kw)
\r
284 local x = self.sequences[kw]
\r
285 self.sequences[kw] = nil
\r
289 -------------------------------------------------------------------
\r
291 -------------------------------------------------------------------
\r
292 function p:parse (lx)
\r
293 local x = raw_parse_multisequence (lx, self.sequences, self.default)
\r
294 return transform (x, self) end
\r
296 -------------------------------------------------------------------
\r
298 -------------------------------------------------------------------
\r
299 -- Register the sequences passed to the constructor. They're going
\r
300 -- from the array part of the parser to the hash part of field
\r
303 for i=1, #p do p:add (p[i]); p[i] = nil end
\r
305 -- FIXME: why is this commented out?
\r
306 --if p.default and not is_parser(p.default) then sequence(p.default) end
\r
308 end --</multisequence>
\r
311 -------------------------------------------------------------------------------
\r
313 -- Expression parser generator
\r
315 -------------------------------------------------------------------------------
\r
317 -- Expression configuration relies on three tables: [prefix], [infix]
\r
318 -- and [suffix]. Moreover, the primary parser can be replaced by a
\r
319 -- table: in this case the [primary] table will be passed to
\r
320 -- [gg.multisequence] to create a parser.
\r
322 -- Each of these tables is a modified multisequence parser: the
\r
323 -- differences with respect to regular multisequence config tables are:
\r
325 -- * the builder takes specific parameters:
\r
326 -- - for [prefix], it takes the result of the prefix sequence parser,
\r
327 -- and the prefixed expression
\r
328 -- - for [infix], it takes the left-hand-side expression, the results
\r
329 -- of the infix sequence parser, and the right-hand-side expression.
\r
330 -- - for [suffix], it takes the suffixed expression, and theresult
\r
331 -- of the suffix sequence parser.
\r
333 -- * the default field is a list, with parameters:
\r
334 -- - [parser] the raw parsing function
\r
335 -- - [transformers], as usual
\r
336 -- - [prec], the operator's precedence
\r
337 -- - [assoc] for [infix] table, the operator's associativity, which
\r
338 -- can be "left", "right" or "flat" (default to left)
\r
340 -- In [p], useful fields are:
\r
341 -- * [transformers]: as usual
\r
342 -- * [name]: as usual
\r
343 -- * [primary]: the atomic expression parser, or a multisequence config
\r
344 -- table (mandatory)
\r
345 -- * [prefix]: prefix operators config table, see above.
\r
346 -- * [infix]: infix operators config table, see above.
\r
347 -- * [suffix]: suffix operators config table, see above.
\r
349 -- After creation, these fields are added:
\r
350 -- * [kind] == "expr"
\r
351 -- * [parse] as usual
\r
352 -- * each table is turned into a multisequence, and therefore has an
\r
355 -------------------------------------------------------------------------------
\r
357 make_parser ("expr", p)
\r
359 -------------------------------------------------------------------
\r
361 -- In addition to the lexer, it takes an optional precedence:
\r
362 -- it won't read expressions whose precedence is lower or equal
\r
364 -------------------------------------------------------------------
\r
365 function p:parse (lx, prec)
\r
366 if not prec then prec = 0 end
\r
368 ------------------------------------------------------
\r
369 -- Extract the right parser and the corresponding
\r
370 -- options table, for (pre|in|post)fix operators.
\r
371 -- Options include prec, assoc, transformers.
\r
372 ------------------------------------------------------
\r
373 local function get_parser_info (tab)
\r
374 local p2 = tab:get (lx:is_keyword (lx:peek()))
\r
375 if p2 then -- keyword-based sequence found
\r
376 local function parser(lx) return raw_parse_sequence(lx, p2) end
\r
378 else -- Got to use the default parser
\r
379 local d = tab.default
\r
380 if d then return d.parse or d.parser, d
\r
381 else return false, false end
\r
385 ------------------------------------------------------
\r
386 -- Look for a prefix sequence. Multiple prefixes are
\r
387 -- handled through the recursive [p.parse] call.
\r
388 -- Notice the double-transform: one for the primary
\r
389 -- expr, and one for the one with the prefix op.
\r
390 ------------------------------------------------------
\r
391 local function handle_prefix ()
\r
392 local p2_func, p2 = get_parser_info (self.prefix)
\r
393 local op = p2_func and p2_func(lx)
\r
394 if op then -- Keyword-based sequence found
\r
395 local e = p2.builder (op, self:parse (lx, p2.prec))
\r
396 return transform (transform (e, p2), self)
\r
397 else -- No prefix found, get a primary expression
\r
398 return transform (self.primary (lx), self)
\r
400 end --</expr.parse.handle_prefix>
\r
402 ------------------------------------------------------
\r
403 -- Look for an infix sequence+right-hand-side operand.
\r
404 -- Return the whole binary expression result,
\r
405 -- or false if no operator was found.
\r
406 ------------------------------------------------------
\r
407 local function handle_infix (e)
\r
408 local p2_func, p2 = get_parser_info (self.infix)
\r
409 if not p2 then return false end
\r
411 -----------------------------------------
\r
412 -- Handle flattening operators: gather all operands
\r
413 -- of the series in [list]; when a different operator
\r
414 -- is found, stop, build from [list], [transform] and
\r
416 -----------------------------------------
\r
417 if (not p2.prec or p2.prec>prec) and p2.assoc=="flat" then
\r
418 local pflat, list = p2, { e }
\r
420 local op = p2_func(lx)
\r
421 if not op then break end
\r
422 table.insert (list, self:parse (lx, p2.prec))
\r
424 _, p2 = get_parser_info (self.infix)
\r
426 return transform (transform (pflat.builder (list), pflat), self)
\r
428 -----------------------------------------
\r
429 -- Handle regular infix operators: [e] the LHS is known,
\r
430 -- just gather the operator and [e2] the RHS.
\r
431 -----------------------------------------
\r
432 elseif p2.prec and p2.prec>prec or
\r
433 p2.prec==prec and p2.assoc=="right" then
\r
434 local op = p2_func(lx)
\r
435 if not op then return false end
\r
436 local e2 = self:parse (lx, p2.prec)
\r
437 return transform (transform (p2.builder (e, op, e2), p2), self)
\r
439 -----------------------------------------
\r
440 -- Check for non-associative operators, and complain if applicable.
\r
441 -----------------------------------------
\r
442 elseif p2.assoc=="none" and p2.prec==prec then
\r
443 parser_error (lx, "non-associative operator!")
\r
445 -----------------------------------------
\r
446 -- No infix operator suitable at that precedence
\r
447 -----------------------------------------
\r
448 else return false end
\r
450 end --</expr.parse.handle_infix>
\r
452 ------------------------------------------------------
\r
453 -- Look for a suffix sequence.
\r
454 -- Return the result of suffix operator on [e],
\r
455 -- or false if no operator was found.
\r
456 ------------------------------------------------------
\r
457 local function handle_suffix (e)
\r
458 local p2_func, p2 = get_parser_info (self.suffix)
\r
459 if not p2 then return false end
\r
460 if not p2.prec or p2.prec>=prec then
\r
461 local op = p2_func(lx)
\r
462 if not op then return false end
\r
463 e = p2.builder (e, op)
\r
464 return transform (transform (e, p2), self)
\r
467 end --</expr.parse.handle_suffix>
\r
469 ------------------------------------------------------
\r
470 -- Parser body: read suffix and (infix+operand)
\r
471 -- extensions as long as we're able to fetch more at
\r
472 -- this precedence level.
\r
473 ------------------------------------------------------
\r
474 local e = handle_prefix()
\r
476 local x = handle_suffix (e); e = x or e
\r
477 local y = handle_infix (e); e = y or e
\r
480 -- No transform: it already happened in operators handling
\r
482 end --</expr.parse>
\r
484 -------------------------------------------------------------------
\r
486 -------------------------------------------------------------------
\r
487 if not p.primary then p.primary=p[1]; p[1]=nil end
\r
488 for _, t in ipairs{ "primary", "prefix", "infix", "suffix" } do
\r
489 if not p[t] then p[t] = { } end
\r
490 if not is_parser(p[t]) then multisequence(p[t]) end
\r
492 function p:add(...) return self.primary:add(...) end
\r
497 -------------------------------------------------------------------------------
\r
499 -- List parser generator
\r
501 -------------------------------------------------------------------------------
\r
502 -- In [p], the following fields can be provided in input:
\r
504 -- * [builder]: takes list of subparser results, returns AST
\r
505 -- * [transformers]: as usual
\r
506 -- * [name]: as usual
\r
508 -- * [terminators]: list of strings representing the keywords which
\r
509 -- might mark the end of the list. When non-empty, the list is
\r
510 -- allowed to be empty. A string is treated as a single-element
\r
511 -- table, whose element is that string, e.g. ["do"] is the same as
\r
514 -- * [separators]: list of strings representing the keywords which can
\r
515 -- separate elements of the list. When non-empty, one of these
\r
516 -- keyword has to be found between each element. Lack of a separator
\r
517 -- indicates the end of the list. A string is treated as a
\r
518 -- single-element table, whose element is that string, e.g. ["do"]
\r
519 -- is the same as [{"do"}]. If [terminators] is empty/nil, then
\r
520 -- [separators] has to be non-empty.
\r
522 -- After creation, the following fields are added:
\r
523 -- * [parse] the parsing function lexer->AST
\r
524 -- * [kind] == "list"
\r
526 -------------------------------------------------------------------------------
\r
528 make_parser ("list", p)
\r
530 -------------------------------------------------------------------
\r
532 -------------------------------------------------------------------
\r
533 function p:parse (lx)
\r
535 ------------------------------------------------------
\r
536 -- Used to quickly check whether there's a terminator
\r
537 -- or a separator immediately ahead
\r
538 ------------------------------------------------------
\r
539 local function peek_is_in (keywords)
\r
540 return keywords and lx:is_keyword(lx:peek(), unpack(keywords)) end
\r
544 -- if there's a terminator to start with, don't bother trying
\r
545 if not peek_is_in (self.terminators) then
\r
546 repeat table.insert (x, self.primary (lx)) -- read one element
\r
548 -- First reason to stop: There's a separator list specified,
\r
549 -- and next token isn't one. Otherwise, consume it with [lx:next()]
\r
550 self.separators and not(peek_is_in (self.separators) and lx:next()) or
\r
551 -- Other reason to stop: terminator token ahead
\r
552 peek_is_in (self.terminators) or
\r
553 -- Last reason: end of file reached
\r
554 lx:peek().tag=="Eof"
\r
557 -- Apply the builder. It can be a string, or a callable value,
\r
558 -- or simply nothing.
\r
559 local b = self.builder
\r
561 if type(b)=="string" then x.tag = b -- b is a string, use it as a tag
\r
562 elseif type(b)=="function" then x=b(x)
\r
564 local bmt = getmetatable(b)
\r
565 if bmt and bmt.__call then x=b(x) end
\r
568 return transform (x, self)
\r
569 end --</list.parse>
\r
571 -------------------------------------------------------------------
\r
573 -------------------------------------------------------------------
\r
574 if not p.primary then p.primary = p[1]; p[1] = nil end
\r
575 if type(p.terminators) == "string" then p.terminators = { p.terminators }
\r
576 elseif p.terminators and #p.terminators == 0 then p.terminators = nil end
\r
577 if type(p.separators) == "string" then p.separators = { p.separators }
\r
578 elseif p.separators and #p.separators == 0 then p.separators = nil end
\r
584 -------------------------------------------------------------------------------
\r
586 -- Keyword-conditionned parser generator
\r
588 -------------------------------------------------------------------------------
\r
590 -- Only apply a parser if a given keyword is found. The result of
\r
591 -- [gg.onkeyword] parser is the result of the subparser (modulo
\r
592 -- [transformers] applications).
\r
596 -- * [name]: as usual
\r
598 -- * [transformers]: as usual
\r
600 -- * [peek]: if non-nil, the conditionning keyword is left in the lexeme
\r
601 -- stream instead of being consumed.
\r
603 -- * [primary]: the subparser.
\r
605 -- * [keywords]: list of strings representing triggering keywords.
\r
607 -- * Table-part entries can contain strings, and/or exactly one parser.
\r
608 -- Strings are put in [keywords], and the parser is put in [primary].
\r
610 -- After the call, the following fields will be set:
\r
612 -- * [parse] the parsing method
\r
613 -- * [kind] == "onkeyword"
\r
617 -------------------------------------------------------------------------------
\r
618 function onkeyword (p)
\r
619 make_parser ("onkeyword", p)
\r
621 -------------------------------------------------------------------
\r
623 -------------------------------------------------------------------
\r
624 function p:parse(lx)
\r
625 if lx:is_keyword (lx:peek(), unpack(self.keywords)) then
\r
626 if not self.peek then lx:next() end
\r
627 return transform (self.primary(lx), p)
\r
628 else return false end
\r
631 -------------------------------------------------------------------
\r
633 -------------------------------------------------------------------
\r
634 if not p.keywords then p.keywords = { } end
\r
635 for _, x in ipairs(p) do
\r
636 if type(x)=="string" then table.insert (p.keywords, x)
\r
637 else assert (not p.primary and is_parser (x)); p.primary = x end
\r
644 -------------------------------------------------------------------------------
\r
646 -- Optional keyword consummer pseudo-parser generator
\r
648 -------------------------------------------------------------------------------
\r
650 -- This doesn't return a real parser, just a function. That function parses
\r
651 -- one of the keywords passed as parameters, and returns it. It returns
\r
652 -- [false] if no matching keyword is found.
\r
654 -------------------------------------------------------------------------------
\r
655 function optkeyword (...)
\r
657 if type (args[1]) == "table" then
\r
658 assert (#args == 1)
\r
661 for _, v in ipairs(args) do assert (type(v)=="string") end
\r
662 return function (lx)
\r
663 local x = lx:is_keyword (lx:peek(), unpack (args))
\r
664 if x then lx:next(); return x
\r
665 else return false end
\r
670 -------------------------------------------------------------------------------
\r
672 -- Run a parser with a special lexer
\r
674 -------------------------------------------------------------------------------
\r
676 -- This doesn't return a real parser, just a function.
\r
677 -- First argument is the lexer class to be used with the parser,
\r
678 -- 2nd is the parser itself.
\r
679 -- The resulting parser returns whatever the argument parser does.
\r
681 -------------------------------------------------------------------------------
\r
682 function with_lexer(new_lexer, parser)
\r
684 -------------------------------------------------------------------
\r
685 -- Most gg functions take their parameters in a table, so it's
\r
686 -- better to silently accept when with_lexer{ } is called with
\r
687 -- its arguments in a list:
\r
688 -------------------------------------------------------------------
\r
689 if not parser and #new_lexer==2 and type(new_lexer[1])=='table' then
\r
690 return with_lexer(unpack(new_lexer))
\r
693 -------------------------------------------------------------------
\r
694 -- Save the current lexer, switch it for the new one, run the parser,
\r
695 -- restore the previous lexer, even if the parser caused an error.
\r
696 -------------------------------------------------------------------
\r
697 return function (lx)
\r
698 local old_lexer = getmetatable(lx)
\r
700 setmetatable(lx, new_lexer)
\r
701 local status, result = pcall(parser, lx)
\r
703 setmetatable(lx, old_lexer)
\r
704 if status then return result else error(result) end
\r