+++ /dev/null
-*.luac
-*.o
-*.so
-*.dylib
-*.sh
-*.dll
-*.exe
-*.a
-*.log
-*~
-.#*
-\#*
-Thumbs.db
-.DS_Store
-semantic.cache
-src/compiler/metalua
-src/lua/luac
-src/lua/lua
-src/compiler/bootstrap
-src/compiler/metalua
-distrib
-junk
-patches
-ran
+++ /dev/null
-Installation guidelines
-=======================
-
-======================================================================
-TESTING AN INSTALLATION PROCEDURE IS HARD, AND RARELY DONE RIGHT AT
-THE FIRST TRY. IF YOU EXPERIENCE INSTALLATION TROUBLES, PLEASE REPORT
-THEM, TO AVOID THEM TO FUTURE USERS. mailto:metalua@gmail.com
-======================================================================
-
-Prerequisites
--------------
-- under MS-Windows, ability to type a couple of commands in a DOS command window
-- under POSIX OSes, lua and luac executables in your path, in versions >=5.1
-
-MS-Windows
-----------
-
-- get the sources
-- cd metalua\src
-- edit make.bat to set your variables:
- * DISTRIB_BIN: where you want to put executables metalua.bat, lua.exe, luac.exe.
- This directory should be referenced in your PATH environment variable.
- * DISTRIB_LIB: where you want to put your Lua libraries. It shouldn't mess up
- an existing Lua libraries directory. This folder should be referrenced in your
- LUA_PATH environment variable.
-- run make.bat
-
-Unix
-----
-
-- get the sources
-- cd metalua/src
-- set these variables or modify them in make.sh:
- * BUILD: a directory in which metalua should be built.
- Must be writable by the user.
- * INSTALL_BIN: where metalua will be copied
- * INSTALL_LIB: where (meta)lua libs will be copied. Should be
- referenced in your LUA_PATH.
-- run ./make.sh, under your UID
-- a ./make-install.sh script should have been generated, run it. If you
- want to install it in a directory that doesn't belong to you, you
- might want to run it as root.
-- if you don't want to edit make.sh, this will do:
- DESTDIR=/opt/metalua/git-1 \
- INSTALL_BIN=/usr/local/bin \
- INSTALL_LIB=/usr/local/lib/lua/5.1 \
- ./make.sh
- If this run is successful, do:
- DESTDIR=/opt/metalua/git-1 \
- INSTALL_BIN=/usr/local/bin \
- INSTALL_LIB=/usr/local/lib/lua/5.1 \
- ./make-install.sh
- if no packaging is required, just leave out the "DESTDIR=... \" line.
- If you want metalua to reside in the same space as distribution
- supplied files in "/usr" instead of "/usr/local", you propably want to
- leave out "local/" in the paths above.
-
-Test drive
-----------
-There are some samples in metalua/src/samples, which can be run simply by
-typing in the shell `metalua samplename.mlua`. Use `metalua -h` to
-have an overview of interesting options. Among them, "-a" dumps the
-AST resulting from a compilation: that's the perfect learning tool for
-practical-oriented minds.
-
-Once you've played with the samples, and maybe written a couple of
-simple programs, I'm afraid the next step will be to RTFM :)
-
+++ /dev/null
-Metalua
-
-Copyright (c) 2006-2997 Fabien Fleutot <metalua@gmail.com>
-
-Metalua is available under the MIT licence.
-
-To compile and use Metalua, you need to have installed the following
-project, released under the MIT public licence:
-
-- Lua 5.1 <http://www.lua.org>
-
-Significant parts of the compiler borrow code from the following project,
-released under the MIT license:
-
-- Kein-Hong Man's Yueliang <http://luaforge.net/projects/yueliang>
-
-Previous versions of Metalua used to embed the following projects,
-all under MIT licence (hese dependencies have been removed to turn
-Metalua into a pure Lua project, simplifying the port to non-Unix OSes):
-
-- Tomas Guisasola's Lua Rings <http://www.keplerproject.org/rings>
-- Ben Sunshine-Hill's Pluto <http://freshmeat.net/projects/pluto>
-- Thomas Reuben's Bitlib <http://luaforge.net/projects/bitlib>
-
-MIT License
-===========
-
-Permission is hereby granted, free of charge, to any person obtaining
-a copy of this software and associated documentation files (the
-"Software"), to deal in the Software without restriction, including
-without limitation the rights to use, copy, modify, merge, publish,
-distribute, sublicense, and/or sell copies of the Software, and to
-permit persons to whom the Software is furnished to do so, subject to
-the following conditions:
-
-The above copyright notice and this permission notice shall be
-included in all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
-LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
-OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
-WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+++ /dev/null
-# Bootstrapped makefiles are a PITA to get right, so metalua is simply generated by
-# a shell script (also available for windows).
-#
-# To customize installation directories, edit metalua/src/make.{sh,bat}
-# - on windows, change DISTRIB_LIB and DISTRIB_BIN
-# - on unix, change INSTALL_LIB and INSTALL_BIN
-#
-# DISTRIB_LIB / INSTALL_LIB can point to an existing Lua library structure, and shouldn't
-# mess it up.
-
-
-
-# Compile everything in a staging area, by default /tmp/metalua-build.
-
-all:
- cd src && ./make.sh
-
-
-
-# src/make-install.sh is generated by src/make.sh, so "make install" won't work unless
-# you called "make" first.
-
-install:
- cd src && ./make-install.sh
\ No newline at end of file
--- /dev/null
+Metalua Compiler
+================
+
+## Metalua compiler
+
+This module `metalua-compiler` depends on `metalua-parser`. Its main
+feature is to compile ASTs into Lua 5.1 bytecode, allowing to convert
+them into bytecode files and executable functions. This opens the
+following possibilities:
+
+* compiler objects generated with `require 'metalua.compiler'.new()`
+ support methods `:xxx_to_function()` and `:xxx_to_bytecode()`;
+
+* Compile-time meta-programming: use of `-{...}` splices in source
+ code, to generate code during compilation;
+
+* Some syntax extensions, such as structural pattern matching and
+ lists by comprehension;
+
+* Some AST manipulation facilities such as `treequery`, which are
+ implemented with Metalua syntax extensions.
+
+## What's new in Metalua 0.7
+
+This is a major overhaul of the compiler's architecture. Some of the
+most noteworthy changes are:
+
+* No more installation or bootstrap script. Some Metalua source files
+ have been rewritten in plain Lua, and module sources have been
+ refactored, so that if you just drop the `metalua` folder somewhere
+ in your `LUA_PATH`, it works.
+
+* The compiler can be cut in two parts:
+
+ * a parser which generates ASTs out of Lua sources, and should be
+ either portable or easily ported to Lua 5.2;
+
+ * a compiler, which can turn sources and AST into executable
+ Lua 5.1 bytecode and run it. It also supports compile-time
+ meta-programming, i.e. code included between `-{ ... }` is
+ executed during compilation, and the ASTs it produces are
+ included in the resulting bytecode.
+
+* Both parts are packaged as separate LuaRocks, `metalua-parser` and
+ `metalua-compiler` respectively, so that you can install the former
+ without the latter.
+
+* The parser is not a unique object anymore. Instead,
+ `require "metalua.compiler".new()` returns a different compiler
+ instance every time it's called. Compiler instances can be reused on
+ as many source files as wanted, but extending one instance's grammar
+ doesn't affect other compiler instances.
+
+* Included standard library has been shed. There are too many standard
+ libs in Lua, and none of them is standard enough, offering
+ yet-another-one, coupled with a specific compiler can only add to
+ confusion.
+
+* Many syntax extensions, which either were arguably more code samples
+ than actual production-ready tools, or relied too heavily on the
+ removed runtime standard libraries, have been removed.
+
+* The remaining libraries and samples are:
+
+ * `metalua.compiler` converts sources into ASTs, bytecode,
+ functions, and ASTs back into sources.
+
+ * `metalua` compiles and/or executes files from the command line,
+ can start an interactive REPL session.
+
+ * `metalua.loader` adds a package loader which allows to use modules
+ written in Metalua, even from a plain Lua program.
+
+ * `metalua.treequery` is an advanced DSL allowing to search ASTs in
+ a smart way, e.g. "_search `return` statements which return a
+ `local` variable but aren't in a nested `function`_".
+
+ * `metalua.extension.comprehension` is a language extension which
+ supports lists by comprehension
+ (`even = { i for i=1, 100 if i%2==0 }`) and improved loops
+ (`for i=1, 10 for j=1,10 if i~=j do print(i,j) end`).
+
+ * `metalua.extension.match` is a language extension which offers
+ Haskell/ML structural pattern matching
+ (``match AST with `Function{ args, body } -> ... | `Number{ 0 } -> ...end``)
+
+ * **TODO Move basic extensions in a separate module.**
+
+* To remove the compilation speed penalty associated with
+ metaprogramming, when environment variable `LUA_MCACHE` or Lua
+ variable `package.mcache` is defined and LuaFileSystem is available,
+ the results of Metalua source compilations is cached. Unless the
+ source file is more recent than the latest cached bytecode file, the
+ latter is loaded instead of the former.
+
+* The Luarock install for the full compiler lists dependencies towards
+ Readline, LuaFileSytem, and Alt-Getopts. Those projects are
+ optional, but having them automatically installed by LuaRocks offers
+ a better user experience.
+
+* The license has changed from MIT to double license MIT + EPL. This
+ has been done in order to provide the IP guarantees expected by the
+ Eclipse Foundation, to include Metalua in Eclipse's
+ [Lua Development Tools](http://www.eclipse.org/koneki/ldt/).
--- /dev/null
+Metalua Parser
+==============
+
+`metalua-parser` is a subset of the Metalua compiler, which turns
+valid Lua source files and strings into abstract syntax trees
+(AST). This README includes a description of this AST format. People
+interested by Lua code analysis and generation are encouraged to
+produce and/or consume this format to represent ASTs.
+
+It has been designed for Lua 5.1. It hasn't been tested against
+Lua 5.2, but should be easily ported.
+
+## Usage
+
+Module `metalua.compiler` has a `new()` function, which returns a
+compiler instance. This instance has a set of methods of the form
+`:xxx_to_yyy(input)`, where `xxx` and `yyy` must be one of the
+following:
+
+* `srcfile` the name of a Lua source file;
+* `src` a string containing the Lua sources of a list of statements;
+* `lexstream` a lexical tokens stream;
+* `ast` an abstract syntax tree;
+* `bytecode` a chunk of Lua bytecode that can be loaded in a Lua 5.1
+ VM (not available if you only installed the parser);
+* `function` an executable Lua function.
+
+Compiling into bytecode or executable functions requires the whole
+Metalua compiler, not only the parser. The most frequently used
+functions are `:src_to_ast(source_string)` and
+`:srcfile_to_ast("path/to/source/file.lua")`.
+
+ mlc = require 'metalua.compiler'.new()
+ ast = mlc :src_to_ast[[ return 123 ]]
+
+A compiler instance can be reused as much as you want; it's only
+interesting to work with more than one compiler instance when you
+start extending their grammars.
+
+## Abstract Syntax Trees definition
+
+### Notation
+
+Trees are written below with some Metalua syntax sugar, which
+increases their readability. the backquote symbol introduces a `tag`,
+i.e. a string stored in the `"tag"` field of a table:
+
+* `` `Foo{ 1, 2, 3 }`` is a shortcut for `{tag="Foo", 1, 2, 3}`;
+* `` `Foo`` is a shortcut for `{tag="Foo"}`;
+* `` `Foo 123`` is a shortcut for `` `Foo{ 123 }``, and therefore
+ `{tag="Foo", 123 }`; the expression after the tag must be a literal
+ number or string.
+
+When using a Metalua interpreter or compiler, the backtick syntax is
+supported and can be used directly. Metalua's pretty-printing helpers
+also try to use backtick syntax whenever applicable.
+
+### Tree elements
+
+Tree elements are mainly categorized into statements `stat`,
+expressions `expr` and lists of statements `block`. Auxiliary
+definitions include function applications/method invocation `apply`,
+are both valid statements and expressions, expressions admissible on
+the left-hand-side of an assignment statement `lhs`.
+
+ block: { stat* }
+
+ stat:
+ `Do{ stat* }
+ | `Set{ {lhs+} {expr+} } -- lhs1, lhs2... = e1, e2...
+ | `While{ expr block } -- while e do b end
+ | `Repeat{ block expr } -- repeat b until e
+ | `If{ (expr block)+ block? } -- if e1 then b1 [elseif e2 then b2] ... [else bn] end
+ | `Fornum{ ident expr expr expr? block } -- for ident = e, e[, e] do b end
+ | `Forin{ {ident+} {expr+} block } -- for i1, i2... in e1, e2... do b end
+ | `Local{ {ident+} {expr+}? } -- local i1, i2... = e1, e2...
+ | `Localrec{ ident expr } -- only used for 'local function'
+ | `Goto{ <string> } -- goto str
+ | `Label{ <string> } -- ::str::
+ | `Return{ <expr*> } -- return e1, e2...
+ | `Break -- break
+ | apply
+
+ expr:
+ `Nil | `Dots | `True | `False
+ | `Number{ <number> }
+ | `String{ <string> }
+ | `Function{ { `Id{ <string> }* `Dots? } block }
+ | `Table{ ( `Pair{ expr expr } | expr )* }
+ | `Op{ opid expr expr? }
+ | `Paren{ expr } -- significant to cut multiple values returns
+ | apply
+ | lhs
+
+ apply:
+ `Call{ expr expr* }
+ | `Invoke{ expr `String{ <string> } expr* }
+
+ lhs: `Id{ <string> } | `Index{ expr expr }
+
+ opid: 'add' | 'sub' | 'mul' | 'div'
+ | 'mod' | 'pow' | 'concat'| 'eq'
+ | 'lt' | 'le' | 'and' | 'or'
+ | 'not' | 'len'
+
+### Meta-data (lineinfo)
+
+
+ASTs also embed some metadata, allowing to map them to their source
+representation. Those informations are stored in a `"lineinfo"` field
+in each tree node, which points to the range of characters in the
+source string which represents it, and to the content of any comment
+that would appear immediately before or after that node.
+
+Lineinfo objects have two fields, `"first"` and `"last"`, describing
+respectively the beginning and the end of the subtree in the
+sources. For instance, the sub-node ``Number{123}` produced by parsing
+`[[return 123]]` will have `lineinfo.first` describing offset 8, and
+`lineinfo.last` describing offset 10:
+
+
+ > mlc = require 'metalua.compiler'.new()
+ > ast = mlc :src_to_ast "return 123 -- comment"
+ > print(ast[1][1].lineinfo)
+ <?|L1|C8-10|K8-10|C>
+ >
+
+A lineinfo keeps track of character offsets relative to the beginning
+of the source string/file ("K8-10" above), line numbers (L1 above; a
+lineinfo spanning on several lines would read something like "L1-10"),
+columns i.e. offset within the line ("C8-10" above), and a filename if
+available (the "?" mark above indicating that we have no file name, as
+the AST comes from a string). The final "|C>" indicates that there's a
+comment immediately after the node; an initial "<C|" would have meant
+that there was a comment immediately before the node.
+
+Positions represent either the end of a token and the beginning of an
+inter-token space (`"last"` fields) or the beginning of a token, and
+the end of an inter-token space (`"first"` fields). Inter-token spaces
+might be empty. They can also contain comments, which might be useful
+to link with surrounding tokens and AST subtrees.
+
+Positions are chained with their "dual" one: a position at the
+beginning of and inter-token space keeps a refernce to the position at
+the end of that inter-token space in its `"facing"` field, and
+conversly, end-of-inter-token positions keep track of the inter-token
+space beginning, also in `"facing"`. An inter-token space can be
+empty, e.g. in `"2+2"`, in which case `lineinfo==lineinfo.facing`.
+
+Comments are also kept in the `"comments"` field. If present, this
+field contains a list of comments, with a `"lineinfo"` field
+describing the span between the first and last comment. Each comment
+is represented by a list of one string, with a `"lineinfo"` describing
+the span of this comment only. Consecutive lines of `--` comments are
+considered as one comment: `"-- foo\n-- bar\n"` parses as one comment
+whose text is `"foo\nbar"`, whereas `"-- foo\n\n-- bar\n"` parses as
+two comments `"foo"` and `"bar"`.
+
+So for instance, if `f` is the AST of a function and I want to
+retrieve the comment before the function, I'd do:
+
+ f_comment = f.lineinfo.first.comments[1][1]
+
+The informations in lineinfo positions, i.e. in each `"first"` and
+`"last"` field, are held in the following fields:
+
+* `"source"` the filename (optional);
+* `"offset"` the 1-based offset relative to the beginning of the string/file;
+* `"line"` the 1-based line number;
+* `"column"` the 1-based offset within the line;
+* `"facing"` the position at the opposite end of the inter-token space.
+* `"comments"` the comments in the associated inter-token space (optional).
+* `"id"` an arbitrary number, which uniquely identifies an inter-token
+ space within a given tokens stream.
+
+++ /dev/null
-README.TXT
-==========
-For installation matters, cf. INSTALL.TXT
-
-Metalua 0.5
-===========
-
-Metalua is a static metaprogramming system for Lua: a set of tools
-that let you alter the compilation process in arbitrary, powerful and
-maintainable ways. For the potential first-time users of such a
-system, a description of these tools, as implemented by Metalua,
-follows.
-
-Dynamic Parsers
----------------
-
-One of the tools is the dynamic parser, which allows a source file to
-change the grammar recognized by the parser while it's being
-parsed. Taken alone, this feature lets you make superficial syntax
-tweaks on the language. The parser is based on a parser combinator
-library called 'gg'; you should know the half dozen functions in gg
-API to do advanced things:
-
-- There are a couple of very simple combinators like gg.list,
- gg.sequence, qq.multisequence, gg.optkeyword etc. that offer a level
- of expressiveness comparable to Yacc-like parsers. For instance, if
- mlp.expr parses Lua expressions, gg.list{ mlp.expr } creates a
- parser which handles lists of Lua expressions.
-
-- Since you can create all the combinators you can think of (they're
- regular, higher-order functions), there also are combinators
- specialized for typical language tasks. In Yacc-like systems, the
- language definition quickly becomes unreadable, because all
- non-native features have to be encoded in clumsy and brittle ways.
- So if your parser won't natively let you specify infix operator
- precedence and associativity easily, tough luck for you and your
- code maintainers. With combinators OTOH, most of such useful
- functions already exist, and you can write your own without
- rewriting the parser itself. For instance, adding an infix operator
- would just look like:
-
- > mlp.expr.infix:add{ "xor", prec=40, assoc='left', builder=xor_builder }
-
- Moreover, combinators tend to produce usable error messages when fed
- with syntactically incorrect inputs. It matters, because clearly
- explaining why an invalid input is invalid is almost as important as
- compiling a valid one, for a use=able compiler.
-
-Yacc-like systems might seem simpler to adopt than combinators, as
-long as they're used on extremely simple problems. However, if you
-either try to write something non trivial, or to write a simple macro
-in a robust way, you'll need to use lots of messy tricks and hacks,
-and spend much more time getting them (approximately) right than
-that 1/2 hour required to master the regular features of gg.
-
-
-Real meta-programming
----------------------
-
-If you plan to go beyond trivial keyword-for-keyword syntax tweaks,
-what will limit you is not syntax definition, but the ability to
-manipulate source code conveniently: without the proper tools and
-abstractions, even the simplest tasks will turn into a dirty hacks
-fest, then either into a maintenance nightmare, or simply into
-abandonware. Providing an empowering framework so that you don't get
-stuck in such predicaments is Metalua's whole purpose. The central
-concept is that programs prefer to manipulate code as trees, whereas
-most developers prefer ASCII sources, so both representations must be
-freely interchangeable. The make-or-break deal is then:
-
-- To easily let users see sources as trees, as sources, or as
- combination thereof, and switch representations seamlessly.
-
-- To offer the proper libraries that won't force you to reinvent a
- square wheel will take care of the most common pitfalls and won't
- force you to resort to brittle hacks.
-
-On the former point, Lisps are at a huge advantage, their user syntax
-already being trees. But languages with casual syntax can also offer
-interchangeable tree/source views; Metalua has some quoting +{ ... }
-and anti-quoting -{ ... } operators which let you switch between both
-representations at will: internally it works on trees, but you always
-have the option to see them as quoted sources. Metalua also supports a
-slightly improved syntax for syntax trees, to improve their
-readability.
-
-Library-wise, Metalua offers a set of syntax tree manipulation tools:
-
-- Structural pattern matching, a feature traditionally found in
- compiler-writing specialized languages (and which has nothing to do
- with string regular expressions BTW), which lets you express
- advanced tree analysis operations in a compact, readable and
- efficient way. If you have to work with advanced data structures
- and you try it, you'll never go back.
-
-- The walker library allows you to perform transformations on big
- portions of programs. It lets you easily express things like:
- "replace all return statements which aren't in a nested function by
- error statements", "rename all local variables and their instances
- into unique fresh names", "list the variables which escape this
- chunk's scope", "insert a type-checking instruction into every
- assignment to variable X", etc. Most of non-trivial macros will
- require some of those global code transformations, if you really want
- them to behave correctly.
-
-- Macro hygiene, although not perfect yet in Metalua, is required if
- you want to make macro writing reasonably usable (and contrary to a
- popular belief, renaming local variables into fresh names only
- address the easiest part of the hygiene issue; cf. changelog below
- for more details).
-
-- The existing extensions are progressively refactored in more modular
- ways, so that their features can be effectively reused in other
- extensions.
-
-
-Noteworthy changes from 0.4.1 to 0.5
-====================================
-
-Simplification of the install and structure:
-
-- This release is included in Lua for Windows, so now it couldn't get simpler
- for MS-Windows users!
-
-- Metalua is written in pure Lua again, thus making it platform-independant.
- No more mandatory C libraries. Pluto interface might be back, as an option,
- in a future version, but it's not worth the install trouble involved by
- DLL dependencies.
-
-- Simpler build process, just run make.sh or make.bat depending on your OS.
-
-- Metalua libraries are now in a separate metalua/* package. This allows to
- mix them with other Lua libraries, and to use them from plain Lua programs
- if you FIXME
-
-
-Other changes:
-
-- new option -S in metalua: prints sources re-generated from AST, after macro
- expansion.
-
-- compatible with more Lua VMs: 64 bits numbers, integral numbers, big endians...
-
-- some new extensions: xloop, xmatch, improved match.
-
-- ASTs now keep track of the source that generated them (API is not
- mature though, it will be changed and broken).
-
-- improved table printer: support of a plain-Lua mode, alternative indentation
- mode for deeply-nested tables.
-
-- added a generic table serializer, which handles shared and recursive
- sub-tables correctly.
-
-- gg API has been made slightly more flexible, as a first step towards a
- comprehensive syntax support for gg grammar definition. Follow the gg-syntax
- branch on github for ongoing work.
-
-
-Noteworthy changes from 0.4 to 0.4.1
-====================================
-
-- Proper reporting of runtime errors
-- Interactive REPL loop
-- Support for 64 bits architectures
-- Update to Pluto 2.2 and Lua 5.1.3
-- Build for Visual Studio .NET
-
-Noteworthy changes from 0.3 to 0.4
-=================================
-
-- A significantly bigger code base, mostly due to more libraries:
- about 2.5KLoC for libs, 4KLoC for the compiler. However, this remains
- tiny in today's desktop computers standards. You don't have to know
- all of the system to do useful stuff with it, and since compiled
- files are Lua 5.1 compatible, you can keep the "big" system on a
- development platform, and keep a lightweight runtime for embedded or
- otherwise underpowered targets.
-
-
-- The compiler/interpreter front-end is completely rewritten. The new
- frontend program, aptly named 'Metalua', supports proper passing of
- arguments to programs, and is generally speaking much more user
- friendly than the mlc from the previous version.
-
-
-- Metalua source libraries are searched for in environmemt variable
- LUA_MPATH, distinct from LUA_PATH. This way, in an application
- that's part Lua part Metalua, you keep a natural access to the
- native Lua compiler.
-
- By convention, Metalua source files should have extension .mlua. By
- default, bytecode and plain lua files have higher precedence than
- Metalua sources, which lets you easily precompile your libraries.
-
-
-- Compilation of files are separated in different Lua Rings: this
- prevents unwanted side-effects when several files are compiled
- (This can be turned off, but shouldn't be IMO).
-
-
-- Metalua features are accessible programmatically. Library
- 'Metalua.runtime' loads only the libraries necessary to run an
- already compiled file; 'Metalua.compile' loads everything useful at
- compile-time.
-
- Transformation functions are available in a library 'mlc' that
- contains all meaningful transformation functions in the form
- 'mlc.destformat_of_sourceformat()', such as 'mlc.luacfile_of_ast()',
- 'mlc.function_of_luastring()' etc. This library has been
- significantly completed and rewritten (in Metalua) since v0.3.
-
-
-- Helper libraries have been added. For now they're in the
- distribution, at some point they should be luarocked in. These
- include:
- - Lua Rings and Pluto, duct-taped together into Springs, an improved
- Rings that lets states exchange arbitrary data instead of just
- scalars and strings. Since Pluto requires a (minor) patch to the
- VM, it can be disabled.
- - Lua bits for bytecode dumping.
- - As always, very large amounts of code borrowed from Yueliang.
- - As a commodity, I've also packaged Lua sources in.
-
-
-- Extensions to Lua standard libraries: many more features in table
- and the baselib, a couple of string features, and a package system
- which correctly handles Metalua source files.
-
-
-- Builds on Linux, OSX, Microsoft Visual Studio. Might build on mingw
- (not tested recently, patches welcome). It's easily ported to all
- systems with a full support for lua, and if possible dynamic
- libraries.
-
- The MS-windows building is based on a dirty .bat script, because
- that's pretty much the only thing you're sure to find on a win32
- computer. It uses Microsoft Visual Studio as a compiler (tested with
- VC++ 6).
-
- Notice that parts of the compiler itself are now written in Metalua,
- which means that its building now goes through a bootstrapping
- stage.
-
-
-- Structural pattern matching improvements:
- - now also handles string regular expressions: 'someregexp'/pattern
- will match if the tested term is a string accepted by the regexp,
- and on success, the list of captures done by the regexp is matched
- against pattern.
- - Matching of multiple values has been optimized
- - the default behavior when no case match is no to raise an error,
- it's the most commonly expected case in practice. Trivial to
- cancel with a final catch-all pattern.
- - generated calls to type() are now hygienic (it's been the cause of
- a puzzling bug report; again, hygiene is hard).
-
-
-- AST grammar overhaul:
- The whole point of being alpha is to fix APIs with a more relaxed
- attitude towards backward compatibility. I think and hope it's the
- last AST revision, so here is it:
- - `Let{...} is now called `Set{...}
- (Functional programmers would expect 'Let' to introduce an
- immutable binding, and assignment isn't immutable in Lua)
- - `Key{ key, value } in table literals is now written `Pair{ key, value }
- (it contained a key *and* its associated value; besides, 'Pair' is
- consistent with the name of the for-loop iterator)
- - `Method{...} is now `Invoke{...}
- (because it's a method invocation, not a method declaration)
- - `One{...} is now `Paren{...} and is properly documented
- (it's the node representing parentheses: it's necessary, since
- parentheses are sometimes meaningful in Lua)
- - Operator are simplified: `Op{ 'add', +{2}, +{2} } instead of
- `Op{ `Add, +{2}, +{2} }. Operator names match the corresponding
- metatable entries, without the leading double-underscore.
- - The operators which haven't a metatable counterpart are
- deprecated: 'ne', 'ge', 'gt'.
-
-
-- Overhaul of the code walking library:
- - the API has been simplified: the fancy predicates proved more
- cumbersome to use than a bit of pattern matching in the visitors.
- - binding identifiers are handled as a distinct AST class
- - walk.id is scope-aware, handles free and bound variables in a
- sensible way.
- - the currified API proved useless and sometimes cumbersome, it's
- been removed.
-
-
-- Hygiene: I originally planned to release a full-featured hygienic
- macro system with v0.4, but what exists remains a work in
- progress. Lua is a Lisp-1, which means unhygienic macros are very
- dangerous, and hygiene a la Scheme pretty much limits macro writing
- to a term rewriting subset of the language, which would be crippling
- to use.
-
- Note: inside hygiene, i.e. preventing macro code from capturing
- variables in user code, is trivial to address through alpha
- conversion, it's not the issue. The trickier part is outside
- hygiene, when user's binders capture globals required by the
- macro-generated code. That's the cause of pretty puzzling and hard
- to find bugs. And the *really* tricky part, which is still an open
- problem in Metalua, is when you have several levels of nesting
- between user code and macro code. For now this case has to be
- hygienized by hand.
-
- Note 2: Converge has a pretty powerful approach to hygienic macros
- in a Lisp-1 language; for reasons that would be too long to expose
- here, I don't think its approach would be the best suited to Metalua.
- But I might well be proved wrong eventually.
-
- Note 3: Redittors must have read that Paul Graham has released Arc,
- which is also a Lisp-1 with Common Lisp style macros; I expect this
- to create a bit of buzz, out of which might emerge proper solutions
- the macro hygiene problem.
-
-
-- No more need to create custom syntax for macros when you don't want
- to. Extension 'dollar' will let you declare macros in the dollar
- table, as in +{block: function dollar.MYMACRO(a, b, c) ... end},
- and use it as $MYMACRO(1, 2, 3) in your code.
-
- With this extension, you can write macros without knowing anything
- about the Metalua parser. Together with quasi-quotes and automatic
- hygiene, this will probably be the closest we can go to "macros for
- dummies" without creating an unmaintainable mess generator.
-
- Besides, it's consistent with my official position that focusing on
- superficial syntax issues is counter-productive most of the time :)
-
-
-- Lexers can be switched on the fly. This lets you change the set of
- keywords temporarily, with the new gg.with_lexer() combinator. You
- can also handle radically different syntaxes in a single file (think
- multiple-languages systems such as LuaTeX, or programs+goo as PHP).
-
-
-- Incorporation of the bug fixes reported to the mailing list and on
- the blog.
-
-
-- New samples and extensions, in various states of completion:
-
- * lists by comprehension, a la python/haskell. It includes lists
- chunking, e.g. mylist[1 ... 3, 5 ... 7]
-
- * anaphoric macros for 'if' and 'while' statements: with this
- extension, the condition of the 'if'/'while' is bound to variable
- 'it' in the body; it lets you write things like:
-
- > while file:read '*l' do print(it) end.
-
- No runtime overhead when 'it' isn't used in the body. An anaphoric
- variable should also be made accessible for functions, to let
- easily write anonymous recursive functions.
-
- * Try ... catch ... finally extension. Syntax is less than ideal,
- but the proper way to fix that is to refactor the match extension
- to improve code reuse. There would be many other great ways to
- leverage a refactored match extension, e.g. destructuring binds or
- multiple dispatch methods. To be done in the next version.
-
- * with ... do extension: it uses try/finally to make sure that
- resources will be properly closed. The only constraint on
- resources is that they have to support a :close() releasing method.
- For instance, he following code guarantees that file1 and file2
- will be closed, even if a return or an error occurs in the body.
-
- > with file1, file2 = io.open "f1.txt", io.open "f2.txt" do
- > contents = file1:read'*a' .. file2:read ;*a'
- > end
-
- * continue statement, logging facilities, ternary "?:" choice
- operator, assignments as expressions, and a couple of similarly
- tiny syntax sugar extensions.
-
-
-You might expect in next versions
-=================================
-The next versions of Metalua will provide some of the following
-improvements, in no particular order: better error reporting,
-especially at runtime (there's a patch I've been too lazy to test
-yet), support for 64 bits CPUs, better support for macro hygiene, more
-samples and extensions, an adequate test suite, refactored libraries.
-
-
-Credits
-=======
-
-I'd like to thank the people who wrote the open source code which
-makes Metalua run: the Lua team, the authors of Yueliang, Pluto, Lua
-Rings, Bitlib; and the people whose bug reports, patches and
-insightful discussions dramatically improved the global design,
-including John Belmonte, Vyacheslav Egorov, David Manura, Olivier
-Gournet, Eric Raible, Laurence Tratt, Alexander Gladysh, Ryan
-Pusztai...
--- /dev/null
+Metalua
+=======
+
+Metalua is a Lua code analysis tool, as well as a compiler for a
+superset of Lua 5.1 supporting Compile-Time Meta-Programming. It's
+separated into two LuaRocks, `metalua-parser` and
+`metalua-compiler`. The documentation of each rock can be found in
+`README-parser.md` and `README-compiler.md`.
+
+All the code in Metalue is released under dual lincenses:
+
+* MIT public license (same as Lua);
+* EPL public license (same as Eclipse).
--- /dev/null
+--------------------------------------------------------------------------------
+-- Copyright (c) 2006-2013 Fabien Fleutot and others.
+--
+-- All rights reserved.
+--
+-- This program and the accompanying materials are made available
+-- under the terms of the Eclipse Public License v1.0 which
+-- accompanies this distribution, and is available at
+-- http://www.eclipse.org/legal/epl-v10.html
+--
+-- This program and the accompanying materials are also made available
+-- under the terms of the MIT public license which accompanies this
+-- distribution, and is available at http://www.lua.org/license.html
+--
+-- Contributors:
+-- Fabien Fleutot - API and implementation
+--
+--------------------------------------------------------------------------------
+
+-- Alternative implementation of checks() in Lua. Slower than
+-- the C counterpart, but no compilation/porting concerns.
+
+checkers = { }
+
+local function check_one(expected, val)
+ if type(val)==expected then return true end
+ local mt = getmetatable(val)
+ if mt and mt.__type==expected then return true end
+ local f = checkers[expected]
+ if f and f(val) then return true end
+ return false
+end
+
+local function check_many(name, expected, val)
+ if expected=='?' then return true
+ elseif expected=='!' then return (val~=nil)
+ elseif type(expected) ~= 'string' then
+ error 'strings expected by checks()'
+ elseif val==nil and expected :sub(1,1) == '?' then return true end
+ for one in expected :gmatch "[^|?]+" do
+ if check_one(one, val) then return true end
+ end
+ return false
+end
+
+function checks(...)
+ for i, arg in ipairs{...} do
+ local name, val = debug.getlocal(2, i)
+ local success = check_many(name, arg, val)
+ if not success then
+ local fname = debug.getinfo(2, 'n').name
+ local fmt = "bad argument #%d to '%s' (%s expected, got %s)"
+ local msg = string.format(fmt, i, fname or "?", arg, type(val))
+ error(msg, 3)
+ end
+ end
+end
+
+return checks
+++ /dev/null
-This directory is intended to contain unfinished, random, poor or temporary stuff that shouldn't make it to a distro.
\ No newline at end of file
+++ /dev/null
-/* This should combine several functions into one, when they're already
- * compiled into functions. Useful when we don't have their AST, e.g.
- * to link several precompiled chunks into one.
- *
- * It currently doesn't work; meanwhile, one can use the original
- * 'luac' executable, although it doesn't handle argument passing through
- * "..." correctly.
- */
-
-#include <lua.h>
-#include <lapi.h>
-#include <lfunc.h>
-#include <lstate.h>
-#include <lstring.h>
-#include <lopcodes.h>
-#include <ldo.h>
-
-static int lua_combine( lua_State* L) {
- int n = lua_gettop( L); /* Number of functions to combine */
- if( 1 == n) {
- return 1; /* Only one function, nothing to combine */
- } else {
- int i, pc = 3*n + 1;
- Proto* f = luaF_newproto( L);
- setptvalue2s( L,L->top,f);
- incr_top( L);
- f->source = luaS_newliteral( L,"=(combiner)");
- f->maxstacksize = 2;
- f->is_vararg = VARARG_ISVARARG;
- f->code = luaM_newvector(L, pc, Instruction);
- f->sizecode = pc;
- f->p = luaM_newvector( L, n, Proto*);
- f->sizep = n;
- for( i = pc = 0; i < n; i ++) {
- int proto_idx = i-n-1;
- Proto *p = clvalue( L->top + proto_idx)->l.p;
- f->p[i] = p;
- f->code[pc++] = CREATE_ABx( OP_CLOSURE, 0, i);
- f->code[pc++] = CREATE_ABx( OP_VARARG, 1, 0);
- f->code[pc++] = CREATE_ABC( OP_CALL, 0, 0, 1);
- }
- f->code[pc++] = CREATE_ABC( OP_RETURN, 0, 1, 0);
- return 1;
- }
-}
-
-int luaopen_combine( lua_State *L) {
- lua_pushcfunction( L, lua_combine);
- lua_setglobal( L, "combine");
- return 0;
-}
+++ /dev/null
---x = dynamatch()
---x <| foo | bar -> do toto end
---table.insert(x.cases, |$| match $ with foo|bar -> toto end )
-
-
-local match_builder = mlp.stat:get "match"
-
-function dynacase_builder (d, s)
- local v = mlp.gensym()
- local m = match_builder{ v, false, { s[1], s[2], s[3] } }
- local c = `Function{ {v}, {m} }
- return `Call{ `Index{ d, "extend" }, c }
-end
-
---fixme: limiter la precedence du expr de droite
-mlp.expr.suffix:add{
- name = "dynamatch extension", prec=30,
- "<|",
- gg.list{ name = "patterns",
- primary = mlp.expr,
- separators = "|",
- terminators = { "->", "if" } },
- gg.onkeyword{ "if", mlp.expr },
- "->",
- gg.multisequence{
- { "do", mlp.block, "end", builder = |x| x[1] },
- default = { mlp.expr, builder = |x| { `Return{ x[1] } } } },
- builder = |x| dyna_builder (x[1], x[3]) }
-
+++ /dev/null
-----------------------------------------------------------------------
--- Metalua: $Id$
---
--- Summary: Hygienic macro facility for Metalua
---
-----------------------------------------------------------------------
---
--- Copyright (c) 2006, Fabien Fleutot <metalua@gmail.com>.
---
--- This software is released under the MIT Licence, see licence.txt
--- for details.
---
---------------------------------------------------------------------------------
---
--- =============
--- W A R N I N G
--- =============
---
--- THIS IS AN OLD NAIVE IMPLEMENTATION. IT'S PARATIAL (NO HYGIENE WRT OUTSIDE)
--- AND WRITTEN FROM SCRATCH WITH PATTERN MATCHING. MUST BE DONE WITH A WALKER.
---
--- Traditional macros carry a well-known pitfall, called variable capture:
--- when pasting a piece of source code A into another code B, if B bonds some
--- variables used by A, then the meaning of A is modified in a way probably
--- not intended by the user.
---
--- Example:
--- A = +{ n = 5 }
--- B = +{ local n=3; -{ A } }
---
--- In this example, [n] in [A] will be captured by the local variable declared
--- by [B], and this is probably a bug.
---
--- Notice that this also exists in C. Typical example:
---
--- #define swap (type, a, b) do { type tmp=a; a=b; b=tmp } while(0)
--- void f() {
--- int tmp=1, a=2;
--- swap (int, tmp, a); // won't work, [tmp] is captured in the macro
--- }
---
--- We can fix this by making sure that all local variables and parameters
--- created by [B] have fresh names. [mlp.gensym()] produces guaranteed-to-be-unique
--- variable names; we use it to replace all local var names declarations and
--- occurences in [B] by such fresh names.
---
--- Such macros which are guaranteed not to capture any variable are called
--- hygienic macros. By extension, an AST guaranteed not to contain capturing
--- variables is called an hygienic AST.
---
--- We implement here some functions which make sure that an AST is hygienic:
---
--- - [hygienize_stat (ast)] for statement AST;
--- - [hygienize_stat (ast)] for statement block AST;
--- - [hygienize_expr (ast)] for expression AST;
---
--- This sample deconstructs AST by structural pattern matching, which is
--- supported by Metalua extension "match.lua"
---
---------------------------------------------------------------------------------
-
--{ extension "match" }
-
-require "std"
-
-local clone_ctx = std.shallow_copy
-
---------------------------------------------------------------------------------
--- Tag tables: these allow [hygienize] to decide whether an AST is
--- an expression, a statement, or something which isn't changed by
--- alpha renaming.
---------------------------------------------------------------------------------
-local stat_tags = {
- Do = true, Let = true,
- While = true, Repeat = true,
- If = true, Fornum = true,
- Forin = true, Local = true,
- Localrec = true, Return = true }
-
-local expr_tags = {
- Function = true, Table = true,
- Op = true, Call = true,
- Method = true, Index = true }
-
-local neutral_tags = {
- String = true, Number = true,
- True = true, False = true,
- Dots = true, Break = true,
- Id = true }
-
---------------------------------------------------------------------------------
--- Choose the relevant [hygienize_xxx()] function according to the AST's tag
--- and the tables above.
---------------------------------------------------------------------------------
-function hygienize (ast)
- if not ast.tag then hygienize_block (ast)
- elseif neutral_tags[ast.tag] then -- pass
- elseif stat_tags[ast.tag] then hygienize_stat (ast)
- elseif expr_tags[ast.tag] then hygienize_expr (ast)
- else error "Unrecognized AST" end
- return ast
-end
-
-if mlp then
- -- Add hygienic parsers for quotes
- mlp.hexpr = hygienize `o` mlp.expr
- mlp.hstat = hygienize `o` mlp.stat
- mlp.hblock = hygienize `o` mlp.block
-end
-
---------------------------------------------------------------------------------
--- Make a statement AST hygienic. The optional [ctx] parameter is a
--- [old_name -> new_name] map, which holds variable name substitutions
--- to perform.
---------------------------------------------------------------------------------
-function hygienize_stat (ast, ctx)
- if not ctx then ctx = { } end
- match ast with
- | { ... } if not ast.tag -> hygienize_block (ast, ctx)
- | `Do{ ... } -> hygienize_block (ast, clone_ctx (ctx))
-
- | `Let{ vars, vals } ->
- hygienize_expr_list (vars, ctx)
- hygienize_expr_list (vals, ctx)
-
- | `While{ cond, block } ->
- hygienize_expr (cond, ctx)
- -- use a clone of [ctx], since the block has a separate scope
- hygienize_block (ast, clone_ctx (ctx))
-
- | `Repeat{ block, cond } ->
- -- use a clone of [ctx], since the block has a separate scope.
- -- Notice that the condition in [repeat ... until] is evaluated
- -- inside the block's scope, i.e. with [inner_ctx] rather than [ctx].
- local inner_ctx = clone_ctx (ctx)
- hygienize_block (ast, inner_ctx)
- hygienize (cond, inner_ctx)
-
- | `If{ ... } ->
- for i=1, #ast-1, 2 do
- hygienize_expr (ast[i], ctx) -- condtion
- -- each block has its own scope
- hygienize_block (ast[i+1], clone_ctx (ctx)) -- conditional block
- end
- if #ast % 2 == 1 then
- hygienize_block (ast[#ast], clone_ctx (ctx)) -- else block
- end
-
- | `Fornum{ var, ... } ->
- hygienize_expr (ast[i], ctx, 2, #ast-1) -- start, finish, step? exprs
- local inner_ctx = clone_ctx (ctx)
- alpha_rename (var, inner_ctx) -- rename local var [var] in [inner_ctx]
- hygienize_block (ast[#ast], inner_ctx)
-
- | `Forin{ vars, vals, block } ->
- hygienize_expr_list (vals, ctx)
- local inner_ctx = clone_ctx (ctx)
- alpha_rename_list (vars, inner_ctx) -- rename local vars [vars] in [inner_ctx]
- hygienize_block (block, inner_ctx)
-
- | `Local{ vars, vals } ->
- -- locals only enter in scope after their values are computed
- -- --> parse values first, then rename vars
- hygienize_expr_list (vals, ctx)
- alpha_rename_list (vars, ctx)
-
- | `Localrec{ vars, vals } ->
- -- As opposed to [`Local], vars are in scope during their values'
- -- computation --> rename before parsing values.
- alpha_rename_list (vars, ctx)
- hygienize_expr_list (vals, ctx)
-
- | `Call{ ... } | `Method{ ... } ->
- -- these are actually expr, delegate to [hygienize_expr]
- hygienize_expr (ast, ctx)
-
- | `Return{ ... } -> hygienize_expr_list (ast, ctx)
- | `Break ->
- | _ -> error ("Unknown statement "..ast.tag)
- end
-end
-
-
---------------------------------------------------------------------------------
--- Make an expression AST hygienic. The optional [ctx] parameter is a
--- [old_name -> new_name] map, which holds variable name substitutions
--- to perform.
---------------------------------------------------------------------------------
-function hygienize_expr (ast, ctx)
- if not ctx then ctx = { } end
- match ast with
- | `String{ _ } | `Number{ _ } | `True | `False | `Dots -> -- nothing
-
- | `Function{ params, block } ->
- local inner_ctx = clone_ctx (ctx)
- alpha_rename_list (params, inner_ctx)
- hygienize_block (block, inner_ctx)
-
- | `Table{ ... } ->
- for _, x in ipairs (ast) do
- match x with
- | `Key{ key, val } ->
- hygienize_expr (key, ctx)
- hygienize_expr (val, ctx)
- | _ -> hygienize (x, ctx)
- end
- end
-
- | `Id{ x } ->
- -- Check for substitutions to apply:
- local y = ctx[x]; if y then ast[1] = y end
-
- | `Op{ op, ... } ->
- hygienize_expr_list (ast, ctx, 2, #ast)
-
- -- Just dispatch to sub-expressions:
- | `Call{ func, ... }
- | `Method{ obj, `String{ name }, ... }
- | `Index{ table, key } ->
- hygienize_expr_list (ast, ctx)
- | _ -> error ("Unknown expression "..ast.tag)
- end
-end
-
---------------------------------------------------------------------------------
--- Make an statements block AST hygienic. The optional [ctx] parameter is a
--- [old_name -> new_name] map, which holds variable name substitutions
--- to perform.
---------------------------------------------------------------------------------
-function hygienize_block (ast, ctx)
- if not ctx then ctx = { } end
- table.iter ((|x| hygienize(x, ctx)), ast)
--- for i = 1, #ast do
--- hygienize_stat (ast[i], ctx)
--- end
-end
-
---------------------------------------------------------------------------------
--- Makes a shallow copy of a table. Used to make a copy of [ctx] substitution
--- tables, when entering a new scope.
---------------------------------------------------------------------------------
---[[
-function clone_ctx (ctx)
- local r = { }
- for k, v in pairs (ctx) do r[k] = v end
- return r
-end
-]]
-
---------------------------------------------------------------------------------
--- Make every expression from index [start] to [finish], in list
--- [ast], hygienic. The optional [ctx] parameter is a [old_name ->
--- new_name] map, which holds variable name substitutions to perform.
--- [start] defaults to 1, [finish] defaults to the list's size.
---------------------------------------------------------------------------------
-function hygienize_expr_list (ast, ctx, start, finish)
- for i = start or 1, finish or #ast do
- hygienize_expr (ast[i], ctx)
- end
-end
-
---------------------------------------------------------------------------------
--- Replace the identifier [var]'s name with a fresh one generated by
--- [mlp.gensym()], and store the new association in [ctx], so that the
--- calling function will be able to substitute identifier occurences with
--- its new name.
---------------------------------------------------------------------------------
-function alpha_rename (var, ctx)
- assert (var.tag == "Id")
- ctx[var[1]] = mlp.gensym()[1]
- var[1] = ctx[var[1]]
-end
-
---------------------------------------------------------------------------------
--- runs [alpha_rename] on a list of identifiers.
---------------------------------------------------------------------------------
-function alpha_rename_list (vars, ctx)
- for _, v in ipairs(vars) do alpha_rename (v, ctx) end
-end
+++ /dev/null
---------------------------------------------------------------------------------
---
--- (c) Fabien Fleutot 2007, published under the MIT license.
---
---
--- API:
--- ----
--- * freevars.block(ast)
--- * freevars.expr(ast)
--- * freevars.stat(ast)
---
---------------------------------------------------------------------------------
-
-require 'std'
-require 'walk'
-require 'freevars'
-
--{ extension 'match' }
-
---------------------------------------------------------------------------------
--- Return the string->boolean hash table of the names of all free variables
--- in 'term'. 'kind' is the name of an entry in module 'walk', presumably
--- one of 'expr', 'stat' or 'block'.
---------------------------------------------------------------------------------
-local function alpha (kind, term)
- local cfg = { expr = { }, stat = { }, block = { } }
-
- -----------------------------------------------------------------------------
- -- Monkey-patch the scope add method, so that it associates a unique name
- -- to bound vars.
- -----------------------------------------------------------------------------
- local scope = scope:new()
- function scope:add(vars)
- for v in values(vars) do self.current[v] = mlp.gensym(v) end
- end
-
- -----------------------------------------------------------------------------
- -- Check identifiers; add functions parameters to scope
- -----------------------------------------------------------------------------
- function cfg.expr.down(x)
- match x with
- | `Splice{...} -> return 'break' -- don't touch user parts
- | `Id{ name } ->
- local alpha = scope.current[name]
- if alpha then x[1] = alpha end
- | `Function{ params, _ } -> scope:push(); scope:add (params)
- | _ -> -- pass
- end
- end
-
- -----------------------------------------------------------------------------
- -- Close the function scope opened by 'down()'
- -----------------------------------------------------------------------------
- function cfg.expr.up(x)
- match x with `Function{...} -> scope:pop() | _ -> end
- end
-
- -----------------------------------------------------------------------------
- -- Create a new scope and register loop variable[s] in it
- -----------------------------------------------------------------------------
- function cfg.stat.down(x)
- match x with
- | `Splice{...} -> return 'break'
- | `Forin{ vars, ... } -> scope:push(); scope:add(vars)
- | `Fornum{ var, ... } -> scope:push(); scope:add{var}
- | `Localrec{ vars, ... } -> scope:add(vars)
- | `Repeat{ block, cond } -> -- 'cond' is in the scope of 'block'
- scope:push()
- for s in values (block) do walk.stat(cfg)(s) end -- no new scope
- walk.expr(cfg)(cond)
- scope:pop()
- return 'break' -- No automatic walking of subparts
- | _ -> -- pass
- end
- end
-
- -----------------------------------------------------------------------------
- -- Close the scopes opened by 'up()'
- -----------------------------------------------------------------------------
- function cfg.stat.up(x)
- match x with
- | `Forin{ ... } | `Fornum{ ... } -> scope:pop() -- `Repeat has no up().
- | `Local{ vars, ... } -> scope:add(vars)
- | _ -> -- pass
- end
- end
-
- -----------------------------------------------------------------------------
- -- Create a separate scope for each block
- -----------------------------------------------------------------------------
- function cfg.block.down() scope:push() end
- function cfg.block.up() scope:pop() end
-
- walk[kind](cfg)(term)
- return freevars
-end
-
---------------------------------------------------------------------------------
--- A wee bit of metatable hackery. Just couldn't resist, sorry.
---------------------------------------------------------------------------------
-freevars = setmetatable ({ scope=scope }, { __index = |_, k| |t| fv(k, t) })
+++ /dev/null
-mlp.lexer:add{ '?.', '?(', '?[' }
-
-function maybe_field_builder(e, f)
- f = f[1]
- local v = mlp.gensym()
- local block =
- +{block: local -{v} = -{e}; if not -{v}[-{f}] then -{v}[-{f}] = { } end}
- return `Stat{ block, v }
-end
-
-function maybe_dot_builder(e, f)
- return maybe_field_builder(e, {mlp.id2string(f[1])})
-end
-
--- NON, ca gere pas les multireturns. Le plus simple c'est d'avoir un support runtime.
-
-function maybe_call_builder(e, args)
- args = args[1]
- local v, w = mlp.gensym(), mlp.gensym()
- local block =
- +{block: local -{v}, -{w} = -{e}; if -{v} then (-{w})-{`Call(v, unpack(args))} end}
- return `Stat{ block, w }
-end
-
-function maybe_invoke_builder(e, s)
- local name, args = unpack(s)
- local v, w = mlp.gensym(), mlp.gensym()
- local block =
- +{block: local -{v}, -{w} = -{e}; if -{v} then (-{w})-{`Invoke(v, unpack(args))} end}
- return `Stat{ block, w }
-end
-
-mlp.expr.suffix:add{ '?.', mlp.id, builder = |e, f|
\ No newline at end of file
+++ /dev/null
-=== Random personal notes ===
-
-====================================================================
-This is my persistent *scratch*. There are random notes, in random
-languages, unstructured, out of date, generally unexploitable. Don't
-expect anything here to make sense.
-====================================================================
-
-Readme fraft
-============
-This distribution of metalua tries to favor ease of install over
-saving a couple of kilobytes; therefore it directly embeds pieces of
-useful other free software rather than letting users fetch and
-configure them, and it installs separately from a regular lua install,
-instead of messing it up.
-
-Open source code used by metalua
-
-
-Metalua current sources include (possibly slightly modified versions
-of) these open source projects:
-
-- Lua, of course.
-
-- Yueliang, a Lua compiler in written in Lua: this is
- the base of metalua's bytecode dumper.
-
-- Rings, a part of the Kelper project, which lets handle multiple Lua
- states from within Lua.
-
-- Editline, an interactive command line editor, very similar to GNU
- readline but with a more permissive licence.
-
-- bitlib for bitwise manipulations (especially useful for bytecode
- dumping)
-
-The mlc module
---------------
-This module contains all of the compilation stuff. The version
-available after compilation is heavily based on the pattern matching
-extension, so it isn't used in the code required for bootstrapping
-(which has to be in lua inter metalua. That is, then, Lua)
-
-Libraries
----------
-The mechanism of metalua libraries is exactly the same as Lua's,
-except that the environment variable names ahve been changed to allow
-peaceful cohabitation with Lua.
-
-Etancheite:
-===========
-- shell interactif:
- * separer les niveaux
- * permettre de monter/descendre d'un niveau avec des commandes
- dediees: "+:" et "-:"
-
-Il faut faire la part entre la synthese de l'AST et l'evaluation. La
-synthese de l'AST est faite en amont de mlc.splice(). Apparemment, le
-lexer est commun a tout le monde... et mlp aussi.
-
-Hygiene:
-========
-les quotes peuvent etre hygieniques (HQQ) ou pas (QQ). les QQ sont
-normales, ne font rien; ca permet de capturer des variables entre
-morceaux.
-
-Les HQQ sont attachees a un contexte, dans lequel seront reversees
-leurs variables libres. +{:hexpr(CTX): foo } va alpha renommer toutes
-les variables libres de +{foo} et les stocker dans CTX.
-
-Autre possibilite syntaxique: une +{hexpr: foo } retourne +{foo} et
-son contexte. Le contexte permet de decider comment fusionner deux
-AST. Il ne se resume pas a une substitution
-
-->Laurie:
-
-Your approach is to tell the user that an AST has a sense by itself:
-it's an autonomous piece of data that won't interfere with its
-environment. Then you re-introduce mechanisms to get some dangling
-bits back, so that you can wire the various bits (QQ and splices)
-together as you wish.
-
-Now, the point from which I'll draw my PoV: an AST is only hygienic
-relative to a context. Through gensym() we're able to craft ASTs that
-are hygienic in all normal contexts, but the notion of contexts
-continues to exist. In particular, as soon as we want to connect bits
-of AST in interesting ways, we start to need a finer-grained control
-of contexts.
-
-You offer, with $c{ } ${ } and &, ways to poke holes into contexts,
-but since you try pretend to the user that there's no context, the
-user is screwed when he does want to mess with it. He has to guess how
-those hidden contexts work, and find ways to use the tools mentionned
-above so that they have the intended effect on the stealth context.
-
-That's a good approach when there's nothing clever to do with
-contexts, and it falls down when users get out of the main use
-cases. Depending on how rare it is to need such conttext hacking
-capabilities, your approach might or might not be practical.
-
-The metalua-consistent approach is the opposite one: if there's such a
-thing as contexts, and it sometimes makes sense for (advanced) users
-to fiddle with them, then there must be first class AST context
-objects. If it can be optionally hidden in most common cases, great,
-but users aren't supposed to ignore their existence.
-
-Therefore, whenever you combine two pieces of AST, you specify how
-their context must be merged. The 2 most common defaults would be
-"don't touch anything" (non-hygienic macros) and "make sure there's no
-interference" (full hygiene, no capture).
-
-In the example we just discussed, the problem is that we have 3 AST
-(around, inside and usercode) and we want to splice them in a fancy
-way: there's only one capturing variable between around and inside,
-and it must not capture anything in usercode. You hack your way around
-to build a protective shell on usercode, then toss it with the other
-ASTs. I'd rather write that around and inside share the same context,
-and usercode has its own. Then when combining all those, the
-hygienizer will know what to do. if I wanted to combine around's and
-outside's contexts so that they'd only share "q_var", I should be able
-to express that as well.
-
-Bottom line is: I'd rather have 1st class context than implicit ones,
-which will be a hassle to interact with when I have to. KISS
-engineering at its best :)
-
-
-
-Restent a faire:
-================
-- reflechir a l'addition automatique des mots-clef par les parsers qui
- vont bien. Peut-etre qu'ils sont attaches optionnellement a un lexer,
- et que cet attachement est transmis par defaut qd on insere un
- parser ds un autre
-- notation pour la generation auto de sous-tables
-- verifier le runtime error handling patch d'EVE
-- anaphoric macros should bind 'it' to a function when it appears directly in
- it. it would allow anonymous recursive functions. However, anaphoric
- structures tend to capture each other very fast; maybe Arc has insightful
- ideas about this? (e.g. different captured names for function and other
- structs)
-
-
-Bugs a verifier:
-================
-- reentrance de in_a_quote dans mlp_meta.lua
-- refuser les flags autres qu'expr dans les splices in_a_quote
-
-il faudra encore fournir une boucle interactive, celle de lua n'est
-pas patchable. Idem pour le compilo.
-
-metalua compiler/interpreter short usage manual
-===============================================
-
-Allows to compile and/or run metalua programs, and to use interactive
-loop.
-
---compile
---run
---interactive
---output
---load-library <name>[@metalevel]
---print-ast
-
-By default, parameters are passed to all running chunks. However, if
-there is are parameters and no running chunk, the first parameter is
-taken as the running chunk.
-
-metalua --compile foo.mlua
-
-if there is an --output, all run and compiled files will be saved in
-it.
-
-- compile all files.
-- displays ASTs depending on --print-ast
-- run all files if --run
-- run loop after compile and run if -i or if no file
-- if no -a and no -o, -x is implied
-- if -x and no file, first param is a file
-
-2nd attempt
-===========
-
-Compile and/or execute metalua programs. Parameters passed to the
-compiler should be prefixed with an option flag, hinting what must be
-done with them: take tham as file names to compile, as library names
-to load, as parameters passed to the running program... When option
-flags lack, metalua tries to adopt a "Do What I Mean" approach:
-
-- if no code (no library, no literal expression and no file) is
- specified, the first flag-less parameter is taken as a file name to
- load.
-
-- if no code and no parameter is passed, an interactive loop is
- started.
-
-- if a target file is specified with --output, the program is not
- executed by default, unless a --run flag forces it to. Conversely,
- if no --output target is specified, the code is run unless ++run
- forbids it.
-
-When does it compile, when does it execute?
--------------------------------------------
-The result of the compilation is saved if there is a --output
-specifying a destination file. If not, the result is run. If both
---output and --run are specified, the result is both saved and run. If
-the compilation must be saved, it is mandatory to have at least one
-file or library.
-
-How to load chunks to compile/run
----------------------------------
-Files can be passed, with their relative paths, with --file. Libraries
-can be loaded, from standard paths, with --library. Litteral blocks of
-code can be passed with --literal.
-
-When does it launch an interactive loop?
-----------------------------------------
-When --interactive is specified, or when no chunk is loaded.
-
-
-
-
-Macro hygiene
-=============
-
-alpha should go at the top of the chunk *where the quote is used*,
-which is generally not the same as where it's defined. Ideally, it
-should go where the extension using the quote is called. So what I
-really need is an improved 'extension' statement, which handles quotes
-anhoring.
-
-A serious problem is that most quotes are inside functions and
-therefore not always evaluated in the extension() call.
-
-Let's consider two instants:
-- when the QQ is formally created for compilation (1)
-- when it's returned by a function, presumably to be spliced (2)
-
-
- If alpha-conv
-happens at QQ creation, then I lose the
-
-
-So, alpha
-conversion must happen at CT
-
-
-Extension
-=========
-The current extension() function automatically loads a runtime, even
-when empty. More control should be given. Both RT and CT parts should
-go in the same directory. Finally, RT should probably handle macro
-hygiene in a standardized way.
-
-==> extension() should be like a require(), except that it:
-- prepends 'extension.' to the module name
-- returns nil when require() returns true
-
-From there, macro alphas could be handled as follows:
-- create a common alpha at the opening of the extension. As an empty list.
-- push in in H() so that it'll be shared by all hygienizations
-- return it if there' no runtime, or return it with a require()
- statement for the RT lib.
-
-Shipping 0.4
-============
-- remove autotable: I don't use it and it isn't polished enough to be
- useful yet.
-
-- remove machine and fsm_test; or at least, put it in samples
-
-- remove mandatory platform from makefile call
-
-- H loads extension/H-runtime.mlua?
-
-
-
-README.TXT
-==========
-For installation matters, cf. INSTALL.TXT
-
-Metalua 0.4
-===========
-Metalua is a static metaprogramming system for Lua: a set of tools
-that let you alter the compilation process in arbitrary, powerful and
-maintainable ways. For the potential first-time users of such a
-system, a descripition of these tools, as implemented by metalua,
-follows.
-
-Dynamic Parsers
----------------
-One of these tools is dynamic parser: a source file can change the
-syntax recognized by the parser while it's being parsed. Taken alone,
-this feature lets you make superficial syntax tweaks on the
-language. The parser is based on a parser combinator called 'gg'. You
-should know the half dozen functions in gg API to do advanced things,
-but it means you can use and define functions that transform parsers:
-
-- There are a couple of very simple combinators like gg.list,
- gg.sequence, qq.multisequence, gg.optkeyword etc. that offer a level
- of expressiveness comparable to Yacc-like parsers. For instance, if
- mlp.expr parses Lua expressions, gg.list{ mlp.expr } creates a
- parser which handles lists of Lua expressions.
-
-- Since you can create all the combinators you can think of, there
- also are combinators specialized for typical language tasks. In
- Yacc-like systems, the language definition quickly becomes
- unreadable, because all non-native features have to be encoded in
- clumsy and brittle ways; so if your parser won't natively let you
- specify infix operator precedence and associativity easily, tough
- luck for you and your code maintainers. With combinators, this is
- abstracted away in a regular function, so you just write:
-
- > mlp.expr.infix:add{ "xor", prec=40, assoc='left', builder=xor_builder }
-
- Moreover, combinators tend to produce usable error messages when fed
- with syntactically incorrect inputs. It matters, because clearly
- explaining why an invalid input is invalid is almost as important as
- compiling a valid one, for a use=able compiler.
-
-Yacc-like systems might seem simpler to adopt than combinators, as
-long as they're used on extremely simple problems. However, if if you
-either try to write something non trivial, or to write a simple macro
-in a robust way, you'll need to use lots of messy tricks and hacks,
-and spend much more time getting them (seemingly) ritght than that 1/2
-hour required to master most of gg.
-
-
-Real meta-programming
----------------------
-If you plan to go beyond trivial keyword-for-keyword syntax tweaks,
-what will limit you is the ability to manipulate source code
-conveniently: without the proper tools and abstractions, even the
-simplest tasks will turn into a dirty hacks fest, then either into a
-nightmare, or most often into abandonware. Providing an empowering
-framework is metalua's whole purpose. The core principle is that
-programs prefer to manipulate code as trees (whereas most developers
-prefer ASCII sources). The make-or-break deal is then:
-
-- To easily let users see sources as trees, as sources, or as
- combination thereof, and switch representations seamlessly.
-
-- To offer the proper libraries, that won't force you to reinvent a
- square wheel, will take care of the most common pitfalls, won't
- force you to resort to brittle hacks.
-
-On the former point, Lisps are at a huge advantage, their user syntax
-already being trees. But languages with casual syntax can also offer
-interchangeable tree/source views; metalua has some quoting +{ ... }
-and anti-quoting -{ ... } operators which let you switch between both
-representations at will: internally it works on trees, but you always
-have the option to see them as quoted sources. Metalua also supports a
-slightly improved syntax for syntax trees, to improve their
-readability.
-
-Library-wise, metalua offers a set of syntax tree manipulation tools:
-
-- Structural pattern matching, a feature traditionally found in
- compiler-writing specialized languages (and which has nothing to do
- with string regular expressions BTW), which lets you express
- advanced tree analysis operations in a compact, readable and
- efficient way. If you regularly have to work with advanced data
- structures and you try it, you'll never go back.
-
-- The walker library allows you to perform transformations on big
- portions of programs. It lets you easily express things like:
- "replace all return statements which aren't in a nested function by
- error statements", "rename all local variables and their instances
- into unique fresh names", "list the variables which escape this
- chunk's scope", "insert a type-checking instruction into every
- assignments to variable X", etc. You can't write many non-trivial
- macros without needing to do some of those global code
- transformations.
-
-- Macro hygiene, although not perfect yet in metalua, is required if
- you want to make macro writing reasonably usable (and contrary to a
- popular belief, renaming local variables into fresh names only
- address the easiest part of the hygiene issue; cf. changelog below
- for more details).
-
-- The existing extensions are progressively refactored in more modular
- ways, so that their features can be effectively reused in other
- extensions.
-
-
-Notworthy changes since 0.3
-===========================
-
-- A significantly bigger code base, mostly due to more libraries:
- about 2.5KLoC for libs, 4KLoC for the compiler. However, this remains
- tiny in today's desktop computers standards. You don't have to know
- all of the system to do useful stuff with it, and since compiled
- files are Lua 5.1 compatible, you can keep the "big" system on a
- development platform, and keep a lightweight runtime for embedded or
- otherwise underpowered targets.
-
-
-- The compiler/interpreter front-end is completely rewritten. The new
- program, aptly named 'metalua', supports proper passing of arguments
- to programs, and is generally speaking much more user friendly than
- the mlc from the previous version.
-
-
-- Metalua source libraries are looked for in environmemt variable
- LUA_MPATH, distinct from LUA_PATH. This way, in an application
- that's part Lua part Metalua, you keep a natural access to the
- native Lua compiler.
-
- By convention, metalua source files should have extension .mlua. By
- default, bytecode and plain lua files are preferred to metalua
- sources, which lets you easily precompile your libraries.
-
-
-- Compilation of files are separated in different Lua Rings: this
- prevents unwanted side-effects when several files are compiled
- (This can be turned off, but shouldn't be IMO).
-
-
-- Metalua features are accessible programmatically. Library
- 'metalua.runtime' loads only the libraries necessary to run an
- already compiled file; 'metalua.compile' loads everything useful at
- compile-time.
-
- Transformation functions are available in a library 'mlc' that
- contains all meaningful transformation functions in the form
- 'mlc.destformat_of_sourceformat()', such as 'mlc.luacfile_of_ast()',
- 'mlc.function_of_luastring()' etc. This library has been
- significantly completed and rewritten (in metalua) since v0.3.
-
-
-- Helper libraries have been added. For now they're in the
- distribution, at some point they should be luarocked in. These
- include:
- - Lua Rings and Pluto, duct-taped together into Springs, an improved
- Rings that lets states exchange arbitrary data instead of just
- scalars and strings.
- - Lua bits for bytecode dumping.
- - As always, very large amounts of code borrowed from Yueliang.
- - As a commodity, I've also packaged Lua sources in.
-
-
-- Builds on Linux, OSX, Microsoft Visual Studio. Might build on mingw
- (not tested recently). It's easily ported to all systems with a full
- support for lua.
-
- The MS-windows building is hackish: it's driven by a batch script,
- and Pluto can't compile as a win32 DLL, so it's linked in the Lua
- VM. If you want to run your own VM, either link pluto in statically,
- or disabled separate compilation by setting environment variable
- LUA_MFAST at true. In the later case, expect puzzling behaviors when
- you load several sources containing compile-time code (==>
- precompile everything).
-
- Notice that bits of the compiler itself are now written in metalua,
- which means that its building now goes through a bootstrapping
- stage.
-
-
-- Structural pattern matching:
- - now also handles string regular expressions: 'someregexp'/pattern
- will match if the tested term is a string accepted by the regexp,
- and on success, the list of captures done by the regexp is matched
- against pattern.
- - Matching of multiple values has been optimized
- - the default behavior when no case match is no to raise an error,
- it's the most commonly expected case in practice. Trivial to
- cancel with a final catch-all pattern.
- - generated calls to type() are now hygienic (it's been the cause of
- a puzzling bug report; again, hygiene is hard).
-
-
-- AST grammar overhaul:
- The whole point of being alpha is to fix APIs with a more relaxed
- attitude towards backward compatibility. I think and hope it's the
- last AST revision, so here is it:
- - `Let{...} is now called `Set{...}
- (Functional programmers would expect 'Let' to introduce an
- immutable binding, and assignment isn't immutable in Lua)
- - `Key{ key, value } in table literals is now written `Pair{ key, value }
- (it contained a key *and* its associated value; besides, 'Pair' is
- consistent with the name of the for-loop iterator)
- - `Method{...} is now `Invoke{...}
- (because it's a method invocation, not a method declaration)
- - `One{...} is now `Paren{...} and is properly documented
- (it's the node representing parentheses: it's necessary, since
- parentheses are sometimes meaningful in Lua)
- - Operator are simplified: `Op{ 'add', +{2}, +{2} } instead of
- `Op{ `Add, +{2}, +{2} }. Operator names match the corresponding
- metatable entries, without the leading double-underscore.
- - The operators which haven't a metatable counterpart are
- deprecated: 'ne', 'ge', 'gt'.
-
-
-- Overhaul of the code walking library:
- - the API has been simplified: the fancy predicates proved more
- cumbersome to use than a bit pattern matching in the visitors
- - binding identifiers are handled as a distinct AST class
- - walk.id is scope-aware, handles free and bound variables in a
- sensible way.
- - the currified API proved useless and sometimes cumbersome, it's
- been removed.
-
-
-- Hygiene: I originally planned to release a full-featured hygienic
- macro system with v0.4, but what exists remains a work in
- progress. Lua is a Lisp-1, which means unhygienic macros are very
- dangerous, and hygiene a la Scheme pretty much limits macro writing
- to a term rewriting subset of the language, which is crippling to
- use.
-
- Note: inside hygiene, i.e. local variables created by the macro
- which might capture user's variable instances, is trivial to address
- by alpha conversion. The trickier part is outside hygiene, when
- user's binders capture globals required by the macro-generated
- code. That's the cause of pretty puzzling and hard to find bugs. And
- the *really* tricky part, which is still unsolved in metalua, is
- when you have several levels of nesting between user code and macro
- code. For now this case has to be hygienized by hand.
-
- Note 2: Converge has a pretty powerful approach of hygienic macros
- in a Lisp-1 language; for long and boringly technical reasons, I
- don't think its approch would be the best suited to metalua.
-
- Note 3: Redittors must have read that Paul Graham has released Arc,
- which is also a Lisp-1 with Common Lisp style macros; I expect this
- to create a bit of buzz, out of which might emerge proper solutions
- the macro hygiene problem.
-
-
-- No more need to create custom syntax for macros when you don't want
- to. Extension 'dollar' will let you declare macros in the dollar
- table, as in +{block: function dollar.MYMACRO(a, b, c) ... end},
- and use it as $MYMACRO(1, 2, 3) in your code.
-
- With this extension, you can write macros without knowing anything
- about the metalua parser. Together with quasi-quotes and automatic
- hygiene, this will probably be the closest we can go to "macros for
- dummies" without creating an unmaintainable mess generator.
-
-
-- Lexers can be switched on the fly. This lets you change the set of
- keywords temporarily, with the new gg.with_lexer() functor, or
- handle radically different syntaxes in a single file (think
- multiple-languages systems such as LuaTeX, or programs+goo as PHP).
-
-
-- Incorporation of the bugs listed on the mailing list and the blog.
-
-
-- New samples and extensions, in various states of completion:
-
- - lists by comprehension, a la python/haskell. It includes lists
- chunking, e.g. mylist[1 ... 3, 5 ... 7]
-
- - anaphoric macros for 'if' and 'while' statements: with this
- extension, the condition of the 'if'/'while' is bound to variable
- 'it' in the body; it lets you write things like:
- while file:read '*l' do print(it) end.
- No runtime overhead when 'it' isn't used in the body. An anaphoric
- variable should also be made accessible for functions, to let
- easily write anonymous recursive functions.
-
- - continue statement, logging facility, ternary "?:" choice operator
-
- - Try ... catch ... finally extension.
-
- - with ... do extension: it uses try/finally to make sure that
- resources will be properly closed. The only constraint on
- resources is that they have to sport a :close() releasing method.
- For instance, he following code guarantees that file1 and file2
- will be closed, even if a return or an error occurs in the body.
-
- with file1, file2 = io.open "f1.txt", io.open "f2.txt" do
- contents = file1:read'*a' .. file2:read ;*a'
- end
-
-
-Credits
-=======
-I'd like to thank the people who wrote the open source code which
-makes metalua run: the Lua team, the authors of Yueliang, Pluto, Lua
-Rings, Bitlib; the people whose bug reports, patches and insightful
-discussions dramatically improved the global design, including
-Laurence Tratt, Viacheslav Egorov, David Manura, John Belmonte, Eric
-Raible...
+++ /dev/null
-- macro hygiene, wrt inside and outside (anchoring)
-- doc: lambda isn't a multiret anymore
-- fix ^ indicator on error reports (one token too far)
-- forbid flags other than expr in +{-{flag:...}}
+++ /dev/null
--- static partial checkings for Lua.
---
--- This program checks some metalua or plain lua source code for most common
--- mistakes. Its design focuses on the ability to check plain lua code: there is
--- no need to load any extension in the module.
---
--- The current checkings include:
---
--- * Listing all free variables, and make sure they are declared.
--- * For free vars known as modules, check that indexings in them are also
--- declared.
--- * When the type of something is known, do some basic type checkings. These
--- checkings are by no means exhaustive; however, when a parameter function
--- is constant or statically declared, it's checked.
-
-
-
---[[
-Type grammar:
-
-t ::=
-| t and t
-| t or t
-| function (t, ...) return t, ... end
-| { (k=t)... }
-| table(t, t)
-| string
-| number
-| integer
-| boolean
-| userdata
-| nil
-| multi(t, ...)
-| _
-
---]]
-
-
-match function get_type
-| `Number{...} -> return +{number}
-| `String{...} -> return +{string}
-| `True|`False -> return +{boolean}
-| `Nil -> return +{nil}
-| `Dots -> return +{_}
-| `Stat{_,v} -> return get_type(v)
-| `Paren{t} -> return get_one_type(t)
-| `Call{f, ...} ->
- local ftype = get_type(f)
- match ftype with
- | `Function{ _, {`Return{result}} } -> return get_type(result)
- | `Function{ _, {`Return{...} == results} } ->
- local r2 = +{ multi() }
- for r in ivalues(results) table.insert(r2, get_type(r)) end
- return r2
- | `And{...} -> return +{_} -- not implemented
- | `Or{ a, b } -> match get_one_type(a), get_one_type(b) with
- | `Function{...}==f1, `Function{...}==f2 ->
- return `Op{ 'or', get_type(`Call{f1}), get_type(`Call{f2})}
- | `Function{...}==f, _ | _, `Function{...}==f ->
- return get_type(`Call{f})
- | _ -> return +{_}
- end
-| `Invoke{o, m, ... } == x -> return get_type(`Call{`Index{o, m}})
-| `Op{...}==o -> return get_op_type(o)
-| `Table{...}==t ->
- local r = `Table{ }
- for x in ivalues(t) do
- match x with
- | `Pair{ `String{...}==k, v } -> table.insert(r, `Pair{k, get_one_type(v)})
- | t -> table.insert(r, get_one_type(t))
- end
- end
- return r
-| `Function{...}==f ->
-| `Id{v} ->
-| `Index{t, k} -> match get_one_type(t), get_one_type(k) with
- | `Call{`Id 'table', tk, tv }, _ -> return tv
- | `Table{...}==tt, `Id 'string' ->
-
-local types_rt = require 'extension.types'
-
-function check_function(f, term)
- match get_type(term) with
- | `Function{ params, {`Return{...} == results}}, args ->
- | `And{ a, b }, args ->
- check_function(a, args)
- check_function(b, args)
- | `Or{ a, b }, args ->
- if not pcall(check_function, a, args) then check_function(b, args) end
- | `Id '_' -> -- pass
- | _ -> error ("Call to a non-function")
- end
-end
-
-function check_index(a, b, term)
- match get_type(term) with
- | `Table{}
-
-match function cfg.id.up
-| `Call{ f, ... } == x -> check_function (f, x)
-| `Index{ a, b } == x -> check_index (a, b, x)
-end
-
-
--- List free vars
-cfg.id.
\ No newline at end of file
--- /dev/null
+--*-lua-*--
+package = "metalua-compiler"
+version = "0.7.2-1"
+source = {
+ url = "git://git.eclipse.org/gitroot/koneki/org.eclipse.koneki.metalua.git",
+ tag = "v0.7.2",
+}
+
+description = {
+ summary = "Metalua's compiler: converting (Meta)lua source strings and files into executable Lua 5.1 bytecode",
+ detailed = [[
+ This is the Metalua copmiler, packaged as a rock, depending
+ on the spearate metalua-parser AST generating library. It
+ compiles a superset of Lua 5.1 into bytecode, which can
+ then be loaded and executed by a Lua 5.1 VM. It also allows
+ to dump ASTs back into Lua source files.
+ ]],
+ homepage = "http://git.eclipse.org/c/koneki/org.eclipse.koneki.metalua.git",
+ license = "EPL + MIT"
+}
+
+dependencies = {
+ "lua ~> 5.1", -- Lua 5.2 bytecode not supported
+ "checks >= 1.0", -- Argument type checking
+ "luafilesystem >= 1.6.2", -- Cached compilation based on file timestamps
+ "readline >= 1.3", -- Better REPL experience
+ "metalua-parser == 0.7.2", -- AST production
+}
+
+build = {
+ type="builtin",
+ modules={
+ ["metalua"] = "metalua.lua",
+ ["metalua.compiler.bytecode"] = "metalua/compiler/bytecode.lua",
+ ["metalua.compiler.globals"] = "metalua/compiler/globals.lua",
+ ["metalua.compiler.bytecode.compile"] = "metalua/compiler/bytecode/compile.lua",
+ ["metalua.compiler.bytecode.lcode"] = "metalua/compiler/bytecode/lcode.lua",
+ ["metalua.compiler.bytecode.lopcodes"] = "metalua/compiler/bytecode/lopcodes.lua",
+ ["metalua.compiler.bytecode.ldump"] = "metalua/compiler/bytecode/ldump.lua",
+ ["metalua.loader"] = "metalua/loader.lua",
+ },
+ install={lua={
+ ["metalua.treequery"] = "metalua/treequery.mlua",
+ ["metalua.compiler.ast_to_src"] = "metalua/compiler/ast_to_src.mlua",
+ ["metalua.treequery.walk"] = "metalua/treequery/walk.mlua",
+ ["metalua.extension.match"] = "metalua/extension/match.mlua",
+ ["metalua.extension.comprehension"] = "metalua/extension/comprehension.mlua",
+ ["metalua.repl"] = "metalua/repl.mlua",
+ }}
+}
+
+--[==[-- Generate file lists
+for _, ext in ipairs{ 'lua', 'mlua' } do
+ for filename in io.popen("find metalua -name '*."..ext.."'") :lines() do
+ local modname = filename :gsub ('/', '.') :gsub ('%.'..ext..'$', '')
+ print((' '):rep(8)..'["' .. modname .. '"] = "' .. filename .. '",')
+ end
+ print""
+end
+--]==]--
--- /dev/null
+--*-lua-*--
+package = "metalua-parser"
+version = "0.7.2-1"
+source = {
+ url = "git://git.eclipse.org/gitroot/koneki/org.eclipse.koneki.metalua.git",
+ tag = "v0.7.2",
+}
+description = {
+ summary = "Metalua's parser: converting Lua source strings and files into AST",
+ detailed = [[
+ This is a subset of the full Metalua compiler. It defines and generates an AST
+ format for Lua programs, which offers a nice level of abstraction to reason about
+ and manipulate Lua programs.
+ ]],
+ homepage = "http://git.eclipse.org/c/koneki/org.eclipse.koneki.metalua.git",
+ license = "EPL + MIT"
+}
+dependencies = {
+ "lua ~> 5.1",
+ "checks >= 1.0",
+}
+build = {
+ type="builtin",
+ modules={
+ ["metalua.grammar.generator"] = "metalua/grammar/generator.lua",
+ ["metalua.grammar.lexer"] = "metalua/grammar/lexer.lua",
+ ["metalua.compiler.parser"] = "metalua/compiler/parser.lua",
+ ["metalua.compiler.parser.common"] = "metalua/compiler/parser/common.lua",
+ ["metalua.compiler.parser.table"] = "metalua/compiler/parser/table.lua",
+ ["metalua.compiler.parser.ext"] = "metalua/compiler/parser/ext.lua",
+ ["metalua.compiler.parser.annot.generator"] = "metalua/compiler/parser/annot/generator.lua",
+ ["metalua.compiler.parser.annot.grammar"] = "metalua/compiler/parser/annot/grammar.lua",
+ ["metalua.compiler.parser.stat"] = "metalua/compiler/parser/stat.lua",
+ ["metalua.compiler.parser.misc"] = "metalua/compiler/parser/misc.lua",
+ ["metalua.compiler.parser.lexer"] = "metalua/compiler/parser/lexer.lua",
+ ["metalua.compiler.parser.meta"] = "metalua/compiler/parser/meta.lua",
+ ["metalua.compiler.parser.expr"] = "metalua/compiler/parser/expr.lua",
+ ["metalua.compiler"] = "metalua/compiler.lua",
+ ["metalua.pprint"] = "metalua/pprint.lua",
+ }
+}
+
--- /dev/null
+-------------------------------------------------------------------------------
+-- Copyright (c) 2006-2013 Fabien Fleutot and others.
+--
+-- All rights reserved.
+--
+-- This program and the accompanying materials are made available
+-- under the terms of the Eclipse Public License v1.0 which
+-- accompanies this distribution, and is available at
+-- http://www.eclipse.org/legal/epl-v10.html
+--
+-- This program and the accompanying materials are also made available
+-- under the terms of the MIT public license which accompanies this
+-- distribution, and is available at http://www.lua.org/license.html
+--
+-- Contributors:
+-- Fabien Fleutot - API and implementation
+--
+-------------------------------------------------------------------------------
+
+-- Survive lack of checks
+if not pcall(require, 'checks') then function package.preload.checks() function checks() end end end
+
+-- Main file for the metalua executable
+require 'metalua.loader' -- load *.mlue files
+require 'metalua.compiler.globals' -- metalua-aware loadstring, dofile etc.
+
+local alt_getopt = require 'alt_getopt'
+local pp = require 'metalua.pprint'
+local mlc = require 'metalua.compiler'
+
+local M = { }
+
+local AST_COMPILE_ERROR_NUMBER = -1
+local RUNTIME_ERROR_NUMBER = -3
+
+local alt_getopt_options = "f:l:e:o:xivaASbs"
+
+local long_opts = {
+ file='f',
+ library='l',
+ literal='e',
+ output='o',
+ run='x',
+ interactive='i',
+ verbose='v',
+ ['print-ast']='a',
+ ['print-ast-lineinfo']='A',
+ ['print-src']='S',
+ ['meta-bugs']='b',
+ ['sharp-bang']='s',
+}
+
+local chunk_options = {
+ library=1,
+ file=1,
+ literal=1
+}
+
+local usage=[[
+
+Compile and/or execute metalua programs. Parameters passed to the
+compiler should be prefixed with an option flag, hinting what must be
+done with them: take tham as file names to compile, as library names
+to load, as parameters passed to the running program... When option
+flags are absent, metalua tries to adopt a "Do What I Mean" approach:
+
+- if no code (no library, no literal expression and no file) is
+ specified, the first flag-less parameter is taken as a file name to
+ load.
+
+- if no code and no parameter is passed, an interactive loop is
+ started.
+
+- if a target file is specified with --output, the program is not
+ executed by default, unless a --run flag forces it to. Conversely,
+ if no --output target is specified, the code is run unless ++run
+ forbids it.
+]]
+
+function M.cmdline_parser(...)
+ local argv = {...}
+ local opts, optind, optarg =
+ alt_getopt.get_ordered_opts({...}, alt_getopt_options, long_opts)
+ --pp.printf("argv=%s; opts=%s, ending at %i, with optarg=%s",
+ -- argv, opts, optind, optarg)
+ local s2l = { } -- short to long option names conversion table
+ for long, short in pairs(long_opts) do s2l[short]=long end
+ local cfg = { chunks = { } }
+ for i, short in pairs(opts) do
+ local long = s2l[short]
+ if chunk_options[long] then table.insert(cfg.chunks, { tag=long, optarg[i] })
+ else cfg[long] = optarg[i] or true end
+ end
+ cfg.params = { select(optind, ...) }
+ return cfg
+end
+
+function M.main (...)
+
+ local cfg = M.cmdline_parser(...)
+
+ -------------------------------------------------------------------
+ -- Print messages if in verbose mode
+ -------------------------------------------------------------------
+ local function verb_print (fmt, ...)
+ if cfg.verbose then
+ return pp.printf ("[ "..fmt.." ]", ...)
+ end
+ end
+
+ if cfg.verbose then
+ verb_print("raw options: %s", cfg)
+ end
+
+ -------------------------------------------------------------------
+ -- If there's no chunk but there are params, interpret the first
+ -- param as a file name.
+ if not next(cfg.chunks) and next(cfg.params) then
+ local the_file = table.remove(cfg.params, 1)
+ verb_print("Param %q considered as a source file", the_file)
+ cfg.file={ the_file }
+ end
+
+ -------------------------------------------------------------------
+ -- If nothing to do, run REPL loop
+ if not next(cfg.chunks) and not cfg.interactive then
+ verb_print "Nothing to compile nor run, force interactive loop"
+ cfg.interactive=true
+ end
+
+
+ -------------------------------------------------------------------
+ -- Run if asked to, or if no --output has been given
+ -- if cfg.run==false it's been *forced* to false, don't override.
+ if not cfg.run and not cfg.output then
+ verb_print("No output file specified; I'll run the program")
+ cfg.run = true
+ end
+
+ local code = { }
+
+ -------------------------------------------------------------------
+ -- Get ASTs from sources
+
+ local last_file_idx
+ for i, x in ipairs(cfg.chunks) do
+ local compiler = mlc.new()
+ local tag, val = x.tag, x[1]
+ verb_print("Compiling %s", x)
+ local st, ast
+ if tag=='library' then
+ ast = { tag='Call',
+ {tag='Id', "require" },
+ {tag='String', val } }
+ elseif tag=='literal' then ast = compiler :src_to_ast(val)
+ elseif tag=='file' then
+ ast = compiler :srcfile_to_ast(val)
+ -- Isolate each file in a separate fenv
+ ast = { tag='Call',
+ { tag='Function', { { tag='Dots'} }, ast },
+ { tag='Dots' } }
+ ast.source = '@'..val
+ code.source = '@'..val
+ last_file_idx = i
+ else
+ error ("Bad option "..tag)
+ end
+ local valid = true -- TODO: check AST's correctness
+ if not valid then
+ pp.printf ("Cannot compile %s:\n%s", x, ast or "no msg")
+ os.exit (AST_COMPILE_ERROR_NUMBER)
+ end
+ ast.origin = x
+ table.insert(code, ast)
+ end
+ -- The last file returns the whole chunk's result
+ if last_file_idx then
+ -- transform +{ (function(...) -{ast} end)(...) }
+ -- into +{ return (function(...) -{ast} end)(...) }
+ local prv_ast = code[last_file_idx]
+ local new_ast = { tag='Return', prv_ast }
+ code[last_file_idx] = new_ast
+ end
+
+ -- Further uses of compiler won't involve AST transformations:
+ -- they can share the same instance.
+ -- TODO: reuse last instance if possible.
+ local compiler = mlc.new()
+
+ -------------------------------------------------------------------
+ -- AST printing
+ if cfg['print-ast'] or cfg['print-ast-lineinfo'] then
+ verb_print "Resulting AST:"
+ for _, x in ipairs(code) do
+ pp.printf("--- AST From %s: ---", x.source)
+ if x.origin and x.origin.tag=='File' then x=x[1][1][2][1] end
+ local pp_cfg = cfg['print-ast-lineinfo']
+ and { line_max=1, fix_indent=1, metalua_tag=1 }
+ or { line_max=1, metalua_tag=1, hide_hash=1 }
+ pp.print(x, 80, pp_cfg)
+ end
+ end
+
+ -------------------------------------------------------------------
+ -- Source printing
+ if cfg['print-src'] then
+ verb_print "Resulting sources:"
+ for _, x in ipairs(code) do
+ printf("--- Source From %s: ---", table.tostring(x.source, 'nohash'))
+ if x.origin and x.origin.tag=='File' then x=x[1][1][2] end
+ print (compiler :ast2string (x))
+ end
+ end
+
+ -- TODO: canonize/check AST
+
+ local bytecode = compiler :ast_to_bytecode (code)
+ code = nil
+
+ -------------------------------------------------------------------
+ -- Insert #!... command
+ if cfg.sharpbang then
+ local shbang = cfg.sharpbang
+ verb_print ("Adding sharp-bang directive %q", shbang)
+ if not shbang :match'^#!' then shbang = '#!' .. shbang end
+ if not shbang :match'\n$' then shbang = shbang .. '\n' end
+ bytecode = shbang .. bytecode
+ end
+
+ -------------------------------------------------------------------
+ -- Save to file
+ if cfg.output then
+ -- FIXME: handle '-'
+ verb_print ("Saving to file %q", cfg.output)
+ local file, err_msg = io.open(cfg.output, 'wb')
+ if not file then error("can't open output file: "..err_msg) end
+ file:write(bytecode)
+ file:close()
+ if cfg.sharpbang and os.getenv "OS" ~= "Windows_NT" then
+ pcall(os.execute, 'chmod a+x "'..cfg.output..'"')
+ end
+ end
+
+ -------------------------------------------------------------------
+ -- Run compiled code
+ if cfg.run then
+ verb_print "Running"
+ local f = compiler :bytecode_to_function (bytecode)
+ bytecode = nil
+ -- FIXME: isolate execution in a ring
+ -- FIXME: check for failures
+ local function print_traceback (errmsg)
+ return errmsg .. '\n' .. debug.traceback ('',2) .. '\n'
+ end
+ local function g() return f(unpack (cfg.params)) end
+ local st, msg = xpcall(g, print_traceback)
+ if not st then
+ io.stderr:write(msg)
+ os.exit(RUNTIME_ERROR_NUMBER)
+ end
+ end
+
+ -------------------------------------------------------------------
+ -- Run REPL loop
+ if cfg.interactive then
+ verb_print "Starting REPL loop"
+ require 'metalua.repl' .run()
+ end
+
+ verb_print "Done"
+
+end
+
+return M.main(...)
--- /dev/null
+--------------------------------------------------------------------------------
+-- Copyright (c) 2006-2013 Fabien Fleutot and others.
+--
+-- All rights reserved.
+--
+-- This program and the accompanying materials are made available
+-- under the terms of the Eclipse Public License v1.0 which
+-- accompanies this distribution, and is available at
+-- http://www.eclipse.org/legal/epl-v10.html
+--
+-- This program and the accompanying materials are also made available
+-- under the terms of the MIT public license which accompanies this
+-- distribution, and is available at http://www.lua.org/license.html
+--
+-- Contributors:
+-- Fabien Fleutot - API and implementation
+--
+--------------------------------------------------------------------------------
+
+local compile = require 'metalua.compiler.bytecode.compile'
+local ldump = require 'metalua.compiler.bytecode.ldump'
+
+local M = { }
+
+M.ast_to_proto = compile.ast_to_proto
+M.proto_to_bytecode = ldump.dump_string
+M.proto_to_file = ldump.dump_file
+
+return M
\ No newline at end of file
--- /dev/null
+---------------------------------------------------------------------------
+-- Copyright (c) 2006-2013 Fabien Fleutot and others.
+--
+-- All rights reserved.
+--
+-- This program and the accompanying materials are made available
+-- under the terms of the Eclipse Public License v1.0 which
+-- accompanies this distribution, and is available at
+-- http://www.eclipse.org/legal/epl-v10.html
+--
+-- This program and the accompanying materials are also made available
+-- under the terms of the MIT public license which accompanies this
+-- distribution, and is available at http://www.lua.org/license.html
+--
+-- Contributors:
+-- Fabien Fleutot - API and implementation
+--
+--------------------------------------------------------------------------------
+
+--------------------------------------------------------------------------------
+--
+-- Convert between various code representation formats. Atomic
+-- converters are written in extenso, others are composed automatically
+-- by chaining the atomic ones together in a closure.
+--
+-- Supported formats are:
+--
+-- * srcfile: the name of a file containing sources.
+-- * src: these sources as a single string.
+-- * lexstream: a stream of lexemes.
+-- * ast: an abstract syntax tree.
+-- * proto: a (Yueliang) struture containing a high level
+-- representation of bytecode. Largely based on the
+-- Proto structure in Lua's VM
+-- * bytecode: a string dump of the function, as taken by
+-- loadstring() and produced by string.dump().
+-- * function: an executable lua function in RAM.
+--
+--------------------------------------------------------------------------------
+
+require 'checks'
+
+local M = { }
+
+--------------------------------------------------------------------------------
+-- Order of the transformations. if 'a' is on the left of 'b', then a 'a' can
+-- be transformed into a 'b' (but not the other way around).
+-- M.sequence goes for numbers to format names, M.order goes from format
+-- names to numbers.
+--------------------------------------------------------------------------------
+M.sequence = {
+ 'srcfile', 'src', 'lexstream', 'ast', 'proto', 'bytecode', 'function' }
+
+local arg_types = {
+ srcfile = { 'string', '?string' },
+ src = { 'string', '?string' },
+ lexstream = { 'lexer.stream', '?string' },
+ ast = { 'table', '?string' },
+ proto = { 'table', '?string' },
+ bytecode = { 'string', '?string' },
+}
+
+if false then
+ -- if defined, runs on every newly-generated AST
+ function M.check_ast(ast)
+ local function rec(x, n, parent)
+ if not x.lineinfo and parent.lineinfo then
+ local pp = require 'metalua.pprint'
+ pp.printf("WARNING: Missing lineinfo in child #%s `%s{...} of node at %s",
+ n, x.tag or '', tostring(parent.lineinfo))
+ end
+ for i, child in ipairs(x) do
+ if type(child)=='table' then rec(child, i, x) end
+ end
+ end
+ rec(ast, -1, { })
+ end
+end
+
+
+M.order= { }; for a,b in pairs(M.sequence) do M.order[b]=a end
+
+local CONV = { } -- conversion metatable __index
+
+function CONV :srcfile_to_src(x, name)
+ checks('metalua.compiler', 'string', '?string')
+ name = name or '@'..x
+ local f, msg = io.open (x, 'rb')
+ if not f then error(msg) end
+ local r, msg = f :read '*a'
+ if not r then error("Cannot read file '"..x.."': "..msg) end
+ f :close()
+ return r, name
+end
+
+function CONV :src_to_lexstream(src, name)
+ checks('metalua.compiler', 'string', '?string')
+ local r = self.parser.lexer :newstream (src, name)
+ return r, name
+end
+
+function CONV :lexstream_to_ast(lx, name)
+ checks('metalua.compiler', 'lexer.stream', '?string')
+ local r = self.parser.chunk(lx)
+ r.source = name
+ if M.check_ast then M.check_ast (r) end
+ return r, name
+end
+
+local bytecode_compiler = nil -- cache to avoid repeated `pcall(require(...))`
+local function get_bytecode_compiler()
+ if bytecode_compiler then return bytecode_compiler else
+ local status, result = pcall(require, 'metalua.compiler.bytecode')
+ if status then
+ bytecode_compiler = result
+ return result
+ elseif string.match(result, "not found") then
+ error "Compilation only available with full Metalua"
+ else error (result) end
+ end
+end
+
+function CONV :ast_to_proto(ast, name)
+ checks('metalua.compiler', 'table', '?string')
+ return get_bytecode_compiler().ast_to_proto(ast, name), name
+end
+
+function CONV :proto_to_bytecode(proto, name)
+ return get_bytecode_compiler().proto_to_bytecode(proto), name
+end
+
+function CONV :bytecode_to_function(bc, name)
+ checks('metalua.compiler', 'string', '?string')
+ return loadstring(bc, name)
+end
+
+-- Create all sensible combinations
+for i=1,#M.sequence do
+ local src = M.sequence[i]
+ for j=i+2, #M.sequence do
+ local dst = M.sequence[j]
+ local dst_name = src.."_to_"..dst
+ local my_arg_types = arg_types[src]
+ local functions = { }
+ for k=i, j-1 do
+ local name = M.sequence[k].."_to_"..M.sequence[k+1]
+ local f = assert(CONV[name], name)
+ table.insert (functions, f)
+ end
+ CONV[dst_name] = function(self, a, b)
+ checks('metalua.compiler', unpack(my_arg_types))
+ for _, f in ipairs(functions) do
+ a, b = f(self, a, b)
+ end
+ return a, b
+ end
+ --printf("Created M.%s out of %s", dst_name, table.concat(n, ', '))
+ end
+end
+
+
+--------------------------------------------------------------------------------
+-- This one goes in the "wrong" direction, cannot be composed.
+--------------------------------------------------------------------------------
+function CONV :function_to_bytecode(...) return string.dump(...) end
+
+function CONV :ast_to_src(...)
+ require 'metalua.loader' -- ast_to_string isn't written in plain lua
+ return require 'metalua.compiler.ast_to_src' (...)
+end
+
+local MT = { __index=CONV, __type='metalua.compiler' }
+
+function M.new()
+ local parser = require 'metalua.compiler.parser' .new()
+ local self = { parser = parser }
+ setmetatable(self, MT)
+ return self
+end
+
+return M
\ No newline at end of file
--- /dev/null
+-------------------------------------------------------------------------------
+-- Copyright (c) 2006-2013 Fabien Fleutot and others.
+--
+-- All rights reserved.
+--
+-- This program and the accompanying materials are made available
+-- under the terms of the Eclipse Public License v1.0 which
+-- accompanies this distribution, and is available at
+-- http://www.eclipse.org/legal/epl-v10.html
+--
+-- This program and the accompanying materials are also made available
+-- under the terms of the MIT public license which accompanies this
+-- distribution, and is available at http://www.lua.org/license.html
+--
+-- Contributors:
+-- Fabien Fleutot - API and implementation
+--
+-------------------------------------------------------------------------------
+
+-{ extension ('match', ...) }
+
+local M = { }
+M.__index = M
+
+local pp=require 'metalua.pprint'
+
+--------------------------------------------------------------------------------
+-- Instanciate a new AST->source synthetizer
+--------------------------------------------------------------------------------
+function M.new ()
+ local self = {
+ _acc = { }, -- Accumulates pieces of source as strings
+ current_indent = 0, -- Current level of line indentation
+ indent_step = " " -- Indentation symbol, normally spaces or '\t'
+ }
+ return setmetatable (self, M)
+end
+
+--------------------------------------------------------------------------------
+-- Run a synthetizer on the `ast' arg and return the source as a string.
+-- Can also be used as a static method `M.run (ast)'; in this case,
+-- a temporary Metizer is instanciated on the fly.
+--------------------------------------------------------------------------------
+function M:run (ast)
+ if not ast then
+ self, ast = M.new(), self
+ end
+ self._acc = { }
+ self:node (ast)
+ return table.concat (self._acc)
+end
+
+--------------------------------------------------------------------------------
+-- Accumulate a piece of source file in the synthetizer.
+--------------------------------------------------------------------------------
+function M:acc (x)
+ if x then table.insert (self._acc, x) end
+end
+
+--------------------------------------------------------------------------------
+-- Accumulate an indented newline.
+-- Jumps an extra line if indentation is 0, so that
+-- toplevel definitions are separated by an extra empty line.
+--------------------------------------------------------------------------------
+function M:nl ()
+ if self.current_indent == 0 then self:acc "\n" end
+ self:acc ("\n" .. self.indent_step:rep (self.current_indent))
+end
+
+--------------------------------------------------------------------------------
+-- Increase indentation and accumulate a new line.
+--------------------------------------------------------------------------------
+function M:nlindent ()
+ self.current_indent = self.current_indent + 1
+ self:nl ()
+end
+
+--------------------------------------------------------------------------------
+-- Decrease indentation and accumulate a new line.
+--------------------------------------------------------------------------------
+function M:nldedent ()
+ self.current_indent = self.current_indent - 1
+ self:acc ("\n" .. self.indent_step:rep (self.current_indent))
+end
+
+--------------------------------------------------------------------------------
+-- Keywords, which are illegal as identifiers.
+--------------------------------------------------------------------------------
+local keywords_list = {
+ "and", "break", "do", "else", "elseif",
+ "end", "false", "for", "function", "if",
+ "in", "local", "nil", "not", "or",
+ "repeat", "return", "then", "true", "until",
+ "while" }
+local keywords = { }
+for _, kw in pairs(keywords_list) do keywords[kw]=true end
+
+--------------------------------------------------------------------------------
+-- Return true iff string `id' is a legal identifier name.
+--------------------------------------------------------------------------------
+local function is_ident (id)
+ return string['match'](id, "^[%a_][%w_]*$") and not keywords[id]
+end
+
+--------------------------------------------------------------------------------
+-- Return true iff ast represents a legal function name for
+-- syntax sugar ``function foo.bar.gnat() ... end'':
+-- a series of nested string indexes, with an identifier as
+-- the innermost node.
+--------------------------------------------------------------------------------
+local function is_idx_stack (ast)
+ match ast with
+ | `Id{ _ } -> return true
+ | `Index{ left, `String{ _ } } -> return is_idx_stack (left)
+ | _ -> return false
+ end
+end
+
+--------------------------------------------------------------------------------
+-- Operator precedences, in increasing order.
+-- This is not directly used, it's used to generate op_prec below.
+--------------------------------------------------------------------------------
+local op_preprec = {
+ { "or", "and" },
+ { "lt", "le", "eq", "ne" },
+ { "concat" },
+ { "add", "sub" },
+ { "mul", "div", "mod" },
+ { "unary", "not", "len" },
+ { "pow" },
+ { "index" } }
+
+--------------------------------------------------------------------------------
+-- operator --> precedence table, generated from op_preprec.
+--------------------------------------------------------------------------------
+local op_prec = { }
+
+for prec, ops in ipairs (op_preprec) do
+ for _, op in ipairs (ops) do
+ op_prec[op] = prec
+ end
+end
+
+--------------------------------------------------------------------------------
+-- operator --> source representation.
+--------------------------------------------------------------------------------
+local op_symbol = {
+ add = " + ", sub = " - ", mul = " * ",
+ div = " / ", mod = " % ", pow = " ^ ",
+ concat = " .. ", eq = " == ", ne = " ~= ",
+ lt = " < ", le = " <= ", ["and"] = " and ",
+ ["or"] = " or ", ["not"] = "not ", len = "# " }
+
+--------------------------------------------------------------------------------
+-- Accumulate the source representation of AST `node' in
+-- the synthetizer. Most of the work is done by delegating to
+-- the method having the name of the AST tag.
+-- If something can't be converted to normal sources, it's
+-- instead dumped as a `-{ ... }' splice in the source accumulator.
+--------------------------------------------------------------------------------
+function M:node (node)
+ assert (self~=M and self._acc)
+ if node==nil then self:acc'<<error>>'; return end
+ if not node.tag then -- tagless block.
+ self:list (node, self.nl)
+ else
+ local f = M[node.tag]
+ if type (f) == "function" then -- Delegate to tag method.
+ f (self, node, unpack (node))
+ elseif type (f) == "string" then -- tag string.
+ self:acc (f)
+ else -- No appropriate method, fall back to splice dumping.
+ -- This cannot happen in a plain Lua AST.
+ self:acc " -{ "
+ self:acc (pp.tostring (node, {metalua_tag=1, hide_hash=1}), 80)
+ self:acc " }"
+ end
+ end
+end
+
+--------------------------------------------------------------------------------
+-- Convert every node in the AST list `list' passed as 1st arg.
+-- `sep' is an optional separator to be accumulated between each list element,
+-- it can be a string or a synth method.
+-- `start' is an optional number (default == 1), indicating which is the
+-- first element of list to be converted, so that we can skip the begining
+-- of a list.
+--------------------------------------------------------------------------------
+function M:list (list, sep, start)
+ for i = start or 1, # list do
+ self:node (list[i])
+ if list[i + 1] then
+ if not sep then
+ elseif type (sep) == "function" then sep (self)
+ elseif type (sep) == "string" then self:acc (sep)
+ else error "Invalid list separator" end
+ end
+ end
+end
+
+--------------------------------------------------------------------------------
+--
+-- Tag methods.
+-- ------------
+--
+-- Specific AST node dumping methods, associated to their node kinds
+-- by their name, which is the corresponding AST tag.
+-- synth:node() is in charge of delegating a node's treatment to the
+-- appropriate tag method.
+--
+-- Such tag methods are called with the AST node as 1st arg.
+-- As a convenience, the n node's children are passed as args #2 ... n+1.
+--
+-- There are several things that could be refactored into common subroutines
+-- here: statement blocks dumping, function dumping...
+-- However, given their small size and linear execution
+-- (they basically perform series of :acc(), :node(), :list(),
+-- :nl(), :nlindent() and :nldedent() calls), it seems more readable
+-- to avoid multiplication of such tiny functions.
+--
+-- To make sense out of these, you need to know metalua's AST syntax, as
+-- found in the reference manual or in metalua/doc/ast.txt.
+--
+--------------------------------------------------------------------------------
+
+function M:Do (node)
+ self:acc "do"
+ self:nlindent ()
+ self:list (node, self.nl)
+ self:nldedent ()
+ self:acc "end"
+end
+
+function M:Set (node)
+ match node with
+ | `Set{ { `Index{ lhs, `String{ method } } },
+ { `Function{ { `Id "self", ... } == params, body } } }
+ if is_idx_stack (lhs) and is_ident (method) ->
+ -- ``function foo:bar(...) ... end'' --
+ self:acc "function "
+ self:node (lhs)
+ self:acc ":"
+ self:acc (method)
+ self:acc " ("
+ self:list (params, ", ", 2)
+ self:acc ")"
+ self:nlindent ()
+ self:list (body, self.nl)
+ self:nldedent ()
+ self:acc "end"
+
+ | `Set{ { lhs }, { `Function{ params, body } } } if is_idx_stack (lhs) ->
+ -- ``function foo(...) ... end'' --
+ self:acc "function "
+ self:node (lhs)
+ self:acc " ("
+ self:list (params, ", ")
+ self:acc ")"
+ self:nlindent ()
+ self:list (body, self.nl)
+ self:nldedent ()
+ self:acc "end"
+
+ | `Set{ { `Id{ lhs1name } == lhs1, ... } == lhs, rhs }
+ if not is_ident (lhs1name) ->
+ -- ``foo, ... = ...'' when foo is *not* a valid identifier.
+ -- In that case, the spliced 1st variable must get parentheses,
+ -- to be distinguished from a statement splice.
+ -- This cannot happen in a plain Lua AST.
+ self:acc "("
+ self:node (lhs1)
+ self:acc ")"
+ if lhs[2] then -- more than one lhs variable
+ self:acc ", "
+ self:list (lhs, ", ", 2)
+ end
+ self:acc " = "
+ self:list (rhs, ", ")
+
+ | `Set{ lhs, rhs } ->
+ -- ``... = ...'', no syntax sugar --
+ self:list (lhs, ", ")
+ self:acc " = "
+ self:list (rhs, ", ")
+ | `Set{ lhs, rhs, annot } ->
+ -- ``... = ...'', no syntax sugar, annotation --
+ local n = #lhs
+ for i=1,n do
+ local ell, a = lhs[i], annot[i]
+ self:node (ell)
+ if a then
+ self:acc ' #'
+ self:node(a)
+ end
+ if i~=n then self:acc ', ' end
+ end
+ self:acc " = "
+ self:list (rhs, ", ")
+ end
+end
+
+function M:While (node, cond, body)
+ self:acc "while "
+ self:node (cond)
+ self:acc " do"
+ self:nlindent ()
+ self:list (body, self.nl)
+ self:nldedent ()
+ self:acc "end"
+end
+
+function M:Repeat (node, body, cond)
+ self:acc "repeat"
+ self:nlindent ()
+ self:list (body, self.nl)
+ self:nldedent ()
+ self:acc "until "
+ self:node (cond)
+end
+
+function M:If (node)
+ for i = 1, #node-1, 2 do
+ -- for each ``if/then'' and ``elseif/then'' pair --
+ local cond, body = node[i], node[i+1]
+ self:acc (i==1 and "if " or "elseif ")
+ self:node (cond)
+ self:acc " then"
+ self:nlindent ()
+ self:list (body, self.nl)
+ self:nldedent ()
+ end
+ -- odd number of children --> last one is an `else' clause --
+ if #node%2 == 1 then
+ self:acc "else"
+ self:nlindent ()
+ self:list (node[#node], self.nl)
+ self:nldedent ()
+ end
+ self:acc "end"
+end
+
+function M:Fornum (node, var, first, last)
+ local body = node[#node]
+ self:acc "for "
+ self:node (var)
+ self:acc " = "
+ self:node (first)
+ self:acc ", "
+ self:node (last)
+ if #node==5 then -- 5 children --> child #4 is a step increment.
+ self:acc ", "
+ self:node (node[4])
+ end
+ self:acc " do"
+ self:nlindent ()
+ self:list (body, self.nl)
+ self:nldedent ()
+ self:acc "end"
+end
+
+function M:Forin (node, vars, generators, body)
+ self:acc "for "
+ self:list (vars, ", ")
+ self:acc " in "
+ self:list (generators, ", ")
+ self:acc " do"
+ self:nlindent ()
+ self:list (body, self.nl)
+ self:nldedent ()
+ self:acc "end"
+end
+
+function M:Local (node, lhs, rhs, annots)
+ if next (lhs) then
+ self:acc "local "
+ if annots then
+ local n = #lhs
+ for i=1, n do
+ self:node (lhs)
+ local a = annots[i]
+ if a then
+ self:acc ' #'
+ self:node (a)
+ end
+ if i~=n then self:acc ', ' end
+ end
+ else
+ self:list (lhs, ", ")
+ end
+ if rhs[1] then
+ self:acc " = "
+ self:list (rhs, ", ")
+ end
+ else -- Can't create a local statement with 0 variables in plain Lua
+ self:acc (table.tostring (node, 'nohash', 80))
+ end
+end
+
+function M:Localrec (node, lhs, rhs)
+ match node with
+ | `Localrec{ { `Id{name} }, { `Function{ params, body } } }
+ if is_ident (name) ->
+ -- ``local function name() ... end'' --
+ self:acc "local function "
+ self:acc (name)
+ self:acc " ("
+ self:list (params, ", ")
+ self:acc ")"
+ self:nlindent ()
+ self:list (body, self.nl)
+ self:nldedent ()
+ self:acc "end"
+
+ | _ ->
+ -- Other localrec are unprintable ==> splice them --
+ -- This cannot happen in a plain Lua AST. --
+ self:acc "-{ "
+ self:acc (table.tostring (node, 'nohash', 80))
+ self:acc " }"
+ end
+end
+
+function M:Call (node, f)
+ -- single string or table literal arg ==> no need for parentheses. --
+ local parens
+ match node with
+ | `Call{ _, `String{_} }
+ | `Call{ _, `Table{...}} -> parens = false
+ | _ -> parens = true
+ end
+ self:node (f)
+ self:acc (parens and " (" or " ")
+ self:list (node, ", ", 2) -- skip `f'.
+ self:acc (parens and ")")
+end
+
+function M:Invoke (node, f, method)
+ -- single string or table literal arg ==> no need for parentheses. --
+ local parens
+ match node with
+ | `Invoke{ _, _, `String{_} }
+ | `Invoke{ _, _, `Table{...}} -> parens = false
+ | _ -> parens = true
+ end
+ self:node (f)
+ self:acc ":"
+ self:acc (method[1])
+ self:acc (parens and " (" or " ")
+ self:list (node, ", ", 3) -- Skip args #1 and #2, object and method name.
+ self:acc (parens and ")")
+end
+
+function M:Return (node)
+ self:acc "return "
+ self:list (node, ", ")
+end
+
+M.Break = "break"
+M.Nil = "nil"
+M.False = "false"
+M.True = "true"
+M.Dots = "..."
+
+function M:Number (node, n)
+ self:acc (tostring (n))
+end
+
+function M:String (node, str)
+ -- format "%q" prints '\n' in an umpractical way IMO,
+ -- so this is fixed with the :gsub( ) call.
+ self:acc (string.format ("%q", str):gsub ("\\\n", "\\n"))
+end
+
+function M:Function (node, params, body, annots)
+ self:acc "function ("
+ if annots then
+ local n = #params
+ for i=1,n do
+ local p, a = params[i], annots[i]
+ self:node(p)
+ if annots then
+ self:acc " #"
+ self:node(a)
+ end
+ if i~=n then self:acc ', ' end
+ end
+ else
+ self:list (params, ", ")
+ end
+ self:acc ")"
+ self:nlindent ()
+ self:list (body, self.nl)
+ self:nldedent ()
+ self:acc "end"
+end
+
+function M:Table (node)
+ if not node[1] then self:acc "{ }" else
+ self:acc "{"
+ if #node > 1 then self:nlindent () else self:acc " " end
+ for i, elem in ipairs (node) do
+ match elem with
+ | `Pair{ `String{ key }, value } if is_ident (key) ->
+ -- ``key = value''. --
+ self:acc (key)
+ self:acc " = "
+ self:node (value)
+
+ | `Pair{ key, value } ->
+ -- ``[key] = value''. --
+ self:acc "["
+ self:node (key)
+ self:acc "] = "
+ self:node (value)
+
+ | _ ->
+ -- ``value''. --
+ self:node (elem)
+ end
+ if node [i+1] then
+ self:acc ","
+ self:nl ()
+ end
+ end
+ if #node > 1 then self:nldedent () else self:acc " " end
+ self:acc "}"
+ end
+end
+
+function M:Op (node, op, a, b)
+ -- Transform ``not (a == b)'' into ``a ~= b''. --
+ match node with
+ | `Op{ "not", `Op{ "eq", _a, _b } }
+ | `Op{ "not", `Paren{ `Op{ "eq", _a, _b } } } ->
+ op, a, b = "ne", _a, _b
+ | _ ->
+ end
+
+ if b then -- binary operator.
+ local left_paren, right_paren
+ match a with
+ | `Op{ op_a, ...} if op_prec[op] >= op_prec[op_a] -> left_paren = true
+ | _ -> left_paren = false
+ end
+
+ match b with -- FIXME: might not work with right assoc operators ^ and ..
+ | `Op{ op_b, ...} if op_prec[op] >= op_prec[op_b] -> right_paren = true
+ | _ -> right_paren = false
+ end
+
+ self:acc (left_paren and "(")
+ self:node (a)
+ self:acc (left_paren and ")")
+
+ self:acc (op_symbol [op])
+
+ self:acc (right_paren and "(")
+ self:node (b)
+ self:acc (right_paren and ")")
+
+ else -- unary operator.
+ local paren
+ match a with
+ | `Op{ op_a, ... } if op_prec[op] >= op_prec[op_a] -> paren = true
+ | _ -> paren = false
+ end
+ self:acc (op_symbol[op])
+ self:acc (paren and "(")
+ self:node (a)
+ self:acc (paren and ")")
+ end
+end
+
+function M:Paren (node, content)
+ self:acc "("
+ self:node (content)
+ self:acc ")"
+end
+
+function M:Index (node, table, key)
+ local paren_table
+ -- Check precedence, see if parens are needed around the table --
+ match table with
+ | `Op{ op, ... } if op_prec[op] < op_prec.index -> paren_table = true
+ | _ -> paren_table = false
+ end
+
+ self:acc (paren_table and "(")
+ self:node (table)
+ self:acc (paren_table and ")")
+
+ match key with
+ | `String{ field } if is_ident (field) ->
+ -- ``table.key''. --
+ self:acc "."
+ self:acc (field)
+ | _ ->
+ -- ``table [key]''. --
+ self:acc "["
+ self:node (key)
+ self:acc "]"
+ end
+end
+
+function M:Id (node, name)
+ if is_ident (name) then
+ self:acc (name)
+ else -- Unprintable identifier, fall back to splice representation.
+ -- This cannot happen in a plain Lua AST.
+ self:acc "-{`Id "
+ self:String (node, name)
+ self:acc "}"
+ end
+end
+
+
+M.TDyn = '*'
+M.TDynbar = '**'
+M.TPass = 'pass'
+M.TField = 'field'
+M.TIdbar = M.TId
+M.TReturn = M.Return
+
+
+function M:TId (node, name) self:acc(name) end
+
+
+function M:TCatbar(node, te, tebar)
+ self:acc'('
+ self:node(te)
+ self:acc'|'
+ self:tebar(tebar)
+ self:acc')'
+end
+
+function M:TFunction(node, p, r)
+ self:tebar(p)
+ self:acc '->'
+ self:tebar(r)
+end
+
+function M:TTable (node, default, pairs)
+ self:acc '['
+ self:list (pairs, ', ')
+ if default.tag~='TField' then
+ self:acc '|'
+ self:node (default)
+ end
+ self:acc ']'
+end
+
+function M:TPair (node, k, v)
+ self:node (k)
+ self:acc '='
+ self:node (v)
+end
+
+function M:TIdbar (node, name)
+ self :acc (name)
+end
+
+function M:TCatbar (node, a, b)
+ self:node(a)
+ self:acc ' ++ '
+ self:node(b)
+end
+
+function M:tebar(node)
+ if node.tag then self:node(node) else
+ self:acc '('
+ self:list(node, ', ')
+ self:acc ')'
+ end
+end
+
+function M:TUnkbar(node, name)
+ self:acc '~~'
+ self:acc (name)
+end
+
+function M:TUnk(node, name)
+ self:acc '~'
+ self:acc (name)
+end
+
+for name, tag in pairs{ const='TConst', var='TVar', currently='TCurrently', just='TJust' } do
+ M[tag] = function(self, node, te)
+ self:acc (name..' ')
+ self:node(te)
+ end
+end
+
+return (|x| M.run(x))
--- /dev/null
+--------------------------------------------------------------------------------
+-- Copyright (c) 2006-2013 Fabien Fleutot and others.
+--
+-- All rights reserved.
+--
+-- This program and the accompanying materials are made available
+-- under the terms of the Eclipse Public License v1.0 which
+-- accompanies this distribution, and is available at
+-- http://www.eclipse.org/legal/epl-v10.html
+--
+-- This program and the accompanying materials are also made available
+-- under the terms of the MIT public license which accompanies this
+-- distribution, and is available at http://www.lua.org/license.html
+--
+-- Contributors:
+-- Fabien Fleutot - API and implementation
+--
+--------------------------------------------------------------------------------
+
+local compile = require 'metalua.compiler.bytecode.compile'
+local ldump = require 'metalua.compiler.bytecode.ldump'
+
+local M = { }
+
+M.ast_to_proto = compile.ast_to_proto
+M.proto_to_bytecode = ldump.dump_string
+M.proto_to_file = ldump.dump_file
+
+return M
\ No newline at end of file
--- /dev/null
+-------------------------------------------------------------------------------
+-- Copyright (c) 2006-2013 Kein-Hong Man, Fabien Fleutot and others.
+--
+-- All rights reserved.
+--
+-- This program and the accompanying materials are made available
+-- under the terms of the Eclipse Public License v1.0 which
+-- accompanies this distribution, and is available at
+-- http://www.eclipse.org/legal/epl-v10.html
+--
+-- This program and the accompanying materials are also made available
+-- under the terms of the MIT public license which accompanies this
+-- distribution, and is available at http://www.lua.org/license.html
+--
+-- Contributors:
+-- Kein-Hong Man - Initial implementation for Lua 5.0, part of Yueliang
+-- Fabien Fleutot - Port to Lua 5.1, integration with Metalua
+--
+-------------------------------------------------------------------------------
+
+----------------------------------------------------------------------
+--
+-- This code mainly results from the borrowing, then ruthless abuse, of
+-- Yueliang's implementation of Lua 5.0 compiler.
+--
+---------------------------------------------------------------------
+
+local pp = require 'metalua.pprint'
+
+local luaK = require 'metalua.compiler.bytecode.lcode'
+local luaP = require 'metalua.compiler.bytecode.lopcodes'
+
+local debugf = function() end
+--local debugf=printf
+
+local stat = { }
+local expr = { }
+
+local M = { }
+
+M.MAX_INT = 2147483645 -- INT_MAX-2 for 32-bit systems (llimits.h)
+M.MAXVARS = 200 -- (llimits.h)
+M.MAXUPVALUES = 32 -- (llimits.h)
+M.MAXPARAMS = 100 -- (llimits.h)
+M.LUA_MAXPARSERLEVEL = 200 -- (llimits.h)
+
+-- from lobject.h
+M.VARARG_HASARG = 1
+M.VARARG_ISVARARG = 2
+M.VARARG_NEEDSARG = 4
+
+local function hasmultret (k)
+ return k=="VCALL" or k=="VVARARG"
+end
+
+-----------------------------------------------------------------------
+-- Some ASTs take expression lists as children; it should be
+-- acceptible to give an expression instead, and to automatically
+-- interpret it as a single element list. That's what does this
+-- function, adding a surrounding list iff needed.
+--
+-- WARNING: "Do" is the tag for chunks, which are essentially lists.
+-- Therefore, we don't listify stuffs with a "Do" tag.
+-----------------------------------------------------------------------
+local function ensure_list (ast)
+ return ast.tag and ast.tag ~= "Do" and {ast} or ast end
+
+-----------------------------------------------------------------------
+-- Get a localvar structure { varname, startpc, endpc } from a
+-- (zero-based) index of active variable. The catch is: don't get
+-- confused between local index and active index.
+--
+-- locvars[x] contains { varname, startpc, endpc };
+-- actvar[i] contains the index of the variable in locvars
+-----------------------------------------------------------------------
+local function getlocvar (fs, i)
+ return fs.f.locvars[fs.actvar[i]]
+end
+
+local function removevars (fs, tolevel)
+ while fs.nactvar > tolevel do
+ fs.nactvar = fs.nactvar - 1
+ -- There may be dummy locvars due to expr.Stat
+ -- FIXME: strange that they didn't disappear?!
+ local locvar = getlocvar (fs, fs.nactvar)
+ --printf("[REMOVEVARS] removing var #%i = %s", fs.nactvar,
+ -- locvar and tostringv(locvar) or "<nil>")
+ if locvar then locvar.endpc = fs.pc end
+ end
+end
+
+-----------------------------------------------------------------------
+-- [f] has a list of all its local variables, active and inactive.
+-- Some local vars can correspond to the same register, if they exist
+-- in different scopes.
+-- [fs.nlocvars] is the total number of local variables, not to be
+-- confused with [fs.nactvar] the numebr of variables active at the
+-- current PC.
+-- At this stage, the existence of the variable is not yet aknowledged,
+-- since [fs.nactvar] and [fs.freereg] aren't updated.
+-----------------------------------------------------------------------
+local function registerlocalvar (fs, varname)
+ --debugf("[locvar: %s = reg %i]", varname, fs.nlocvars)
+ local f = fs.f
+ f.locvars[fs.nlocvars] = { } -- LocVar
+ f.locvars[fs.nlocvars].varname = varname
+ local nlocvars = fs.nlocvars
+ fs.nlocvars = fs.nlocvars + 1
+ return nlocvars
+end
+
+-----------------------------------------------------------------------
+-- update the active vars counter in [fs] by adding [nvars] of them,
+-- and sets those variables' [startpc] to the current [fs.pc].
+-- These variables were allready created, but not yet counted, by
+-- new_localvar.
+-----------------------------------------------------------------------
+local function adjustlocalvars (fs, nvars)
+ --debugf("adjustlocalvars, nvars=%i, previous fs.nactvar=%i,"..
+ -- " #locvars=%i, #actvar=%i",
+ -- nvars, fs.nactvar, #fs.f.locvars, #fs.actvar)
+
+ fs.nactvar = fs.nactvar + nvars
+ for i = nvars, 1, -1 do
+ --printf ("adjusting actvar #%i", fs.nactvar - i)
+ getlocvar (fs, fs.nactvar - i).startpc = fs.pc
+ end
+end
+
+------------------------------------------------------------------------
+-- check whether, in an assignment to a local variable, the local variable
+-- is needed in a previous assignment (to a table). If so, save original
+-- local value in a safe place and use this safe copy in the previous
+-- assignment.
+------------------------------------------------------------------------
+local function check_conflict (fs, lh, v)
+ local extra = fs.freereg -- eventual position to save local variable
+ local conflict = false
+ while lh do
+ if lh.v.k == "VINDEXED" then
+ if lh.v.info == v.info then -- conflict?
+ conflict = true
+ lh.v.info = extra -- previous assignment will use safe copy
+ end
+ if lh.v.aux == v.info then -- conflict?
+ conflict = true
+ lh.v.aux = extra -- previous assignment will use safe copy
+ end
+ end
+ lh = lh.prev
+ end
+ if conflict then
+ luaK:codeABC (fs, "OP_MOVE", fs.freereg, v.info, 0) -- make copy
+ luaK:reserveregs (fs, 1)
+ end
+end
+
+-----------------------------------------------------------------------
+-- Create an expdesc. To be updated when expdesc is lua-ified.
+-----------------------------------------------------------------------
+local function init_exp (e, k, i)
+ e.f, e.t, e.k, e.info = luaK.NO_JUMP, luaK.NO_JUMP, k, i end
+
+-----------------------------------------------------------------------
+-- Reserve the string in tthe constant pool, and return an expdesc
+-- referring to it.
+-----------------------------------------------------------------------
+local function codestring (fs, e, str)
+ --printf( "codestring(%s)", disp.ast(str))
+ init_exp (e, "VK", luaK:stringK (fs, str))
+end
+
+-----------------------------------------------------------------------
+-- search for a local variable named [name] in the function being
+-- built by [fs]. Doesn't try to visit upvalues.
+-----------------------------------------------------------------------
+local function searchvar (fs, name)
+ for i = fs.nactvar - 1, 0, -1 do
+ -- Because of expr.Stat, there can be some actvars which don't
+ -- correspond to any locvar. Hence the checking for locvar's
+ -- nonnilness before getting the varname.
+ local locvar = getlocvar(fs, i)
+ if locvar and name == locvar.varname then
+ --printf("Found local var: %s; i = %i", tostringv(locvar), i)
+ return i
+ end
+ end
+ return -1 -- not found
+end
+
+-----------------------------------------------------------------------
+-- create and return a new proto [f]
+-----------------------------------------------------------------------
+local function newproto ()
+ local f = {}
+ f.k = {}
+ f.sizek = 0
+ f.p = {}
+ f.sizep = 0
+ f.code = {}
+ f.sizecode = 0
+ f.sizelineinfo = 0
+ f.sizeupvalues = 0
+ f.nups = 0
+ f.upvalues = {}
+ f.numparams = 0
+ f.is_vararg = 0
+ f.maxstacksize = 0
+ f.lineinfo = {}
+ f.sizelocvars = 0
+ f.locvars = {}
+ f.lineDefined = 0
+ f.source = nil
+ return f
+end
+
+------------------------------------------------------------------------
+-- create and return a function state [new_fs] as a sub-funcstate of [fs].
+------------------------------------------------------------------------
+local function open_func (old_fs)
+ local new_fs = { }
+ new_fs.upvalues = { }
+ new_fs.actvar = { }
+ local f = newproto ()
+ new_fs.f = f
+ new_fs.prev = old_fs -- linked list of funcstates
+ new_fs.pc = 0
+ new_fs.lasttarget = -1
+ new_fs.jpc = luaK.NO_JUMP
+ new_fs.freereg = 0
+ new_fs.nk = 0
+ new_fs.h = {} -- constant table; was luaH_new call
+ new_fs.np = 0
+ new_fs.nlocvars = 0
+ new_fs.nactvar = 0
+ new_fs.bl = nil
+ new_fs.nestlevel = old_fs and old_fs.nestlevel or 0
+ f.maxstacksize = 2 -- registers 0/1 are always valid
+ new_fs.lastline = 0
+ new_fs.forward_gotos = { }
+ new_fs.labels = { }
+ return new_fs
+end
+
+------------------------------------------------------------------------
+-- Finish to set up [f] according to final state of [fs]
+------------------------------------------------------------------------
+local function close_func (fs)
+ local f = fs.f
+ --printf("[CLOSE_FUNC] remove any remaining var")
+ removevars (fs, 0)
+ luaK:ret (fs, 0, 0)
+ f.sizecode = fs.pc
+ f.sizelineinfo = fs.pc
+ f.sizek = fs.nk
+ f.sizep = fs.np
+ f.sizelocvars = fs.nlocvars
+ f.sizeupvalues = f.nups
+ assert (fs.bl == nil)
+ if next(fs.forward_gotos) then
+ local x = pp.tostring(fs.forward_gotos)
+ error ("Unresolved goto: "..x)
+ end
+end
+
+------------------------------------------------------------------------
+--
+------------------------------------------------------------------------
+local function pushclosure(fs, func, v)
+ local f = fs.f
+ f.p [fs.np] = func.f
+ fs.np = fs.np + 1
+ init_exp (v, "VRELOCABLE", luaK:codeABx (fs, "OP_CLOSURE", 0, fs.np - 1))
+ for i = 0, func.f.nups - 1 do
+ local o = (func.upvalues[i].k == "VLOCAL") and "OP_MOVE" or "OP_GETUPVAL"
+ luaK:codeABC (fs, o, 0, func.upvalues[i].info, 0)
+ end
+end
+
+------------------------------------------------------------------------
+-- FIXME: is there a need for f=fs.f? if yes, why not always using it?
+------------------------------------------------------------------------
+local function indexupvalue(fs, name, v)
+ local f = fs.f
+ for i = 0, f.nups - 1 do
+ if fs.upvalues[i].k == v.k and fs.upvalues[i].info == v.info then
+ assert(fs.f.upvalues[i] == name)
+ return i
+ end
+ end
+ -- new one
+ f.upvalues[f.nups] = name
+ assert (v.k == "VLOCAL" or v.k == "VUPVAL")
+ fs.upvalues[f.nups] = { k = v.k; info = v.info }
+ local nups = f.nups
+ f.nups = f.nups + 1
+ return nups
+end
+
+------------------------------------------------------------------------
+--
+------------------------------------------------------------------------
+local function markupval(fs, level)
+ local bl = fs.bl
+ while bl and bl.nactvar > level do bl = bl.previous end
+ if bl then bl.upval = true end
+end
+
+
+--for debug only
+--[[
+local function bldepth(fs)
+ local i, x= 1, fs.bl
+ while x do i=i+1; x=x.previous end
+ return i
+end
+--]]
+
+------------------------------------------------------------------------
+--
+------------------------------------------------------------------------
+local function enterblock (fs, bl, isbreakable)
+ bl.breaklist = luaK.NO_JUMP
+ bl.isbreakable = isbreakable
+ bl.nactvar = fs.nactvar
+ bl.upval = false
+ bl.previous = fs.bl
+ fs.bl = bl
+ assert (fs.freereg == fs.nactvar)
+end
+
+------------------------------------------------------------------------
+--
+------------------------------------------------------------------------
+local function leaveblock (fs)
+ local bl = fs.bl
+ fs.bl = bl.previous
+ --printf("[LEAVEBLOCK] Removing vars...")
+ removevars (fs, bl.nactvar)
+ --printf("[LEAVEBLOCK] ...Vars removed")
+ if bl.upval then
+ luaK:codeABC (fs, "OP_CLOSE", bl.nactvar, 0, 0)
+ end
+ -- a block either controls scope or breaks (never both)
+ assert (not bl.isbreakable or not bl.upval)
+ assert (bl.nactvar == fs.nactvar)
+ fs.freereg = fs.nactvar -- free registers
+ luaK:patchtohere (fs, bl.breaklist)
+end
+
+
+------------------------------------------------------------------------
+-- read a list of expressions from a list of ast [astlist]
+-- starts at the [offset]th element of the list (defaults to 1)
+------------------------------------------------------------------------
+local function explist(fs, astlist, v, offset)
+ offset = offset or 1
+ if #astlist < offset then error "I don't handle empty expr lists yet" end
+ --printf("[EXPLIST] about to precompile 1st element %s", disp.ast(astlist[offset]))
+ expr.expr (fs, astlist[offset], v)
+ --printf("[EXPLIST] precompiled first element v=%s", tostringv(v))
+ for i = offset+1, #astlist do
+ luaK:exp2nextreg (fs, v)
+ --printf("[EXPLIST] flushed v=%s", tostringv(v))
+ expr.expr (fs, astlist[i], v)
+ --printf("[EXPLIST] precompiled element v=%s", tostringv(v))
+ end
+ return #astlist - offset + 1
+end
+
+------------------------------------------------------------------------
+--
+------------------------------------------------------------------------
+local function funcargs (fs, ast, v, idx_from)
+ local args = { } -- expdesc
+ local nparams
+ if #ast < idx_from then args.k = "VVOID" else
+ explist(fs, ast, args, idx_from)
+ luaK:setmultret(fs, args)
+ end
+ assert(v.k == "VNONRELOC")
+ local base = v.info -- base register for call
+ if hasmultret(args.k) then nparams = luaK.LUA_MULTRET else -- open call
+ if args.k ~= "VVOID" then
+ luaK:exp2nextreg(fs, args) end -- close last argument
+ nparams = fs.freereg - (base + 1)
+ end
+ init_exp(v, "VCALL", luaK:codeABC(fs, "OP_CALL", base, nparams + 1, 2))
+ if ast.lineinfo then
+ luaK:fixline(fs, ast.lineinfo.first.line)
+ else
+ luaK:fixline(fs, ast.line)
+ end
+ fs.freereg = base + 1 -- call remove function and arguments and leaves
+ -- (unless changed) one result
+end
+
+------------------------------------------------------------------------
+-- calculates log value for encoding the hash portion's size
+------------------------------------------------------------------------
+local function log2(x)
+ -- math result is always one more than lua0_log2()
+ local mn, ex = math.frexp(x)
+ return ex - 1
+end
+
+------------------------------------------------------------------------
+-- converts an integer to a "floating point byte", represented as
+-- (mmmmmxxx), where the real value is (xxx) * 2^(mmmmm)
+------------------------------------------------------------------------
+
+-- local function int2fb(x)
+-- local m = 0 -- mantissa
+-- while x >= 8 do x = math.floor((x + 1) / 2); m = m + 1 end
+-- return m * 8 + x
+-- end
+
+local function int2fb(x)
+ local e = 0
+ while x >= 16 do
+ x = math.floor ( (x+1) / 2)
+ e = e+1
+ end
+ if x<8 then return x
+ else return (e+1) * 8 + x - 8 end
+end
+
+
+------------------------------------------------------------------------
+-- FIXME: to be unified with singlevar
+------------------------------------------------------------------------
+local function singlevaraux(fs, n, var, base)
+--[[
+print("\n\nsinglevaraux: fs, n, var, base")
+printv(fs)
+printv(n)
+printv(var)
+printv(base)
+print("\n")
+--]]
+ if fs == nil then -- no more levels?
+ init_exp(var, "VGLOBAL", luaP.NO_REG) -- default is global variable
+ return "VGLOBAL"
+ else
+ local v = searchvar(fs, n) -- look up at current level
+ if v >= 0 then
+ init_exp(var, "VLOCAL", v)
+ if not base then
+ markupval(fs, v) -- local will be used as an upval
+ end
+ else -- not found at current level; try upper one
+ if singlevaraux(fs.prev, n, var, false) == "VGLOBAL" then
+ return "VGLOBAL" end
+ var.info = indexupvalue (fs, n, var)
+ var.k = "VUPVAL"
+ return "VUPVAL"
+ end
+ end
+end
+
+------------------------------------------------------------------------
+--
+------------------------------------------------------------------------
+local function singlevar(fs, varname, var)
+ if singlevaraux(fs, varname, var, true) == "VGLOBAL" then
+ var.info = luaK:stringK (fs, varname) end
+end
+
+------------------------------------------------------------------------
+--
+------------------------------------------------------------------------
+local function new_localvar (fs, name, n)
+ assert (type (name) == "string")
+ if fs.nactvar + n > M.MAXVARS then error ("too many local vars") end
+ fs.actvar[fs.nactvar + n] = registerlocalvar (fs, name)
+ --printf("[NEW_LOCVAR] %i = %s", fs.nactvar+n, name)
+end
+
+------------------------------------------------------------------------
+--
+------------------------------------------------------------------------
+local function parlist (fs, ast_params)
+ local dots = (#ast_params > 0 and ast_params[#ast_params].tag == "Dots")
+ local nparams = dots and #ast_params - 1 or #ast_params
+ for i = 1, nparams do
+ assert (ast_params[i].tag == "Id", "Function parameters must be Ids")
+ new_localvar (fs, ast_params[i][1], i-1)
+ end
+ -- from [code_param]:
+ --checklimit (fs, fs.nactvar, self.M.MAXPARAMS, "parameters")
+ fs.f.numparams = fs.nactvar
+ fs.f.is_vararg = dots and M.VARARG_ISVARARG or 0
+ adjustlocalvars (fs, nparams)
+ fs.f.numparams = fs.nactvar --FIXME vararg must be taken in account
+ luaK:reserveregs (fs, fs.nactvar) -- reserve register for parameters
+end
+
+------------------------------------------------------------------------
+-- if there's more variables than expressions in an assignment,
+-- some assignations to nil are made for extraneous vars.
+-- Also handles multiret functions
+------------------------------------------------------------------------
+local function adjust_assign (fs, nvars, nexps, e)
+ local extra = nvars - nexps
+ if hasmultret (e.k) then
+ extra = extra+1 -- includes call itself
+ if extra <= 0 then extra = 0 end
+ luaK:setreturns(fs, e, extra) -- call provides the difference
+ if extra > 1 then luaK:reserveregs(fs, extra-1) end
+ else
+ if e.k ~= "VVOID" then
+ luaK:exp2nextreg(fs, e) end -- close last expression
+ if extra > 0 then
+ local reg = fs.freereg
+ luaK:reserveregs(fs, extra)
+ luaK:_nil(fs, reg, extra)
+ end
+ end
+end
+
+
+------------------------------------------------------------------------
+--
+------------------------------------------------------------------------
+local function enterlevel (fs)
+ fs.nestlevel = fs.nestlevel + 1
+ assert (fs.nestlevel <= M.LUA_MAXPARSERLEVEL, "too many syntax levels")
+end
+
+------------------------------------------------------------------------
+--
+------------------------------------------------------------------------
+local function leavelevel (fs)
+ fs.nestlevel = fs.nestlevel - 1
+end
+
+------------------------------------------------------------------------
+-- Parse conditions in if/then/else, while, repeat
+------------------------------------------------------------------------
+local function cond (fs, ast)
+ local v = { }
+ expr.expr(fs, ast, v) -- read condition
+ if v.k == "VNIL" then v.k = "VFALSE" end -- 'falses' are all equal here
+ luaK:goiftrue (fs, v)
+ return v.f
+end
+
+------------------------------------------------------------------------
+--
+------------------------------------------------------------------------
+local function chunk (fs, ast)
+ enterlevel (fs)
+ assert (not ast.tag)
+ for i=1, #ast do
+ stat.stat (fs, ast[i]);
+ fs.freereg = fs.nactvar
+ end
+ leavelevel (fs)
+end
+
+------------------------------------------------------------------------
+--
+------------------------------------------------------------------------
+local function block (fs, ast)
+ local bl = {}
+ enterblock (fs, bl, false)
+ for i=1, #ast do
+ stat.stat (fs, ast[i])
+ fs.freereg = fs.nactvar
+ end
+ assert (bl.breaklist == luaK.NO_JUMP)
+ leaveblock (fs)
+end
+
+------------------------------------------------------------------------
+-- Forin / Fornum body parser
+-- [fs]
+-- [body]
+-- [base]
+-- [nvars]
+-- [isnum]
+------------------------------------------------------------------------
+local function forbody (fs, ast_body, base, nvars, isnum)
+ local bl = {} -- BlockCnt
+ adjustlocalvars (fs, 3) -- control variables
+ local prep =
+ isnum and luaK:codeAsBx (fs, "OP_FORPREP", base, luaK.NO_JUMP)
+ or luaK:jump (fs)
+ enterblock (fs, bl, false) -- loop block
+ adjustlocalvars (fs, nvars) -- scope for declared variables
+ luaK:reserveregs (fs, nvars)
+ block (fs, ast_body)
+ leaveblock (fs)
+ --luaK:patchtohere (fs, prep-1)
+ luaK:patchtohere (fs, prep)
+ local endfor =
+ isnum and luaK:codeAsBx (fs, "OP_FORLOOP", base, luaK.NO_JUMP)
+ or luaK:codeABC (fs, "OP_TFORLOOP", base, 0, nvars)
+ luaK:fixline (fs, ast_body.line) -- pretend that 'OP_FOR' starts the loop
+ luaK:patchlist (fs, isnum and endfor or luaK:jump(fs), prep + 1)
+end
+
+
+------------------------------------------------------------------------
+--
+------------------------------------------------------------------------
+local function recfield (fs, ast, cc)
+ local reg = fs.freereg
+ local key, val = {}, {} -- expdesc
+ --FIXME: expr + exp2val = index -->
+ -- check reduncancy between exp2val and exp2rk
+ cc.nh = cc.nh + 1
+ expr.expr(fs, ast[1], key);
+ luaK:exp2val (fs, key)
+ local keyreg = luaK:exp2RK (fs, key)
+ expr.expr(fs, ast[2], val)
+ local valreg = luaK:exp2RK (fs, val)
+ luaK:codeABC(fs, "OP_SETTABLE", cc.t.info, keyreg, valreg)
+ fs.freereg = reg -- free registers
+end
+
+
+------------------------------------------------------------------------
+--
+------------------------------------------------------------------------
+local function listfield(fs, ast, cc)
+ expr.expr(fs, ast, cc.v)
+ assert (cc.na <= luaP.MAXARG_Bx) -- FIXME check <= or <
+ cc.na = cc.na + 1
+ cc.tostore = cc.tostore + 1
+end
+
+------------------------------------------------------------------------
+--
+------------------------------------------------------------------------
+local function closelistfield(fs, cc)
+ if cc.v.k == "VVOID" then return end -- there is no list item
+ luaK:exp2nextreg(fs, cc.v)
+ cc.v.k = "VVOID"
+ if cc.tostore == luaP.LFIELDS_PER_FLUSH then
+ luaK:setlist (fs, cc.t.info, cc.na, cc.tostore)
+ cc.tostore = 0
+ end
+end
+
+------------------------------------------------------------------------
+-- The last field might be a call to a multireturn function. In that
+-- case, we must unfold all of its results into the list.
+------------------------------------------------------------------------
+local function lastlistfield(fs, cc)
+ if cc.tostore == 0 then return end
+ if hasmultret (cc.v.k) then
+ luaK:setmultret(fs, cc.v)
+ luaK:setlist (fs, cc.t.info, cc.na, luaK.LUA_MULTRET)
+ cc.na = cc.na - 1
+ else
+ if cc.v.k ~= "VVOID" then luaK:exp2nextreg(fs, cc.v) end
+ luaK:setlist (fs, cc.t.info, cc.na, cc.tostore)
+ end
+end
+------------------------------------------------------------------------
+------------------------------------------------------------------------
+--
+-- Statement parsers table
+--
+------------------------------------------------------------------------
+------------------------------------------------------------------------
+
+function stat.stat (fs, ast)
+ if ast.lineinfo then fs.lastline = ast.lineinfo.last.line end
+ --debugf (" - Statement %s", table.tostring (ast) )
+
+ if not ast.tag then chunk (fs, ast) else
+
+ local parser = stat [ast.tag]
+ if not parser then
+ error ("A statement cannot have tag `"..ast.tag) end
+ parser (fs, ast)
+ end
+ --debugf (" - /Statement `%s", ast.tag)
+end
+
+------------------------------------------------------------------------
+
+stat.Do = block
+
+------------------------------------------------------------------------
+
+function stat.Break (fs, ast)
+ -- if ast.lineinfo then fs.lastline = ast.lineinfo.last.line
+ local bl, upval = fs.bl, false
+ while bl and not bl.isbreakable do
+ if bl.upval then upval = true end
+ bl = bl.previous
+ end
+ assert (bl, "no loop to break")
+ if upval then luaK:codeABC(fs, "OP_CLOSE", bl.nactvar, 0, 0) end
+ bl.breaklist = luaK:concat(fs, bl.breaklist, luaK:jump(fs))
+end
+
+------------------------------------------------------------------------
+
+function stat.Return (fs, ast)
+ local e = {} -- expdesc
+ local first -- registers with returned values
+ local nret = #ast
+
+ if nret == 0 then first = 0
+ else
+ --printf("[RETURN] compiling explist")
+ explist (fs, ast, e)
+ --printf("[RETURN] explist e=%s", tostringv(e))
+ if hasmultret (e.k) then
+ luaK:setmultret(fs, e)
+ if e.k == "VCALL" and nret == 1 then
+ luaP:SET_OPCODE(luaK:getcode(fs, e), "OP_TAILCALL")
+ assert(luaP:GETARG_A(luaK:getcode(fs, e)) == fs.nactvar)
+ end
+ first = fs.nactvar
+ nret = luaK.LUA_MULTRET -- return all values
+ elseif nret == 1 then
+ first = luaK:exp2anyreg(fs, e)
+ else
+ --printf("* Return multiple vals in nextreg %i", fs.freereg)
+ luaK:exp2nextreg(fs, e) -- values must go to the 'stack'
+ first = fs.nactvar -- return all 'active' values
+ assert(nret == fs.freereg - first)
+ end
+ end
+ luaK:ret(fs, first, nret)
+end
+------------------------------------------------------------------------
+
+function stat.Local (fs, ast)
+ local names, values = ast[1], ast[2] or { }
+ for i = 1, #names do new_localvar (fs, names[i][1], i-1) end
+ local e = { }
+ if #values == 0 then e.k = "VVOID" else explist (fs, values, e) end
+ adjust_assign (fs, #names, #values, e)
+ adjustlocalvars (fs, #names)
+end
+
+------------------------------------------------------------------------
+
+function stat.Localrec (fs, ast)
+ assert(#ast[1]==1 and #ast[2]==1, "Multiple letrecs not implemented yet")
+ local ast_var, ast_val, e_var, e_val = ast[1][1], ast[2][1], { }, { }
+ new_localvar (fs, ast_var[1], 0)
+ init_exp (e_var, "VLOCAL", fs.freereg)
+ luaK:reserveregs (fs, 1)
+ adjustlocalvars (fs, 1)
+ expr.expr (fs, ast_val, e_val)
+ luaK:storevar (fs, e_var, e_val)
+ getlocvar (fs, fs.nactvar-1).startpc = fs.pc
+end
+
+------------------------------------------------------------------------
+
+function stat.If (fs, ast)
+ local astlen = #ast
+ -- Degenerate case #1: no statement
+ if astlen==0 then return block(fs, { }) end
+ -- Degenerate case #2: only an else statement
+ if astlen==1 then return block(fs, ast[1]) end
+
+ local function test_then_block (fs, test, body)
+ local condexit = cond (fs, test);
+ block (fs, body)
+ return condexit
+ end
+
+ local escapelist = luaK.NO_JUMP
+
+ local flist = test_then_block (fs, ast[1], ast[2]) -- 'then' statement
+ for i = 3, #ast - 1, 2 do -- 'elseif' statement
+ escapelist = luaK:concat( fs, escapelist, luaK:jump(fs))
+ luaK:patchtohere (fs, flist)
+ flist = test_then_block (fs, ast[i], ast[i+1])
+ end
+ if #ast % 2 == 1 then -- 'else' statement
+ escapelist = luaK:concat(fs, escapelist, luaK:jump(fs))
+ luaK:patchtohere(fs, flist)
+ block (fs, ast[#ast])
+ else
+ escapelist = luaK:concat(fs, escapelist, flist)
+ end
+ luaK:patchtohere(fs, escapelist)
+end
+
+------------------------------------------------------------------------
+
+function stat.Forin (fs, ast)
+ local vars, vals, body = ast[1], ast[2], ast[3]
+ -- imitating forstat:
+ local bl = { }
+ enterblock (fs, bl, true)
+ -- imitating forlist:
+ local e, base = { }, fs.freereg
+ new_localvar (fs, "(for generator)", 0)
+ new_localvar (fs, "(for state)", 1)
+ new_localvar (fs, "(for control)", 2)
+ for i = 1, #vars do new_localvar (fs, vars[i][1], i+2) end
+ explist (fs, vals, e)
+ adjust_assign (fs, 3, #vals, e)
+ luaK:checkstack (fs, 3)
+ forbody (fs, body, base, #vars, false)
+ -- back to forstat:
+ leaveblock (fs)
+end
+
+------------------------------------------------------------------------
+
+function stat.Fornum (fs, ast)
+
+ local function exp1 (ast_e)
+ local e = { }
+ expr.expr (fs, ast_e, e)
+ luaK:exp2nextreg (fs, e)
+ end
+ -- imitating forstat:
+ local bl = { }
+ enterblock (fs, bl, true)
+ -- imitating fornum:
+ local base = fs.freereg
+ new_localvar (fs, "(for index)", 0)
+ new_localvar (fs, "(for limit)", 1)
+ new_localvar (fs, "(for step)", 2)
+ new_localvar (fs, ast[1][1], 3)
+ exp1 (ast[2]) -- initial value
+ exp1 (ast[3]) -- limit
+ if #ast == 5 then exp1 (ast[4]) else -- default step = 1
+ luaK:codeABx(fs, "OP_LOADK", fs.freereg, luaK:numberK(fs, 1))
+ luaK:reserveregs(fs, 1)
+ end
+ forbody (fs, ast[#ast], base, 1, true)
+ -- back to forstat:
+ leaveblock (fs)
+end
+
+------------------------------------------------------------------------
+function stat.Repeat (fs, ast)
+ local repeat_init = luaK:getlabel (fs)
+ local bl1, bl2 = { }, { }
+ enterblock (fs, bl1, true)
+ enterblock (fs, bl2, false)
+ chunk (fs, ast[1])
+ local condexit = cond (fs, ast[2])
+ if not bl2.upval then
+ leaveblock (fs)
+ luaK:patchlist (fs, condexit, repeat_init)
+ else
+ stat.Break (fs)
+ luaK:patchtohere (fs, condexit)
+ leaveblock (fs)
+ luaK:patchlist (fs, luaK:jump (fs), repeat_init)
+ end
+ leaveblock (fs)
+end
+
+------------------------------------------------------------------------
+
+function stat.While (fs, ast)
+ local whileinit = luaK:getlabel (fs)
+ local condexit = cond (fs, ast[1])
+ local bl = { }
+ enterblock (fs, bl, true)
+ block (fs, ast[2])
+ luaK:patchlist (fs, luaK:jump (fs), whileinit)
+ leaveblock (fs)
+ luaK:patchtohere (fs, condexit);
+end
+
+------------------------------------------------------------------------
+
+-- FIXME: it's cumbersome to write this in this semi-recursive way.
+function stat.Set (fs, ast)
+ local ast_lhs, ast_vals, e = ast[1], ast[2], { }
+
+ --print "\n\nSet ast_lhs ast_vals:"
+ --print(disp.ast(ast_lhs))
+ --print(disp.ast(ast_vals))
+
+ local function let_aux (lhs, nvars)
+ local legal = { VLOCAL=1, VUPVAL=1, VGLOBAL=1, VINDEXED=1 }
+ --printv(lhs)
+ if not legal [lhs.v.k] then
+ error ("Bad lhs expr: "..pp.tostring(ast_lhs))
+ end
+ if nvars < #ast_lhs then -- this is not the last lhs
+ local nv = { v = { }, prev = lhs }
+ expr.expr (fs, ast_lhs [nvars+1], nv.v)
+ if nv.v.k == "VLOCAL" then check_conflict (fs, lhs, nv.v) end
+ let_aux (nv, nvars+1)
+ else -- this IS the last lhs
+ explist (fs, ast_vals, e)
+ if #ast_vals < nvars then
+ adjust_assign (fs, nvars, #ast_vals, e)
+ elseif #ast_vals > nvars then
+ adjust_assign (fs, nvars, #ast_vals, e)
+ fs.freereg = fs.freereg - #ast_vals + nvars
+ else -- #ast_vals == nvars (and we're at last lhs)
+ luaK:setoneret (fs, e) -- close last expression
+ luaK:storevar (fs, lhs.v, e)
+ return -- avoid default
+ end
+ end
+ init_exp (e, "VNONRELOC", fs.freereg - 1) -- default assignment
+ luaK:storevar (fs, lhs.v, e)
+ end
+
+ local lhs = { v = { }, prev = nil }
+ expr.expr (fs, ast_lhs[1], lhs.v)
+ let_aux( lhs, 1)
+end
+
+------------------------------------------------------------------------
+
+function stat.Call (fs, ast)
+ local v = { }
+ expr.Call (fs, ast, v)
+ luaP:SETARG_C (luaK:getcode(fs, v), 1)
+end
+
+------------------------------------------------------------------------
+
+function stat.Invoke (fs, ast)
+ local v = { }
+ expr.Invoke (fs, ast, v)
+ --FIXME: didn't check that, just copied from stat.Call
+ luaP:SETARG_C (luaK:getcode(fs, v), 1)
+end
+
+
+local function patch_goto (fs, src, dst)
+
+end
+
+
+------------------------------------------------------------------------
+-- Goto/Label data:
+-- fs.labels :: string => { nactvar :: int; pc :: int }
+-- fs.forward_gotos :: string => list(int)
+--
+-- fs.labels goes from label ids to the number of active variables at
+-- the label's PC, and that PC
+--
+-- fs.forward_gotos goes from label ids to the list of the PC where
+-- some goto wants to jump to this label. Since gotos are actually made
+-- up of two instructions OP_CLOSE and OP_JMP, it's the first instruction's
+-- PC that's stored in fs.forward_gotos
+--
+-- Note that backward gotos aren't stored: since their destination is knowns
+-- when they're compiled, their target is directly set.
+------------------------------------------------------------------------
+
+------------------------------------------------------------------------
+-- Set a Label to jump to with Goto
+------------------------------------------------------------------------
+function stat.Label (fs, ast)
+ local label_id = ast[1]
+ if type(label_id)=='table' then label_id=label_id[1] end
+ -- printf("Label %s at PC %i", label_id, fs.pc)
+ -------------------------------------------------------------------
+ -- Register the label, so that future gotos can use it.
+ -------------------------------------------------------------------
+ if fs.labels [label_id] then error "Duplicate label in function"
+ else fs.labels [label_id] = { pc = fs.pc; nactvar = fs.nactvar } end
+ local gotos = fs.forward_gotos [label_id]
+ if gotos then
+ ----------------------------------------------------------------
+ -- Patch forward gotos which were targetting this label.
+ ----------------------------------------------------------------
+ for _, goto_pc in ipairs(gotos) do
+ local close_instr = fs.f.code[goto_pc]
+ local jmp_instr = fs.f.code[goto_pc+1]
+ local goto_nactvar = luaP:GETARG_A (close_instr)
+ if fs.nactvar < goto_nactvar then
+ luaP:SETARG_A (close_instr, fs.nactvar) end
+ luaP:SETARG_sBx (jmp_instr, fs.pc - goto_pc - 2)
+ end
+ ----------------------------------------------------------------
+ -- Gotos are patched, they can be forgotten about (when the
+ -- function will be finished, it will be checked that all gotos
+ -- have been patched, by checking that forward_goto is empty).
+ ----------------------------------------------------------------
+ fs.forward_gotos[label_id] = nil
+ end
+end
+
+------------------------------------------------------------------------
+-- jumps to a label set with stat.Label.
+-- Argument must be a String or an Id
+-- FIXME/optim: get rid of useless OP_CLOSE when nactvar doesn't change.
+-- Thsi must be done both here for backward gotos, and in
+-- stat.Label for forward gotos.
+------------------------------------------------------------------------
+function stat.Goto (fs, ast)
+ local label_id = ast[1]
+ if type(label_id)=='table' then label_id=label_id[1] end
+ -- printf("Goto %s at PC %i", label_id, fs.pc)
+ local label = fs.labels[label_id]
+ if label then
+ ----------------------------------------------------------------
+ -- Backward goto: the label already exists, so I can get its
+ -- nactvar and address directly. nactvar is used to close
+ -- upvalues if we get out of scoping blocks by jumping.
+ ----------------------------------------------------------------
+ if fs.nactvar > label.nactvar then
+ luaK:codeABC (fs, "OP_CLOSE", label.nactvar, 0, 0) end
+ local offset = label.pc - fs.pc - 1
+ luaK:codeAsBx (fs, "OP_JMP", 0, offset)
+ else
+ ----------------------------------------------------------------
+ -- Forward goto: will be patched when the matching label is
+ -- found, forward_gotos[label_id] keeps the PC of the CLOSE
+ -- instruction just before the JMP. [stat.Label] will use it to
+ -- patch the OP_CLOSE and the OP_JMP.
+ ----------------------------------------------------------------
+ if not fs.forward_gotos[label_id] then
+ fs.forward_gotos[label_id] = { } end
+ table.insert (fs.forward_gotos[label_id], fs.pc)
+ luaK:codeABC (fs, "OP_CLOSE", fs.nactvar, 0, 0)
+ luaK:codeAsBx (fs, "OP_JMP", 0, luaK.NO_JUMP)
+ end
+end
+
+------------------------------------------------------------------------
+------------------------------------------------------------------------
+--
+-- Expression parsers table
+--
+------------------------------------------------------------------------
+------------------------------------------------------------------------
+
+function expr.expr (fs, ast, v)
+ if type(ast) ~= "table" then
+ error ("Expr AST expected, got "..pp.tostring(ast)) end
+
+ if ast.lineinfo then fs.lastline = ast.lineinfo.last.line end
+
+ --debugf (" - Expression %s", table.tostring (ast))
+ local parser = expr[ast.tag]
+ if parser then parser (fs, ast, v)
+ elseif not ast.tag then
+ error ("No tag in expression "..
+ pp.tostring(ast, {line_max=80, hide_hash=1, metalua_tag=1}))
+ else
+ error ("No parser for node `"..ast.tag) end
+ --debugf (" - /Expression `%s", ast.tag)
+end
+
+------------------------------------------------------------------------
+
+function expr.Nil (fs, ast, v) init_exp (v, "VNIL", 0) end
+function expr.True (fs, ast, v) init_exp (v, "VTRUE", 0) end
+function expr.False (fs, ast, v) init_exp (v, "VFALSE", 0) end
+function expr.String (fs, ast, v) codestring (fs, v, ast[1]) end
+function expr.Number (fs, ast, v)
+ init_exp (v, "VKNUM", 0)
+ v.nval = ast[1]
+end
+
+function expr.Paren (fs, ast, v)
+ expr.expr (fs, ast[1], v)
+ luaK:setoneret (fs, v)
+end
+
+function expr.Dots (fs, ast, v)
+ assert (fs.f.is_vararg ~= 0, "No vararg in this function")
+ -- NEEDSARG flag is set if and only if the function is a vararg,
+ -- but no vararg has been used yet in its code.
+ if fs.f.is_vararg < M.VARARG_NEEDSARG then
+ fs.f.is_varag = fs.f.is_vararg - M.VARARG_NEEDSARG end
+ init_exp (v, "VVARARG", luaK:codeABC (fs, "OP_VARARG", 0, 1, 0))
+end
+
+------------------------------------------------------------------------
+
+function expr.Table (fs, ast, v)
+ local pc = luaK:codeABC(fs, "OP_NEWTABLE", 0, 0, 0)
+ local cc = { v = { } , na = 0, nh = 0, tostore = 0, t = v } -- ConsControl
+ init_exp (v, "VRELOCABLE", pc)
+ init_exp (cc.v, "VVOID", 0) -- no value (yet)
+ luaK:exp2nextreg (fs, v) -- fix it at stack top (for gc)
+ for i = 1, #ast do
+ assert(cc.v.k == "VVOID" or cc.tostore > 0)
+ closelistfield(fs, cc);
+ (ast[i].tag == "Pair" and recfield or listfield) (fs, ast[i], cc)
+ end
+ lastlistfield(fs, cc)
+
+ -- Configure [OP_NEWTABLE] dimensions
+ luaP:SETARG_B(fs.f.code[pc], int2fb(cc.na)) -- set initial array size
+ luaP:SETARG_C(fs.f.code[pc], int2fb(cc.nh)) -- set initial table size
+ --printv(fs.f.code[pc])
+end
+
+
+------------------------------------------------------------------------
+
+function expr.Function (fs, ast, v)
+ if ast.lineinfo then fs.lastline = ast.lineinfo.last.line end
+
+ local new_fs = open_func(fs)
+ if ast.lineinfo then
+ new_fs.f.lineDefined, new_fs.f.lastLineDefined =
+ ast.lineinfo.first.line, ast.lineinfo.last.line
+ end
+ parlist (new_fs, ast[1])
+ chunk (new_fs, ast[2])
+ close_func (new_fs)
+ pushclosure(fs, new_fs, v)
+end
+
+------------------------------------------------------------------------
+
+function expr.Op (fs, ast, v)
+ if ast.lineinfo then fs.lastline = ast.lineinfo.last.line end
+ local op = ast[1]
+
+ if #ast == 2 then
+ expr.expr (fs, ast[2], v)
+ luaK:prefix (fs, op, v)
+ elseif #ast == 3 then
+ local v2 = { }
+ expr.expr (fs, ast[2], v)
+ luaK:infix (fs, op, v)
+ expr.expr (fs, ast[3], v2)
+ luaK:posfix (fs, op, v, v2)
+ else
+ error "Wrong arg number"
+ end
+end
+
+------------------------------------------------------------------------
+
+function expr.Call (fs, ast, v)
+ expr.expr (fs, ast[1], v)
+ luaK:exp2nextreg (fs, v)
+ funcargs(fs, ast, v, 2)
+ --debugf("after expr.Call: %s, %s", v.k, luaP.opnames[luaK:getcode(fs, v).OP])
+end
+
+------------------------------------------------------------------------
+-- `Invoke{ table key args }
+function expr.Invoke (fs, ast, v)
+ expr.expr (fs, ast[1], v)
+ luaK:dischargevars (fs, v)
+ local key = { }
+ codestring (fs, key, ast[2][1])
+ luaK:_self (fs, v, key)
+ funcargs (fs, ast, v, 3)
+end
+
+------------------------------------------------------------------------
+
+function expr.Index (fs, ast, v)
+ if #ast ~= 2 then
+ print"\n\nBAD INDEX AST:"
+ pp.print(ast)
+ error "generalized indexes not implemented" end
+
+ if ast.lineinfo then fs.lastline = ast.lineinfo.last.line end
+
+ --assert(fs.lastline ~= 0, ast.tag)
+
+ expr.expr (fs, ast[1], v)
+ luaK:exp2anyreg (fs, v)
+
+ local k = { }
+ expr.expr (fs, ast[2], k)
+ luaK:exp2val (fs, k)
+ luaK:indexed (fs, v, k)
+end
+
+------------------------------------------------------------------------
+
+function expr.Id (fs, ast, v)
+ assert (ast.tag == "Id")
+ singlevar (fs, ast[1], v)
+end
+
+------------------------------------------------------------------------
+
+function expr.Stat (fs, ast, v)
+ --printf(" * Stat: %i actvars, first freereg is %i", fs.nactvar, fs.freereg)
+ --printf(" actvars: %s", table.tostring(fs.actvar))
+
+ -- Protect temporary stack values by pretending they are local
+ -- variables. Local vars are in registers 0 ... fs.nactvar-1,
+ -- and temporary unnamed variables in fs.nactvar ... fs.freereg-1
+ local save_nactvar = fs.nactvar
+
+ -- Eventually, the result should go on top of stack *after all
+ -- `Stat{ } related computation and string usage is over. The index
+ -- of this destination register is kept here:
+ local dest_reg = fs.freereg
+
+ -- There might be variables in actvar whose register is > nactvar,
+ -- and therefore will not be protected by the "nactvar := freereg"
+ -- trick. Indeed, `Local only increases nactvar after the variable
+ -- content has been computed. Therefore, in
+ -- "local foo = -{`Stat{...}}", variable foo will be messed up by
+ -- the compilation of `Stat.
+ -- FIX: save the active variables at indices >= nactvar in
+ -- save_actvar, and restore them after `Stat has been computed.
+ --
+ -- I use a while rather than for loops and length operators because
+ -- fs.actvar is a 0-based array...
+ local save_actvar = { } do
+ local i = fs.nactvar
+ while true do
+ local v = fs.actvar[i]
+ if not v then break end
+ --printf("save hald-baked actvar %s at index %i", table.tostring(v), i)
+ save_actvar[i] = v
+ i=i+1
+ end
+ end
+
+ fs.nactvar = fs.freereg -- Now temp unnamed registers are protected
+ enterblock (fs, { }, false)
+ chunk (fs, ast[1])
+ expr.expr (fs, ast[2], v)
+ luaK:exp2nextreg (fs, v)
+ leaveblock (fs)
+ luaK:exp2reg (fs, v, dest_reg)
+
+ -- Reserve the newly allocated stack level
+ -- Puzzled note: here was written "fs.freereg = fs.freereg+1".
+ -- I'm pretty sure it should rather be dest_reg+1, but maybe
+ -- both are equivalent?
+ fs.freereg = dest_reg+1
+
+ -- Restore nactvar, so that intermediate stacked value stop
+ -- being protected.
+ --printf(" nactvar back from %i to %i", fs.nactvar, save_nactvar)
+ fs.nactvar = save_nactvar
+
+ -- restore messed-up unregistered local vars
+ for i, j in pairs(save_actvar) do
+ --printf(" Restoring actvar %i", i)
+ fs.actvar[i] = j
+ end
+ --printf(" * End of Stat")
+end
+
+------------------------------------------------------------------------
+-- Main function: ast --> proto
+------------------------------------------------------------------------
+function M.ast_to_proto (ast, source)
+ local fs = open_func (nil)
+ fs.f.is_vararg = M.VARARG_ISVARARG
+ chunk (fs, ast)
+ close_func (fs)
+ assert (fs.prev == nil)
+ assert (fs.f.nups == 0)
+ assert (fs.nestlevel == 0)
+ if source then fs.f.source = source end
+ return fs.f, source
+end
+
+return M
\ No newline at end of file
--- /dev/null
+-------------------------------------------------------------------------------
+-- Copyright (c) 2005-2013 Kein-Hong Man, Fabien Fleutot and others.
+--
+-- All rights reserved.
+--
+-- This program and the accompanying materials are made available
+-- under the terms of the Eclipse Public License v1.0 which
+-- accompanies this distribution, and is available at
+-- http://www.eclipse.org/legal/epl-v10.html
+--
+-- This program and the accompanying materials are also made available
+-- under the terms of the MIT public license which accompanies this
+-- distribution, and is available at http://www.lua.org/license.html
+--
+-- Contributors:
+-- Kein-Hong Man - Initial implementation for Lua 5.0, part of Yueliang
+-- Fabien Fleutot - Port to Lua 5.1, integration with Metalua
+--
+-------------------------------------------------------------------------------
+
+--[[--------------------------------------------------------------------
+
+ $Id$
+
+ lcode.lua
+ Lua 5 code generator in Lua
+ This file is part of Yueliang.
+
+ Copyright (c) 2005 Kein-Hong Man <khman@users.sf.net>
+ The COPYRIGHT file describes the conditions
+ under which this software may be distributed.
+
+ See the ChangeLog for more information.
+
+------------------------------------------------------------------------
+
+ [FF] Slightly modified, mainly to produce Lua 5.1 bytecode.
+
+----------------------------------------------------------------------]]
+
+--[[--------------------------------------------------------------------
+-- Notes:
+-- * one function manipulate a pointer argument with a simple data type
+-- (can't be emulated by a table, ambiguous), now returns that value:
+-- luaK:concat(fs, l1, l2)
+-- * some function parameters changed to boolean, additional code
+-- translates boolean back to 1/0 for instruction fields
+-- * Added:
+-- luaK:ttisnumber(o) (from lobject.h)
+-- luaK:nvalue(o) (from lobject.h)
+-- luaK:setnilvalue(o) (from lobject.h)
+-- luaK:setsvalue(o) (from lobject.h)
+-- luaK:setnvalue(o) (from lobject.h)
+-- luaK:sethvalue(o) (from lobject.h)
+----------------------------------------------------------------------]]
+
+local luaP = require 'metalua.compiler.bytecode.lopcodes'
+
+local function debugf() end
+
+local luaK = { }
+
+luaK.MAXSTACK = 250 -- (llimits.h, used in lcode.lua)
+luaK.LUA_MULTRET = -1 -- (lua.h)
+
+------------------------------------------------------------------------
+-- Marks the end of a patch list. It is an invalid value both as an absolute
+-- address, and as a list link (would link an element to itself).
+------------------------------------------------------------------------
+luaK.NO_JUMP = -1
+
+--FF 5.1
+function luaK:isnumeral(e)
+ return e.k=="VKNUM" and e.t==self.NO_JUMP and e.t==self.NO_JUMP
+end
+
+------------------------------------------------------------------------
+-- emulation of TObject macros (these are from lobject.h)
+-- * TObject is a table since lcode passes references around
+-- * tt member field removed, using Lua's type() instead
+------------------------------------------------------------------------
+function luaK:ttisnumber(o)
+ if o then return type(o.value) == "number" else return false end
+end
+function luaK:nvalue(o) return o.value end
+function luaK:setnilvalue(o) o.value = nil end
+function luaK:setsvalue(o, s) o.value = s end
+luaK.setnvalue = luaK.setsvalue
+luaK.sethvalue = luaK.setsvalue
+
+------------------------------------------------------------------------
+-- returns the instruction object for given e (expdesc)
+------------------------------------------------------------------------
+function luaK:getcode(fs, e)
+ return fs.f.code[e.info]
+end
+
+------------------------------------------------------------------------
+-- codes an instruction with a signed Bx (sBx) field
+------------------------------------------------------------------------
+function luaK:codeAsBx(fs, o, A, sBx)
+ return self:codeABx(fs, o, A, sBx + luaP.MAXARG_sBx)
+end
+
+------------------------------------------------------------------------
+--
+------------------------------------------------------------------------
+function luaK:hasjumps(e)
+ return e.t ~= e.f
+end
+
+------------------------------------------------------------------------
+-- FF updated 5.1
+------------------------------------------------------------------------
+function luaK:_nil(fs, from, n)
+ if fs.pc > fs.lasttarget then -- no jumps to current position?
+ if fs.pc == 0 then return end --function start, positions are already clean
+ local previous = fs.f.code[fs.pc - 1]
+ if luaP:GET_OPCODE(previous) == "OP_LOADNIL" then
+ local pfrom = luaP:GETARG_A(previous)
+ local pto = luaP:GETARG_B(previous)
+ if pfrom <= from and from <= pto + 1 then -- can connect both?
+ if from + n - 1 > pto then
+ luaP:SETARG_B(previous, from + n - 1)
+ end
+ return
+ end
+ end
+ end
+ self:codeABC(fs, "OP_LOADNIL", from, from + n - 1, 0) -- else no optimization
+end
+
+------------------------------------------------------------------------
+--
+------------------------------------------------------------------------
+function luaK:jump(fs)
+ local jpc = fs.jpc -- save list of jumps to here
+ fs.jpc = self.NO_JUMP
+ local j = self:codeAsBx(fs, "OP_JMP", 0, self.NO_JUMP)
+ return self:concat(fs, j, jpc) -- keep them on hold
+end
+
+--FF 5.1
+function luaK:ret (fs, first, nret)
+ luaK:codeABC (fs, "OP_RETURN", first, nret+1, 0)
+end
+
+
+------------------------------------------------------------------------
+--
+------------------------------------------------------------------------
+function luaK:condjump(fs, op, A, B, C)
+ self:codeABC(fs, op, A, B, C)
+ return self:jump(fs)
+end
+
+------------------------------------------------------------------------
+--
+------------------------------------------------------------------------
+function luaK:fixjump(fs, pc, dest)
+ local jmp = fs.f.code[pc]
+ local offset = dest - (pc + 1)
+ assert(dest ~= self.NO_JUMP)
+ if math.abs(offset) > luaP.MAXARG_sBx then
+ error("control structure too long")
+ end
+ luaP:SETARG_sBx(jmp, offset)
+end
+
+------------------------------------------------------------------------
+-- returns current 'pc' and marks it as a jump target (to avoid wrong
+-- optimizations with consecutive instructions not in the same basic block).
+------------------------------------------------------------------------
+function luaK:getlabel(fs)
+ fs.lasttarget = fs.pc
+ return fs.pc
+end
+
+------------------------------------------------------------------------
+--
+------------------------------------------------------------------------
+function luaK:getjump(fs, pc)
+ local offset = luaP:GETARG_sBx(fs.f.code[pc])
+ if offset == self.NO_JUMP then -- point to itself represents end of list
+ return self.NO_JUMP -- end of list
+ else
+ return (pc + 1) + offset -- turn offset into absolute position
+ end
+end
+
+------------------------------------------------------------------------
+--
+------------------------------------------------------------------------
+function luaK:getjumpcontrol(fs, pc)
+ local pi = fs.f.code[pc]
+ local ppi = fs.f.code[pc - 1]
+ if pc >= 1 and luaP:testOpMode(luaP:GET_OPCODE(ppi), "OpModeT") then
+ return ppi
+ else
+ return pi
+ end
+end
+
+------------------------------------------------------------------------
+-- check whether list has any jump that do not produce a value
+-- (or produce an inverted value)
+------------------------------------------------------------------------
+--FF updated 5.1
+function luaK:need_value(fs, list, cond)
+ while list ~= self.NO_JUMP do
+ local i = self:getjumpcontrol(fs, list)
+ if luaP:GET_OPCODE(i) ~= "OP_TESTSET" or
+ luaP:GETARG_A(i) ~= luaP.NO_REG or
+ luaP:GETARG_C(i) ~= cond then
+ return true
+ end
+ list = self:getjump(fs, list)
+ end
+ return false -- not found
+end
+
+------------------------------------------------------------------------
+--
+------------------------------------------------------------------------
+--FF updated 5.1
+function luaK:patchtestreg(fs, node, reg)
+ assert(reg) -- pour assurer, vu que j'ai ajoute un parametre p/r a 5.0
+ local i = self:getjumpcontrol(fs, node)
+ if luaP:GET_OPCODE(i) ~= "OP_TESTSET" then
+ return false end -- cannot patch other instructions
+ if reg ~= luaP.NO_REG and reg ~= luaP:GETARG_B(i) then
+ luaP:SETARG_A(i, reg)
+ else
+ -- no register to put value or register already has the value
+ luaP:SET_OPCODE(i, "OP_TEST")
+ luaP:SETARG_A(i, luaP:GETARG_B(i))
+ luaP:SETARG_B(i, 0)
+ luaP:SETARG_C(i, luaP:GETARG_C(i))
+ end
+ return true
+end
+
+--FF added 5.1
+function luaK:removevalues (fs, list)
+ while list ~= self.NO_JUMP do
+ self:patchtestreg (fs, list, luaP.NO_REG)
+ list = self:getjump (fs, list)
+ end
+end
+
+------------------------------------------------------------------------
+-- FF updated 5.1
+------------------------------------------------------------------------
+function luaK:patchlistaux(fs, list, vtarget, reg, dtarget)
+ while list ~= self.NO_JUMP do
+ local _next = self:getjump(fs, list)
+ if self:patchtestreg (fs, list, reg) then
+ self:fixjump(fs, list, vtarget)
+ else
+ self:fixjump (fs, list, dtarget)
+ end
+ list = _next
+ end
+end
+
+------------------------------------------------------------------------
+--
+------------------------------------------------------------------------
+function luaK:dischargejpc(fs)
+ self:patchlistaux(fs, fs.jpc, fs.pc, luaP.NO_REG, fs.pc)
+ fs.jpc = self.NO_JUMP
+end
+
+------------------------------------------------------------------------
+--
+------------------------------------------------------------------------
+function luaK:patchlist(fs, list, target)
+ if target == fs.pc then
+ self:patchtohere(fs, list)
+ else
+ assert(target < fs.pc)
+ self:patchlistaux(fs, list, target, luaP.NO_REG, target)
+ end
+end
+
+------------------------------------------------------------------------
+--
+------------------------------------------------------------------------
+function luaK:patchtohere(fs, list)
+ self:getlabel(fs)
+ fs.jpc = self:concat(fs, fs.jpc, list)
+end
+
+------------------------------------------------------------------------
+-- * l1 was a pointer, now l1 is returned and callee assigns the value
+------------------------------------------------------------------------
+function luaK:concat(fs, l1, l2)
+ if l2 == self.NO_JUMP then return l1 -- unchanged
+ elseif l1 == self.NO_JUMP then
+ return l2 -- changed
+ else
+ local list = l1
+ local _next = self:getjump(fs, list)
+ while _next ~= self.NO_JUMP do -- find last element
+ list = _next
+ _next = self:getjump(fs, list)
+ end
+ self:fixjump(fs, list, l2)
+ end
+ return l1 -- unchanged
+end
+
+------------------------------------------------------------------------
+--
+------------------------------------------------------------------------
+function luaK:checkstack(fs, n)
+ local newstack = fs.freereg + n
+ if newstack > fs.f.maxstacksize then
+ if newstack >= luaK.MAXSTACK then
+ error("function or expression too complex")
+ end
+ fs.f.maxstacksize = newstack
+ end
+end
+
+------------------------------------------------------------------------
+--
+------------------------------------------------------------------------
+function luaK:reserveregs(fs, n)
+ self:checkstack(fs, n)
+ fs.freereg = fs.freereg + n
+end
+
+------------------------------------------------------------------------
+--
+------------------------------------------------------------------------
+function luaK:freereg(fs, reg)
+ if not luaP:ISK (reg) and reg >= fs.nactvar then
+ fs.freereg = fs.freereg - 1
+ assert(reg == fs.freereg,
+ string.format("reg=%i, fs.freereg=%i", reg, fs.freereg))
+ end
+end
+
+------------------------------------------------------------------------
+--
+------------------------------------------------------------------------
+function luaK:freeexp(fs, e)
+ if e.k == "VNONRELOC" then
+ self:freereg(fs, e.info)
+ end
+end
+
+------------------------------------------------------------------------
+-- k is a constant, v is... what?
+-- fs.h is a hash value --> index in f.k
+------------------------------------------------------------------------
+-- * luaH_get, luaH_set deleted; direct table access used instead
+-- * luaO_rawequalObj deleted in first assert
+-- * setobj2n deleted in assignment of v to f.k table
+------------------------------------------------------------------------
+--FF radically updated, not completely understood
+function luaK:addk(fs, k, v)
+ local idx = fs.h[k.value]
+ local f = fs.f
+-- local oldsize = f.sizek
+ if self:ttisnumber (idx) then
+ --TODO this assert currently FAILS
+ --assert(fs.f.k[self:nvalue(idx)] == v)
+ return self:nvalue(idx)
+ else -- constant not found; create a new entry
+ do
+ local t = type (v.value)
+ assert(t=="nil" or t=="string" or t=="number" or t=="boolean")
+ end
+ --debugf("[const: k[%i] = %s ]", fs.nk, tostringv(v.value))
+ fs.f.k[fs.nk] = v
+ fs.h[k.value] = { }
+ self:setnvalue(fs.h[k.value], fs.nk)
+ local nk = fs.nk
+ fs.nk = fs.nk+1
+ return nk
+ end
+end
+
+------------------------------------------------------------------------
+--
+------------------------------------------------------------------------
+function luaK:stringK(fs, s)
+ assert (type(s)=="string")
+ local o = {} -- TObject
+ self:setsvalue(o, s)
+ return self:addk(fs, o, o)
+end
+
+------------------------------------------------------------------------
+--
+------------------------------------------------------------------------
+function luaK:numberK(fs, r)
+ assert (type(r)=="number")
+ local o = {} -- TObject
+ self:setnvalue(o, r)
+ return self:addk(fs, o, o)
+end
+
+------------------------------------------------------------------------
+--
+------------------------------------------------------------------------
+function luaK:boolK(fs, r)
+ assert (type(r)=="boolean")
+ local o = {} -- TObject
+ self:setnvalue(o, r)
+ return self:addk(fs, o, o)
+end
+
+------------------------------------------------------------------------
+--
+------------------------------------------------------------------------
+function luaK:nilK(fs)
+ local k, v = {}, {} -- TObject
+ self:setnilvalue(v)
+ self:sethvalue(k, fs.h) -- cannot use nil as key; instead use table itself
+ return self:addk(fs, k, v)
+end
+
+
+--FF 5.1
+function luaK:setreturns (fs, e, nresults)
+ if e.k == "VCALL" then -- expression is an open function call?
+ luaP:SETARG_C(self:getcode(fs, e), nresults + 1)
+ elseif e.k == "VVARARG" then
+ luaP:SETARG_B (self:getcode (fs, e), nresults + 1)
+ luaP:SETARG_A (self:getcode (fs, e), fs.freereg)
+ self:reserveregs (fs, 1)
+ end
+end
+
+--FF 5.1
+function luaK:setmultret (fs, e)
+ self:setreturns (fs, e, self.LUA_MULTRET)
+end
+
+--FF 5.1
+function luaK:setoneret (fs, e)
+ if e.k == "VCALL" then -- expression is an open function call?
+ e.k = "VNONRELOC"
+ e.info = luaP:GETARG_A(self:getcode(fs, e))
+ elseif e.k == "VVARARG" then
+ luaP:SETARG_B (self:getcode (fs, e), 2)
+ e.k = "VRELOCABLE"
+ end
+end
+
+
+------------------------------------------------------------------------
+--FF deprecated in 5.1
+------------------------------------------------------------------------
+function luaK:setcallreturns(fs, e, nresults)
+ assert (false, "setcallreturns deprecated")
+ --print "SCR:"
+ --printv(e)
+ --printv(self:getcode(fs, e))
+ if e.k == "VCALL" then -- expression is an open function call?
+ luaP:SETARG_C(self:getcode(fs, e), nresults + 1)
+ if nresults == 1 then -- 'regular' expression?
+ e.k = "VNONRELOC"
+ e.info = luaP:GETARG_A(self:getcode(fs, e))
+ end
+ elseif e.k == "VVARARG" then
+ --printf("Handle vararg return on expr %s, whose code is %s",
+ -- tostringv(e), tostringv(self:getcode(fs, e)))
+ if nresults == 1 then
+ luaP:SETARG_B (self:getcode (fs, e), 2)
+ e.k = "VRELOCABLE"
+--FIXME: why no SETARG_A???
+ else
+ luaP:SETARG_B (self:getcode (fs, e), nresults + 1)
+ luaP:SETARG_A (self:getcode (fs, e), fs.freereg)
+ self:reserveregs (fs, 1)
+ --printf("Now code is %s", tostringv(self:getcode(fs, e)))
+ end
+ end
+end
+
+------------------------------------------------------------------------
+-- Ajoute le code pour effectuer l'extraction de la locvar/upval/globvar
+-- /idx, sachant
+------------------------------------------------------------------------
+function luaK:dischargevars(fs, e)
+--printf("\ndischargevars\n")
+ local k = e.k
+ if k == "VLOCAL" then
+ e.k = "VNONRELOC"
+ elseif k == "VUPVAL" then
+ e.info = self:codeABC(fs, "OP_GETUPVAL", 0, e.info, 0)
+ e.k = "VRELOCABLE"
+ elseif k == "VGLOBAL" then
+ e.info = self:codeABx(fs, "OP_GETGLOBAL", 0, e.info)
+ e.k = "VRELOCABLE"
+ elseif k == "VINDEXED" then
+ self:freereg(fs, e.aux)
+ self:freereg(fs, e.info)
+ e.info = self:codeABC(fs, "OP_GETTABLE", 0, e.info, e.aux)
+ e.k = "VRELOCABLE"
+ elseif k == "VCALL" or k == "VVARARG" then
+ self:setoneret(fs, e)
+ else
+ -- there is one value available (somewhere)
+ end
+--printf("\n/dischargevars\n")
+end
+
+------------------------------------------------------------------------
+--
+------------------------------------------------------------------------
+function luaK:code_label(fs, A, b, jump)
+ self:getlabel(fs) -- those instructions may be jump targets
+ return self:codeABC(fs, "OP_LOADBOOL", A, b, jump)
+end
+
+------------------------------------------------------------------------
+-- FF updated 5.1
+------------------------------------------------------------------------
+function luaK:discharge2reg(fs, e, reg)
+ self:dischargevars(fs, e)
+ local k = e.k
+ if k == "VNIL" then
+ self:_nil(fs, reg, 1)
+ elseif k == "VFALSE" or k == "VTRUE" then
+ self:codeABC(fs, "OP_LOADBOOL", reg, (e.k == "VTRUE") and 1 or 0, 0)
+ elseif k == "VKNUM" then
+ self:codeABx (fs, "OP_LOADK", reg, self:numberK(fs, e.nval))
+ elseif k == "VK" then
+ self:codeABx(fs, "OP_LOADK", reg, e.info)
+ elseif k == "VRELOCABLE" then
+ local pc = self:getcode(fs, e)
+ luaP:SETARG_A(pc, reg)
+ elseif k == "VNONRELOC" then
+ if reg ~= e.info then
+ self:codeABC(fs, "OP_MOVE", reg, e.info, 0)
+ end
+ else
+ assert(e.k == "VVOID" or e.k == "VJMP")
+ return -- nothing to do...
+ end
+ e.info = reg
+ e.k = "VNONRELOC"
+end
+
+------------------------------------------------------------------------
+--
+------------------------------------------------------------------------
+function luaK:discharge2anyreg(fs, e)
+ if e.k ~= "VNONRELOC" then
+ self:reserveregs(fs, 1)
+ self:discharge2reg(fs, e, fs.freereg - 1)
+ end
+end
+
+------------------------------------------------------------------------
+--
+------------------------------------------------------------------------
+function luaK:exp2reg(fs, e, reg)
+ self:discharge2reg(fs, e, reg)
+ if e.k == "VJMP" then
+ e.t = self:concat(fs, e.t, e.info) -- put this jump in 't' list
+ end
+ if self:hasjumps(e) then
+ local final -- position after whole expression
+ local p_f = self.NO_JUMP -- position of an eventual LOAD false
+ local p_t = self.NO_JUMP -- position of an eventual LOAD true
+ if self:need_value(fs, e.t, 1) or self:need_value(fs, e.f, 0) then
+ local fj = self.NO_JUMP -- first jump (over LOAD ops.)
+ if e.k ~= "VJMP" then fj = self:jump(fs) end
+ p_f = self:code_label(fs, reg, 0, 1)
+ p_t = self:code_label(fs, reg, 1, 0)
+ self:patchtohere(fs, fj)
+ end
+ final = self:getlabel(fs)
+ self:patchlistaux(fs, e.f, final, reg, p_f)
+ self:patchlistaux(fs, e.t, final, reg, p_t)
+ end
+ e.f, e.t = self.NO_JUMP, self.NO_JUMP
+ e.info = reg
+ e.k = "VNONRELOC"
+end
+
+------------------------------------------------------------------------
+--
+------------------------------------------------------------------------
+function luaK:exp2nextreg(fs, e)
+ self:dischargevars(fs, e)
+ --[FF] Allready in place (added for expr.Stat)
+ if e.k == "VNONRELOC" and e.info == fs.freereg then
+ --printf("Expression already in next reg %i: %s", fs.freereg, tostringv(e))
+ return end
+ self:freeexp(fs, e)
+ self:reserveregs(fs, 1)
+ self:exp2reg(fs, e, fs.freereg - 1)
+end
+
+------------------------------------------------------------------------
+--
+------------------------------------------------------------------------
+function luaK:exp2anyreg(fs, e)
+ --printf("exp2anyregs(e=%s)", tostringv(e))
+ self:dischargevars(fs, e)
+ if e.k == "VNONRELOC" then
+ if not self:hasjumps(e) then -- exp is already in a register
+ return e.info
+ end
+ if e.info >= fs.nactvar then -- reg. is not a local?
+ self:exp2reg(fs, e, e.info) -- put value on it
+ return e.info
+ end
+ end
+ self:exp2nextreg(fs, e) -- default
+ return e.info
+end
+
+------------------------------------------------------------------------
+--
+------------------------------------------------------------------------
+function luaK:exp2val(fs, e)
+ if self:hasjumps(e) then
+ self:exp2anyreg(fs, e)
+ else
+ self:dischargevars(fs, e)
+ end
+end
+
+------------------------------------------------------------------------
+-- FF updated 5.1
+------------------------------------------------------------------------
+function luaK:exp2RK(fs, e)
+ self:exp2val(fs, e)
+ local k = e.k
+ if k=="VNIL" or k=="VTRUE" or k=="VFALSE" or k=="VKNUM" then
+ if fs.nk <= luaP.MAXINDEXRK then
+ if k=="VNIL" then e.info = self:nilK(fs)
+ elseif k=="VKNUM" then e.info = self:numberK (fs, e.nval)
+ else e.info = self:boolK(fs, e.k=="VTRUE") end
+ e.k = "VK"
+ return luaP:RKASK(e.info)
+ end
+ elseif k == "VK" then
+ if e.info <= luaP.MAXINDEXRK then -- constant fit in argC?
+ return luaP:RKASK (e.info)
+ end
+ end
+ -- not a constant in the right range: put it in a register
+ return self:exp2anyreg(fs, e)
+end
+
+------------------------------------------------------------------------
+--
+------------------------------------------------------------------------
+function luaK:storevar(fs, var, exp)
+ --print("STOREVAR")
+ --printf("var=%s", tostringv(var))
+ --printf("exp=%s", tostringv(exp))
+
+ local k = var.k
+ if k == "VLOCAL" then
+ self:freeexp(fs, exp)
+ self:exp2reg(fs, exp, var.info)
+ return
+ elseif k == "VUPVAL" then
+ local e = self:exp2anyreg(fs, exp)
+ self:codeABC(fs, "OP_SETUPVAL", e, var.info, 0)
+ elseif k == "VGLOBAL" then
+ --printf("store global, exp=%s", tostringv(exp))
+ local e = self:exp2anyreg(fs, exp)
+ self:codeABx(fs, "OP_SETGLOBAL", e, var.info)
+ elseif k == "VINDEXED" then
+ local e = self:exp2RK(fs, exp)
+ self:codeABC(fs, "OP_SETTABLE", var.info, var.aux, e)
+ else
+ assert(0) -- invalid var kind to store
+ end
+ self:freeexp(fs, exp)
+ --print("/STOREVAR")
+end
+
+------------------------------------------------------------------------
+--
+------------------------------------------------------------------------
+function luaK:_self(fs, e, key)
+ self:exp2anyreg(fs, e)
+ self:freeexp(fs, e)
+ local func = fs.freereg
+ self:reserveregs(fs, 2)
+ self:codeABC(fs, "OP_SELF", func, e.info, self:exp2RK(fs, key))
+ self:freeexp(fs, key)
+ e.info = func
+ e.k = "VNONRELOC"
+end
+
+------------------------------------------------------------------------
+-- FF updated 5.1
+------------------------------------------------------------------------
+function luaK:invertjump(fs, e)
+ --printf("invertjump on jump instruction #%i", e.info)
+ --printv(self:getcode(fs, e))
+ local pc = self:getjumpcontrol(fs, e.info)
+ assert(luaP:testOpMode(luaP:GET_OPCODE(pc), "OpModeT") and
+ luaP:GET_OPCODE(pc) ~= "OP_TESTSET" and
+ luaP:GET_OPCODE(pc) ~= "OP_TEST")
+ --printf("Before invert:")
+ --printv(pc)
+ luaP:SETARG_A(pc, (luaP:GETARG_A(pc) == 0) and 1 or 0)
+ --printf("After invert:")
+ --printv(pc)
+end
+
+------------------------------------------------------------------------
+--
+------------------------------------------------------------------------
+function luaK:jumponcond(fs, e, cond)
+ if e.k == "VRELOCABLE" then
+ local ie = self:getcode(fs, e)
+ if luaP:GET_OPCODE(ie) == "OP_NOT" then
+ fs.pc = fs.pc - 1 -- remove previous OP_NOT
+ return self:condjump(fs, "OP_TEST", luaP:GETARG_B(ie), 0,
+ cond and 0 or 1)
+ end
+ -- else go through
+ end
+ self:discharge2anyreg(fs, e)
+ self:freeexp(fs, e)
+ return self:condjump(fs, "OP_TESTSET", luaP.NO_REG, e.info, cond and 1 or 0)
+end
+
+------------------------------------------------------------------------
+--
+------------------------------------------------------------------------
+function luaK:goiftrue(fs, e)
+ local pc -- pc of last jump
+ self:dischargevars(fs, e)
+ local k = e.k
+ if k == "VK" or k == "VTRUE" or k == "VKNUM" then
+ pc = self.NO_JUMP -- always true; do nothing
+ elseif k == "VFALSE" then
+ pc = self:jump(fs) -- always jump
+ elseif k == "VJMP" then
+ self:invertjump(fs, e)
+ pc = e.info
+ else
+ pc = self:jumponcond(fs, e, false)
+ end
+ e.f = self:concat(fs, e.f, pc) -- insert last jump in 'f' list
+ self:patchtohere(fs, e.t)
+ e.t = self.NO_JUMP
+end
+
+------------------------------------------------------------------------
+--
+------------------------------------------------------------------------
+function luaK:goiffalse(fs, e)
+ local pc -- pc of last jump
+ self:dischargevars(fs, e)
+ local k = e.k
+ if k == "VNIL" or k == "VFALSE"then
+ pc = self.NO_JUMP -- always false; do nothing
+ elseif k == "VTRUE" then
+ pc = self:jump(fs) -- always jump
+ elseif k == "VJMP" then
+ pc = e.info
+ else
+ pc = self:jumponcond(fs, e, true)
+ end
+ e.t = self:concat(fs, e.t, pc) -- insert last jump in 't' list
+ self:patchtohere(fs, e.f)
+ e.f = self.NO_JUMP
+end
+
+------------------------------------------------------------------------
+--
+------------------------------------------------------------------------
+function luaK:codenot(fs, e)
+ self:dischargevars(fs, e)
+ local k = e.k
+ if k == "VNIL" or k == "VFALSE" then
+ e.k = "VTRUE"
+ elseif k == "VK" or k == "VKNUM" or k == "VTRUE" then
+ e.k = "VFALSE"
+ elseif k == "VJMP" then
+ self:invertjump(fs, e)
+ elseif k == "VRELOCABLE" or k == "VNONRELOC" then
+ self:discharge2anyreg(fs, e)
+ self:freeexp(fs, e)
+ e.info = self:codeABC(fs, "OP_NOT", 0, e.info, 0)
+ e.k = "VRELOCABLE"
+ else
+ assert(0) -- cannot happen
+ end
+ -- interchange true and false lists
+ e.f, e.t = e.t, e.f
+ self:removevalues(fs, e.f)
+ self:removevalues(fs, e.t)
+end
+
+------------------------------------------------------------------------
+--
+------------------------------------------------------------------------
+function luaK:indexed(fs, t, k)
+ t.aux = self:exp2RK(fs, k)
+ t.k = "VINDEXED"
+end
+
+--FF 5.1
+function luaK:constfolding (op, e1, e2)
+ if not self:isnumeral(e1) or not self:isnumeral(e2) then return false end
+ local v1, v2, e, r = e1.nval, e2 and e2.nval, nil
+ if op == "OP_ADD" then r = v1+v2
+ elseif op == "OP_SUB" then r = v1-v2
+ elseif op == "OP_MUL" then r = v1*v2
+ elseif op == "OP_DIV" then if v2==0 then return false end r = v1/v2
+ elseif op == "OP_MOD" then if v2==0 then return false end r = v1%v2
+ elseif op == "OP_POW" then r = v1^v2
+ elseif op == "OP_UNM" then r = -v1
+ elseif op == "OP_LEN" then return false
+ else assert (false, "Unknown numeric value") end
+ e1.nval = r
+ return true
+end
+
+--FF 5.1
+function luaK:codearith (fs, op, e1, e2)
+ if self:constfolding (op, e1, e2) then return else
+ local o1 = self:exp2RK (fs, e1)
+ local o2 = 0
+ if op ~= "OP_UNM" and op ~= "OP_LEN" then
+ o2 = self:exp2RK (fs, e2) end
+ self:freeexp(fs, e2)
+ self:freeexp(fs, e1)
+ e1.info = self:codeABC (fs, op, 0, o1, o2)
+ e1.k = "VRELOCABLE"
+ end
+end
+
+--FF 5.1
+function luaK:codecomp (fs, op, cond, e1, e2)
+ assert (type (cond) == "boolean")
+ local o1 = self:exp2RK (fs, e1)
+ local o2 = self:exp2RK (fs, e2)
+ self:freeexp (fs, e2)
+ self:freeexp (fs, e1)
+ if not cond and op ~= "OP_EQ" then
+ local temp = o1; o1=o2; o2=temp cond = true end
+ e1.info = self:condjump (fs, op, cond and 1 or 0, o1, o2)
+ e1.k = "VJMP"
+end
+
+------------------------------------------------------------------------
+--
+------------------------------------------------------------------------
+function luaK:prefix (fs, op, e)
+ local e2 = { t = self.NO_JUMP; f = self.NO_JUMP;
+ k = "VKNUM"; nval = 0 }
+ if op == "unm" then
+ if e.k == "VK" then
+ self:exp2anyreg (fs, e) end
+ self:codearith (fs, "OP_UNM", e, e2)
+ elseif op == "not" then
+ self:codenot (fs, e)
+ elseif op == "len" then
+ self:exp2anyreg (fs, e)
+ self:codearith (fs, "OP_LEN", e, e2)
+ else
+ assert (false, "Unknown unary operator")
+ end
+end
+
+------------------------------------------------------------------------
+--
+------------------------------------------------------------------------
+function luaK:infix (fs, op, v)
+ if op == "and" then
+ self:goiftrue(fs, v)
+ elseif op == "or" then
+ self:goiffalse(fs, v)
+ elseif op == "concat" then
+ self:exp2nextreg(fs, v) -- operand must be on the 'stack'
+ else
+ if not self:isnumeral (v) then self:exp2RK(fs, v) end
+ end
+end
+
+------------------------------------------------------------------------
+--
+-- grep "ORDER OPR" if you change these enums
+------------------------------------------------------------------------
+luaK.arith_opc = { -- done as a table lookup instead of a calc
+ add = "OP_ADD",
+ sub = "OP_SUB",
+ mul = "OP_MUL",
+ mod = "OP_MOD",
+ div = "OP_DIV",
+ pow = "OP_POW",
+ len = "OP_LEN",
+ ["not"] = "OP_NOT"
+}
+luaK.test_opc = { -- was ops[] in the codebinop function
+ eq = {opc="OP_EQ", cond=true},
+ lt = {opc="OP_LT", cond=true},
+ le = {opc="OP_LE", cond=true},
+
+ -- Pseudo-ops, with no metatable equivalent:
+ ne = {opc="OP_EQ", cond=false},
+ gt = {opc="OP_LT", cond=false},
+ ge = {opc="OP_LE", cond=false}
+}
+
+------------------------------------------------------------------------
+--
+------------------------------------------------------------------------
+function luaK:posfix(fs, op, e1, e2)
+ if op == "and" then
+ assert(e1.t == self.NO_JUMP) -- list must be closed
+ self:dischargevars(fs, e2)
+ e2.f = self:concat(fs, e2.f, e1.f)
+ for k,v in pairs(e2) do e1[k]=v end -- *e1 = *e2
+ elseif op == "or" then
+ assert(e1.f == self.NO_JUMP) -- list must be closed
+ self:dischargevars(fs, e2)
+ e2.t = self:concat(fs, e2.t, e1.t)
+ for k,v in pairs(e2) do e1[k]=v end -- *e1 = *e2
+ elseif op == "concat" then
+ self:exp2val(fs, e2)
+ if e2.k == "VRELOCABLE"
+ and luaP:GET_OPCODE(self:getcode(fs, e2)) == "OP_CONCAT" then
+ assert(e1.info == luaP:GETARG_B(self:getcode(fs, e2)) - 1)
+ self:freeexp(fs, e1)
+ luaP:SETARG_B(self:getcode(fs, e2), e1.info)
+ e1.k = "VRELOCABLE"; e1.info = e2.info
+ else
+ self:exp2nextreg(fs, e2)
+ self:codearith (fs, "OP_CONCAT", e1, e2)
+ end
+ else
+ local opc = self.arith_opc[op]
+ if opc then self:codearith (fs, opc, e1, e2) else
+ opc = self.test_opc[op] or error ("Unknown operator "..op)
+ self:codecomp (fs, opc.opc, opc.cond, e1, e2)
+ end
+ end
+end
+
+------------------------------------------------------------------------
+--
+------------------------------------------------------------------------
+function luaK:fixline(fs, line)
+ --assert (line)
+ if not line then
+ --print(debug.traceback "fixline (line == nil)")
+ end
+ fs.f.lineinfo[fs.pc - 1] = line or 0
+end
+
+------------------------------------------------------------------------
+--
+------------------------------------------------------------------------
+function luaK:code(fs, i, line)
+ if not line then
+ line = 0
+ --print(debug.traceback "line == nil")
+ end
+ local f = fs.f
+
+ do -- print it
+ local params = { }
+ for _,x in ipairs{"A","B","Bx", "sBx", "C"} do
+ if i[x] then table.insert (params, string.format ("%s=%i", x, i[x])) end
+ end
+ debugf ("[code:\t%s\t%s]", luaP.opnames[i.OP], table.concat (params, ", "))
+ end
+
+ self:dischargejpc(fs) -- 'pc' will change
+
+ f.code[fs.pc] = i
+ f.lineinfo[fs.pc] = line
+
+ if line == 0 then
+ f.lineinfo[fs.pc] = fs.lastline
+ if fs.lastline == 0 then
+ --print(debug.traceback())
+ end
+ end
+
+ if f.lineinfo[fs.pc] == 0 then
+ f.lineinfo[fs.pc] = 42
+ end
+
+ local pc = fs.pc
+ fs.pc = fs.pc + 1
+ return pc
+end
+
+------------------------------------------------------------------------
+--
+------------------------------------------------------------------------
+function luaK:codeABC(fs, o, a, b, c)
+ assert(luaP:getOpMode(o) == "iABC", o.." is not an ABC operation")
+ --assert getbmode(o) ~= opargn or b == 0
+ --assert getcmode(o) ~= opargn or c == 0
+ --FF
+ --return self:code(fs, luaP:CREATE_ABC(o, a, b, c), fs.ls.lastline)
+ return self:code(fs, luaP:CREATE_ABC(o, a, b, c), fs.lastline)
+end
+
+------------------------------------------------------------------------
+--
+------------------------------------------------------------------------
+function luaK:codeABx(fs, o, a, bc)
+ assert(luaP:getOpMode(o) == "iABx" or luaP:getOpMode(o) == "iAsBx")
+ --assert getcmode(o) == opargn
+ --FF
+ --return self:code(fs, luaP:CREATE_ABx(o, a, bc), fs.ls.lastline)
+ return self:code(fs, luaP:CREATE_ABx(o, a, bc), fs.lastline)
+end
+
+------------------------------------------------------------------------
+--
+------------------------------------------------------------------------
+function luaK:setlist (fs, base, nelems, tostore)
+ local c = math.floor ((nelems-1) / luaP.LFIELDS_PER_FLUSH + 1)
+ local b = tostore == self.LUA_MULTRET and 0 or tostore
+ assert (tostore ~= 0)
+ if c <= luaP.MAXARG_C then self:codeABC (fs, "OP_SETLIST", base, b, c)
+ else
+ self:codeABC (fs, "OP_SETLIST", base, b, 0)
+ self:code (fs, c, fs.lastline)--FIXME
+ end
+ fs.freereg = base + 1
+end
+
+return luaK
\ No newline at end of file
--- /dev/null
+-------------------------------------------------------------------------------
+-- Copyright (c) 2005-2013 Kein-Hong Man, Fabien Fleutot and others.
+--
+-- All rights reserved.
+--
+-- This program and the accompanying materials are made available
+-- under the terms of the Eclipse Public License v1.0 which
+-- accompanies this distribution, and is available at
+-- http://www.eclipse.org/legal/epl-v10.html
+--
+-- This program and the accompanying materials are also made available
+-- under the terms of the MIT public license which accompanies this
+-- distribution, and is available at http://www.lua.org/license.html
+--
+-- Contributors:
+-- Kein-Hong Man - Initial implementation for Lua 5.0, part of Yueliang
+-- Fabien Fleutot - Port to Lua 5.1, integration with Metalua
+--
+-------------------------------------------------------------------------------
+
+--[[--------------------------------------------------------------------
+
+ ldump.lua
+ Save bytecodes in Lua
+ This file is part of Yueliang.
+
+ Copyright (c) 2005 Kein-Hong Man <khman@users.sf.net>
+ The COPYRIGHT file describes the conditions
+ under which this software may be distributed.
+
+------------------------------------------------------------------------
+
+ [FF] Slightly modified, mainly to produce Lua 5.1 bytecode.
+
+----------------------------------------------------------------------]]
+
+--[[--------------------------------------------------------------------
+-- Notes:
+-- * LUA_NUMBER (double), byte order (little endian) and some other
+-- header values hard-coded; see other notes below...
+-- * One significant difference is that instructions are still in table
+-- form (with OP/A/B/C/Bx fields) and luaP:Instruction() is needed to
+-- convert them into 4-char strings
+-- * Deleted:
+-- luaU:DumpVector: folded into DumpLines, DumpCode
+-- * Added:
+-- luaU:endianness() (from lundump.c)
+-- luaU:make_setS: create a chunk writer that writes to a string
+-- luaU:make_setF: create a chunk writer that writes to a file
+-- (lua.h contains a typedef for a Chunkwriter pointer, and
+-- a Lua-based implementation exists, writer() in lstrlib.c)
+-- luaU:from_double(x): encode double value for writing
+-- luaU:from_int(x): encode integer value for writing
+-- (error checking is limited for these conversion functions)
+-- (double conversion does not support denormals or NaNs)
+-- luaU:ttype(o) (from lobject.h)
+----------------------------------------------------------------------]]
+
+local luaP = require 'metalua.compiler.bytecode.lopcodes'
+
+local M = { }
+
+local format = { }
+format.header = string.dump(function()end):sub(1, 12)
+format.little_endian, format.int_size,
+format.size_t_size, format.instr_size,
+format.number_size, format.integral = format.header:byte(7, 12)
+format.little_endian = format.little_endian~=0
+format.integral = format.integral ~=0
+
+assert(format.integral or format.number_size==8, "Number format not supported by dumper")
+assert(format.little_endian, "Big endian architectures not supported by dumper")
+
+--requires luaP
+local luaU = { }
+M.luaU = luaU
+
+luaU.format = format
+
+-- constants used by dumper
+luaU.LUA_TNIL = 0
+luaU.LUA_TBOOLEAN = 1
+luaU.LUA_TNUMBER = 3 -- (all in lua.h)
+luaU.LUA_TSTRING = 4
+luaU.LUA_TNONE = -1
+
+-- definitions for headers of binary files
+--luaU.LUA_SIGNATURE = "\27Lua" -- binary files start with "<esc>Lua"
+--luaU.VERSION = 81 -- 0x50; last format change was in 5.0
+--luaU.FORMAT_VERSION = 0 -- 0 is official version. yeah I know I'm a liar.
+
+-- a multiple of PI for testing native format
+-- multiplying by 1E7 gives non-trivial integer values
+--luaU.TEST_NUMBER = 3.14159265358979323846E7
+
+--[[--------------------------------------------------------------------
+-- Additional functions to handle chunk writing
+-- * to use make_setS and make_setF, see test_ldump.lua elsewhere
+----------------------------------------------------------------------]]
+
+------------------------------------------------------------------------
+-- works like the lobject.h version except that TObject used in these
+-- scripts only has a 'value' field, no 'tt' field (native types used)
+------------------------------------------------------------------------
+function luaU:ttype(o)
+ local tt = type(o.value)
+ if tt == "number" then return self.LUA_TNUMBER
+ elseif tt == "string" then return self.LUA_TSTRING
+ elseif tt == "nil" then return self.LUA_TNIL
+ elseif tt == "boolean" then return self.LUA_TBOOLEAN
+ else
+ return self.LUA_TNONE -- the rest should not appear
+ end
+end
+
+------------------------------------------------------------------------
+-- create a chunk writer that writes to a string
+-- * returns the writer function and a table containing the string
+-- * to get the final result, look in buff.data
+------------------------------------------------------------------------
+function luaU:make_setS()
+ local buff = {}
+ buff.data = ""
+ local writer =
+ function(s, buff) -- chunk writer
+ if not s then return end
+ buff.data = buff.data..s
+ end
+ return writer, buff
+end
+
+------------------------------------------------------------------------
+-- create a chunk writer that writes to a file
+-- * returns the writer function and a table containing the file handle
+-- * if a nil is passed, then writer should close the open file
+------------------------------------------------------------------------
+function luaU:make_setF(filename)
+ local buff = {}
+ buff.h = io.open(filename, "wb")
+ if not buff.h then return nil end
+ local writer =
+ function(s, buff) -- chunk writer
+ if not buff.h then return end
+ if not s then buff.h:close(); return end
+ buff.h:write(s)
+ end
+ return writer, buff
+end
+
+-----------------------------------------------------------------------
+-- converts a IEEE754 double number to an 8-byte little-endian string
+-- * luaU:from_double() and luaU:from_int() are from ChunkBake project
+-- * supports +/- Infinity, but not denormals or NaNs
+-----------------------------------------------------------------------
+function luaU:from_double(x)
+ local function grab_byte(v)
+ return math.floor(v / 256),
+ string.char(math.mod(math.floor(v), 256))
+ end
+ local sign = 0
+ if x < 0 then sign = 1; x = -x end
+ local mantissa, exponent = math.frexp(x)
+ if x == 0 then -- zero
+ mantissa, exponent = 0, 0
+ elseif x == 1/0 then
+ mantissa, exponent = 0, 2047
+ else
+ mantissa = (mantissa * 2 - 1) * math.ldexp(0.5, 53)
+ exponent = exponent + 1022
+ end
+ local v, byte = "" -- convert to bytes
+ x = mantissa
+ for i = 1,6 do
+ x, byte = grab_byte(x); v = v..byte -- 47:0
+ end
+ x, byte = grab_byte(exponent * 16 + x); v = v..byte -- 55:48
+ x, byte = grab_byte(sign * 128 + x); v = v..byte -- 63:56
+ return v
+end
+
+-----------------------------------------------------------------------
+-- converts a number to a little-endian 32-bit integer string
+-- * input value assumed to not overflow, can be signed/unsigned
+-----------------------------------------------------------------------
+function luaU:from_int(x, size)
+ local v = ""
+ x = math.floor(x)
+ if x >= 0 then
+ for i = 1, size do
+ v = v..string.char(math.mod(x, 256)); x = math.floor(x / 256)
+ end
+ else -- x < 0
+ x = -x
+ local carry = 1
+ for i = 1, size do
+ local c = 255 - math.mod(x, 256) + carry
+ if c == 256 then c = 0; carry = 1 else carry = 0 end
+ v = v..string.char(c); x = math.floor(x / 256)
+ end
+ end
+ return v
+end
+
+--[[--------------------------------------------------------------------
+-- Functions to make a binary chunk
+-- * many functions have the size parameter removed, since output is
+-- in the form of a string and some sizes are implicit or hard-coded
+-- * luaU:DumpVector has been deleted (used in DumpCode & DumpLines)
+----------------------------------------------------------------------]]
+
+------------------------------------------------------------------------
+-- dump a block of literal bytes
+------------------------------------------------------------------------
+function luaU:DumpLiteral(s, D) self:DumpBlock(s, D) end
+
+--[[--------------------------------------------------------------------
+-- struct DumpState:
+-- L -- lua_State (not used in this script)
+-- write -- lua_Chunkwriter (chunk writer function)
+-- data -- void* (chunk writer context or data already written)
+----------------------------------------------------------------------]]
+
+------------------------------------------------------------------------
+-- dumps a block of bytes
+-- * lua_unlock(D.L), lua_lock(D.L) deleted
+------------------------------------------------------------------------
+function luaU:DumpBlock(b, D) D.write(b, D.data) end
+
+------------------------------------------------------------------------
+-- dumps a single byte
+------------------------------------------------------------------------
+function luaU:DumpByte(y, D)
+ self:DumpBlock(string.char(y), D)
+end
+
+------------------------------------------------------------------------
+-- dumps a signed integer of size `format.int_size` (for int)
+------------------------------------------------------------------------
+function luaU:DumpInt(x, D)
+ self:DumpBlock(self:from_int(x, format.int_size), D)
+end
+
+------------------------------------------------------------------------
+-- dumps an unsigned integer of size `format.size_t_size` (for size_t)
+------------------------------------------------------------------------
+function luaU:DumpSize(x, D)
+ self:DumpBlock(self:from_int(x, format.size_t_size), D)
+end
+
+------------------------------------------------------------------------
+-- dumps a LUA_NUMBER; can be an int or double depending on the VM.
+------------------------------------------------------------------------
+function luaU:DumpNumber(x, D)
+ if format.integral then
+ self:DumpBlock(self:from_int(x, format.number_size), D)
+ else
+ self:DumpBlock(self:from_double(x), D)
+ end
+end
+
+------------------------------------------------------------------------
+-- dumps a Lua string
+------------------------------------------------------------------------
+function luaU:DumpString(s, D)
+ if s == nil then
+ self:DumpSize(0, D)
+ else
+ s = s.."\0" -- include trailing '\0'
+ self:DumpSize(string.len(s), D)
+ self:DumpBlock(s, D)
+ end
+end
+
+------------------------------------------------------------------------
+-- dumps instruction block from function prototype
+------------------------------------------------------------------------
+function luaU:DumpCode(f, D)
+ local n = f.sizecode
+ self:DumpInt(n, D)
+ --was DumpVector
+ for i = 0, n - 1 do
+ self:DumpBlock(luaP:Instruction(f.code[i]), D)
+ end
+end
+
+------------------------------------------------------------------------
+-- dumps local variable names from function prototype
+------------------------------------------------------------------------
+function luaU:DumpLocals(f, D)
+ local n = f.sizelocvars
+ self:DumpInt(n, D)
+ for i = 0, n - 1 do
+ -- Dirty temporary fix:
+ -- `Stat{ } keeps properly count of the number of local vars,
+ -- but fails to keep score of their debug info (names).
+ -- It therefore might happen that #f.localvars < f.sizelocvars, or
+ -- that a variable's startpc and endpc fields are left unset.
+ -- FIXME: This might not be needed anymore, check the bug report
+ -- by J. Belmonte.
+ local var = f.locvars[i]
+ if not var then break end
+ -- printf("[DUMPLOCALS] dumping local var #%i = %s", i, table.tostring(var))
+ self:DumpString(var.varname, D)
+ self:DumpInt(var.startpc or 0, D)
+ self:DumpInt(var.endpc or 0, D)
+ end
+end
+
+------------------------------------------------------------------------
+-- dumps line information from function prototype
+------------------------------------------------------------------------
+function luaU:DumpLines(f, D)
+ local n = f.sizelineinfo
+ self:DumpInt(n, D)
+ --was DumpVector
+ for i = 0, n - 1 do
+ self:DumpInt(f.lineinfo[i], D) -- was DumpBlock
+ --print(i, f.lineinfo[i])
+ end
+end
+
+------------------------------------------------------------------------
+-- dump upvalue names from function prototype
+------------------------------------------------------------------------
+function luaU:DumpUpvalues(f, D)
+ local n = f.sizeupvalues
+ self:DumpInt(n, D)
+ for i = 0, n - 1 do
+ self:DumpString(f.upvalues[i], D)
+ end
+end
+
+------------------------------------------------------------------------
+-- dump constant pool from function prototype
+-- * nvalue(o) and tsvalue(o) macros removed
+------------------------------------------------------------------------
+function luaU:DumpConstants(f, D)
+ local n = f.sizek
+ self:DumpInt(n, D)
+ for i = 0, n - 1 do
+ local o = f.k[i] -- TObject
+ local tt = self:ttype(o)
+ assert (tt >= 0)
+ self:DumpByte(tt, D)
+ if tt == self.LUA_TNUMBER then
+ self:DumpNumber(o.value, D)
+ elseif tt == self.LUA_TSTRING then
+ self:DumpString(o.value, D)
+ elseif tt == self.LUA_TBOOLEAN then
+ self:DumpByte (o.value and 1 or 0, D)
+ elseif tt == self.LUA_TNIL then
+ else
+ assert(false) -- cannot happen
+ end
+ end
+end
+
+
+function luaU:DumpProtos (f, D)
+ local n = f.sizep
+ assert (n)
+ self:DumpInt(n, D)
+ for i = 0, n - 1 do
+ self:DumpFunction(f.p[i], f.source, D)
+ end
+end
+
+function luaU:DumpDebug(f, D)
+ self:DumpLines(f, D)
+ self:DumpLocals(f, D)
+ self:DumpUpvalues(f, D)
+end
+
+
+------------------------------------------------------------------------
+-- dump child function prototypes from function prototype
+--FF completely reworked for 5.1 format
+------------------------------------------------------------------------
+function luaU:DumpFunction(f, p, D)
+ -- print "Dumping function:"
+ -- table.print(f, 60)
+
+ local source = f.source
+ if source == p then source = nil end
+ self:DumpString(source, D)
+ self:DumpInt(f.lineDefined, D)
+ self:DumpInt(f.lastLineDefined or 42, D)
+ self:DumpByte(f.nups, D)
+ self:DumpByte(f.numparams, D)
+ self:DumpByte(f.is_vararg, D)
+ self:DumpByte(f.maxstacksize, D)
+ self:DumpCode(f, D)
+ self:DumpConstants(f, D)
+ self:DumpProtos( f, D)
+ self:DumpDebug(f, D)
+end
+
+------------------------------------------------------------------------
+-- dump Lua header section (some sizes hard-coded)
+--FF: updated for version 5.1
+------------------------------------------------------------------------
+function luaU:DumpHeader(D)
+ self:DumpLiteral(format.header, D)
+end
+
+------------------------------------------------------------------------
+-- dump function as precompiled chunk
+-- * w, data are created from make_setS, make_setF
+--FF: suppressed extraneous [L] param
+------------------------------------------------------------------------
+function luaU:dump (Main, w, data)
+ local D = {} -- DumpState
+ D.write = w
+ D.data = data
+ self:DumpHeader(D)
+ self:DumpFunction(Main, nil, D)
+ -- added: for a chunk writer writing to a file, this final call with
+ -- nil data is to indicate to the writer to close the file
+ D.write(nil, D.data)
+end
+
+------------------------------------------------------------------------
+-- find byte order (from lundump.c)
+-- * hard-coded to little-endian
+------------------------------------------------------------------------
+function luaU:endianness()
+ return 1
+end
+
+-- FIXME: ugly concat-base generation in [make_setS], bufferize properly!
+function M.dump_string (proto)
+ local writer, buff = luaU:make_setS()
+ luaU:dump (proto, writer, buff)
+ return buff.data
+end
+
+-- FIXME: [make_setS] sucks, perform synchronous file writing
+-- Now unused
+function M.dump_file (proto, filename)
+ local writer, buff = luaU:make_setS()
+ luaU:dump (proto, writer, buff)
+ local file = io.open (filename, "wb")
+ file:write (buff.data)
+ io.close(file)
+ --if UNIX_SHARPBANG then os.execute ("chmod a+x "..filename) end
+end
+
+return M
--- /dev/null
+-------------------------------------------------------------------------------
+-- Copyright (c) 2005-2013 Kein-Hong Man, Fabien Fleutot and others.
+--
+-- All rights reserved.
+--
+-- This program and the accompanying materials are made available
+-- under the terms of the Eclipse Public License v1.0 which
+-- accompanies this distribution, and is available at
+-- http://www.eclipse.org/legal/epl-v10.html
+--
+-- This program and the accompanying materials are also made available
+-- under the terms of the MIT public license which accompanies this
+-- distribution, and is available at http://www.lua.org/license.html
+--
+-- Contributors:
+-- Kein-Hong Man - Initial implementation for Lua 5.0, part of Yueliang
+-- Fabien Fleutot - Port to Lua 5.1, integration with Metalua
+--
+-------------------------------------------------------------------------------
+
+--[[--------------------------------------------------------------------
+
+ $Id$
+
+ lopcodes.lua
+ Lua 5 virtual machine opcodes in Lua
+ This file is part of Yueliang.
+
+ Copyright (c) 2005 Kein-Hong Man <khman@users.sf.net>
+ The COPYRIGHT file describes the conditions
+ under which this software may be distributed.
+
+ See the ChangeLog for more information.
+
+------------------------------------------------------------------------
+
+ [FF] Slightly modified, mainly to produce Lua 5.1 bytecode.
+
+----------------------------------------------------------------------]]
+
+--[[--------------------------------------------------------------------
+-- Notes:
+-- * an Instruction is a table with OP, A, B, C, Bx elements; this
+-- should allow instruction handling to work with doubles and ints
+-- * Added:
+-- luaP:Instruction(i): convert field elements to a 4-char string
+-- luaP:DecodeInst(x): convert 4-char string into field elements
+-- * WARNING luaP:Instruction outputs instructions encoded in little-
+-- endian form and field size and positions are hard-coded
+----------------------------------------------------------------------]]
+
+local function debugf() end
+
+local luaP = { }
+
+--[[
+===========================================================================
+ We assume that instructions are unsigned numbers.
+ All instructions have an opcode in the first 6 bits.
+ Instructions can have the following fields:
+ 'A' : 8 bits
+ 'B' : 9 bits
+ 'C' : 9 bits
+ 'Bx' : 18 bits ('B' and 'C' together)
+ 'sBx' : signed Bx
+
+ A signed argument is represented in excess K; that is, the number
+ value is the unsigned value minus K. K is exactly the maximum value
+ for that argument (so that -max is represented by 0, and +max is
+ represented by 2*max), which is half the maximum for the corresponding
+ unsigned argument.
+===========================================================================
+--]]
+
+luaP.OpMode = {"iABC", "iABx", "iAsBx"} -- basic instruction format
+
+------------------------------------------------------------------------
+-- size and position of opcode arguments.
+-- * WARNING size and position is hard-coded elsewhere in this script
+------------------------------------------------------------------------
+luaP.SIZE_C = 9
+luaP.SIZE_B = 9
+luaP.SIZE_Bx = luaP.SIZE_C + luaP.SIZE_B
+luaP.SIZE_A = 8
+
+luaP.SIZE_OP = 6
+
+luaP.POS_C = luaP.SIZE_OP
+luaP.POS_B = luaP.POS_C + luaP.SIZE_C
+luaP.POS_Bx = luaP.POS_C
+luaP.POS_A = luaP.POS_B + luaP.SIZE_B
+
+--FF from 5.1
+luaP.BITRK = 2^(luaP.SIZE_B - 1)
+function luaP:ISK(x) return x >= self.BITRK end
+luaP.MAXINDEXRK = luaP.BITRK - 1
+function luaP:RKASK(x)
+ if x < self.BITRK then return x+self.BITRK else return x end
+end
+
+
+
+------------------------------------------------------------------------
+-- limits for opcode arguments.
+-- we use (signed) int to manipulate most arguments,
+-- so they must fit in BITS_INT-1 bits (-1 for sign)
+------------------------------------------------------------------------
+-- removed "#if SIZE_Bx < BITS_INT-1" test, assume this script is
+-- running on a Lua VM with double or int as LUA_NUMBER
+
+luaP.MAXARG_Bx = math.ldexp(1, luaP.SIZE_Bx) - 1
+luaP.MAXARG_sBx = math.floor(luaP.MAXARG_Bx / 2) -- 'sBx' is signed
+
+luaP.MAXARG_A = math.ldexp(1, luaP.SIZE_A) - 1
+luaP.MAXARG_B = math.ldexp(1, luaP.SIZE_B) - 1
+luaP.MAXARG_C = math.ldexp(1, luaP.SIZE_C) - 1
+
+-- creates a mask with 'n' 1 bits at position 'p'
+-- MASK1(n,p) deleted
+-- creates a mask with 'n' 0 bits at position 'p'
+-- MASK0(n,p) deleted
+
+--[[--------------------------------------------------------------------
+ Visual representation for reference:
+
+ 31 | | | 0 bit position
+ +-----+-----+-----+----------+
+ | B | C | A | Opcode | iABC format
+ +-----+-----+-----+----------+
+ - 9 - 9 - 8 - 6 - field sizes
+ +-----+-----+-----+----------+
+ | [s]Bx | A | Opcode | iABx | iAsBx format
+ +-----+-----+-----+----------+
+----------------------------------------------------------------------]]
+
+------------------------------------------------------------------------
+-- the following macros help to manipulate instructions
+-- * changed to a table object representation, very clean compared to
+-- the [nightmare] alternatives of using a number or a string
+------------------------------------------------------------------------
+
+-- these accept or return opcodes in the form of string names
+function luaP:GET_OPCODE(i) return self.ROpCode[i.OP] end
+function luaP:SET_OPCODE(i, o) i.OP = self.OpCode[o] end
+
+function luaP:GETARG_A(i) return i.A end
+function luaP:SETARG_A(i, u) i.A = u end
+
+function luaP:GETARG_B(i) return i.B end
+function luaP:SETARG_B(i, b) i.B = b end
+
+function luaP:GETARG_C(i) return i.C end
+function luaP:SETARG_C(i, b) i.C = b end
+
+function luaP:GETARG_Bx(i) return i.Bx end
+function luaP:SETARG_Bx(i, b) i.Bx = b end
+
+function luaP:GETARG_sBx(i) return i.Bx - self.MAXARG_sBx end
+function luaP:SETARG_sBx(i, b) i.Bx = b + self.MAXARG_sBx end
+
+function luaP:CREATE_ABC(o,a,b,c)
+ return {OP = self.OpCode[o], A = a, B = b, C = c}
+end
+
+function luaP:CREATE_ABx(o,a,bc)
+ return {OP = self.OpCode[o], A = a, Bx = bc}
+end
+
+------------------------------------------------------------------------
+-- Bit shuffling stuffs
+------------------------------------------------------------------------
+
+if false and pcall (require, 'bit') then
+ ------------------------------------------------------------------------
+ -- Return a 4-char string little-endian encoded form of an instruction
+ ------------------------------------------------------------------------
+ function luaP:Instruction(i)
+ --FIXME
+ end
+else
+ ------------------------------------------------------------------------
+ -- Version without bit manipulation library.
+ ------------------------------------------------------------------------
+ local p2 = {1,2,4,8,16,32,64,128,256, 512, 1024, 2048, 4096}
+ -- keeps [n] bits from [x]
+ local function keep (x, n) return x % p2[n+1] end
+ -- shifts bits of [x] [n] places to the right
+ local function srb (x,n) return math.floor (x / p2[n+1]) end
+ -- shifts bits of [x] [n] places to the left
+ local function slb (x,n) return x * p2[n+1] end
+
+ ------------------------------------------------------------------------
+ -- Return a 4-char string little-endian encoded form of an instruction
+ ------------------------------------------------------------------------
+ function luaP:Instruction(i)
+ -- printf("Instr->string: %s %s", self.opnames[i.OP], table.tostring(i))
+ local c0, c1, c2, c3
+ -- change to OP/A/B/C format if needed
+ if i.Bx then i.C = keep (i.Bx, 9); i.B = srb (i.Bx, 9) end
+ -- c0 = 6B from opcode + 2LSB from A (flushed to MSB)
+ c0 = i.OP + slb (keep (i.A, 2), 6)
+ -- c1 = 6MSB from A + 2LSB from C (flushed to MSB)
+ c1 = srb (i.A, 2) + slb (keep (i.C, 2), 6)
+ -- c2 = 7MSB from C + 1LSB from B (flushed to MSB)
+ c2 = srb (i.C, 2) + slb (keep (i.B, 1), 7)
+ -- c3 = 8MSB from B
+ c3 = srb (i.B, 1)
+ --printf ("Instruction: %s %s", self.opnames[i.OP], tostringv (i))
+ --printf ("Bin encoding: %x %x %x %x", c0, c1, c2, c3)
+ return string.char(c0, c1, c2, c3)
+ end
+end
+------------------------------------------------------------------------
+-- decodes a 4-char little-endian string into an instruction struct
+------------------------------------------------------------------------
+function luaP:DecodeInst(x)
+ error "Not implemented"
+end
+
+------------------------------------------------------------------------
+-- invalid register that fits in 8 bits
+------------------------------------------------------------------------
+luaP.NO_REG = luaP.MAXARG_A
+
+------------------------------------------------------------------------
+-- R(x) - register
+-- Kst(x) - constant (in constant table)
+-- RK(x) == if x < MAXSTACK then R(x) else Kst(x-MAXSTACK)
+------------------------------------------------------------------------
+
+------------------------------------------------------------------------
+-- grep "ORDER OP" if you change these enums
+------------------------------------------------------------------------
+
+--[[--------------------------------------------------------------------
+Lua virtual machine opcodes (enum OpCode):
+------------------------------------------------------------------------
+name args description
+------------------------------------------------------------------------
+OP_MOVE A B R(A) := R(B)
+OP_LOADK A Bx R(A) := Kst(Bx)
+OP_LOADBOOL A B C R(A) := (Bool)B; if (C) PC++
+OP_LOADNIL A B R(A) := ... := R(B) := nil
+OP_GETUPVAL A B R(A) := UpValue[B]
+OP_GETGLOBAL A Bx R(A) := Gbl[Kst(Bx)]
+OP_GETTABLE A B C R(A) := R(B)[RK(C)]
+OP_SETGLOBAL A Bx Gbl[Kst(Bx)] := R(A)
+OP_SETUPVAL A B UpValue[B] := R(A)
+OP_SETTABLE A B C R(A)[RK(B)] := RK(C)
+OP_NEWTABLE A B C R(A) := {} (size = B,C)
+OP_SELF A B C R(A+1) := R(B); R(A) := R(B)[RK(C)]
+OP_ADD A B C R(A) := RK(B) + RK(C)
+OP_SUB A B C R(A) := RK(B) - RK(C)
+OP_MUL A B C R(A) := RK(B) * RK(C)
+OP_DIV A B C R(A) := RK(B) / RK(C)
+OP_POW A B C R(A) := RK(B) ^ RK(C)
+OP_UNM A B R(A) := -R(B)
+OP_NOT A B R(A) := not R(B)
+OP_CONCAT A B C R(A) := R(B).. ... ..R(C)
+OP_JMP sBx PC += sBx
+OP_EQ A B C if ((RK(B) == RK(C)) ~= A) then pc++
+OP_LT A B C if ((RK(B) < RK(C)) ~= A) then pc++
+OP_LE A B C if ((RK(B) <= RK(C)) ~= A) then pc++
+OP_TEST A B C if (R(B) <=> C) then R(A) := R(B) else pc++
+OP_CALL A B C R(A), ... ,R(A+C-2) := R(A)(R(A+1), ... ,R(A+B-1))
+OP_TAILCALL A B C return R(A)(R(A+1), ... ,R(A+B-1))
+OP_RETURN A B return R(A), ... ,R(A+B-2) (see note)
+OP_FORLOOP A sBx R(A)+=R(A+2); if R(A) <?= R(A+1) then PC+= sBx
+OP_TFORLOOP A C R(A+2), ... ,R(A+2+C) := R(A)(R(A+1), R(A+2));
+ if R(A+2) ~= nil then pc++
+OP_TFORPREP A sBx if type(R(A)) == table then R(A+1):=R(A), R(A):=next;
+ PC += sBx
+OP_SETLIST A Bx R(A)[Bx-Bx%FPF+i] := R(A+i), 1 <= i <= Bx%FPF+1
+OP_SETLISTO A Bx (see note)
+OP_CLOSE A close all variables in the stack up to (>=) R(A)
+OP_CLOSURE A Bx R(A) := closure(KPROTO[Bx], R(A), ... ,R(A+n))
+----------------------------------------------------------------------]]
+
+luaP.opnames = {} -- opcode names
+luaP.OpCode = {} -- lookup name -> number
+luaP.ROpCode = {} -- lookup number -> name
+
+local i = 0
+for v in string.gfind([[
+MOVE -- 0
+LOADK
+LOADBOOL
+LOADNIL
+GETUPVAL
+GETGLOBAL -- 5
+GETTABLE
+SETGLOBAL
+SETUPVAL
+SETTABLE
+NEWTABLE -- 10
+SELF
+ADD
+SUB
+MUL
+DIV -- 15
+MOD
+POW
+UNM
+NOT
+LEN -- 20
+CONCAT
+JMP
+EQ
+LT
+LE -- 25
+TEST
+TESTSET
+CALL
+TAILCALL
+RETURN -- 30
+FORLOOP
+FORPREP
+TFORLOOP
+SETLIST
+CLOSE -- 35
+CLOSURE
+VARARG
+]], "[%a]+") do
+ local n = "OP_"..v
+ luaP.opnames[i] = v
+ luaP.OpCode[n] = i
+ luaP.ROpCode[i] = n
+ i = i + 1
+end
+luaP.NUM_OPCODES = i
+
+--[[
+===========================================================================
+ Notes:
+ (1) In OP_CALL, if (B == 0) then B = top. C is the number of returns - 1,
+ and can be 0: OP_CALL then sets 'top' to last_result+1, so
+ next open instruction (OP_CALL, OP_RETURN, OP_SETLIST) may use 'top'.
+
+ (2) In OP_RETURN, if (B == 0) then return up to 'top'
+
+ (3) For comparisons, B specifies what conditions the test should accept.
+
+ (4) All 'skips' (pc++) assume that next instruction is a jump
+
+ (5) OP_SETLISTO is used when the last item in a table constructor is a
+ function, so the number of elements set is up to top of stack
+===========================================================================
+--]]
+
+------------------------------------------------------------------------
+-- masks for instruction properties
+------------------------------------------------------------------------
+-- was enum OpModeMask:
+luaP.OpModeBreg = 2 -- B is a register
+luaP.OpModeBrk = 3 -- B is a register/constant
+luaP.OpModeCrk = 4 -- C is a register/constant
+luaP.OpModesetA = 5 -- instruction set register A
+luaP.OpModeK = 6 -- Bx is a constant
+luaP.OpModeT = 1 -- operator is a test
+
+------------------------------------------------------------------------
+-- get opcode mode, e.g. "iABC"
+------------------------------------------------------------------------
+function luaP:getOpMode(m)
+ --printv(m)
+ --printv(self.OpCode[m])
+ --printv(self.opmodes [self.OpCode[m]+1])
+ return self.OpMode[tonumber(string.sub(self.opmodes[self.OpCode[m] + 1], 7, 7))]
+end
+
+------------------------------------------------------------------------
+-- test an instruction property flag
+-- * b is a string, e.g. "OpModeBreg"
+------------------------------------------------------------------------
+function luaP:testOpMode(m, b)
+ return (string.sub(self.opmodes[self.OpCode[m] + 1], self[b], self[b]) == "1")
+end
+
+-- number of list items to accumulate before a SETLIST instruction
+-- (must be a power of 2)
+-- * used in lparser, lvm, ldebug, ltests
+luaP.LFIELDS_PER_FLUSH = 50 --FF updated to match 5.1
+
+-- luaP_opnames[] is set above, as the luaP.opnames table
+-- opmode(t,b,bk,ck,sa,k,m) deleted
+
+--[[--------------------------------------------------------------------
+ Legend for luaP:opmodes:
+ 1 T -> T (is a test?)
+ 2 B -> B is a register
+ 3 b -> B is an RK register/constant combination
+ 4 C -> C is an RK register/constant combination
+ 5 A -> register A is set by the opcode
+ 6 K -> Bx is a constant
+ 7 m -> 1 if iABC layout,
+ 2 if iABx layout,
+ 3 if iAsBx layout
+----------------------------------------------------------------------]]
+
+luaP.opmodes = {
+-- TBbCAKm opcode
+ "0100101", -- OP_MOVE 0
+ "0000112", -- OP_LOADK
+ "0000101", -- OP_LOADBOOL
+ "0100101", -- OP_LOADNIL
+ "0000101", -- OP_GETUPVAL
+ "0000112", -- OP_GETGLOBAL 5
+ "0101101", -- OP_GETTABLE
+ "0000012", -- OP_SETGLOBAL
+ "0000001", -- OP_SETUPVAL
+ "0011001", -- OP_SETTABLE
+ "0000101", -- OP_NEWTABLE 10
+ "0101101", -- OP_SELF
+ "0011101", -- OP_ADD
+ "0011101", -- OP_SUB
+ "0011101", -- OP_MUL
+ "0011101", -- OP_DIV 15
+ "0011101", -- OP_MOD
+ "0011101", -- OP_POW
+ "0100101", -- OP_UNM
+ "0100101", -- OP_NOT
+ "0100101", -- OP_LEN 20
+ "0101101", -- OP_CONCAT
+ "0000003", -- OP_JMP
+ "1011001", -- OP_EQ
+ "1011001", -- OP_LT
+ "1011001", -- OP_LE 25
+ "1000101", -- OP_TEST
+ "1100101", -- OP_TESTSET
+ "0000001", -- OP_CALL
+ "0000001", -- OP_TAILCALL
+ "0000001", -- OP_RETURN 30
+ "0000003", -- OP_FORLOOP
+ "0000103", -- OP_FORPREP
+ "1000101", -- OP_TFORLOOP
+ "0000001", -- OP_SETLIST
+ "0000001", -- OP_CLOSE 35
+ "0000102", -- OP_CLOSURE
+ "0000101" -- OP_VARARG
+}
+
+return luaP
\ No newline at end of file
--- /dev/null
+--------------------------------------------------------------------------------
+-- Copyright (c) 2006-2013 Fabien Fleutot and others.
+--
+-- All rights reserved.
+--
+-- This program and the accompanying materials are made available
+-- under the terms of the Eclipse Public License v1.0 which
+-- accompanies this distribution, and is available at
+-- http://www.eclipse.org/legal/epl-v10.html
+--
+-- This program and the accompanying materials are also made available
+-- under the terms of the MIT public license which accompanies this
+-- distribution, and is available at http://www.lua.org/license.html
+--
+-- Contributors:
+-- Fabien Fleutot - API and implementation
+--
+--------------------------------------------------------------------------------
+
+--*-lua-*-----------------------------------------------------------------------
+-- Override Lua's default compilation functions, so that they support Metalua
+-- rather than only plain Lua
+--------------------------------------------------------------------------------
+
+local mlc = require 'metalua.compiler'
+
+local M = { }
+
+-- Original versions
+local original_lua_versions = {
+ load = load,
+ loadfile = loadfile,
+ loadstring = loadstring,
+ dofile = dofile }
+
+local lua_loadstring = loadstring
+local lua_loadfile = loadfile
+
+function M.loadstring(str, name)
+ if type(str) ~= 'string' then error 'string expected' end
+ if str:match '^\027LuaQ' then return lua_loadstring(str) end
+ local n = str:match '^#![^\n]*\n()'
+ if n then str=str:sub(n, -1) end
+ -- FIXME: handle erroneous returns (return nil + error msg)
+ return mlc.new():src_to_function(str, name)
+end
+
+function M.loadfile(filename)
+ local f, err_msg = io.open(filename, 'rb')
+ if not f then return nil, err_msg end
+ local success, src = pcall( f.read, f, '*a')
+ pcall(f.close, f)
+ if success then return M.loadstring (src, '@'..filename)
+ else return nil, src end
+end
+
+function M.load(f, name)
+ local acc = { }
+ while true do
+ local x = f()
+ if not x then break end
+ assert(type(x)=='string', "function passed to load() must return strings")
+ table.insert(acc, x)
+ end
+ return M.loadstring(table.concat(acc))
+end
+
+function M.dostring(src)
+ local f, msg = M.loadstring(src)
+ if not f then error(msg) end
+ return f()
+end
+
+function M.dofile(name)
+ local f, msg = M.loadfile(name)
+ if not f then error(msg) end
+ return f()
+end
+
+-- Export replacement functions as globals
+for name, f in pairs(M) do _G[name] = f end
+
+-- To be done *after* exportation
+M.lua = original_lua_versions
+
+return M
\ No newline at end of file
--- /dev/null
+--------------------------------------------------------------------------------
+-- Copyright (c) 2006-2013 Fabien Fleutot and others.
+--
+-- All rights reserved.
+--
+-- This program and the accompanying materials are made available
+-- under the terms of the Eclipse Public License v1.0 which
+-- accompanies this distribution, and is available at
+-- http://www.eclipse.org/legal/epl-v10.html
+--
+-- This program and the accompanying materials are also made available
+-- under the terms of the MIT public license which accompanies this
+-- distribution, and is available at http://www.lua.org/license.html
+--
+-- Contributors:
+-- Fabien Fleutot - API and implementation
+--
+--------------------------------------------------------------------------------
+
+-- Export all public APIs from sub-modules, squashed into a flat spacename
+
+local MT = { __type='metalua.compiler.parser' }
+
+local MODULE_REL_NAMES = { "annot.grammar", "expr", "meta", "misc",
+ "stat", "table", "ext" }
+
+local function new()
+ local M = {
+ lexer = require "metalua.compiler.parser.lexer" ();
+ extensions = { } }
+ for _, rel_name in ipairs(MODULE_REL_NAMES) do
+ local abs_name = "metalua.compiler.parser."..rel_name
+ local extender = require (abs_name)
+ if not M.extensions[abs_name] then
+ if type (extender) == 'function' then extender(M) end
+ M.extensions[abs_name] = extender
+ end
+ end
+ return setmetatable(M, MT)
+end
+
+return { new = new }
--- /dev/null
+--------------------------------------------------------------------------------
+-- Copyright (c) 2006-2013 Fabien Fleutot and others.
+--
+-- All rights reserved.
+--
+-- This program and the accompanying materials are made available
+-- under the terms of the Eclipse Public License v1.0 which
+-- accompanies this distribution, and is available at
+-- http://www.eclipse.org/legal/epl-v10.html
+--
+-- This program and the accompanying materials are also made available
+-- under the terms of the MIT public license which accompanies this
+-- distribution, and is available at http://www.lua.org/license.html
+--
+-- Contributors:
+-- Fabien Fleutot - API and implementation
+--
+--------------------------------------------------------------------------------
+
+require 'checks'
+local gg = require 'metalua.grammar.generator'
+local M = { }
+
+function M.opt(mlc, primary, a_type)
+ checks('table', 'table|function', 'string')
+ return gg.sequence{
+ primary,
+ gg.onkeyword{ "#", function() return assert(mlc.annot[a_type]) end },
+ builder = function(x)
+ local t, annot = unpack(x)
+ return annot and { tag='Annot', t, annot } or t
+ end }
+end
+
+-- split a list of "foo" and "`Annot{foo, annot}" into a list of "foo"
+-- and a list of "annot".
+-- No annot list is returned if none of the elements were annotated.
+function M.split(lst)
+ local x, a, some = { }, { }, false
+ for i, p in ipairs(lst) do
+ if p.tag=='Annot' then
+ some, x[i], a[i] = true, unpack(p)
+ else x[i] = p end
+ end
+ if some then return x, a else return lst end
+end
+
+return M
--- /dev/null
+--------------------------------------------------------------------------------
+-- Copyright (c) 2006-2013 Fabien Fleutot and others.
+--
+-- All rights reserved.
+--
+-- This program and the accompanying materials are made available
+-- under the terms of the Eclipse Public License v1.0 which
+-- accompanies this distribution, and is available at
+-- http://www.eclipse.org/legal/epl-v10.html
+--
+-- This program and the accompanying materials are also made available
+-- under the terms of the MIT public license which accompanies this
+-- distribution, and is available at http://www.lua.org/license.html
+--
+-- Contributors:
+-- Fabien Fleutot - API and implementation
+--
+--------------------------------------------------------------------------------
+
+local gg = require 'metalua.grammar.generator'
+
+return function(M)
+ local _M = gg.future(M)
+ M.lexer :add '->'
+ local A = { }
+ local _A = gg.future(A)
+ M.annot = A
+
+ -- Type identifier: Lua keywords such as `"nil"` allowed.
+ function M.annot.tid(lx)
+ local w = lx :next()
+ local t = w.tag
+ if t=='Keyword' and w[1] :match '^[%a_][%w_]*$' or w.tag=='Id'
+ then return {tag='TId'; lineinfo=w.lineinfo; w[1]}
+ else return gg.parse_error (lx, 'tid expected') end
+ end
+
+ local field_types = { var='TVar'; const='TConst';
+ currently='TCurrently'; field='TField' }
+
+ -- TODO check lineinfo
+ function M.annot.tf(lx)
+ local tk = lx:next()
+ local w = tk[1]
+ local tag = field_types[w]
+ if not tag then error ('Invalid field type '..w)
+ elseif tag=='TField' then return {tag='TField'} else
+ local te = M.te(lx)
+ return {tag=tag; te}
+ end
+ end
+
+ M.annot.tebar_content = gg.list{
+ name = 'tebar content',
+ primary = _A.te,
+ separators = { ",", ";" },
+ terminators = ")" }
+
+ M.annot.tebar = gg.multisequence{
+ name = 'annot.tebar',
+ --{ '*', builder = 'TDynbar' }, -- maybe not user-available
+ { '(', _A.tebar_content, ')',
+ builder = function(x) return x[1] end },
+ { _A.te }
+ }
+
+ M.annot.te = gg.multisequence{
+ name = 'annot.te',
+ { _A.tid, builder=function(x) return x[1] end },
+ { '*', builder = 'TDyn' },
+ { "[",
+ gg.list{
+ primary = gg.sequence{
+ _M.expr, "=", _A.tf,
+ builder = 'TPair'
+ },
+ separators = { ",", ";" },
+ terminators = { "]", "|" } },
+ gg.onkeyword{ "|", _A.tf },
+ "]",
+ builder = function(x)
+ local fields, other = unpack(x)
+ return { tag='TTable', other or {tag='TField'}, fields }
+ end }, -- "[ ... ]"
+ { '(', _A.tebar_content, ')', '->', '(', _A.tebar_content, ')',
+ builder = function(x)
+ local p, r = unpack(x)
+ return {tag='TFunction', p, r }
+ end } }
+
+ M.annot.ts = gg.multisequence{
+ name = 'annot.ts',
+ { 'return', _A.tebar_content, builder='TReturn' },
+ { _A.tid, builder = function(x)
+ if x[1][1]=='pass' then return {tag='TPass'}
+ else error "Bad statement type" end
+ end } }
+
+-- TODO: add parsers for statements:
+-- #return tebar
+-- #alias = te
+-- #ell = tf
+--[[
+ M.annot.stat_annot = gg.sequence{
+ gg.list{ primary=_A.tid, separators='.' },
+ '=',
+ XXX??,
+ builder = 'Annot' }
+--]]
+
+ return M.annot
+end
\ No newline at end of file
--- /dev/null
+--------------------------------------------------------------------------------
+-- Copyright (c) 2006-2013 Fabien Fleutot and others.
+--
+-- All rights reserved.
+--
+-- This program and the accompanying materials are made available
+-- under the terms of the Eclipse Public License v1.0 which
+-- accompanies this distribution, and is available at
+-- http://www.eclipse.org/legal/epl-v10.html
+--
+-- This program and the accompanying materials are also made available
+-- under the terms of the MIT public license which accompanies this
+-- distribution, and is available at http://www.lua.org/license.html
+--
+-- Contributors:
+-- Fabien Fleutot - API and implementation
+--
+--------------------------------------------------------------------------------
+
+-- Shared common parser table. It will be filled by parser.init(),
+-- and every other module will be able to call its elements at runtime.
+--
+-- If the table was directly created in parser.init, a circular
+-- dependency would be created: parser.init depends on other modules to fill the table,
+-- so other modules can't simultaneously depend on it.
+
+return { }
\ No newline at end of file
--- /dev/null
+-------------------------------------------------------------------------------
+-- Copyright (c) 2006-2013 Fabien Fleutot and others.
+--
+-- All rights reserved.
+--
+-- This program and the accompanying materials are made available
+-- under the terms of the Eclipse Public License v1.0 which
+-- accompanies this distribution, and is available at
+-- http://www.eclipse.org/legal/epl-v10.html
+--
+-- This program and the accompanying materials are also made available
+-- under the terms of the MIT public license which accompanies this
+-- distribution, and is available at http://www.lua.org/license.html
+--
+-- Contributors:
+-- Fabien Fleutot - API and implementation
+--
+-------------------------------------------------------------------------------
+
+-------------------------------------------------------------------------------
+--
+-- Exported API:
+-- * [mlp.expr()]
+-- * [mlp.expr_list()]
+-- * [mlp.func_val()]
+--
+-------------------------------------------------------------------------------
+
+local pp = require 'metalua.pprint'
+local gg = require 'metalua.grammar.generator'
+local annot = require 'metalua.compiler.parser.annot.generator'
+
+return function(M)
+ local _M = gg.future(M)
+ local _table = gg.future(M, 'table')
+ local _meta = gg.future(M, 'meta') -- TODO move to ext?
+ local _annot = gg.future(M, 'annot') -- TODO move to annot
+
+ --------------------------------------------------------------------------------
+ -- Non-empty expression list. Actually, this isn't used here, but that's
+ -- handy to give to users.
+ --------------------------------------------------------------------------------
+ M.expr_list = gg.list{ primary=_M.expr, separators="," }
+
+ --------------------------------------------------------------------------------
+ -- Helpers for function applications / method applications
+ --------------------------------------------------------------------------------
+ M.func_args_content = gg.list{
+ name = "function arguments",
+ primary = _M.expr,
+ separators = ",",
+ terminators = ")" }
+
+ -- Used to parse methods
+ M.method_args = gg.multisequence{
+ name = "function argument(s)",
+ { "{", _table.content, "}" },
+ { "(", _M.func_args_content, ")", builder = unpack },
+ { "+{", _meta.quote_content, "}" },
+ -- TODO lineinfo?
+ function(lx) local r = M.opt_string(lx); return r and {r} or { } end }
+
+ --------------------------------------------------------------------------------
+ -- [func_val] parses a function, from opening parameters parenthese to
+ -- "end" keyword included. Used for anonymous functions as well as
+ -- function declaration statements (both local and global).
+ --
+ -- It's wrapped in a [_func_val] eta expansion, so that when expr
+ -- parser uses the latter, they will notice updates of [func_val]
+ -- definitions.
+ --------------------------------------------------------------------------------
+ M.func_params_content = gg.list{
+ name="function parameters",
+ gg.multisequence{ { "...", builder = "Dots" }, annot.opt(M, _M.id, 'te') },
+ separators = ",", terminators = {")", "|"} }
+
+ -- TODO move to annot
+ M.func_val = gg.sequence{
+ name = "function body",
+ "(", _M.func_params_content, ")", _M.block, "end",
+ builder = function(x)
+ local params, body = unpack(x)
+ local annots, some = { }, false
+ for i, p in ipairs(params) do
+ if p.tag=='Annot' then
+ params[i], annots[i], some = p[1], p[2], true
+ else annots[i] = false end
+ end
+ if some then return { tag='Function', params, body, annots }
+ else return { tag='Function', params, body } end
+ end }
+
+ local func_val = function(lx) return M.func_val(lx) end
+
+ --------------------------------------------------------------------------------
+ -- Default parser for primary expressions
+ --------------------------------------------------------------------------------
+ function M.id_or_literal (lx)
+ local a = lx:next()
+ if a.tag~="Id" and a.tag~="String" and a.tag~="Number" then
+ local msg
+ if a.tag=='Eof' then
+ msg = "End of file reached when an expression was expected"
+ elseif a.tag=='Keyword' then
+ msg = "An expression was expected, and `"..a[1]..
+ "' can't start an expression"
+ else
+ msg = "Unexpected expr token " .. pp.tostring (a)
+ end
+ gg.parse_error (lx, msg)
+ end
+ return a
+ end
+
+
+ --------------------------------------------------------------------------------
+ -- Builder generator for operators. Wouldn't be worth it if "|x|" notation
+ -- were allowed, but then lua 5.1 wouldn't compile it
+ --------------------------------------------------------------------------------
+
+ -- opf1 = |op| |_,a| `Op{ op, a }
+ local function opf1 (op) return
+ function (_,a) return { tag="Op", op, a } end end
+
+ -- opf2 = |op| |a,_,b| `Op{ op, a, b }
+ local function opf2 (op) return
+ function (a,_,b) return { tag="Op", op, a, b } end end
+
+ -- opf2r = |op| |a,_,b| `Op{ op, b, a } -- (args reversed)
+ local function opf2r (op) return
+ function (a,_,b) return { tag="Op", op, b, a } end end
+
+ local function op_ne(a, _, b)
+ -- This version allows to remove the "ne" operator from the AST definition.
+ -- However, it doesn't always produce the exact same bytecode as Lua 5.1.
+ return { tag="Op", "not",
+ { tag="Op", "eq", a, b, lineinfo= {
+ first = a.lineinfo.first, last = b.lineinfo.last } } }
+ end
+
+
+ --------------------------------------------------------------------------------
+ --
+ -- complete expression
+ --
+ --------------------------------------------------------------------------------
+
+ -- FIXME: set line number. In [expr] transformers probably
+ M.expr = gg.expr {
+ name = "expression",
+ primary = gg.multisequence{
+ name = "expr primary",
+ { "(", _M.expr, ")", builder = "Paren" },
+ { "function", _M.func_val, builder = unpack },
+ { "-{", _meta.splice_content, "}", builder = unpack },
+ { "+{", _meta.quote_content, "}", builder = unpack },
+ { "nil", builder = "Nil" },
+ { "true", builder = "True" },
+ { "false", builder = "False" },
+ { "...", builder = "Dots" },
+ { "{", _table.content, "}", builder = unpack },
+ _M.id_or_literal },
+
+ infix = {
+ name = "expr infix op",
+ { "+", prec = 60, builder = opf2 "add" },
+ { "-", prec = 60, builder = opf2 "sub" },
+ { "*", prec = 70, builder = opf2 "mul" },
+ { "/", prec = 70, builder = opf2 "div" },
+ { "%", prec = 70, builder = opf2 "mod" },
+ { "^", prec = 90, builder = opf2 "pow", assoc = "right" },
+ { "..", prec = 40, builder = opf2 "concat", assoc = "right" },
+ { "==", prec = 30, builder = opf2 "eq" },
+ { "~=", prec = 30, builder = op_ne },
+ { "<", prec = 30, builder = opf2 "lt" },
+ { "<=", prec = 30, builder = opf2 "le" },
+ { ">", prec = 30, builder = opf2r "lt" },
+ { ">=", prec = 30, builder = opf2r "le" },
+ { "and",prec = 20, builder = opf2 "and" },
+ { "or", prec = 10, builder = opf2 "or" } },
+
+ prefix = {
+ name = "expr prefix op",
+ { "not", prec = 80, builder = opf1 "not" },
+ { "#", prec = 80, builder = opf1 "len" },
+ { "-", prec = 80, builder = opf1 "unm" } },
+
+ suffix = {
+ name = "expr suffix op",
+ { "[", _M.expr, "]", builder = function (tab, idx)
+ return {tag="Index", tab, idx[1]} end},
+ { ".", _M.id, builder = function (tab, field)
+ return {tag="Index", tab, _M.id2string(field[1])} end },
+ { "(", _M.func_args_content, ")", builder = function(f, args)
+ return {tag="Call", f, unpack(args[1])} end },
+ { "{", _table.content, "}", builder = function (f, arg)
+ return {tag="Call", f, arg[1]} end},
+ { ":", _M.id, _M.method_args, builder = function (obj, post)
+ local m_name, args = unpack(post)
+ return {tag="Invoke", obj, _M.id2string(m_name), unpack(args)} end},
+ { "+{", _meta.quote_content, "}", builder = function (f, arg)
+ return {tag="Call", f, arg[1] } end },
+ default = { name="opt_string_arg", parse = _M.opt_string, builder = function(f, arg)
+ return {tag="Call", f, arg } end } } }
+ return M
+end
\ No newline at end of file
--- /dev/null
+-------------------------------------------------------------------------------
+-- Copyright (c) 2006-2013 Fabien Fleutot and others.
+--
+-- All rights reserved.
+--
+-- This program and the accompanying materials are made available
+-- under the terms of the Eclipse Public License v1.0 which
+-- accompanies this distribution, and is available at
+-- http://www.eclipse.org/legal/epl-v10.html
+--
+-- This program and the accompanying materials are also made available
+-- under the terms of the MIT public license which accompanies this
+-- distribution, and is available at http://www.lua.org/license.html
+--
+-- Contributors:
+-- Fabien Fleutot - API and implementation
+--
+-------------------------------------------------------------------------------
+
+--------------------------------------------------------------------------------
+--
+-- Non-Lua syntax extensions
+--
+--------------------------------------------------------------------------------
+
+local gg = require 'metalua.grammar.generator'
+
+return function(M)
+
+ local _M = gg.future(M)
+
+ ---------------------------------------------------------------------------
+ -- Algebraic Datatypes
+ ----------------------------------------------------------------------------
+ local function adt (lx)
+ local node = _M.id (lx)
+ local tagval = node[1]
+ -- tagkey = `Pair{ `String "key", `String{ -{tagval} } }
+ local tagkey = { tag="Pair", {tag="String", "tag"}, {tag="String", tagval} }
+ if lx:peek().tag == "String" or lx:peek().tag == "Number" then
+ -- TODO support boolean litterals
+ return { tag="Table", tagkey, lx:next() }
+ elseif lx:is_keyword (lx:peek(), "{") then
+ local x = M.table.table (lx)
+ table.insert (x, 1, tagkey)
+ return x
+ else return { tag="Table", tagkey } end
+ end
+
+ M.adt = gg.sequence{ "`", adt, builder = unpack }
+
+ M.expr.primary :add(M.adt)
+
+ ----------------------------------------------------------------------------
+ -- Anonymous lambda
+ ----------------------------------------------------------------------------
+ M.lambda_expr = gg.sequence{
+ "|", _M.func_params_content, "|", _M.expr,
+ builder = function (x)
+ local li = x[2].lineinfo
+ return { tag="Function", x[1],
+ { {tag="Return", x[2], lineinfo=li }, lineinfo=li } }
+ end }
+
+ M.expr.primary :add (M.lambda_expr)
+
+ --------------------------------------------------------------------------------
+ -- Allows to write "a `f` b" instead of "f(a, b)". Taken from Haskell.
+ --------------------------------------------------------------------------------
+ function M.expr_in_backquotes (lx) return M.expr(lx, 35) end -- 35=limited precedence
+ M.expr.infix :add{ name = "infix function",
+ "`", _M.expr_in_backquotes, "`", prec = 35, assoc="left",
+ builder = function(a, op, b) return {tag="Call", op[1], a, b} end }
+
+ --------------------------------------------------------------------------------
+ -- C-style op+assignments
+ -- TODO: no protection against side-effects in LHS vars.
+ --------------------------------------------------------------------------------
+ local function op_assign(kw, op)
+ local function rhs(a, b) return { tag="Op", op, a, b } end
+ local function f(a,b)
+ if #a ~= #b then gg.parse_error "assymetric operator+assignment" end
+ local right = { }
+ local r = { tag="Set", a, right }
+ for i=1, #a do right[i] = { tag="Op", op, a[i], b[i] } end
+ return r
+ end
+ M.lexer :add (kw)
+ M.assignments[kw] = f
+ end
+
+ local ops = { add='+='; sub='-='; mul='*='; div='/=' }
+ for ast_op_name, keyword in pairs(ops) do op_assign(keyword, ast_op_name) end
+
+ return M
+end
\ No newline at end of file
--- /dev/null
+--------------------------------------------------------------------------------
+-- Copyright (c) 2006-2014 Fabien Fleutot and others.
+--
+-- All rights reserved.
+--
+-- This program and the accompanying materials are made available
+-- under the terms of the Eclipse Public License v1.0 which
+-- accompanies this distribution, and is available at
+-- http://www.eclipse.org/legal/epl-v10.html
+--
+-- This program and the accompanying materials are also made available
+-- under the terms of the MIT public license which accompanies this
+-- distribution, and is available at http://www.lua.org/license.html
+--
+-- Contributors:
+-- Fabien Fleutot - API and implementation
+--
+--------------------------------------------------------------------------------
+
+----------------------------------------------------------------------
+-- Generate a new lua-specific lexer, derived from the generic lexer.
+----------------------------------------------------------------------
+
+local generic_lexer = require 'metalua.grammar.lexer'
+
+return function()
+ local lexer = generic_lexer.lexer :clone()
+
+ local keywords = {
+ "and", "break", "do", "else", "elseif",
+ "end", "false", "for", "function",
+ "goto", -- Lua5.2
+ "if",
+ "in", "local", "nil", "not", "or", "repeat",
+ "return", "then", "true", "until", "while",
+ "...", "..", "==", ">=", "<=", "~=",
+ "::", -- Lua5,2
+ "+{", "-{" } -- Metalua
+
+ for _, w in ipairs(keywords) do lexer :add (w) end
+
+ return lexer
+end
\ No newline at end of file
--- /dev/null
+-------------------------------------------------------------------------------
+-- Copyright (c) 2006-2014 Fabien Fleutot and others.
+--
+-- All rights reserved.
+--
+-- This program and the accompanying materials are made available
+-- under the terms of the Eclipse Public License v1.0 which
+-- accompanies this distribution, and is available at
+-- http://www.eclipse.org/legal/epl-v10.html
+--
+-- This program and the accompanying materials are also made available
+-- under the terms of the MIT public license which accompanies this
+-- distribution, and is available at http://www.lua.org/license.html
+--
+-- Contributors:
+-- Fabien Fleutot - API and implementation
+--
+-------------------------------------------------------------------------------
+
+-- Compile-time metaprogramming features: splicing ASTs generated during compilation,
+-- AST quasi-quoting helpers.
+
+local gg = require 'metalua.grammar.generator'
+
+return function(M)
+ local _M = gg.future(M)
+ M.meta={ }
+ local _MM = gg.future(M.meta)
+
+ --------------------------------------------------------------------------------
+ -- External splicing: compile an AST into a chunk, load and evaluate
+ -- that chunk, and replace the chunk by its result (which must also be
+ -- an AST).
+ --------------------------------------------------------------------------------
+
+ -- TODO: that's not part of the parser
+ function M.meta.eval (ast)
+ -- TODO: should there be one mlc per splice, or per parser instance?
+ local mlc = require 'metalua.compiler'.new()
+ local f = mlc :ast_to_function (ast, '=splice')
+ local result=f(M) -- splices act on the current parser
+ return result
+ end
+
+ ----------------------------------------------------------------------------
+ -- Going from an AST to an AST representing that AST
+ -- the only hash-part key being lifted is `"tag"`.
+ -- Doesn't lift subtrees protected inside a `Splice{ ... }.
+ -- e.g. change `Foo{ 123 } into
+ -- `Table{ `Pair{ `String "tag", `String "foo" }, `Number 123 }
+ ----------------------------------------------------------------------------
+ local function lift (t)
+ --print("QUOTING:", table.tostring(t, 60,'nohash'))
+ local cases = { }
+ function cases.table (t)
+ local mt = { tag = "Table" }
+ --table.insert (mt, { tag = "Pair", quote "quote", { tag = "True" } })
+ if t.tag == "Splice" then
+ assert (#t==1, "Invalid splice")
+ local sp = t[1]
+ return sp
+ elseif t.tag then
+ table.insert (mt, { tag="Pair", lift "tag", lift(t.tag) })
+ end
+ for _, v in ipairs (t) do
+ table.insert (mt, lift(v))
+ end
+ return mt
+ end
+ function cases.number (t) return { tag = "Number", t, quote = true } end
+ function cases.string (t) return { tag = "String", t, quote = true } end
+ function cases.boolean (t) return { tag = t and "True" or "False", t, quote = true } end
+ local f = cases [type(t)]
+ if f then return f(t) else error ("Cannot quote an AST containing "..tostring(t)) end
+ end
+ M.meta.lift = lift
+
+ --------------------------------------------------------------------------------
+ -- when this variable is false, code inside [-{...}] is compiled and
+ -- avaluated immediately. When it's true (supposedly when we're
+ -- parsing data inside a quasiquote), [-{foo}] is replaced by
+ -- [`Splice{foo}], which will be unpacked by [quote()].
+ --------------------------------------------------------------------------------
+ local in_a_quote = false
+
+ --------------------------------------------------------------------------------
+ -- Parse the inside of a "-{ ... }"
+ --------------------------------------------------------------------------------
+ function M.meta.splice_content (lx)
+ local parser_name = "expr"
+ if lx:is_keyword (lx:peek(2), ":") then
+ local a = lx:next()
+ lx:next() -- skip ":"
+ assert (a.tag=="Id", "Invalid splice parser name")
+ parser_name = a[1]
+ end
+ -- TODO FIXME running a new parser with the old lexer?!
+ local parser = require 'metalua.compiler.parser'.new()
+ local ast = parser [parser_name](lx)
+ if in_a_quote then -- only prevent quotation in this subtree
+ --printf("SPLICE_IN_QUOTE:\n%s", _G.table.tostring(ast, "nohash", 60))
+ return { tag="Splice", ast }
+ else -- convert in a block, eval, replace with result
+ if parser_name == "expr" then ast = { { tag="Return", ast } }
+ elseif parser_name == "stat" then ast = { ast }
+ elseif parser_name ~= "block" then
+ error ("splice content must be an expr, stat or block") end
+ --printf("EXEC THIS SPLICE:\n%s", _G.table.tostring(ast, "nohash", 60))
+ return M.meta.eval (ast)
+ end
+ end
+
+ M.meta.splice = gg.sequence{ "-{", _MM.splice_content, "}", builder=unpack }
+
+ --------------------------------------------------------------------------------
+ -- Parse the inside of a "+{ ... }"
+ --------------------------------------------------------------------------------
+ function M.meta.quote_content (lx)
+ local parser
+ if lx:is_keyword (lx:peek(2), ":") then -- +{parser: content }
+ local parser_name = M.id(lx)[1]
+ parser = M[parser_name]
+ lx:next() -- skip ":"
+ else -- +{ content }
+ parser = M.expr
+ end
+
+ local prev_iq = in_a_quote
+ in_a_quote = true
+ --print("IN_A_QUOTE")
+ local content = parser (lx)
+ local q_content = M.meta.lift (content)
+ in_a_quote = prev_iq
+ return q_content
+ end
+
+ return M
+end
\ No newline at end of file
--- /dev/null
+-------------------------------------------------------------------------------
+-- Copyright (c) 2006-2013 Fabien Fleutot and others.
+--
+-- All rights reserved.
+--
+-- This program and the accompanying materials are made available
+-- under the terms of the Eclipse Public License v1.0 which
+-- accompanies this distribution, and is available at
+-- http://www.eclipse.org/legal/epl-v10.html
+--
+-- This program and the accompanying materials are also made available
+-- under the terms of the MIT public license which accompanies this
+-- distribution, and is available at http://www.lua.org/license.html
+--
+-- Contributors:
+-- Fabien Fleutot - API and implementation
+--
+-------------------------------------------------------------------------------
+
+-------------------------------------------------------------------------------
+--
+-- Summary: metalua parser, miscellaneous utility functions.
+--
+-------------------------------------------------------------------------------
+
+--------------------------------------------------------------------------------
+--
+-- Exported API:
+-- * [mlp.fget()]
+-- * [mlp.id()]
+-- * [mlp.opt_id()]
+-- * [mlp.id_list()]
+-- * [mlp.string()]
+-- * [mlp.opt_string()]
+-- * [mlp.id2string()]
+--
+--------------------------------------------------------------------------------
+
+local gg = require 'metalua.grammar.generator'
+
+-- TODO: replace splice-aware versions with naive ones, move etensions in ./meta
+
+return function(M)
+ local _M = gg.future(M)
+
+--[[ metaprog-free versions:
+ function M.id(lx)
+ if lx:peek().tag~='Id' then gg.parse_error(lx, "Identifier expected")
+ else return lx:next() end
+ end
+
+ function M.opt_id(lx)
+ if lx:peek().tag~='Id' then return lx:next() else return false end
+ end
+
+ function M.string(lx)
+ if lx:peek().tag~='String' then gg.parse_error(lx, "String expected")
+ else return lx:next() end
+ end
+
+ function M.opt_string(lx)
+ if lx:peek().tag~='String' then return lx:next() else return false end
+ end
+
+ --------------------------------------------------------------------------------
+ -- Converts an identifier into a string. Hopefully one day it'll handle
+ -- splices gracefully, but that proves quite tricky.
+ --------------------------------------------------------------------------------
+ function M.id2string (id)
+ if id.tag == "Id" then id.tag = "String"; return id
+ else error ("Identifier expected: "..table.tostring(id, 'nohash')) end
+ end
+--]]
+
+ --------------------------------------------------------------------------------
+ -- Try to read an identifier (possibly as a splice), or return [false] if no
+ -- id is found.
+ --------------------------------------------------------------------------------
+ function M.opt_id (lx)
+ local a = lx:peek();
+ if lx:is_keyword (a, "-{") then
+ local v = M.meta.splice(lx)
+ if v.tag ~= "Id" and v.tag ~= "Splice" then
+ gg.parse_error(lx, "Bad id splice")
+ end
+ return v
+ elseif a.tag == "Id" then return lx:next()
+ else return false end
+ end
+
+ --------------------------------------------------------------------------------
+ -- Mandatory reading of an id: causes an error if it can't read one.
+ --------------------------------------------------------------------------------
+ function M.id (lx)
+ return M.opt_id (lx) or gg.parse_error(lx,"Identifier expected")
+ end
+
+ --------------------------------------------------------------------------------
+ -- Common helper function
+ --------------------------------------------------------------------------------
+ M.id_list = gg.list { primary = _M.id, separators = "," }
+
+ --------------------------------------------------------------------------------
+ -- Converts an identifier into a string. Hopefully one day it'll handle
+ -- splices gracefully, but that proves quite tricky.
+ --------------------------------------------------------------------------------
+ function M.id2string (id)
+ --print("id2string:", disp.ast(id))
+ if id.tag == "Id" then id.tag = "String"; return id
+ elseif id.tag == "Splice" then
+ error ("id2string on splice not implemented")
+ -- Evaluating id[1] will produce `Id{ xxx },
+ -- and we want it to produce `String{ xxx }.
+ -- The following is the plain notation of:
+ -- +{ `String{ `Index{ `Splice{ -{id[1]} }, `Number 1 } } }
+ return { tag="String", { tag="Index", { tag="Splice", id[1] },
+ { tag="Number", 1 } } }
+ else error ("Identifier expected: "..table.tostring(id, 'nohash')) end
+ end
+
+ --------------------------------------------------------------------------------
+ -- Read a string, possibly spliced, or return an error if it can't
+ --------------------------------------------------------------------------------
+ function M.string (lx)
+ local a = lx:peek()
+ if lx:is_keyword (a, "-{") then
+ local v = M.meta.splice(lx)
+ if v.tag ~= "String" and v.tag ~= "Splice" then
+ gg.parse_error(lx,"Bad string splice")
+ end
+ return v
+ elseif a.tag == "String" then return lx:next()
+ else error "String expected" end
+ end
+
+ --------------------------------------------------------------------------------
+ -- Try to read a string, or return false if it can't. No splice allowed.
+ --------------------------------------------------------------------------------
+ function M.opt_string (lx)
+ return lx:peek().tag == "String" and lx:next()
+ end
+
+ --------------------------------------------------------------------------------
+ -- Chunk reader: block + Eof
+ --------------------------------------------------------------------------------
+ function M.skip_initial_sharp_comment (lx)
+ -- Dirty hack: I'm happily fondling lexer's private parts
+ -- FIXME: redundant with lexer:newstream()
+ lx :sync()
+ local i = lx.src:match ("^#.-\n()", lx.i)
+ if i then
+ lx.i = i
+ lx.column_offset = i
+ lx.line = lx.line and lx.line + 1 or 1
+ end
+ end
+
+ local function chunk (lx)
+ if lx:peek().tag == 'Eof' then
+ return { } -- handle empty files
+ else
+ M.skip_initial_sharp_comment (lx)
+ local chunk = M.block (lx)
+ if lx:peek().tag ~= "Eof" then
+ gg.parse_error(lx, "End-of-file expected")
+ end
+ return chunk
+ end
+ end
+
+ -- chunk is wrapped in a sequence so that it has a "transformer" field.
+ M.chunk = gg.sequence { chunk, builder = unpack }
+
+ return M
+end
\ No newline at end of file
--- /dev/null
+------------------------------------------------------------------------------
+-- Copyright (c) 2006-2013 Fabien Fleutot and others.
+--
+-- All rights reserved.
+--
+-- This program and the accompanying materials are made available
+-- under the terms of the Eclipse Public License v1.0 which
+-- accompanies this distribution, and is available at
+-- http://www.eclipse.org/legal/epl-v10.html
+--
+-- This program and the accompanying materials are also made available
+-- under the terms of the MIT public license which accompanies this
+-- distribution, and is available at http://www.lua.org/license.html
+--
+-- Contributors:
+-- Fabien Fleutot - API and implementation
+--
+-------------------------------------------------------------------------------
+
+-------------------------------------------------------------------------------
+--
+-- Summary: metalua parser, statement/block parser. This is part of the
+-- definition of module [mlp].
+--
+-------------------------------------------------------------------------------
+
+-------------------------------------------------------------------------------
+--
+-- Exports API:
+-- * [mlp.stat()]
+-- * [mlp.block()]
+-- * [mlp.for_header()]
+--
+-------------------------------------------------------------------------------
+
+local lexer = require 'metalua.grammar.lexer'
+local gg = require 'metalua.grammar.generator'
+
+local annot = require 'metalua.compiler.parser.annot.generator'
+
+--------------------------------------------------------------------------------
+-- List of all keywords that indicate the end of a statement block. Users are
+-- likely to extend this list when designing extensions.
+--------------------------------------------------------------------------------
+
+
+return function(M)
+ local _M = gg.future(M)
+
+ M.block_terminators = { "else", "elseif", "end", "until", ")", "}", "]" }
+
+ -- FIXME: this must be handled from within GG!!!
+ -- FIXME: there's no :add method in the list anyway. Added by gg.list?!
+ function M.block_terminators :add(x)
+ if type (x) == "table" then for _, y in ipairs(x) do self :add (y) end
+ else table.insert (self, x) end
+ end
+
+ ----------------------------------------------------------------------------
+ -- list of statements, possibly followed by semicolons
+ ----------------------------------------------------------------------------
+ M.block = gg.list {
+ name = "statements block",
+ terminators = M.block_terminators,
+ primary = function (lx)
+ -- FIXME use gg.optkeyword()
+ local x = M.stat (lx)
+ if lx:is_keyword (lx:peek(), ";") then lx:next() end
+ return x
+ end }
+
+ ----------------------------------------------------------------------------
+ -- Helper function for "return <expr_list>" parsing.
+ -- Called when parsing return statements.
+ -- The specific test for initial ";" is because it's not a block terminator,
+ -- so without it gg.list would choke on "return ;" statements.
+ -- We don't make a modified copy of block_terminators because this list
+ -- is sometimes modified at runtime, and the return parser would get out of
+ -- sync if it was relying on a copy.
+ ----------------------------------------------------------------------------
+ local return_expr_list_parser = gg.multisequence{
+ { ";" , builder = function() return { } end },
+ default = gg.list {
+ _M.expr, separators = ",", terminators = M.block_terminators } }
+
+
+ local for_vars_list = gg.list{
+ name = "for variables list",
+ primary = _M.id,
+ separators = ",",
+ terminators = "in" }
+
+ ----------------------------------------------------------------------------
+ -- for header, between [for] and [do] (exclusive).
+ -- Return the `Forxxx{...} AST, without the body element (the last one).
+ ----------------------------------------------------------------------------
+ function M.for_header (lx)
+ local vars = M.id_list(lx)
+ if lx :is_keyword (lx:peek(), "=") then
+ if #vars ~= 1 then
+ gg.parse_error (lx, "numeric for only accepts one variable")
+ end
+ lx:next() -- skip "="
+ local exprs = M.expr_list (lx)
+ if #exprs < 2 or #exprs > 3 then
+ gg.parse_error (lx, "numeric for requires 2 or 3 boundaries")
+ end
+ return { tag="Fornum", vars[1], unpack (exprs) }
+ else
+ if not lx :is_keyword (lx :next(), "in") then
+ gg.parse_error (lx, '"=" or "in" expected in for loop')
+ end
+ local exprs = M.expr_list (lx)
+ return { tag="Forin", vars, exprs }
+ end
+ end
+
+ ----------------------------------------------------------------------------
+ -- Function def parser helper: id ( . id ) *
+ ----------------------------------------------------------------------------
+ local function fn_builder (list)
+ local acc = list[1]
+ local first = acc.lineinfo.first
+ for i = 2, #list do
+ local index = M.id2string(list[i])
+ local li = lexer.new_lineinfo(first, index.lineinfo.last)
+ acc = { tag="Index", acc, index, lineinfo=li }
+ end
+ return acc
+ end
+ local func_name = gg.list{ _M.id, separators = ".", builder = fn_builder }
+
+ ----------------------------------------------------------------------------
+ -- Function def parser helper: ( : id )?
+ ----------------------------------------------------------------------------
+ local method_name = gg.onkeyword{ name = "method invocation", ":", _M.id,
+ transformers = { function(x) return x and x.tag=='Id' and M.id2string(x) end } }
+
+ ----------------------------------------------------------------------------
+ -- Function def builder
+ ----------------------------------------------------------------------------
+ local function funcdef_builder(x)
+ local name, method, func = unpack(x)
+ if method then
+ name = { tag="Index", name, method,
+ lineinfo = {
+ first = name.lineinfo.first,
+ last = method.lineinfo.last } }
+ table.insert (func[1], 1, {tag="Id", "self"})
+ end
+ local r = { tag="Set", {name}, {func} }
+ r[1].lineinfo = name.lineinfo
+ r[2].lineinfo = func.lineinfo
+ return r
+ end
+
+
+ ----------------------------------------------------------------------------
+ -- if statement builder
+ ----------------------------------------------------------------------------
+ local function if_builder (x)
+ local cond_block_pairs, else_block, r = x[1], x[2], {tag="If"}
+ local n_pairs = #cond_block_pairs
+ for i = 1, n_pairs do
+ local cond, block = unpack(cond_block_pairs[i])
+ r[2*i-1], r[2*i] = cond, block
+ end
+ if else_block then table.insert(r, #r+1, else_block) end
+ return r
+ end
+
+ --------------------------------------------------------------------------------
+ -- produce a list of (expr,block) pairs
+ --------------------------------------------------------------------------------
+ local elseifs_parser = gg.list {
+ gg.sequence { _M.expr, "then", _M.block , name='elseif parser' },
+ separators = "elseif",
+ terminators = { "else", "end" }
+ }
+
+ local annot_expr = gg.sequence {
+ _M.expr,
+ gg.onkeyword{ "#", gg.future(M, 'annot').tf },
+ builder = function(x)
+ local e, a = unpack(x)
+ if a then return { tag='Annot', e, a }
+ else return e end
+ end }
+
+ local annot_expr_list = gg.list {
+ primary = annot.opt(M, _M.expr, 'tf'), separators = ',' }
+
+ ------------------------------------------------------------------------
+ -- assignments and calls: statements that don't start with a keyword
+ ------------------------------------------------------------------------
+ local function assign_or_call_stat_parser (lx)
+ local e = annot_expr_list (lx)
+ local a = lx:is_keyword(lx:peek())
+ local op = a and M.assignments[a]
+ -- TODO: refactor annotations
+ if op then
+ --FIXME: check that [e] is a LHS
+ lx :next()
+ local annots
+ e, annots = annot.split(e)
+ local v = M.expr_list (lx)
+ if type(op)=="string" then return { tag=op, e, v, annots }
+ else return op (e, v) end
+ else
+ assert (#e > 0)
+ if #e > 1 then
+ gg.parse_error (lx,
+ "comma is not a valid statement separator; statement can be "..
+ "separated by semicolons, or not separated at all")
+ elseif e[1].tag ~= "Call" and e[1].tag ~= "Invoke" then
+ local typename
+ if e[1].tag == 'Id' then
+ typename = '("'..e[1][1]..'") is an identifier'
+ elseif e[1].tag == 'Op' then
+ typename = "is an arithmetic operation"
+ else typename = "is of type '"..(e[1].tag or "<list>").."'" end
+ gg.parse_error (lx,
+ "This expression %s; "..
+ "a statement was expected, and only function and method call "..
+ "expressions can be used as statements", typename);
+ end
+ return e[1]
+ end
+ end
+
+ M.local_stat_parser = gg.multisequence{
+ -- local function <name> <func_val>
+ { "function", _M.id, _M.func_val, builder =
+ function(x)
+ local vars = { x[1], lineinfo = x[1].lineinfo }
+ local vals = { x[2], lineinfo = x[2].lineinfo }
+ return { tag="Localrec", vars, vals }
+ end },
+ -- local <id_list> ( = <expr_list> )?
+ default = gg.sequence{
+ gg.list{
+ primary = annot.opt(M, _M.id, 'tf'),
+ separators = ',' },
+ gg.onkeyword{ "=", _M.expr_list },
+ builder = function(x)
+ local annotated_left, right = unpack(x)
+ local left, annotations = annot.split(annotated_left)
+ return {tag="Local", left, right or { }, annotations }
+ end } }
+
+ ------------------------------------------------------------------------
+ -- statement
+ ------------------------------------------------------------------------
+ M.stat = gg.multisequence {
+ name = "statement",
+ { "do", _M.block, "end", builder =
+ function (x) return { tag="Do", unpack (x[1]) } end },
+ { "for", _M.for_header, "do", _M.block, "end", builder =
+ function (x) x[1][#x[1]+1] = x[2]; return x[1] end },
+ { "function", func_name, method_name, _M.func_val, builder=funcdef_builder },
+ { "while", _M.expr, "do", _M.block, "end", builder = "While" },
+ { "repeat", _M.block, "until", _M.expr, builder = "Repeat" },
+ { "local", _M.local_stat_parser, builder = unpack },
+ { "return", return_expr_list_parser, builder =
+ function(x) x[1].tag='Return'; return x[1] end },
+ { "break", builder = function() return { tag="Break" } end },
+ { "-{", gg.future(M, 'meta').splice_content, "}", builder = unpack },
+ { "if", gg.nonempty(elseifs_parser), gg.onkeyword{ "else", M.block }, "end",
+ builder = if_builder },
+ default = assign_or_call_stat_parser }
+
+ M.assignments = {
+ ["="] = "Set"
+ }
+
+ function M.assignments:add(k, v) self[k] = v end
+
+ return M
+end
\ No newline at end of file
--- /dev/null
+--------------------------------------------------------------------------------
+-- Copyright (c) 2006-2013 Fabien Fleutot and others.
+--
+-- All rights reserved.
+--
+-- This program and the accompanying materials are made available
+-- under the terms of the Eclipse Public License v1.0 which
+-- accompanies this distribution, and is available at
+-- http://www.eclipse.org/legal/epl-v10.html
+--
+-- This program and the accompanying materials are also made available
+-- under the terms of the MIT public license which accompanies this
+-- distribution, and is available at http://www.lua.org/license.html
+--
+-- Contributors:
+-- Fabien Fleutot - API and implementation
+--
+--------------------------------------------------------------------------------
+
+--------------------------------------------------------------------------------
+--
+-- Exported API:
+-- * [M.table_bracket_field()]
+-- * [M.table_field()]
+-- * [M.table_content()]
+-- * [M.table()]
+--
+-- KNOWN BUG: doesn't handle final ";" or "," before final "}"
+--
+--------------------------------------------------------------------------------
+
+local gg = require 'metalua.grammar.generator'
+
+return function(M)
+
+ M.table = { }
+ local _table = gg.future(M.table)
+ local _expr = gg.future(M).expr
+
+ --------------------------------------------------------------------------------
+ -- `[key] = value` table field definition
+ --------------------------------------------------------------------------------
+ M.table.bracket_pair = gg.sequence{ "[", _expr, "]", "=", _expr, builder = "Pair" }
+
+ --------------------------------------------------------------------------------
+ -- table element parser: list value, `id = value` pair or `[value] = value` pair.
+ --------------------------------------------------------------------------------
+ function M.table.element (lx)
+ if lx :is_keyword (lx :peek(), "[") then return M.table.bracket_pair(lx) end
+ local e = M.expr (lx)
+ if not lx :is_keyword (lx :peek(), "=") then return e end
+ lx :next(); -- skip the "="
+ local key = M.id2string(e) -- will fail on non-identifiers
+ local val = M.expr(lx)
+ local r = { tag="Pair", key, val }
+ r.lineinfo = { first = key.lineinfo.first, last = val.lineinfo.last }
+ return r
+ end
+
+ -----------------------------------------------------------------------------
+ -- table constructor, without enclosing braces; returns a full table object
+ -----------------------------------------------------------------------------
+ M.table.content = gg.list {
+ -- eta expansion to allow patching the element definition
+ primary = _table.element,
+ separators = { ",", ";" },
+ terminators = "}",
+ builder = "Table" }
+
+ --------------------------------------------------------------------------------
+ -- complete table constructor including [{...}]
+ --------------------------------------------------------------------------------
+ -- TODO beware, stat and expr use only table.content, this can't be patched.
+ M.table.table = gg.sequence{ "{", _table.content, "}", builder = unpack }
+
+ return M
+end
\ No newline at end of file
--- /dev/null
+-{ extension ('match', ...) }
+
+local M = { }
+
+M.register = { }
+
+local function dollar_builder(e)
+ match e with
+ | `Call{ `Id{name}, ... } ->
+ local entry = M.register[name] or error ("No macro "..name.." registered")
+ return entry(select(2, unpack(e)))
+ | `Id{name} ->
+ local entry = dollar[name] or error ("No macro "..name.." registered")
+ match type(entry) with
+ | 'function' -> return entry()
+ | 'table' -> return entry -- constant AST
+ | t -> error ("Invalid macro type "..t)
+ end
+ | _ -> error "Invalid $macro, '$' must be followed by an identifier or function call"
+ end
+end
+
+function M.extend(M)
+ local M = require 'metalua.grammar.generator' .future(M)
+ M.expr.prefix :add {
+ '$', prec = 100, builder = |_, x| dollar_builder(x) }
+ M.stat:add{
+ '$', _M.expr, builder = |x| dollar_builder(x[1]) }
+end
+
+return M
--- /dev/null
+-------------------------------------------------------------------------------
+-- Copyright (c) 2006-2013 Fabien Fleutot and others.
+--
+-- All rights reserved.
+--
+-- This program and the accompanying materials are made available
+-- under the terms of the Eclipse Public License v1.0 which
+-- accompanies this distribution, and is available at
+-- http://www.eclipse.org/legal/epl-v10.html
+--
+-- This program and the accompanying materials are also made available
+-- under the terms of the MIT public license which accompanies this
+-- distribution, and is available at http://www.lua.org/license.html
+--
+-- Contributors:
+-- Fabien Fleutot - API and implementation
+--
+-------------------------------------------------------------------------------
+--
+-- This extension implements list comprehensions, similar to Haskell and
+-- Python syntax, to easily describe lists.
+--
+-- * x[a ... b] is the list { x[a], x[a+1], ..., x[b] }
+-- * { f()..., b } contains all the elements returned by f(), then b
+-- (allows to expand list fields other than the last one)
+-- * list comprehensions a la python, with "for" and "if" suffixes:
+-- {i+10*j for i=1,3 for j=1,3 if i~=j} is { 21, 31, 12, 32, 13, 23 }
+--
+-------------------------------------------------------------------------------
+
+-{ extension ("match", ...) }
+
+local SUPPORT_IMPROVED_LOOPS = true
+local SUPPORT_IMPROVED_INDEXES = false -- depends on deprecated table.isub
+local SUPPORT_CONTINUE = true
+local SUPPORT_COMP_LISTS = true
+
+assert (SUPPORT_IMPROVED_LOOPS or not SUPPORT_CONTINUE,
+ "Can't support 'continue' without improved loop headers")
+
+local gg = require 'metalua.grammar.generator'
+local Q = require 'metalua.treequery'
+
+local function dots_list_suffix_builder (x) return `DotsSuffix{ x } end
+
+local function for_list_suffix_builder (list_element, suffix)
+ local new_header = suffix[1]
+ match list_element with
+ | `Comp{ _, acc } -> table.insert (acc, new_header); return list_element
+ | _ -> return `Comp{ list_element, { new_header } }
+ end
+end
+
+local function if_list_suffix_builder (list_element, suffix)
+ local new_header = `If{ suffix[1] }
+ match list_element with
+ | `Comp{ _, acc } -> table.insert (acc, new_header); return list_element
+ | _ -> return `Comp{ list_element, { new_header } }
+ end
+end
+
+-- Builds a statement from a table element, which adds this element to
+-- a table `t`, potentially thanks to an alias `tinsert` to
+-- `table.insert`.
+-- @param core the part around which the loops are built.
+-- either `DotsSuffix{expr}, `Pair{ expr } or a plain expression
+-- @param list comprehension suffixes, in the order in which they appear
+-- either `Forin{ ... } or `Fornum{ ...} or `If{ ... }. In each case,
+-- it misses a last child node as its body.
+-- @param t a variable containing the table to fill
+-- @param tinsert a variable containing `table.insert`.
+--
+-- @return fill a statement which fills empty table `t` with the denoted element
+local function comp_list_builder(core, list, t, tinsert)
+ local filler
+ -- 1 - Build the loop's core: if it has suffix "...", every elements of the
+ -- multi-return must be inserted, hence the extra [for] loop.
+ match core with
+ | `DotsSuffix{ element } ->
+ local x = gg.gensym()
+ filler = +{stat: for _, -{x} in pairs{ -{element} } do (-{tinsert})(-{t}, -{x}) end }
+ | `Pair{ key, value } ->
+ --filler = +{ -{t}[-{key}] = -{value} }
+ filler = `Set{ { `Index{ t, key } }, { value } }
+ | _ -> filler = +{ (-{tinsert})(-{t}, -{core}) }
+ end
+
+ -- 2 - Stack the `if` and `for` control structures, from outside to inside.
+ -- This is done in a destructive way for the elements of [list].
+ for i = #list, 1, -1 do
+ table.insert (list[i], {filler})
+ filler = list[i]
+ end
+
+ return filler
+end
+
+local function table_content_builder (list)
+ local special = false -- Does the table need a special builder?
+ for _, element in ipairs(list) do
+ local etag = element.tag
+ if etag=='Comp' or etag=='DotsSuffix' then special=true; break end
+ end
+ if not special then list.tag='Table'; return list end
+
+ local t, tinsert = gg.gensym 'table', gg.gensym 'table_insert'
+ local filler_block = { +{stat: local -{t}, -{tinsert} = { }, table.insert } }
+ for _, element in ipairs(list) do
+ local filler
+ match element with
+ | `Comp{ core, comp } -> filler = comp_list_builder(core, comp, t, tinsert)
+ | _ -> filler = comp_list_builder(element, { }, t, tinsert)
+ end
+ table.insert(filler_block, filler)
+ end
+ return `Stat{ filler_block, t }
+end
+
+
+--------------------------------------------------------------------------------
+-- Back-end for improved index operator.
+local function index_builder(a, suffix)
+ match suffix[1] with
+ -- Single index, no range: keep the native semantics
+ | { { e, false } } -> return `Index{ a, e }
+ -- Either a range, or multiple indexes, or both
+ | ranges ->
+ local r = `Call{ +{table.isub}, a }
+ local function acc (x,y) table.insert (r,x); table.insert (r,y) end
+ for _, seq in ipairs (ranges) do
+ match seq with
+ | { e, false } -> acc(e,e)
+ | { e, f } -> acc(e,f)
+ end
+ end
+ return r
+ end
+end
+
+-------------------------------------------------------------------
+-- Find continue statements in a loop body, change them into goto
+-- end-of-body.
+local function transform_continue_statements(body)
+ local continue_statements = Q(body)
+ :if_unknown() -- tolerate unknown 'Continue' statements
+ :not_under ('Forin', 'Fornum', 'While', 'Repeat')
+ :filter ('Continue')
+ :list()
+ if next(continue_statements) then
+ local continue_label = gg.gensym 'continue' [1]
+ table.insert(body, `Label{ continue_label })
+ for _, statement in ipairs(continue_statements) do
+ statement.tag = 'Goto'
+ statement[1] = continue_label
+ end
+ return true
+ else return false end
+end
+
+-------------------------------------------------------------------------------
+-- Back-end for loops with a multi-element header
+local function loop_builder(x)
+ local first, elements, body = unpack(x)
+
+ -- Change continue statements into gotos.
+ if SUPPORT_CONTINUE then transform_continue_statements(body) end
+
+ -------------------------------------------------------------------
+ -- If it's a regular loop, don't bloat the code
+ if not next(elements) then
+ table.insert(first, body)
+ return first
+ end
+
+ -------------------------------------------------------------------
+ -- There's no reason to treat the first element in a special way
+ table.insert(elements, 1, first)
+
+ -------------------------------------------------------------------
+ -- Change breaks into gotos that escape all loops at once.
+ local exit_label = nil
+ local function break_to_goto(break_node)
+ if not exit_label then exit_label = gg.gensym 'break' [1] end
+ break_node = break_node or { }
+ break_node.tag = 'Goto'
+ break_node[1] = exit_label
+ return break_node
+ end
+ Q(body)
+ :not_under('Function', 'Forin', 'Fornum', 'While', 'Repeat')
+ :filter('Break')
+ :foreach (break_to_goto)
+
+ -------------------------------------------------------------------
+ -- Compile all headers elements, from last to first.
+ -- invariant: `body` is a block (not a statement)
+ local result = body
+ for i = #elements, 1, -1 do
+ local e = elements[i]
+ match e with
+ | `If{ cond } ->
+ result = { `If{ cond, result } }
+ | `Until{ cond } ->
+ result = +{block: if -{cond} then -{break_to_goto()} else -{result} end }
+ | `While{ cond } ->
+ if i==1 then result = { `While{ cond, result } } -- top-level while
+ else result = +{block: if -{cond} then -{result} else -{break_to_goto()} end } end
+ | `Forin{ ... } | `Fornum{ ... } ->
+ table.insert (e, result); result={e}
+ | _-> require'metalua.pprint'.printf("Bad loop header element %s", e)
+ end
+ end
+
+
+ -------------------------------------------------------------------
+ -- If some breaks had to be changed into gotos, insert the label
+ if exit_label then result = { result, `Label{ exit_label } } end
+
+ return result
+end
+
+
+--------------------------------------------------------------------------------
+-- Improved "[...]" index operator:
+-- * support for multi-indexes ("foo[bar, gnat]")
+-- * support for ranges ("foo[bar ... gnat]")
+--------------------------------------------------------------------------------
+local function extend(M)
+
+ local _M = gg.future(M)
+
+ if SUPPORT_COMP_LISTS then
+ -- support for "for" / "if" comprehension suffixes in literal tables
+ local original_table_element = M.table.element
+ M.table.element = gg.expr{ name="table cell",
+ primary = original_table_element,
+ suffix = { name="table cell suffix",
+ { "...", builder = dots_list_suffix_builder },
+ { "for", _M.for_header, builder = for_list_suffix_builder },
+ { "if", _M.expr, builder = if_list_suffix_builder } } }
+ M.table.content.builder = table_content_builder
+ end
+
+ if SUPPORT_IMPROVED_INDEXES then
+ -- Support for ranges and multiple indices in bracket suffixes
+ M.expr.suffix:del '['
+ M.expr.suffix:add{ name="table index/range",
+ "[", gg.list{
+ gg.sequence { _M.expr, gg.onkeyword{ "...", _M.expr } } ,
+ separators = { ",", ";" } },
+ "]", builder = index_builder }
+ end
+
+ if SUPPORT_IMPROVED_LOOPS then
+ local original_for_header = M.for_header
+ M.stat :del 'for'
+ M.stat :del 'while'
+
+ M.loop_suffix = gg.multisequence{
+ { 'while', _M.expr, builder = |x| `Until{ `Op{ 'not', x[1] } } },
+ { 'until', _M.expr, builder = |x| `Until{ x[1] } },
+ { 'if', _M.expr, builder = |x| `If{ x[1] } },
+ { 'for', original_for_header, builder = |x| x[1] } }
+
+ M.loop_suffix_list = gg.list{ _M.loop_suffix, terminators='do' }
+
+ M.stat :add{
+ 'for', original_for_header, _M.loop_suffix_list, 'do', _M.block, 'end',
+ builder = loop_builder }
+
+ M.stat :add{
+ 'while', _M.expr, _M.loop_suffix_list, 'do', _M.block, 'end',
+ builder = |x| loop_builder{ `While{x[1]}, x[2], x[3] } }
+ end
+
+ if SUPPORT_CONTINUE then
+ M.lexer :add 'continue'
+ M.stat :add{ 'continue', builder='Continue' }
+ end
+end
+
+return extend
--- /dev/null
+-------------------------------------------------------------------------------
+-- Copyright (c) 2006-2013 Fabien Fleutot and others.
+--
+-- All rights reserved.
+--
+-- This program and the accompanying materials are made available
+-- under the terms of the Eclipse Public License v1.0 which
+-- accompanies this distribution, and is available at
+-- http://www.eclipse.org/legal/epl-v10.html
+--
+-- This program and the accompanying materials are also made available
+-- under the terms of the MIT public license which accompanies this
+-- distribution, and is available at http://www.lua.org/license.html
+--
+-- Contributors:
+-- Fabien Fleutot - API and implementation
+--
+-------------------------------------------------------------------------------
+
+-------------------------------------------------------------------------------
+--
+-- Glossary:
+--
+-- * term_seq: the tested stuff, a sequence of terms
+-- * pattern_element: might match one term of a term seq. Represented
+-- as expression ASTs.
+-- * pattern_seq: might match a term_seq
+-- * pattern_group: several pattern seqs, one of them might match
+-- the term seq.
+-- * case: pattern_group * guard option * block
+-- * match_statement: tested term_seq * case list
+--
+-- Hence a complete match statement is a:
+--
+-- { list(expr), list{ list(list(expr)), expr or false, block } }
+--
+-- Implementation hints
+-- ====================
+--
+-- The implementation is made as modular as possible, so that parts
+-- can be reused in other extensions. The priviledged way to share
+-- contextual information across functions is through the 'cfg' table
+-- argument. Its fields include:
+--
+-- * code: code generated from pattern. A pattern_(element|seq|group)
+-- is compiled as a sequence of instructions which will jump to
+-- label [cfg.on_failure] if the tested term doesn't match.
+--
+-- * on_failure: name of the label where the code will jump if the
+-- pattern doesn't match
+--
+-- * locals: names of local variables used by the pattern. This
+-- includes bound variables, and temporary variables used to
+-- destructure tables. Names are stored as keys of the table,
+-- values are meaningless.
+--
+-- * after_success: label where the code must jump after a pattern
+-- succeeded to capture a term, and the guard suceeded if there is
+-- any, and the conditional block has run.
+--
+-- * ntmp: number of temporary variables used to destructurate table
+-- in the current case.
+--
+-- Code generation is performed by acc_xxx() functions, which accumulate
+-- code in cfg.code:
+--
+-- * acc_test(test, cfg) will generate a jump to cfg.on_failure
+-- *when the test returns TRUE*
+--
+-- * acc_stat accumulates a statement
+--
+-- * acc_assign accumulate an assignment statement, and makes sure that
+-- the LHS variable the registered as local in cfg.locals.
+--
+-------------------------------------------------------------------------------
+
+-- TODO: hygiene wrt type()
+-- TODO: cfg.ntmp isn't reset as often as it could. I'm not even sure
+-- the corresponding locals are declared.
+
+
+local gg = require 'metalua.grammar.generator'
+local pp = require 'metalua.pprint'
+
+----------------------------------------------------------------------
+-- This would have been best done through library 'metalua.walk',
+-- but walk depends on match, so we have to break the dependency.
+-- It replaces all instances of `...' in `ast' with `term', unless
+-- it appears in a function.
+----------------------------------------------------------------------
+local function replace_dots (ast, term)
+ local function rec (node)
+ for i, child in ipairs(node) do
+ if type(child)~="table" then -- pass
+ elseif child.tag=='Dots' then
+ if term=='ambiguous' then
+ error ("You can't use `...' on the right of a match case when it appears "..
+ "more than once on the left")
+ else node[i] = term end
+ elseif child.tag=='Function' then return nil
+ else rec(child) end
+ end
+ end
+ return rec(ast)
+end
+
+local tmpvar_base = gg.gensym 'submatch.' [1]
+
+local function next_tmpvar(cfg)
+ assert (cfg.ntmp, "No cfg.ntmp imbrication level in the match compiler")
+ cfg.ntmp = cfg.ntmp+1
+ return `Id{ tmpvar_base .. cfg.ntmp }
+end
+
+-- Code accumulators
+local acc_stat = |x,cfg| table.insert (cfg.code, x)
+local acc_test = |x,cfg| acc_stat(+{stat: if -{x} then -{`Goto{cfg.on_failure}} end}, cfg)
+-- lhs :: `Id{ string }
+-- rhs :: expr
+local function acc_assign (lhs, rhs, cfg)
+ assert(lhs.tag=='Id')
+ cfg.locals[lhs[1]] = true
+ acc_stat (`Set{ {lhs}, {rhs} }, cfg)
+end
+
+local literal_tags = { String=1, Number=1, True=1, False=1, Nil=1 }
+
+-- pattern :: `Id{ string }
+-- term :: expr
+local function id_pattern_element_builder (pattern, term, cfg)
+ assert (pattern.tag == "Id")
+ if pattern[1] == "_" then
+ -- "_" is used as a dummy var ==> no assignment, no == checking
+ cfg.locals._ = true
+ elseif cfg.locals[pattern[1]] then
+ -- This var is already bound ==> test for equality
+ acc_test (+{ -{term} ~= -{pattern} }, cfg)
+ else
+ -- Free var ==> bind it, and remember it for latter linearity checking
+ acc_assign (pattern, term, cfg)
+ cfg.locals[pattern[1]] = true
+ end
+end
+
+-- mutually recursive with table_pattern_element_builder
+local pattern_element_builder
+
+-- pattern :: pattern and `Table{ }
+-- term :: expr
+local function table_pattern_element_builder (pattern, term, cfg)
+ local seen_dots, len = false, 0
+ acc_test (+{ type( -{term} ) ~= "table" }, cfg)
+ for i = 1, #pattern do
+ local key, sub_pattern
+ if pattern[i].tag=="Pair" then -- Explicit key/value pair
+ key, sub_pattern = unpack (pattern[i])
+ assert (literal_tags[key.tag], "Invalid key")
+ else -- Implicit key
+ len, key, sub_pattern = len+1, `Number{ len+1 }, pattern[i]
+ end
+
+ -- '...' can only appear in final position
+ -- Could be fixed actually...
+ assert (not seen_dots, "Wrongly placed `...' ")
+
+ if sub_pattern.tag == "Id" then
+ -- Optimization: save a useless [ v(n+1)=v(n).key ]
+ id_pattern_element_builder (sub_pattern, `Index{ term, key }, cfg)
+ if sub_pattern[1] ~= "_" then
+ acc_test (+{ -{sub_pattern} == nil }, cfg)
+ end
+ elseif sub_pattern.tag == "Dots" then
+ -- Remember where the capture is, and thatt arity checking shouldn't occur
+ seen_dots = true
+ else
+ -- Business as usual:
+ local v2 = next_tmpvar(cfg)
+ acc_assign (v2, `Index{ term, key }, cfg)
+ pattern_element_builder (sub_pattern, v2, cfg)
+ -- TODO: restore ntmp?
+ end
+ end
+ if seen_dots then -- remember how to retrieve `...'
+ -- FIXME: check, but there might be cases where the variable -{term}
+ -- will be overridden in contrieved tables.
+ -- ==> save it now, and clean the setting statement if unused
+ if cfg.dots_replacement then cfg.dots_replacement = 'ambiguous'
+ else cfg.dots_replacement = +{ select (-{`Number{len}}, unpack(-{term})) } end
+ else -- Check arity
+ acc_test (+{ #-{term} ~= -{`Number{len}} }, cfg)
+ end
+end
+
+-- mutually recursive with pattern_element_builder
+local eq_pattern_element_builder, regexp_pattern_element_builder
+
+-- Concatenate code in [cfg.code], that will jump to label
+-- [cfg.on_failure] if [pattern] doesn't match [term]. [pattern]
+-- should be an identifier, or at least cheap to compute and
+-- side-effects free.
+--
+-- pattern :: pattern_element
+-- term :: expr
+function pattern_element_builder (pattern, term, cfg)
+ if literal_tags[pattern.tag] then
+ acc_test (+{ -{term} ~= -{pattern} }, cfg)
+ elseif "Id" == pattern.tag then
+ id_pattern_element_builder (pattern, term, cfg)
+ elseif "Op" == pattern.tag and "div" == pattern[1] then
+ regexp_pattern_element_builder (pattern, term, cfg)
+ elseif "Op" == pattern.tag and "eq" == pattern[1] then
+ eq_pattern_element_builder (pattern, term, cfg)
+ elseif "Table" == pattern.tag then
+ table_pattern_element_builder (pattern, term, cfg)
+ else
+ error ("Invalid pattern at "..
+ tostring(pattern.lineinfo)..
+ ": "..pp.tostring(pattern, {hide_hash=true}))
+ end
+end
+
+function eq_pattern_element_builder (pattern, term, cfg)
+ local _, pat1, pat2 = unpack (pattern)
+ local ntmp_save = cfg.ntmp
+ pattern_element_builder (pat1, term, cfg)
+ cfg.ntmp = ntmp_save
+ pattern_element_builder (pat2, term, cfg)
+end
+
+-- pattern :: `Op{ 'div', string, list{`Id string} or `Id{ string }}
+-- term :: expr
+local function regexp_pattern_element_builder (pattern, term, cfg)
+ local op, regexp, sub_pattern = unpack(pattern)
+
+ -- Sanity checks --
+ assert (op=='div', "Don't know what to do with that op in a pattern")
+ assert (regexp.tag=="String",
+ "Left hand side operand for '/' in a pattern must be "..
+ "a literal string representing a regular expression")
+ if sub_pattern.tag=="Table" then
+ for _, x in ipairs(sub_pattern) do
+ assert (x.tag=="Id" or x.tag=='Dots',
+ "Right hand side operand for '/' in a pattern must be "..
+ "a list of identifiers")
+ end
+ else
+ assert (sub_pattern.tag=="Id",
+ "Right hand side operand for '/' in a pattern must be "..
+ "an identifier or a list of identifiers")
+ end
+
+ -- Regexp patterns can only match strings
+ acc_test (+{ type(-{term}) ~= 'string' }, cfg)
+ -- put all captures in a list
+ local capt_list = +{ { string.strmatch(-{term}, -{regexp}) } }
+ -- save them in a var_n for recursive decomposition
+ local v2 = next_tmpvar(cfg)
+ acc_stat (+{stat: local -{v2} = -{capt_list} }, cfg)
+ -- was capture successful?
+ acc_test (+{ not next (-{v2}) }, cfg)
+ pattern_element_builder (sub_pattern, v2, cfg)
+end
+
+
+-- Jumps to [cfg.on_faliure] if pattern_seq doesn't match
+-- term_seq.
+local function pattern_seq_builder (pattern_seq, term_seq, cfg)
+ if #pattern_seq ~= #term_seq then error ("Bad seq arity") end
+ cfg.locals = { } -- reset bound variables between alternatives
+ for i=1, #pattern_seq do
+ cfg.ntmp = 1 -- reset the tmp var generator
+ pattern_element_builder(pattern_seq[i], term_seq[i], cfg)
+ end
+end
+
+--------------------------------------------------
+-- for each case i:
+-- pattern_seq_builder_i:
+-- * on failure, go to on_failure_i
+-- * on success, go to on_success
+-- label on_success:
+-- block
+-- goto after_success
+-- label on_failure_i
+--------------------------------------------------
+local function case_builder (case, term_seq, cfg)
+ local patterns_group, guard, block = unpack(case)
+ local on_success = gg.gensym 'on_success' [1]
+ for i = 1, #patterns_group do
+ local pattern_seq = patterns_group[i]
+ cfg.on_failure = gg.gensym 'match_fail' [1]
+ cfg.dots_replacement = false
+ pattern_seq_builder (pattern_seq, term_seq, cfg)
+ if i<#patterns_group then
+ acc_stat (`Goto{on_success}, cfg)
+ acc_stat (`Label{cfg.on_failure}, cfg)
+ end
+ end
+ acc_stat (`Label{on_success}, cfg)
+ if guard then acc_test (+{not -{guard}}, cfg) end
+ if cfg.dots_replacement then
+ replace_dots (block, cfg.dots_replacement)
+ end
+ block.tag = 'Do'
+ acc_stat (block, cfg)
+ acc_stat (`Goto{cfg.after_success}, cfg)
+ acc_stat (`Label{cfg.on_failure}, cfg)
+end
+
+local function match_builder (x)
+ local term_seq, cases = unpack(x)
+ local cfg = {
+ code = `Do{ },
+ after_success = gg.gensym "_after_success" }
+
+
+ -- Some sharing issues occur when modifying term_seq,
+ -- so it's replaced by a copy new_term_seq.
+ -- TODO: clean that up, and re-suppress the useless copies
+ -- (cf. remarks about capture bug below).
+ local new_term_seq = { }
+
+ local match_locals
+
+ -- Make sure that all tested terms are variables or literals
+ for i=1, #term_seq do
+ local t = term_seq[i]
+ -- Capture problem: the following would compile wrongly:
+ -- `match x with x -> end'
+ -- Temporary workaround: suppress the condition, so that
+ -- all external variables are copied into unique names.
+ --if t.tag ~= 'Id' and not literal_tags[t.tag] then
+ local v = gg.gensym 'v'
+ if not match_locals then match_locals = `Local{ {v}, {t} } else
+ table.insert(match_locals[1], v)
+ table.insert(match_locals[2], t)
+ end
+ new_term_seq[i] = v
+ --end
+ end
+ term_seq = new_term_seq
+
+ if match_locals then acc_stat(match_locals, cfg) end
+
+ for i=1, #cases do
+ local case_cfg = {
+ after_success = cfg.after_success,
+ code = `Do{ }
+ -- locals = { } -- unnecessary, done by pattern_seq_builder
+ }
+ case_builder (cases[i], term_seq, case_cfg)
+ if next (case_cfg.locals) then
+ local case_locals = { }
+ table.insert (case_cfg.code, 1, `Local{ case_locals, { } })
+ for v, _ in pairs (case_cfg.locals) do
+ table.insert (case_locals, `Id{ v })
+ end
+ end
+ acc_stat(case_cfg.code, cfg)
+ end
+ local li = `String{tostring(cases.lineinfo)}
+ acc_stat(+{error('mismatch at '..-{li})}, cfg)
+ acc_stat(`Label{cfg.after_success}, cfg)
+ return cfg.code
+end
+
+----------------------------------------------------------------------
+-- Syntactical front-end
+----------------------------------------------------------------------
+
+local function extend(M)
+
+ local _M = gg.future(M)
+
+ checks('metalua.compiler.parser')
+ M.lexer:add{ "match", "with", "->" }
+ M.block.terminators:add "|"
+
+ local match_cases_list_parser = gg.list{ name = "match cases list",
+ gg.sequence{ name = "match case",
+ gg.list{ name = "match case patterns list",
+ primary = _M.expr_list,
+ separators = "|",
+ terminators = { "->", "if" } },
+ gg.onkeyword{ "if", _M.expr, consume = true },
+ "->",
+ _M.block },
+ separators = "|",
+ terminators = "end" }
+
+ M.stat:add{ name = "match statement",
+ "match",
+ _M.expr_list,
+ "with", gg.optkeyword "|",
+ match_cases_list_parser,
+ "end",
+ builder = |x| match_builder{ x[1], x[3] } }
+end
+
+return extend
\ No newline at end of file
--- /dev/null
+-------------------------------------------------------------------------------
+-- Copyright (c) 2006-2013 Fabien Fleutot and others.
+--
+-- All rights reserved.
+--
+-- This program and the accompanying materials are made available
+-- under the terms of the Eclipse Public License v1.0 which
+-- accompanies this distribution, and is available at
+-- http://www.eclipse.org/legal/epl-v10.html
+--
+-- This program and the accompanying materials are also made available
+-- under the terms of the MIT public license which accompanies this
+-- distribution, and is available at http://www.lua.org/license.html
+--
+-- Contributors:
+-- Fabien Fleutot - API and implementation
+--
+-------------------------------------------------------------------------------
+
+require 'metalua.extension.match'
+
+module ('spmatch', package.seeall)
+
+require 'metalua.walk.id'
+
+----------------------------------------------------------------------
+-- Back-end for statements
+-- "match function ..." and "local match function...".
+-- Tag must be either "Localrec" or "Set".
+----------------------------------------------------------------------
+named_match_function_builder = |tag| function (x)
+ local func_name, _, cases = unpack(x)
+ local arity = #cases[1][1][1]
+ if arity==0 then
+ error "There must be at least 1 case in match function"
+ end
+ local args = { }
+ for i=1, arity do args[i] = mlp.gensym("arg."..i) end
+ local body = match_builder{args, cases}
+ return { tag=tag, {func_name}, { `Function{ args, {body} } } }
+end
+
+-- Get rid of the former parser, it will be blended in a multiseq:
+mlp.stat:del 'match'
+
+----------------------------------------------------------------------
+-- "match function", "match ... with"
+----------------------------------------------------------------------
+mlp.stat:add{ 'match',
+ gg.multisequence{
+
+ ----------------------------------------------------------------
+ -- Shortcut for declaration of functions containing only a match:
+ -- "function f($1) match $1 with $2 end end" can be written:
+ -- "match function f $2 end"
+ ----------------------------------------------------------------
+ { 'function', mlp.expr, gg.optkeyword '|',
+ match_cases_list_parser, 'end',
+ builder = named_match_function_builder 'Set' },
+
+ ----------------------------------------------------------------
+ -- Reintroduce the original match statement:
+ ----------------------------------------------------------------
+ default = gg.sequence{
+ mlp.expr_list, 'with', gg.optkeyword '|',
+ match_cases_list_parser, 'end',
+ builder = |x| match_builder{ x[1], x[3] } } } }
+
+----------------------------------------------------------------------
+-- Shortcut: "local match function f $cases end" translates to:
+-- "local function f($args) match $args with $cases end end"
+----------------------------------------------------------------------
+mlp.stat:get'local'[2]:add{
+ 'match', 'function', mlp.expr, gg.optkeyword '|',
+ match_cases_list_parser, 'end',
+ builder = named_match_function_builder 'Localrec' }
+
+----------------------------------------------------------------------
+-- "match...with" expressions and "match function..."
+----------------------------------------------------------------------
+mlp.expr:add{ 'match', builder = |x| x[1], gg.multisequence{
+
+ ----------------------------------------------------------------
+ -- Anonymous match functions:
+ -- "function ($1) match $1 with $2 end end" can be written:
+ -- "match function $2 end"
+ ----------------------------------------------------------------
+ { 'function', gg.optkeyword '|',
+ match_cases_list_parser,
+ 'end',
+ builder = function(x)
+ local _, cases = unpack(x)
+ local v = mlp.gensym()
+ local body = match_builder{v, cases}
+ return `Function{ {v}, {body} }
+ end },
+
+ ----------------------------------------------------------------
+ -- match expressions: you can put a match where an expression
+ -- is expected. The case bodies are then expected to be
+ -- expressions, not blocks.
+ ----------------------------------------------------------------
+ default = gg.sequence{
+ mlp.expr_list, 'with', gg.optkeyword '|',
+ gg.list{ name = "match cases list",
+ gg.sequence{ name = "match expr case",
+ gg.list{ name = "match expr case patterns list",
+ primary = mlp.expr_list,
+ separators = "|",
+ terminators = { "->", "if" } },
+ gg.onkeyword{ "if", mlp.expr, consume = true },
+ "->",
+ mlp.expr }, -- Notice: expression, not block!
+ separators = "|" },
+ -- Notice: no "end" keyword!
+ builder = function (x)
+ local tested_term_seq, _, cases = unpack(x)
+ local v = mlp.gensym 'match_expr'
+ -- Replace expressions with blocks
+ for _, case in ipairs (cases) do
+ local body = case[3]
+ case[3] = { `Set{ {v}, {body} } }
+ end
+ local m = match_builder { tested_term_seq, cases }
+ return `Stat{ { `Local{{v}}; m }, v }
+ end } } }
+
+function bind (x)
+ local patterns, values = unpack(x)
+
+ -------------------------------------------------------------------
+ -- Generate pattern code: "bind vars = vals" translates to:
+ -- do
+ -- pattern matching code, goto 'fail' on mismatch
+ -- goto 'success'
+ -- label 'fail': error "..."
+ -- label success
+ -- end
+ -- vars is the set of variables used by the pattern
+ -------------------------------------------------------------------
+ local code, vars do
+ local match_cfg = {
+ on_failure = mlp.gensym 'mismatch' [1],
+ locals = { },
+ code = { } }
+ pattern_seq_builder(patterns, values, match_cfg)
+ local on_success = mlp.gensym 'on_success' [1]
+ code = {
+ match_cfg.code;
+ `Goto{ on_success };
+ `Label{ match_cfg.on_failure };
+ +{error "bind error"};
+ `Label{ on_success } }
+ vars = match_cfg.locals
+ end
+
+ -------------------------------------------------------------------
+ -- variables that actually appear in the pattern:
+ -------------------------------------------------------------------
+ local vars_in_pattern do
+ vars_in_pattern = { }
+ local walk_cfg = { id = { } }
+ function walk_cfg.id.free(v) vars_in_pattern[v[1]]=true end
+ walk_id.expr_list(walk_cfg, patterns)
+ end
+
+ -------------------------------------------------------------------
+ -- temp variables that are generated for destructuring,
+ -- but aren't explicitly typed by the user. These must be made
+ -- local.
+ -------------------------------------------------------------------
+ local vars_not_in_pattern do
+ vars_not_in_pattern = { }
+ for k, _ in pairs(vars) do
+ if not vars_in_pattern[k] then
+ vars_not_in_pattern[k] = true
+ end
+ end
+ end
+
+ -------------------------------------------------------------------
+ -- Declare the temp variables as local to the statement.
+ -------------------------------------------------------------------
+ if next(vars_not_in_pattern) then
+ local loc = { }
+ for k, _ in pairs(vars_not_in_pattern) do
+ table.insert (loc, `Id{k})
+ end
+ table.insert (code, 1, `Local{ loc, { } })
+ end
+
+ -------------------------------------------------------------------
+ -- Transform the set of pattern variable names into a list of `Id{}
+ -------------------------------------------------------------------
+ local decl_list do
+ decl_list = { }
+ for k, _ in pairs(vars_in_pattern) do
+ table.insert (decl_list, `Id{k})
+ end
+ end
+
+ return code, decl_list
+end
+
+function local_bind(x)
+ local code, vars = bind (x)
+ return { `Local{ vars, { } }; code }
+end
+
+function non_local_bind(x)
+ local code, _ = bind (x)
+ code.tag = 'Do'
+ return code
+end
+
+----------------------------------------------------------------------
+-- Syntax front-end
+----------------------------------------------------------------------
+mlp.lexer:add 'bind'
+
+----------------------------------------------------------------------
+-- bind patterns = vars
+----------------------------------------------------------------------
+mlp.stat:add{ 'bind', mlp.expr_list, '=', mlp.expr_list,
+ builder = non_local_bind }
+
+----------------------------------------------------------------------
+-- local bind patterns = vars
+-- Some monkey-patching of "local ..." must take place
+----------------------------------------------------------------------
+mlp.stat:get'local'[2]:add{ 'bind', mlp.expr_list, '=', mlp.expr_list,
+ builder = local_bind }
--- /dev/null
+--------------------------------------------------------------------------------
+-- Copyright (c) 2006-2013 Fabien Fleutot and others.
+--
+-- All rights reserved.
+--
+-- This program and the accompanying materials are made available
+-- under the terms of the Eclipse Public License v1.0 which
+-- accompanies this distribution, and is available at
+-- http://www.eclipse.org/legal/epl-v10.html
+--
+-- This program and the accompanying materials are also made available
+-- under the terms of the MIT public license which accompanies this
+-- distribution, and is available at http://www.lua.org/license.html
+--
+-- Contributors:
+-- Fabien Fleutot - API and implementation
+--
+--------------------------------------------------------------------------------
+
+--------------------------------------------------------------------------------
+--
+-- Summary: parser generator. Collection of higher order functors,
+-- which allow to build and combine parsers. Relies on a lexer
+-- that supports the same API as the one exposed in mll.lua.
+--
+--------------------------------------------------------------------------------
+
+--------------------------------------------------------------------------------
+--
+-- Exported API:
+--
+-- Parser generators:
+-- * [gg.sequence()]
+-- * [gg.multisequence()]
+-- * [gg.expr()]
+-- * [gg.list()]
+-- * [gg.onkeyword()]
+-- * [gg.optkeyword()]
+--
+-- Other functions:
+-- * [gg.parse_error()]
+-- * [gg.make_parser()]
+-- * [gg.is_parser()]
+--
+--------------------------------------------------------------------------------
+
+local M = { }
+
+local lexer = require 'metalua.grammar.lexer'
+
+--------------------------------------------------------------------------------
+-- Symbol generator: [gensym()] returns a guaranteed-to-be-unique identifier.
+-- The main purpose is to avoid variable capture in macros.
+--
+-- If a string is passed as an argument, theis string will be part of the
+-- id name (helpful for macro debugging)
+--------------------------------------------------------------------------------
+local gensymidx = 0
+
+function M.gensym (arg)
+ gensymidx = gensymidx + 1
+ return { tag="Id", string.format(".%i.%s", gensymidx, arg or "")}
+end
+
+
+-------------------------------------------------------------------------------
+-- parser metatable, which maps __call to method parse, and adds some
+-- error tracing boilerplate.
+-------------------------------------------------------------------------------
+local parser_metatable = { }
+
+function parser_metatable :__call (lx, ...)
+ return self :parse (lx, ...)
+end
+
+-------------------------------------------------------------------------------
+-- Turn a table into a parser, mainly by setting the metatable.
+-------------------------------------------------------------------------------
+function M.make_parser(kind, p)
+ p.kind = kind
+ if not p.transformers then p.transformers = { } end
+ function p.transformers:add (x)
+ table.insert (self, x)
+ end
+ setmetatable (p, parser_metatable)
+ return p
+end
+
+-------------------------------------------------------------------------------
+-- Return true iff [x] is a parser.
+-- If it's a gg-generated parser, return the name of its kind.
+-------------------------------------------------------------------------------
+function M.is_parser (x)
+ return type(x)=="function" or getmetatable(x)==parser_metatable and x.kind
+end
+
+-------------------------------------------------------------------------------
+-- Parse a sequence, without applying builder nor transformers.
+-------------------------------------------------------------------------------
+local function raw_parse_sequence (lx, p)
+ local r = { }
+ for i=1, #p do
+ local e=p[i]
+ if type(e) == "string" then
+ local kw = lx :next()
+ if not lx :is_keyword (kw, e) then
+ M.parse_error(
+ lx, "A keyword was expected, probably `%s'.", e)
+ end
+ elseif M.is_parser (e) then
+ table.insert (r, e(lx))
+ else -- Invalid parser definition, this is *not* a parsing error
+ error(string.format(
+ "Sequence `%s': element #%i is neither a string nor a parser: %s",
+ p.name, i, table.tostring(e)))
+ end
+ end
+ return r
+end
+
+-------------------------------------------------------------------------------
+-- Parse a multisequence, without applying multisequence transformers.
+-- The sequences are completely parsed.
+-------------------------------------------------------------------------------
+local function raw_parse_multisequence (lx, sequence_table, default)
+ local seq_parser = sequence_table[lx:is_keyword(lx:peek())]
+ if seq_parser then return seq_parser (lx)
+ elseif default then return default (lx)
+ else return false end
+end
+
+-------------------------------------------------------------------------------
+-- Applies all transformers listed in parser on ast.
+-------------------------------------------------------------------------------
+local function transform (ast, parser, fli, lli)
+ if parser.transformers then
+ for _, t in ipairs (parser.transformers) do ast = t(ast) or ast end
+ end
+ if type(ast) == 'table' then
+ local ali = ast.lineinfo
+ if not ali or ali.first~=fli or ali.last~=lli then
+ ast.lineinfo = lexer.new_lineinfo(fli, lli)
+ end
+ end
+ return ast
+end
+
+-------------------------------------------------------------------------------
+-- Generate a tracable parsing error (not implemented yet)
+-------------------------------------------------------------------------------
+function M.parse_error(lx, fmt, ...)
+ local li = lx:lineinfo_left()
+ local file, line, column, offset, positions
+ if li then
+ file, line, column, offset = li.source, li.line, li.column, li.offset
+ positions = { first = li, last = li }
+ else
+ line, column, offset = -1, -1, -1
+ end
+
+ local msg = string.format("line %i, char %i: "..fmt, line, column, ...)
+ if file and file~='?' then msg = "file "..file..", "..msg end
+
+ local src = lx.src
+ if offset>0 and src then
+ local i, j = offset, offset
+ while src:sub(i,i) ~= '\n' and i>=0 do i=i-1 end
+ while src:sub(j,j) ~= '\n' and j<=#src do j=j+1 end
+ local srcline = src:sub (i+1, j-1)
+ local idx = string.rep (" ", column).."^"
+ msg = string.format("%s\n>>> %s\n>>> %s", msg, srcline, idx)
+ end
+ --lx :kill()
+ error(msg)
+end
+
+-------------------------------------------------------------------------------
+--
+-- Sequence parser generator
+--
+-------------------------------------------------------------------------------
+-- Input fields:
+--
+-- * [builder]: how to build an AST out of sequence parts. let [x] be the list
+-- of subparser results (keywords are simply omitted). [builder] can be:
+-- - [nil], in which case the result of parsing is simply [x]
+-- - a string, which is then put as a tag on [x]
+-- - a function, which takes [x] as a parameter and returns an AST.
+--
+-- * [name]: the name of the parser. Used for debug messages
+--
+-- * [transformers]: a list of AST->AST functions, applied in order on ASTs
+-- returned by the parser.
+--
+-- * Table-part entries corresponds to keywords (strings) and subparsers
+-- (function and callable objects).
+--
+-- After creation, the following fields are added:
+-- * [parse] the parsing function lexer->AST
+-- * [kind] == "sequence"
+-- * [name] is set, if it wasn't in the input.
+--
+-------------------------------------------------------------------------------
+function M.sequence (p)
+ M.make_parser ("sequence", p)
+
+ -------------------------------------------------------------------
+ -- Parsing method
+ -------------------------------------------------------------------
+ function p:parse (lx)
+
+ -- Raw parsing:
+ local fli = lx:lineinfo_right()
+ local seq = raw_parse_sequence (lx, self)
+ local lli = lx:lineinfo_left()
+
+ -- Builder application:
+ local builder, tb = self.builder, type (self.builder)
+ if tb == "string" then seq.tag = builder
+ elseif tb == "function" or builder and builder.__call then seq = builder(seq)
+ elseif builder == nil then -- nothing
+ else error ("Invalid builder of type "..tb.." in sequence") end
+ seq = transform (seq, self, fli, lli)
+ assert (not seq or seq.lineinfo)
+ return seq
+ end
+
+ -------------------------------------------------------------------
+ -- Construction
+ -------------------------------------------------------------------
+ -- Try to build a proper name
+ if p.name then
+ -- don't touch existing name
+ elseif type(p[1])=="string" then -- find name based on 1st keyword
+ if #p==1 then p.name=p[1]
+ elseif type(p[#p])=="string" then
+ p.name = p[1] .. " ... " .. p[#p]
+ else p.name = p[1] .. " ..." end
+ else -- can't find a decent name
+ p.name = "unnamed_sequence"
+ end
+
+ return p
+end --</sequence>
+
+
+-------------------------------------------------------------------------------
+--
+-- Multiple, keyword-driven, sequence parser generator
+--
+-------------------------------------------------------------------------------
+-- in [p], useful fields are:
+--
+-- * [transformers]: as usual
+--
+-- * [name]: as usual
+--
+-- * Table-part entries must be sequence parsers, or tables which can
+-- be turned into a sequence parser by [gg.sequence]. These
+-- sequences must start with a keyword, and this initial keyword
+-- must be different for each sequence. The table-part entries will
+-- be removed after [gg.multisequence] returns.
+--
+-- * [default]: the parser to run if the next keyword in the lexer is
+-- none of the registered initial keywords. If there's no default
+-- parser and no suitable initial keyword, the multisequence parser
+-- simply returns [false].
+--
+-- After creation, the following fields are added:
+--
+-- * [parse] the parsing function lexer->AST
+--
+-- * [sequences] the table of sequences, indexed by initial keywords.
+--
+-- * [add] method takes a sequence parser or a config table for
+-- [gg.sequence], and adds/replaces the corresponding sequence
+-- parser. If the keyword was already used, the former sequence is
+-- removed and a warning is issued.
+--
+-- * [get] method returns a sequence by its initial keyword
+--
+-- * [kind] == "multisequence"
+--
+-------------------------------------------------------------------------------
+function M.multisequence (p)
+ M.make_parser ("multisequence", p)
+
+ -------------------------------------------------------------------
+ -- Add a sequence (might be just a config table for [gg.sequence])
+ -------------------------------------------------------------------
+ function p :add (s)
+ -- compile if necessary:
+ local keyword = type(s)=='table' and s[1]
+ if type(s)=='table' and not M.is_parser(s) then M.sequence(s) end
+ if M.is_parser(s)~='sequence' or type(keyword)~='string' then
+ if self.default then -- two defaults
+ error ("In a multisequence parser, all but one sequences "..
+ "must start with a keyword")
+ else self.default = s end -- first default
+ else
+ if self.sequences[keyword] then -- duplicate keyword
+ -- TODO: warn that initial keyword `keyword` is overloaded in multiseq
+ end
+ self.sequences[keyword] = s
+ end
+ end -- </multisequence.add>
+
+ -------------------------------------------------------------------
+ -- Get the sequence starting with this keyword. [kw :: string]
+ -------------------------------------------------------------------
+ function p :get (kw) return self.sequences [kw] end
+
+ -------------------------------------------------------------------
+ -- Remove the sequence starting with keyword [kw :: string]
+ -------------------------------------------------------------------
+ function p :del (kw)
+ if not self.sequences[kw] then
+ -- TODO: warn that we try to delete a non-existent entry
+ end
+ local removed = self.sequences[kw]
+ self.sequences[kw] = nil
+ return removed
+ end
+
+ -------------------------------------------------------------------
+ -- Parsing method
+ -------------------------------------------------------------------
+ function p :parse (lx)
+ local fli = lx:lineinfo_right()
+ local x = raw_parse_multisequence (lx, self.sequences, self.default)
+ local lli = lx:lineinfo_left()
+ return transform (x, self, fli, lli)
+ end
+
+ -------------------------------------------------------------------
+ -- Construction
+ -------------------------------------------------------------------
+ -- Register the sequences passed to the constructor. They're going
+ -- from the array part of the parser to the hash part of field
+ -- [sequences]
+ p.sequences = { }
+ for i=1, #p do p :add (p[i]); p[i] = nil end
+
+ -- FIXME: why is this commented out?
+ --if p.default and not is_parser(p.default) then sequence(p.default) end
+ return p
+end --</multisequence>
+
+
+-------------------------------------------------------------------------------
+--
+-- Expression parser generator
+--
+-------------------------------------------------------------------------------
+--
+-- Expression configuration relies on three tables: [prefix], [infix]
+-- and [suffix]. Moreover, the primary parser can be replaced by a
+-- table: in this case the [primary] table will be passed to
+-- [gg.multisequence] to create a parser.
+--
+-- Each of these tables is a modified multisequence parser: the
+-- differences with respect to regular multisequence config tables are:
+--
+-- * the builder takes specific parameters:
+-- - for [prefix], it takes the result of the prefix sequence parser,
+-- and the prefixed expression
+-- - for [infix], it takes the left-hand-side expression, the results
+-- of the infix sequence parser, and the right-hand-side expression.
+-- - for [suffix], it takes the suffixed expression, and the result
+-- of the suffix sequence parser.
+--
+-- * the default field is a list, with parameters:
+-- - [parser] the raw parsing function
+-- - [transformers], as usual
+-- - [prec], the operator's precedence
+-- - [assoc] for [infix] table, the operator's associativity, which
+-- can be "left", "right" or "flat" (default to left)
+--
+-- In [p], useful fields are:
+-- * [transformers]: as usual
+-- * [name]: as usual
+-- * [primary]: the atomic expression parser, or a multisequence config
+-- table (mandatory)
+-- * [prefix]: prefix operators config table, see above.
+-- * [infix]: infix operators config table, see above.
+-- * [suffix]: suffix operators config table, see above.
+--
+-- After creation, these fields are added:
+-- * [kind] == "expr"
+-- * [parse] as usual
+-- * each table is turned into a multisequence, and therefore has an
+-- [add] method
+--
+-------------------------------------------------------------------------------
+function M.expr (p)
+ M.make_parser ("expr", p)
+
+ -------------------------------------------------------------------
+ -- parser method.
+ -- In addition to the lexer, it takes an optional precedence:
+ -- it won't read expressions whose precedence is lower or equal
+ -- to [prec].
+ -------------------------------------------------------------------
+ function p :parse (lx, prec)
+ prec = prec or 0
+
+ ------------------------------------------------------
+ -- Extract the right parser and the corresponding
+ -- options table, for (pre|in|suff)fix operators.
+ -- Options include prec, assoc, transformers.
+ ------------------------------------------------------
+ local function get_parser_info (tab)
+ local p2 = tab :get (lx :is_keyword (lx :peek()))
+ if p2 then -- keyword-based sequence found
+ local function parser(lx) return raw_parse_sequence(lx, p2) end
+ return parser, p2
+ else -- Got to use the default parser
+ local d = tab.default
+ if d then return d.parse or d.parser, d
+ else return false, false end
+ end
+ end
+
+ ------------------------------------------------------
+ -- Look for a prefix sequence. Multiple prefixes are
+ -- handled through the recursive [p.parse] call.
+ -- Notice the double-transform: one for the primary
+ -- expr, and one for the one with the prefix op.
+ ------------------------------------------------------
+ local function handle_prefix ()
+ local fli = lx :lineinfo_right()
+ local p2_func, p2 = get_parser_info (self.prefix)
+ local op = p2_func and p2_func (lx)
+ if op then -- Keyword-based sequence found
+ local ili = lx :lineinfo_right() -- Intermediate LineInfo
+ local e = p2.builder (op, self :parse (lx, p2.prec))
+ local lli = lx :lineinfo_left()
+ return transform (transform (e, p2, ili, lli), self, fli, lli)
+ else -- No prefix found, get a primary expression
+ local e = self.primary(lx)
+ local lli = lx :lineinfo_left()
+ return transform (e, self, fli, lli)
+ end
+ end --</expr.parse.handle_prefix>
+
+ ------------------------------------------------------
+ -- Look for an infix sequence+right-hand-side operand.
+ -- Return the whole binary expression result,
+ -- or false if no operator was found.
+ ------------------------------------------------------
+ local function handle_infix (e)
+ local p2_func, p2 = get_parser_info (self.infix)
+ if not p2 then return false end
+
+ -----------------------------------------
+ -- Handle flattening operators: gather all operands
+ -- of the series in [list]; when a different operator
+ -- is found, stop, build from [list], [transform] and
+ -- return.
+ -----------------------------------------
+ if (not p2.prec or p2.prec>prec) and p2.assoc=="flat" then
+ local fli = lx:lineinfo_right()
+ local pflat, list = p2, { e }
+ repeat
+ local op = p2_func(lx)
+ if not op then break end
+ table.insert (list, self:parse (lx, p2.prec))
+ local _ -- We only care about checking that p2==pflat
+ _, p2 = get_parser_info (self.infix)
+ until p2 ~= pflat
+ local e2 = pflat.builder (list)
+ local lli = lx:lineinfo_left()
+ return transform (transform (e2, pflat, fli, lli), self, fli, lli)
+
+ -----------------------------------------
+ -- Handle regular infix operators: [e] the LHS is known,
+ -- just gather the operator and [e2] the RHS.
+ -- Result goes in [e3].
+ -----------------------------------------
+ elseif p2.prec and p2.prec>prec or
+ p2.prec==prec and p2.assoc=="right" then
+ local fli = e.lineinfo.first -- lx:lineinfo_right()
+ local op = p2_func(lx)
+ if not op then return false end
+ local e2 = self:parse (lx, p2.prec)
+ local e3 = p2.builder (e, op, e2)
+ local lli = lx:lineinfo_left()
+ return transform (transform (e3, p2, fli, lli), self, fli, lli)
+
+ -----------------------------------------
+ -- Check for non-associative operators, and complain if applicable.
+ -----------------------------------------
+ elseif p2.assoc=="none" and p2.prec==prec then
+ M.parse_error (lx, "non-associative operator!")
+
+ -----------------------------------------
+ -- No infix operator suitable at that precedence
+ -----------------------------------------
+ else return false end
+
+ end --</expr.parse.handle_infix>
+
+ ------------------------------------------------------
+ -- Look for a suffix sequence.
+ -- Return the result of suffix operator on [e],
+ -- or false if no operator was found.
+ ------------------------------------------------------
+ local function handle_suffix (e)
+ -- FIXME bad fli, must take e.lineinfo.first
+ local p2_func, p2 = get_parser_info (self.suffix)
+ if not p2 then return false end
+ if not p2.prec or p2.prec>=prec then
+ --local fli = lx:lineinfo_right()
+ local fli = e.lineinfo.first
+ local op = p2_func(lx)
+ if not op then return false end
+ local lli = lx:lineinfo_left()
+ e = p2.builder (e, op)
+ e = transform (transform (e, p2, fli, lli), self, fli, lli)
+ return e
+ end
+ return false
+ end --</expr.parse.handle_suffix>
+
+ ------------------------------------------------------
+ -- Parser body: read suffix and (infix+operand)
+ -- extensions as long as we're able to fetch more at
+ -- this precedence level.
+ ------------------------------------------------------
+ local e = handle_prefix()
+ repeat
+ local x = handle_suffix (e); e = x or e
+ local y = handle_infix (e); e = y or e
+ until not (x or y)
+
+ -- No transform: it already happened in operators handling
+ return e
+ end --</expr.parse>
+
+ -------------------------------------------------------------------
+ -- Construction
+ -------------------------------------------------------------------
+ if not p.primary then p.primary=p[1]; p[1]=nil end
+ for _, t in ipairs{ "primary", "prefix", "infix", "suffix" } do
+ if not p[t] then p[t] = { } end
+ if not M.is_parser(p[t]) then M.multisequence(p[t]) end
+ end
+ function p:add(...) return self.primary:add(...) end
+ return p
+end --</expr>
+
+
+-------------------------------------------------------------------------------
+--
+-- List parser generator
+--
+-------------------------------------------------------------------------------
+-- In [p], the following fields can be provided in input:
+--
+-- * [builder]: takes list of subparser results, returns AST
+-- * [transformers]: as usual
+-- * [name]: as usual
+--
+-- * [terminators]: list of strings representing the keywords which
+-- might mark the end of the list. When non-empty, the list is
+-- allowed to be empty. A string is treated as a single-element
+-- table, whose element is that string, e.g. ["do"] is the same as
+-- [{"do"}].
+--
+-- * [separators]: list of strings representing the keywords which can
+-- separate elements of the list. When non-empty, one of these
+-- keyword has to be found between each element. Lack of a separator
+-- indicates the end of the list. A string is treated as a
+-- single-element table, whose element is that string, e.g. ["do"]
+-- is the same as [{"do"}]. If [terminators] is empty/nil, then
+-- [separators] has to be non-empty.
+--
+-- After creation, the following fields are added:
+-- * [parse] the parsing function lexer->AST
+-- * [kind] == "list"
+--
+-------------------------------------------------------------------------------
+function M.list (p)
+ M.make_parser ("list", p)
+
+ -------------------------------------------------------------------
+ -- Parsing method
+ -------------------------------------------------------------------
+ function p :parse (lx)
+
+ ------------------------------------------------------
+ -- Used to quickly check whether there's a terminator
+ -- or a separator immediately ahead
+ ------------------------------------------------------
+ local function peek_is_in (keywords)
+ return keywords and lx:is_keyword(lx:peek(), unpack(keywords)) end
+
+ local x = { }
+ local fli = lx :lineinfo_right()
+
+ -- if there's a terminator to start with, don't bother trying
+ local is_empty_list = self.terminators and (peek_is_in (self.terminators) or lx:peek().tag=="Eof")
+ if not is_empty_list then
+ repeat
+ local item = self.primary(lx)
+ table.insert (x, item) -- read one element
+ until
+ -- There's a separator list specified, and next token isn't in it.
+ -- Otherwise, consume it with [lx:next()]
+ self.separators and not(peek_is_in (self.separators) and lx:next()) or
+ -- Terminator token ahead
+ peek_is_in (self.terminators) or
+ -- Last reason: end of file reached
+ lx:peek().tag=="Eof"
+ end
+
+ local lli = lx:lineinfo_left()
+
+ -- Apply the builder. It can be a string, or a callable value,
+ -- or simply nothing.
+ local b = self.builder
+ if b then
+ if type(b)=="string" then x.tag = b -- b is a string, use it as a tag
+ elseif type(b)=="function" then x=b(x)
+ else
+ local bmt = getmetatable(b)
+ if bmt and bmt.__call then x=b(x) end
+ end
+ end
+ return transform (x, self, fli, lli)
+ end --</list.parse>
+
+ -------------------------------------------------------------------
+ -- Construction
+ -------------------------------------------------------------------
+ if not p.primary then p.primary = p[1]; p[1] = nil end
+ if type(p.terminators) == "string" then p.terminators = { p.terminators }
+ elseif p.terminators and #p.terminators == 0 then p.terminators = nil end
+ if type(p.separators) == "string" then p.separators = { p.separators }
+ elseif p.separators and #p.separators == 0 then p.separators = nil end
+
+ return p
+end --</list>
+
+
+-------------------------------------------------------------------------------
+--
+-- Keyword-conditioned parser generator
+--
+-------------------------------------------------------------------------------
+--
+-- Only apply a parser if a given keyword is found. The result of
+-- [gg.onkeyword] parser is the result of the subparser (modulo
+-- [transformers] applications).
+--
+-- lineinfo: the keyword is *not* included in the boundaries of the
+-- resulting lineinfo. A review of all usages of gg.onkeyword() in the
+-- implementation of metalua has shown that it was the appropriate choice
+-- in every case.
+--
+-- Input fields:
+--
+-- * [name]: as usual
+--
+-- * [transformers]: as usual
+--
+-- * [peek]: if non-nil, the conditioning keyword is left in the lexeme
+-- stream instead of being consumed.
+--
+-- * [primary]: the subparser.
+--
+-- * [keywords]: list of strings representing triggering keywords.
+--
+-- * Table-part entries can contain strings, and/or exactly one parser.
+-- Strings are put in [keywords], and the parser is put in [primary].
+--
+-- After the call, the following fields will be set:
+--
+-- * [parse] the parsing method
+-- * [kind] == "onkeyword"
+-- * [primary]
+-- * [keywords]
+--
+-------------------------------------------------------------------------------
+function M.onkeyword (p)
+ M.make_parser ("onkeyword", p)
+
+ -------------------------------------------------------------------
+ -- Parsing method
+ -------------------------------------------------------------------
+ function p :parse (lx)
+ if lx :is_keyword (lx:peek(), unpack(self.keywords)) then
+ local fli = lx:lineinfo_right()
+ if not self.peek then lx:next() end
+ local content = self.primary (lx)
+ local lli = lx:lineinfo_left()
+ local li = content.lineinfo or { }
+ fli, lli = li.first or fli, li.last or lli
+ return transform (content, p, fli, lli)
+ else return false end
+ end
+
+ -------------------------------------------------------------------
+ -- Construction
+ -------------------------------------------------------------------
+ if not p.keywords then p.keywords = { } end
+ for _, x in ipairs(p) do
+ if type(x)=="string" then table.insert (p.keywords, x)
+ else assert (not p.primary and M.is_parser (x)); p.primary = x end
+ end
+ assert (next (p.keywords), "Missing trigger keyword in gg.onkeyword")
+ assert (p.primary, 'no primary parser in gg.onkeyword')
+ return p
+end --</onkeyword>
+
+
+-------------------------------------------------------------------------------
+--
+-- Optional keyword consummer pseudo-parser generator
+--
+-------------------------------------------------------------------------------
+--
+-- This doesn't return a real parser, just a function. That function parses
+-- one of the keywords passed as parameters, and returns it. It returns
+-- [false] if no matching keyword is found.
+--
+-- Notice that tokens returned by lexer already carry lineinfo, therefore
+-- there's no need to add them, as done usually through transform() calls.
+-------------------------------------------------------------------------------
+function M.optkeyword (...)
+ local args = {...}
+ if type (args[1]) == "table" then
+ assert (#args == 1)
+ args = args[1]
+ end
+ for _, v in ipairs(args) do assert (type(v)=="string") end
+ return function (lx)
+ local x = lx:is_keyword (lx:peek(), unpack (args))
+ if x then lx:next(); return x
+ else return false end
+ end
+end
+
+
+-------------------------------------------------------------------------------
+--
+-- Run a parser with a special lexer
+--
+-------------------------------------------------------------------------------
+--
+-- This doesn't return a real parser, just a function.
+-- First argument is the lexer class to be used with the parser,
+-- 2nd is the parser itself.
+-- The resulting parser returns whatever the argument parser does.
+--
+-------------------------------------------------------------------------------
+function M.with_lexer(new_lexer, parser)
+
+ -------------------------------------------------------------------
+ -- Most gg functions take their parameters in a table, so it's
+ -- better to silently accept when with_lexer{ } is called with
+ -- its arguments in a list:
+ -------------------------------------------------------------------
+ if not parser and #new_lexer==2 and type(new_lexer[1])=='table' then
+ return M.with_lexer(unpack(new_lexer))
+ end
+
+ -------------------------------------------------------------------
+ -- Save the current lexer, switch it for the new one, run the parser,
+ -- restore the previous lexer, even if the parser caused an error.
+ -------------------------------------------------------------------
+ return function (lx)
+ local old_lexer = getmetatable(lx)
+ lx:sync()
+ setmetatable(lx, new_lexer)
+ local status, result = pcall(parser, lx)
+ lx:sync()
+ setmetatable(lx, old_lexer)
+ if status then return result else error(result) end
+ end
+end
+
+--------------------------------------------------------------------------------
+--
+-- Make sure a parser is used and returns successfully.
+--
+--------------------------------------------------------------------------------
+function M.nonempty(primary)
+ local p = M.make_parser('non-empty list', { primary = primary, name=primary.name })
+ function p :parse (lx)
+ local fli = lx:lineinfo_right()
+ local content = self.primary (lx)
+ local lli = lx:lineinfo_left()
+ local li = content.lineinfo or { }
+ fli, lli = li.first or fli, li.last or lli
+ if #content == 0 then
+ M.parse_error (lx, "`%s' must not be empty.", self.name or "list")
+ else
+ return transform (content, self, fli, lli)
+ end
+ end
+ return p
+end
+
+local FUTURE_MT = { }
+function FUTURE_MT:__tostring() return "<Proxy parser module>" end
+function FUTURE_MT:__newindex(key, value) error "don't write in futures" end
+function FUTURE_MT :__index (parser_name)
+ return function(...)
+ local p, m = rawget(self, '__path'), self.__module
+ if p then for _, name in ipairs(p) do
+ m=rawget(m, name)
+ if not m then error ("Submodule '"..name.."' undefined") end
+ end end
+ local f = rawget(m, parser_name)
+ if not f then error ("Parser '"..parser_name.."' undefined") end
+ return f(...)
+ end
+end
+
+function M.future(module, ...)
+ checks('table')
+ local path = ... and {...}
+ if path then for _, x in ipairs(path) do
+ assert(type(x)=='string', "Bad future arg")
+ end end
+ local self = { __module = module,
+ __path = path }
+ return setmetatable(self, FUTURE_MT)
+end
+
+return M
--- /dev/null
+-------------------------------------------------------------------------------
+-- Copyright (c) 2006-2013 Fabien Fleutot and others.
+--
+-- All rights reserved.
+--
+-- This program and the accompanying materials are made available
+-- under the terms of the Eclipse Public License v1.0 which
+-- accompanies this distribution, and is available at
+-- http://www.eclipse.org/legal/epl-v10.html
+--
+-- This program and the accompanying materials are also made available
+-- under the terms of the MIT public license which accompanies this
+-- distribution, and is available at http://www.lua.org/license.html
+--
+-- Contributors:
+-- Fabien Fleutot - API and implementation
+--
+-------------------------------------------------------------------------------
+
+require 'checks'
+
+local M = { }
+
+local lexer = { alpha={ }, sym={ } }
+lexer.__index=lexer
+lexer.__type='lexer.stream'
+
+M.lexer = lexer
+
+
+local debugf = function() end
+-- local debugf=printf
+
+----------------------------------------------------------------------
+-- Some locale settings produce bad results, e.g. French locale
+-- expect float numbers to use commas instead of periods.
+-- TODO: change number parser into something loclae-independent,
+-- locales are nasty.
+----------------------------------------------------------------------
+os.setlocale('C')
+
+local MT = { }
+
+M.metatables=MT
+
+----------------------------------------------------------------------
+-- Create a new metatable, for a new class of objects.
+----------------------------------------------------------------------
+local function new_metatable(name)
+ local mt = { __type = 'lexer.'..name };
+ mt.__index = mt
+ MT[name] = mt
+end
+
+
+----------------------------------------------------------------------
+-- Position: represent a point in a source file.
+----------------------------------------------------------------------
+new_metatable 'position'
+
+local position_idx=1
+
+function M.new_position(line, column, offset, source)
+ checks('number', 'number', 'number', 'string')
+ local id = position_idx; position_idx = position_idx+1
+ return setmetatable({line=line, column=column, offset=offset,
+ source=source, id=id}, MT.position)
+end
+
+function MT.position :__tostring()
+ return string.format("<%s%s|L%d|C%d|K%d>",
+ self.comments and "C|" or "",
+ self.source, self.line, self.column, self.offset)
+end
+
+
+
+----------------------------------------------------------------------
+-- Position factory: convert offsets into line/column/offset positions.
+----------------------------------------------------------------------
+new_metatable 'position_factory'
+
+function M.new_position_factory(src, src_name)
+ -- assert(type(src)=='string')
+ -- assert(type(src_name)=='string')
+ local lines = { 1 }
+ for offset in src :gmatch '\n()' do table.insert(lines, offset) end
+ local max = #src+1
+ table.insert(lines, max+1) -- +1 includes Eof
+ return setmetatable({ src_name=src_name, line2offset=lines, max=max },
+ MT.position_factory)
+end
+
+function MT.position_factory :get_position (offset)
+ -- assert(type(offset)=='number')
+ assert(offset<=self.max)
+ local line2offset = self.line2offset
+ local left = self.last_left or 1
+ if offset<line2offset[left] then left=1 end
+ local right = left+1
+ if line2offset[right]<=offset then right = right+1 end
+ if line2offset[right]<=offset then right = #line2offset end
+ while true do
+ -- print (" trying lines "..left.."/"..right..", offsets "..line2offset[left]..
+ -- "/"..line2offset[right].." for offset "..offset)
+ -- assert(line2offset[left]<=offset)
+ -- assert(offset<line2offset[right])
+ -- assert(left<right)
+ if left+1==right then break end
+ local middle = math.floor((left+right)/2)
+ if line2offset[middle]<=offset then left=middle else right=middle end
+ end
+ -- assert(left+1==right)
+ -- printf("found that offset %d is between %d and %d, hence on line %d",
+ -- offset, line2offset[left], line2offset[right], left)
+ local line = left
+ local column = offset - line2offset[line] + 1
+ self.last_left = left
+ return M.new_position(line, column, offset, self.src_name)
+end
+
+
+
+----------------------------------------------------------------------
+-- Lineinfo: represent a node's range in a source file;
+-- embed information about prefix and suffix comments.
+----------------------------------------------------------------------
+new_metatable 'lineinfo'
+
+function M.new_lineinfo(first, last)
+ checks('lexer.position', 'lexer.position')
+ return setmetatable({first=first, last=last}, MT.lineinfo)
+end
+
+function MT.lineinfo :__tostring()
+ local fli, lli = self.first, self.last
+ local line = fli.line; if line~=lli.line then line =line ..'-'..lli.line end
+ local column = fli.column; if column~=lli.column then column=column..'-'..lli.column end
+ local offset = fli.offset; if offset~=lli.offset then offset=offset..'-'..lli.offset end
+ return string.format("<%s%s|L%s|C%s|K%s%s>",
+ fli.comments and "C|" or "",
+ fli.source, line, column, offset,
+ lli.comments and "|C" or "")
+end
+
+----------------------------------------------------------------------
+-- Token: atomic Lua language element, with a category, a content,
+-- and some lineinfo relating it to its original source.
+----------------------------------------------------------------------
+new_metatable 'token'
+
+function M.new_token(tag, content, lineinfo)
+ --printf("TOKEN `%s{ %q, lineinfo = %s} boundaries %d, %d",
+ -- tag, content, tostring(lineinfo), lineinfo.first.id, lineinfo.last.id)
+ return setmetatable({tag=tag, lineinfo=lineinfo, content}, MT.token)
+end
+
+function MT.token :__tostring()
+ --return string.format("`%s{ %q, %s }", self.tag, self[1], tostring(self.lineinfo))
+ return string.format("`%s %q", self.tag, self[1])
+end
+
+
+----------------------------------------------------------------------
+-- Comment: series of comment blocks with associated lineinfo.
+-- To be attached to the tokens just before and just after them.
+----------------------------------------------------------------------
+new_metatable 'comment'
+
+function M.new_comment(lines)
+ local first = lines[1].lineinfo.first
+ local last = lines[#lines].lineinfo.last
+ local lineinfo = M.new_lineinfo(first, last)
+ return setmetatable({lineinfo=lineinfo, unpack(lines)}, MT.comment)
+end
+
+function MT.comment :text()
+ local last_line = self[1].lineinfo.last.line
+ local acc = { }
+ for i, line in ipairs(self) do
+ local nreturns = line.lineinfo.first.line - last_line
+ table.insert(acc, ("\n"):rep(nreturns))
+ table.insert(acc, line[1])
+ end
+ return table.concat(acc)
+end
+
+function M.new_comment_line(text, lineinfo, nequals)
+ checks('string', 'lexer.lineinfo', '?number')
+ return { lineinfo = lineinfo, text, nequals }
+end
+
+
+
+----------------------------------------------------------------------
+-- Patterns used by [lexer :extract] to decompose the raw string into
+-- correctly tagged tokens.
+----------------------------------------------------------------------
+lexer.patterns = {
+ spaces = "^[ \r\n\t]*()",
+ short_comment = "^%-%-([^\n]*)\n?()",
+ --final_short_comment = "^%-%-([^\n]*)()$",
+ long_comment = "^%-%-%[(=*)%[\n?(.-)%]%1%]()",
+ long_string = "^%[(=*)%[\n?(.-)%]%1%]()",
+ number_mantissa = { "^%d+%.?%d*()", "^%d*%.%d+()" },
+ number_mantissa_hex = { "^%x+%.?%x*()", "^%x*%.%x+()" }, --Lua5.1 and Lua5.2
+ number_exponant = "^[eE][%+%-]?%d+()",
+ number_exponant_hex = "^[pP][%+%-]?%d+()", --Lua5.2
+ number_hex = "^0[xX]()",
+ word = "^([%a_][%w_]*)()"
+}
+
+----------------------------------------------------------------------
+-- unescape a whole string, applying [unesc_digits] and
+-- [unesc_letter] as many times as required.
+----------------------------------------------------------------------
+local function unescape_string (s)
+
+ -- Turn the digits of an escape sequence into the corresponding
+ -- character, e.g. [unesc_digits("123") == string.char(123)].
+ local function unesc_digits (backslashes, digits)
+ if #backslashes%2==0 then
+ -- Even number of backslashes, they escape each other, not the digits.
+ -- Return them so that unesc_letter() can treat them
+ return backslashes..digits
+ else
+ -- Remove the odd backslash, which escapes the number sequence.
+ -- The rest will be returned and parsed by unesc_letter()
+ backslashes = backslashes :sub (1,-2)
+ end
+ local k, j, i = digits :reverse() :byte(1, 3)
+ local z = string.byte "0"
+ local code = (k or z) + 10*(j or z) + 100*(i or z) - 111*z
+ if code > 255 then
+ error ("Illegal escape sequence '\\"..digits..
+ "' in string: ASCII codes must be in [0..255]")
+ end
+ local c = string.char (code)
+ if c == '\\' then c = '\\\\' end -- parsed by unesc_letter (test: "\092b" --> "\\b")
+ return backslashes..c
+ end
+
+ -- Turn hex digits of escape sequence into char.
+ local function unesc_hex(backslashes, digits)
+ if #backslashes%2==0 then
+ return backslashes..'x'..digits
+ else
+ backslashes = backslashes :sub (1,-2)
+ end
+ local c = string.char(tonumber(digits,16))
+ if c == '\\' then c = '\\\\' end -- parsed by unesc_letter (test: "\x5cb" --> "\\b")
+ return backslashes..c
+ end
+
+ -- Handle Lua 5.2 \z sequences
+ local function unesc_z(backslashes, more)
+ if #backslashes%2==0 then
+ return backslashes..more
+ else
+ return backslashes :sub (1,-2)
+ end
+ end
+
+ -- Take a letter [x], and returns the character represented by the
+ -- sequence ['\\'..x], e.g. [unesc_letter "n" == "\n"].
+ local function unesc_letter(x)
+ local t = {
+ a = "\a", b = "\b", f = "\f",
+ n = "\n", r = "\r", t = "\t", v = "\v",
+ ["\\"] = "\\", ["'"] = "'", ['"'] = '"', ["\n"] = "\n" }
+ return t[x] or x
+ end
+
+ s = s: gsub ("(\\+)(z%s*)", unesc_z) -- Lua 5.2
+ s = s: gsub ("(\\+)([0-9][0-9]?[0-9]?)", unesc_digits)
+ s = s: gsub ("(\\+)x([0-9a-fA-F][0-9a-fA-F])", unesc_hex) -- Lua 5.2
+ s = s: gsub ("\\(%D)",unesc_letter)
+ return s
+end
+
+lexer.extractors = {
+ "extract_long_comment", "extract_short_comment",
+ "extract_short_string", "extract_word", "extract_number",
+ "extract_long_string", "extract_symbol" }
+
+
+
+----------------------------------------------------------------------
+-- Really extract next token from the raw string
+-- (and update the index).
+-- loc: offset of the position just after spaces and comments
+-- previous_i: offset in src before extraction began
+----------------------------------------------------------------------
+function lexer :extract ()
+ local attached_comments = { }
+ local function gen_token(...)
+ local token = M.new_token(...)
+ if #attached_comments>0 then -- attach previous comments to token
+ local comments = M.new_comment(attached_comments)
+ token.lineinfo.first.comments = comments
+ if self.lineinfo_last_extracted then
+ self.lineinfo_last_extracted.comments = comments
+ end
+ attached_comments = { }
+ end
+ token.lineinfo.first.facing = self.lineinfo_last_extracted
+ self.lineinfo_last_extracted.facing = assert(token.lineinfo.first)
+ self.lineinfo_last_extracted = assert(token.lineinfo.last)
+ return token
+ end
+ while true do -- loop until a non-comment token is found
+
+ -- skip whitespaces
+ self.i = self.src:match (self.patterns.spaces, self.i)
+ if self.i>#self.src then
+ local fli = self.posfact :get_position (#self.src+1)
+ local lli = self.posfact :get_position (#self.src+1) -- ok?
+ local tok = gen_token("Eof", "eof", M.new_lineinfo(fli, lli))
+ tok.lineinfo.last.facing = lli
+ return tok
+ end
+ local i_first = self.i -- loc = position after whitespaces
+
+ -- try every extractor until a token is found
+ for _, extractor in ipairs(self.extractors) do
+ local tag, content, xtra = self [extractor] (self)
+ if tag then
+ local fli = self.posfact :get_position (i_first)
+ local lli = self.posfact :get_position (self.i-1)
+ local lineinfo = M.new_lineinfo(fli, lli)
+ if tag=='Comment' then
+ local prev_comment = attached_comments[#attached_comments]
+ if not xtra -- new comment is short
+ and prev_comment and not prev_comment[2] -- prev comment is short
+ and prev_comment.lineinfo.last.line+1==fli.line then -- adjascent lines
+ -- concat with previous comment
+ prev_comment[1] = prev_comment[1].."\n"..content -- TODO quadratic, BAD!
+ prev_comment.lineinfo.last = lli
+ else -- accumulate comment
+ local comment = M.new_comment_line(content, lineinfo, xtra)
+ table.insert(attached_comments, comment)
+ end
+ break -- back to skipping spaces
+ else -- not a comment: real token, then
+ return gen_token(tag, content, lineinfo)
+ end -- if token is a comment
+ end -- if token found
+ end -- for each extractor
+ end -- while token is a comment
+end -- :extract()
+
+
+
+
+----------------------------------------------------------------------
+-- Extract a short comment.
+----------------------------------------------------------------------
+function lexer :extract_short_comment()
+ -- TODO: handle final_short_comment
+ local content, j = self.src :match (self.patterns.short_comment, self.i)
+ if content then self.i=j; return 'Comment', content, nil end
+end
+
+----------------------------------------------------------------------
+-- Extract a long comment.
+----------------------------------------------------------------------
+function lexer :extract_long_comment()
+ local equals, content, j = self.src:match (self.patterns.long_comment, self.i)
+ if j then self.i = j; return "Comment", content, #equals end
+end
+
+----------------------------------------------------------------------
+-- Extract a '...' or "..." short string.
+----------------------------------------------------------------------
+function lexer :extract_short_string()
+ local k = self.src :sub (self.i,self.i) -- first char
+ if k~=[[']] and k~=[["]] then return end -- no match'
+ local i = self.i + 1
+ local j = i
+ while true do
+ local x,y; x, j, y = self.src :match ("([\\\r\n"..k.."])()(.?)", j) -- next interesting char
+ if x == '\\' then
+ if y == 'z' then -- Lua 5.2 \z
+ j = self.src :match ("^%s*()", j+1)
+ else
+ j=j+1 -- escaped char
+ end
+ elseif x == k then break -- end of string
+ else
+ assert (not x or x=='\r' or x=='\n')
+ return nil, 'Unterminated string'
+ end
+ end
+ self.i = j
+
+ return 'String', unescape_string (self.src :sub (i,j-2))
+end
+
+----------------------------------------------------------------------
+-- Extract Id or Keyword.
+----------------------------------------------------------------------
+function lexer :extract_word()
+ local word, j = self.src:match (self.patterns.word, self.i)
+ if word then
+ self.i = j
+ return (self.alpha [word] and 'Keyword' or 'Id'), word
+ end
+end
+
+----------------------------------------------------------------------
+-- Extract Number.
+----------------------------------------------------------------------
+function lexer :extract_number()
+ local j = self.src:match(self.patterns.number_hex, self.i)
+ if j then
+ j = self.src:match (self.patterns.number_mantissa_hex[1], j) or
+ self.src:match (self.patterns.number_mantissa_hex[2], j)
+ if j then
+ j = self.src:match (self.patterns.number_exponant_hex, j) or j
+ end
+ else
+ j = self.src:match (self.patterns.number_mantissa[1], self.i) or
+ self.src:match (self.patterns.number_mantissa[2], self.i)
+ if j then
+ j = self.src:match (self.patterns.number_exponant, j) or j
+ end
+ end
+ if not j then return end
+ -- Number found, interpret with tonumber() and return it
+ local str = self.src:sub (self.i, j-1)
+ -- :TODO: tonumber on Lua5.2 floating hex may or may not work on Lua5.1
+ local n = tonumber (str)
+ if not n then error(str.." is not a valid number according to tonumber()") end
+ self.i = j
+ return 'Number', n
+end
+
+----------------------------------------------------------------------
+-- Extract long string.
+----------------------------------------------------------------------
+function lexer :extract_long_string()
+ local _, content, j = self.src :match (self.patterns.long_string, self.i)
+ if j then self.i = j; return 'String', content end
+end
+
+----------------------------------------------------------------------
+-- Extract symbol.
+----------------------------------------------------------------------
+function lexer :extract_symbol()
+ local k = self.src:sub (self.i,self.i)
+ local symk = self.sym [k] -- symbols starting with `k`
+ if not symk then
+ self.i = self.i + 1
+ return 'Keyword', k
+ end
+ for _, sym in pairs (symk) do
+ if sym == self.src:sub (self.i, self.i + #sym - 1) then
+ self.i = self.i + #sym
+ return 'Keyword', sym
+ end
+ end
+ self.i = self.i+1
+ return 'Keyword', k
+end
+
+----------------------------------------------------------------------
+-- Add a keyword to the list of keywords recognized by the lexer.
+----------------------------------------------------------------------
+function lexer :add (w, ...)
+ assert(not ..., "lexer :add() takes only one arg, although possibly a table")
+ if type (w) == "table" then
+ for _, x in ipairs (w) do self :add (x) end
+ else
+ if w:match (self.patterns.word .. "$") then self.alpha [w] = true
+ elseif w:match "^%p%p+$" then
+ local k = w:sub(1,1)
+ local list = self.sym [k]
+ if not list then list = { }; self.sym [k] = list end
+ table.insert (list, w)
+ elseif w:match "^%p$" then return
+ else error "Invalid keyword" end
+ end
+end
+
+----------------------------------------------------------------------
+-- Return the [n]th next token, without consuming it.
+-- [n] defaults to 1. If it goes pass the end of the stream, an EOF
+-- token is returned.
+----------------------------------------------------------------------
+function lexer :peek (n)
+ if not n then n=1 end
+ if n > #self.peeked then
+ for i = #self.peeked+1, n do
+ self.peeked [i] = self :extract()
+ end
+ end
+ return self.peeked [n]
+end
+
+----------------------------------------------------------------------
+-- Return the [n]th next token, removing it as well as the 0..n-1
+-- previous tokens. [n] defaults to 1. If it goes pass the end of the
+-- stream, an EOF token is returned.
+----------------------------------------------------------------------
+function lexer :next (n)
+ n = n or 1
+ self :peek (n)
+ local a
+ for i=1,n do
+ a = table.remove (self.peeked, 1)
+ -- TODO: is this used anywhere? I think not. a.lineinfo.last may be nil.
+ --self.lastline = a.lineinfo.last.line
+ end
+ self.lineinfo_last_consumed = a.lineinfo.last
+ return a
+end
+
+----------------------------------------------------------------------
+-- Returns an object which saves the stream's current state.
+----------------------------------------------------------------------
+-- FIXME there are more fields than that to save
+function lexer :save () return { self.i; {unpack(self.peeked) } } end
+
+----------------------------------------------------------------------
+-- Restore the stream's state, as saved by method [save].
+----------------------------------------------------------------------
+-- FIXME there are more fields than that to restore
+function lexer :restore (s) self.i=s[1]; self.peeked=s[2] end
+
+----------------------------------------------------------------------
+-- Resynchronize: cancel any token in self.peeked, by emptying the
+-- list and resetting the indexes
+----------------------------------------------------------------------
+function lexer :sync()
+ local p1 = self.peeked[1]
+ if p1 then
+ local li_first = p1.lineinfo.first
+ if li_first.comments then li_first=li_first.comments.lineinfo.first end
+ self.i = li_first.offset
+ self.column_offset = self.i - li_first.column
+ self.peeked = { }
+ self.attached_comments = p1.lineinfo.first.comments or { }
+ end
+end
+
+----------------------------------------------------------------------
+-- Take the source and offset of an old lexer.
+----------------------------------------------------------------------
+function lexer :takeover(old)
+ self :sync(); old :sync()
+ for _, field in ipairs{ 'i', 'src', 'attached_comments', 'posfact' } do
+ self[field] = old[field]
+ end
+ return self
+end
+
+----------------------------------------------------------------------
+-- Return the current position in the sources. This position is between
+-- two tokens, and can be within a space / comment area, and therefore
+-- have a non-null width. :lineinfo_left() returns the beginning of the
+-- separation area, :lineinfo_right() returns the end of that area.
+--
+-- ____ last consummed token ____ first unconsummed token
+-- / /
+-- XXXXX <spaces and comments> YYYYY
+-- \____ \____
+-- :lineinfo_left() :lineinfo_right()
+----------------------------------------------------------------------
+function lexer :lineinfo_right()
+ return self :peek(1).lineinfo.first
+end
+
+function lexer :lineinfo_left()
+ return self.lineinfo_last_consumed
+end
+
+----------------------------------------------------------------------
+-- Create a new lexstream.
+----------------------------------------------------------------------
+function lexer :newstream (src_or_stream, name)
+ name = name or "?"
+ if type(src_or_stream)=='table' then -- it's a stream
+ return setmetatable ({ }, self) :takeover (src_or_stream)
+ elseif type(src_or_stream)=='string' then -- it's a source string
+ local src = src_or_stream
+ local pos1 = M.new_position(1, 1, 1, name)
+ local stream = {
+ src_name = name; -- Name of the file
+ src = src; -- The source, as a single string
+ peeked = { }; -- Already peeked, but not discarded yet, tokens
+ i = 1; -- Character offset in src
+ attached_comments = { },-- comments accumulator
+ lineinfo_last_extracted = pos1,
+ lineinfo_last_consumed = pos1,
+ posfact = M.new_position_factory (src_or_stream, name)
+ }
+ setmetatable (stream, self)
+
+ -- Skip initial sharp-bang for Unix scripts
+ -- FIXME: redundant with mlp.chunk()
+ if src and src :match "^#!" then
+ local endofline = src :find "\n"
+ stream.i = endofline and (endofline + 1) or #src
+ end
+ return stream
+ else
+ assert(false, ":newstream() takes a source string or a stream, not a "..
+ type(src_or_stream))
+ end
+end
+
+----------------------------------------------------------------------
+-- If there's no ... args, return the token a (whose truth value is
+-- true) if it's a `Keyword{ }, or nil. If there are ... args, they
+-- have to be strings. if the token a is a keyword, and it's content
+-- is one of the ... args, then returns it (it's truth value is
+-- true). If no a keyword or not in ..., return nil.
+----------------------------------------------------------------------
+function lexer :is_keyword (a, ...)
+ if not a or a.tag ~= "Keyword" then return false end
+ local words = {...}
+ if #words == 0 then return a[1] end
+ for _, w in ipairs (words) do
+ if w == a[1] then return w end
+ end
+ return false
+end
+
+----------------------------------------------------------------------
+-- Cause an error if the next token isn't a keyword whose content
+-- is listed among ... args (which have to be strings).
+----------------------------------------------------------------------
+function lexer :check (...)
+ local words = {...}
+ local a = self :next()
+ local function err ()
+ error ("Got " .. tostring (a) ..
+ ", expected one of these keywords : '" ..
+ table.concat (words,"', '") .. "'") end
+ if not a or a.tag ~= "Keyword" then err () end
+ if #words == 0 then return a[1] end
+ for _, w in ipairs (words) do
+ if w == a[1] then return w end
+ end
+ err ()
+end
+
+----------------------------------------------------------------------
+--
+----------------------------------------------------------------------
+function lexer :clone()
+ local alpha_clone, sym_clone = { }, { }
+ for word in pairs(self.alpha) do alpha_clone[word]=true end
+ for letter, list in pairs(self.sym) do sym_clone[letter] = { unpack(list) } end
+ local clone = { alpha=alpha_clone, sym=sym_clone }
+ setmetatable(clone, self)
+ clone.__index = clone
+ return clone
+end
+
+----------------------------------------------------------------------
+-- Cancel everything left in a lexer, all subsequent attempts at
+-- `:peek()` or `:next()` will return `Eof`.
+----------------------------------------------------------------------
+function lexer :kill()
+ self.i = #self.src+1
+ self.peeked = { }
+ self.attached_comments = { }
+ self.lineinfo_last = self.posfact :get_position (#self.src+1)
+end
+
+return M
--- /dev/null
+--------------------------------------------------------------------------------
+-- Copyright (c) 2006-2013 Fabien Fleutot and others.
+--
+-- All rights reserved.
+--
+-- This program and the accompanying materials are made available
+-- under the terms of the Eclipse Public License v1.0 which
+-- accompanies this distribution, and is available at
+-- http://www.eclipse.org/legal/epl-v10.html
+--
+-- This program and the accompanying materials are also made available
+-- under the terms of the MIT public license which accompanies this
+-- distribution, and is available at http://www.lua.org/license.html
+--
+-- Contributors:
+-- Fabien Fleutot - API and implementation
+--
+--------------------------------------------------------------------------------
+
+local M = require "package" -- extend Lua's basic "package" module
+
+M.metalua_extension_prefix = 'metalua.extension.'
+
+-- Initialize package.mpath from package.path
+M.mpath = M.mpath or os.getenv 'LUA_MPATH' or
+ (M.path..";") :gsub("%.(lua[:;])", ".m%1") :sub(1, -2)
+
+M.mcache = M.mcache or os.getenv 'LUA_MCACHE'
+
+----------------------------------------------------------------------
+-- resc(k) returns "%"..k if it's a special regular expression char,
+-- or just k if it's normal.
+----------------------------------------------------------------------
+local regexp_magic = { }
+for k in ("^$()%.[]*+-?") :gmatch "." do regexp_magic[k]="%"..k end
+
+local function resc(k) return regexp_magic[k] or k end
+
+----------------------------------------------------------------------
+-- Take a Lua module name, return the open file and its name,
+-- or <false> and an error message.
+----------------------------------------------------------------------
+function M.findfile(name, path_string)
+ local config_regexp = ("([^\n])\n"):rep(5):sub(1, -2)
+ local dir_sep, path_sep, path_mark, execdir, igmark =
+ M.config :match (config_regexp)
+ name = name:gsub ('%.', dir_sep)
+ local errors = { }
+ local path_pattern = string.format('[^%s]+', resc(path_sep))
+ for path in path_string:gmatch (path_pattern) do
+ --printf('path = %s, rpath_mark=%s, name=%s', path, resc(path_mark), name)
+ local filename = path:gsub (resc (path_mark), name)
+ --printf('filename = %s', filename)
+ local file = io.open (filename, 'r')
+ if file then return file, filename end
+ table.insert(errors, string.format("\tno lua file %q", filename))
+ end
+ return false, '\n'..table.concat(errors, "\n")..'\n'
+end
+
+----------------------------------------------------------------------
+-- Before compiling a metalua source module, try to find and load
+-- a more recent bytecode dump. Requires lfs
+----------------------------------------------------------------------
+local function metalua_cache_loader(name, src_filename, src)
+ local mlc = require 'metalua.compiler'.new()
+ local lfs = require 'lfs'
+ local dir_sep = M.config:sub(1,1)
+ local dst_filename = M.mcache :gsub ('%?', (name:gsub('%.', dir_sep)))
+ local src_a = lfs.attributes(src_filename)
+ local src_date = src_a and src_a.modification or 0
+ local dst_a = lfs.attributes(dst_filename)
+ local dst_date = dst_a and dst_a.modification or 0
+ local delta = dst_date - src_date
+ local bytecode, file, msg
+ if delta <= 0 then
+ print "NEED TO RECOMPILE"
+ bytecode = mlc :src_to_bytecode (src, name)
+ for x in dst_filename :gmatch('()'..dir_sep) do
+ lfs.mkdir(dst_filename:sub(1,x))
+ end
+ file, msg = io.open(dst_filename, 'wb')
+ if not file then error(msg) end
+ file :write (bytecode)
+ file :close()
+ else
+ file, msg = io.open(dst_filename, 'rb')
+ if not file then error(msg) end
+ bytecode = file :read '*a'
+ file :close()
+ end
+ return mlc :bytecode_to_function (bytecode)
+end
+
+----------------------------------------------------------------------
+-- Load a metalua source file.
+----------------------------------------------------------------------
+function M.metalua_loader (name)
+ local file, filename_or_msg = M.findfile (name, M.mpath)
+ if not file then return filename_or_msg end
+ local luastring = file:read '*a'
+ file:close()
+ if M.mcache and pcall(require, 'lfs') then
+ return metalua_cache_loader(name, filename_or_msg, luastring)
+ else return require 'metalua.compiler'.new() :src_to_function (luastring, name) end
+end
+
+
+----------------------------------------------------------------------
+-- Placed after lua/luac loader, so precompiled files have
+-- higher precedence.
+----------------------------------------------------------------------
+table.insert(M.loaders, M.metalua_loader)
+
+----------------------------------------------------------------------
+-- Load an extension.
+----------------------------------------------------------------------
+function extension (name, mlp)
+ local complete_name = M.metalua_extension_prefix..name
+ local extend_func = require (complete_name)
+ if not mlp.extensions[complete_name] then
+ local ast =extend_func(mlp)
+ mlp.extensions[complete_name] =extend_func
+ return ast
+ end
+end
+
+return M
--- /dev/null
+-------------------------------------------------------------------------------
+-- Copyright (c) 2006-2013 Fabien Fleutot and others.
+--
+-- All rights reserved.
+--
+-- This program and the accompanying materials are made available
+-- under the terms of the Eclipse Public License v1.0 which
+-- accompanies this distribution, and is available at
+-- http://www.eclipse.org/legal/epl-v10.html
+--
+-- This program and the accompanying materials are also made available
+-- under the terms of the MIT public license which accompanies this
+-- distribution, and is available at http://www.lua.org/license.html
+--
+-- Contributors:
+-- Fabien Fleutot - API and implementation
+--
+----------------------------------------------------------------------
+
+----------------------------------------------------------------------
+----------------------------------------------------------------------
+--
+-- Lua objects pretty-printer
+--
+----------------------------------------------------------------------
+----------------------------------------------------------------------
+
+local M = { }
+
+M.DEFAULT_CFG = {
+ hide_hash = false; -- Print the non-array part of tables?
+ metalua_tag = true; -- Use Metalua's backtick syntax sugar?
+ fix_indent = nil; -- If a number, number of indentation spaces;
+ -- If false, indent to the previous brace.
+ line_max = nil; -- If a number, tries to avoid making lines with
+ -- more than this number of chars.
+ initial_indent = 0; -- If a number, starts at this level of indentation
+ keywords = { }; -- Set of keywords which must not use Lua's field
+ -- shortcuts {["foo"]=...} -> {foo=...}
+}
+
+local function valid_id(cfg, x)
+ if type(x) ~= "string" then return false end
+ if not x:match "^[a-zA-Z_][a-zA-Z0-9_]*$" then return false end
+ if cfg.keywords and cfg.keywords[x] then return false end
+ return true
+end
+
+local __tostring_cache = setmetatable({ }, {__mode='k'})
+
+-- Retrieve the string produced by `__tostring` metamethod if present,
+-- return `false` otherwise. Cached in `__tostring_cache`.
+local function __tostring(x)
+ local the_string = __tostring_cache[x]
+ if the_string~=nil then return the_string end
+ local mt = getmetatable(x)
+ if mt then
+ local __tostring = mt.__tostring
+ if __tostring then
+ the_string = __tostring(x)
+ __tostring_cache[x] = the_string
+ return the_string
+ end
+ end
+ if x~=nil then __tostring_cache[x] = false end -- nil is an illegal key
+ return false
+end
+
+local xlen -- mutually recursive with `xlen_type`
+
+local xlen_cache = setmetatable({ }, {__mode='k'})
+
+-- Helpers for the `xlen` function
+local xlen_type = {
+ ["nil"] = function ( ) return 3 end;
+ number = function (x) return #tostring(x) end;
+ boolean = function (x) return x and 4 or 5 end;
+ string = function (x) return #string.format("%q",x) end;
+}
+
+function xlen_type.table (adt, cfg, nested)
+ local custom_string = __tostring(adt)
+ if custom_string then return #custom_string end
+
+ -- Circular referenced objects are printed with the plain
+ -- `tostring` function in nested positions.
+ if nested [adt] then return #tostring(adt) end
+ nested [adt] = true
+
+ local has_tag = cfg.metalua_tag and valid_id(cfg, adt.tag)
+ local alen = #adt
+ local has_arr = alen>0
+ local has_hash = false
+ local x = 0
+
+ if not cfg.hide_hash then
+ -- first pass: count hash-part
+ for k, v in pairs(adt) do
+ if k=="tag" and has_tag then
+ -- this is the tag -> do nothing!
+ elseif type(k)=="number" and k<=alen and math.fmod(k,1)==0 and k>0 then
+ -- array-part pair -> do nothing!
+ else
+ has_hash = true
+ if valid_id(cfg, k) then x=x+#k
+ else x = x + xlen (k, cfg, nested) + 2 end -- count surrounding brackets
+ x = x + xlen (v, cfg, nested) + 5 -- count " = " and ", "
+ end
+ end
+ end
+
+ for i = 1, alen do x = x + xlen (adt[i], nested) + 2 end -- count ", "
+
+ nested[adt] = false -- No more nested calls
+
+ if not (has_tag or has_arr or has_hash) then return 3 end
+ if has_tag then x=x+#adt.tag+1 end
+ if not (has_arr or has_hash) then return x end
+ if not has_hash and alen==1 and type(adt[1])~="table" then
+ return x-2 -- substract extraneous ", "
+ end
+ return x+2 -- count "{ " and " }", substract extraneous ", "
+end
+
+
+-- Compute the number of chars it would require to display the table
+-- on a single line. Helps to decide whether some carriage returns are
+-- required. Since the size of each sub-table is required many times,
+-- it's cached in [xlen_cache].
+xlen = function (x, cfg, nested)
+ -- no need to compute length for 1-line prints
+ if not cfg.line_max then return 0 end
+ nested = nested or { }
+ if x==nil then return #"nil" end
+ local len = xlen_cache[x]
+ if len then return len end
+ local f = xlen_type[type(x)]
+ if not f then return #tostring(x) end
+ len = f (x, cfg, nested)
+ xlen_cache[x] = len
+ return len
+end
+
+local function consider_newline(p, len)
+ if not p.cfg.line_max then return end
+ if p.current_offset + len <= p.cfg.line_max then return end
+ if p.indent < p.current_offset then
+ p:acc "\n"; p:acc ((" "):rep(p.indent))
+ p.current_offset = p.indent
+ end
+end
+
+local acc_value
+
+local acc_type = {
+ ["nil"] = function(p) p:acc("nil") end;
+ number = function(p, adt) p:acc (tostring (adt)) end;
+ string = function(p, adt) p:acc ((string.format ("%q", adt):gsub("\\\n", "\\n"))) end;
+ boolean = function(p, adt) p:acc (adt and "true" or "false") end }
+
+-- Indentation:
+-- * if `cfg.fix_indent` is set to a number:
+-- * add this number of space for each level of depth
+-- * return to the line as soon as it flushes things further left
+-- * if not, tabulate to one space after the opening brace.
+-- * as a result, it never saves right-space to return before first element
+
+function acc_type.table(p, adt)
+ if p.nested[adt] then p:acc(tostring(adt)); return end
+ p.nested[adt] = true
+
+ local has_tag = p.cfg.metalua_tag and valid_id(p.cfg, adt.tag)
+ local alen = #adt
+ local has_arr = alen>0
+ local has_hash = false
+
+ local previous_indent = p.indent
+
+ if has_tag then p:acc("`"); p:acc(adt.tag) end
+
+ local function indent(p)
+ if not p.cfg.fix_indent then p.indent = p.current_offset
+ else p.indent = p.indent + p.cfg.fix_indent end
+ end
+
+ -- First pass: handle hash-part
+ if not p.cfg.hide_hash then
+ for k, v in pairs(adt) do
+
+ if has_tag and k=='tag' then -- pass the 'tag' field
+ elseif type(k)=="number" and k<=alen and k>0 and math.fmod(k,1)==0 then
+ -- pass array-part keys (consecutive ints less than `#adt`)
+ else -- hash-part keys
+ if has_hash then p:acc ", " else -- 1st hash-part pair ever found
+ p:acc "{ "; indent(p)
+ end
+
+ -- Determine whether a newline is required
+ local is_id, expected_len=valid_id(p.cfg, k)
+ if is_id then expected_len=#k+xlen(v, p.cfg, p.nested)+#" = , "
+ else expected_len = xlen(k, p.cfg, p.nested)+xlen(v, p.cfg, p.nested)+#"[] = , " end
+ consider_newline(p, expected_len)
+
+ -- Print the key
+ if is_id then p:acc(k); p:acc " = " else
+ p:acc "["; acc_value (p, k); p:acc "] = "
+ end
+
+ acc_value (p, v) -- Print the value
+ has_hash = true
+ end
+ end
+ end
+
+ -- Now we know whether there's a hash-part, an array-part, and a tag.
+ -- Tag and hash-part are already printed if they're present.
+ if not has_tag and not has_hash and not has_arr then p:acc "{ }";
+ elseif has_tag and not has_hash and not has_arr then -- nothing, tag already in acc
+ else
+ assert (has_hash or has_arr) -- special case { } already handled
+ local no_brace = false
+ if has_hash and has_arr then p:acc ", "
+ elseif has_tag and not has_hash and alen==1 and type(adt[1])~="table" then
+ -- No brace required; don't print "{", remember not to print "}"
+ p:acc (" "); acc_value (p, adt[1]) -- indent= indent+(cfg.fix_indent or 0))
+ no_brace = true
+ elseif not has_hash then
+ -- Braces required, but not opened by hash-part handler yet
+ p:acc "{ "; indent(p)
+ end
+
+ -- 2nd pass: array-part
+ if not no_brace and has_arr then
+ local expected_len = xlen(adt[1], p.cfg, p.nested)
+ consider_newline(p, expected_len)
+ acc_value(p, adt[1]) -- indent+(cfg.fix_indent or 0)
+ for i=2, alen do
+ p:acc ", ";
+ consider_newline(p, xlen(adt[i], p.cfg, p.nested))
+ acc_value (p, adt[i]) --indent+(cfg.fix_indent or 0)
+ end
+ end
+ if not no_brace then p:acc " }" end
+ end
+ p.nested[adt] = false -- No more nested calls
+ p.indent = previous_indent
+end
+
+
+function acc_value(p, v)
+ local custom_string = __tostring(v)
+ if custom_string then p:acc(custom_string) else
+ local f = acc_type[type(v)]
+ if f then f(p, v) else p:acc(tostring(v)) end
+ end
+end
+
+
+-- FIXME: new_indent seems to be always nil?!s detection
+-- FIXME: accumulator function should be configurable,
+-- so that print() doesn't need to bufferize the whole string
+-- before starting to print.
+function M.tostring(t, cfg)
+
+ cfg = cfg or M.DEFAULT_CFG or { }
+
+ local p = {
+ cfg = cfg;
+ indent = 0;
+ current_offset = cfg.initial_indent or 0;
+ buffer = { };
+ nested = { };
+ acc = function(self, str)
+ table.insert(self.buffer, str)
+ self.current_offset = self.current_offset + #str
+ end;
+ }
+ acc_value(p, t)
+ return table.concat(p.buffer)
+end
+
+function M.print(...) return print(M.tostring(...)) end
+function M.sprintf(fmt, ...)
+ local args={...}
+ for i, v in pairs(args) do
+ local t=type(v)
+ if t=='table' then args[i]=M.tostring(v)
+ elseif t=='nil' then args[i]='nil' end
+ end
+ return string.format(fmt, unpack(args))
+end
+
+function M.printf(...) print(M.sprintf(...)) end
+
+return M
\ No newline at end of file
--- /dev/null
+-------------------------------------------------------------------------------
+-- Copyright (c) 2006-2013 Fabien Fleutot and others.
+--
+-- All rights reserved.
+--
+-- This program and the accompanying materials are made available
+-- under the terms of the Eclipse Public License v1.0 which
+-- accompanies this distribution, and is available at
+-- http://www.eclipse.org/legal/epl-v10.html
+--
+-- This program and the accompanying materials are also made available
+-- under the terms of the MIT public license which accompanies this
+-- distribution, and is available at http://www.lua.org/license.html
+--
+-- Contributors:
+-- Fabien Fleutot - API and implementation
+--
+-------------------------------------------------------------------------------
+
+-- Keep these global:
+PRINT_AST = true
+LINE_WIDTH = 60
+PROMPT = "M> "
+PROMPT2 = ">> "
+
+local pp=require 'metalua.pprint'
+local M = { }
+
+mlc = require 'metalua.compiler'.new()
+
+local readline
+
+do -- set readline() to a line reader, either editline otr a default
+ local status, _ = pcall(require, 'editline')
+ if status then
+ local rl_handle = editline.init 'metalua'
+ readline = |p| rl_handle:read(p)
+ else
+ local status, rl = pcall(require, 'readline')
+ if status then
+ rl.set_options{histfile='~/.metalua_history', keeplines=100, completion=false }
+ readline = rl.readline
+ else -- neither editline nor readline available
+ function readline (p)
+ io.write (p)
+ io.flush ()
+ return io.read '*l'
+ end
+ end
+ end
+end
+
+local function reached_eof(lx, msg)
+ return lx:peek().tag=='Eof' or msg:find "token `Eof"
+end
+
+
+function M.run()
+ pp.printf ("Metalua, interactive REPLoop.\n"..
+ "(c) 2006-2013 <metalua@gmail.com>")
+ local lines = { }
+ while true do
+ local src, lx, ast, f, results, success
+ repeat
+ local line = readline(next(lines) and PROMPT2 or PROMPT)
+ if not line then print(); os.exit(0) end -- line==nil iff eof on stdin
+ if not next(lines) then
+ line = line:gsub('^%s*=', 'return ')
+ end
+ table.insert(lines, line)
+ src = table.concat (lines, "\n")
+ until #line>0
+ lx = mlc :src_to_lexstream(src)
+ success, ast = pcall(mlc.lexstream_to_ast, mlc, lx)
+ if success then
+ success, f = pcall(mlc.ast_to_function, mlc, ast, '=stdin')
+ if success then
+ results = { xpcall(f, debug.traceback) }
+ success = table.remove (results, 1)
+ if success then
+ -- Success!
+ for _, x in ipairs(results) do
+ pp.print(x, {line_max=LINE_WIDTH, metalua_tag=true})
+ end
+ lines = { }
+ else
+ print "Evaluation error:"
+ print (results[1])
+ lines = { }
+ end
+ else
+ print "Can't compile into bytecode:"
+ print (f)
+ lines = { }
+ end
+ else
+ -- If lx has been read entirely, try to read
+ -- another line before failing.
+ if not reached_eof(lx, ast) then
+ print "Can't compile source into AST:"
+ print (ast)
+ lines = { }
+ end
+ end
+ end
+end
+
+return M
\ No newline at end of file
--- /dev/null
+-------------------------------------------------------------------------------
+-- Copyright (c) 2006-2013 Fabien Fleutot and others.
+--
+-- All rights reserved.
+--
+-- This program and the accompanying materials are made available
+-- under the terms of the Eclipse Public License v1.0 which
+-- accompanies this distribution, and is available at
+-- http://www.eclipse.org/legal/epl-v10.html
+--
+-- This program and the accompanying materials are also made available
+-- under the terms of the MIT public license which accompanies this
+-- distribution, and is available at http://www.lua.org/license.html
+--
+-- Contributors:
+-- Fabien Fleutot - API and implementation
+--
+-------------------------------------------------------------------------------
+
+local walk = require 'metalua.treequery.walk'
+
+local M = { }
+-- support for old-style modules
+treequery = M
+
+-- -----------------------------------------------------------------------------
+-- -----------------------------------------------------------------------------
+--
+-- multimap helper mmap: associate a key to a set of values
+--
+-- -----------------------------------------------------------------------------
+-- -----------------------------------------------------------------------------
+
+local function mmap_add (mmap, node, x)
+ if node==nil then return false end
+ local set = mmap[node]
+ if set then set[x] = true
+ else mmap[node] = {[x]=true} end
+end
+
+-- currently unused, I throw the whole set away
+local function mmap_remove (mmap, node, x)
+ local set = mmap[node]
+ if not set then return false
+ elseif not set[x] then return false
+ elseif next(set) then set[x]=nil
+ else mmap[node] = nil end
+ return true
+end
+
+-- -----------------------------------------------------------------------------
+-- -----------------------------------------------------------------------------
+--
+-- TreeQuery object.
+--
+-- -----------------------------------------------------------------------------
+-- -----------------------------------------------------------------------------
+
+local ACTIVE_SCOPE = setmetatable({ }, {__mode="k"})
+
+-- treequery metatable
+local Q = { }; Q.__index = Q
+
+--- treequery constructor
+-- the resultingg object will allow to filter ans operate on the AST
+-- @param root the AST to visit
+-- @return a treequery visitor instance
+function M.treequery(root)
+ return setmetatable({
+ root = root,
+ unsatisfied = 0,
+ predicates = { },
+ until_up = { },
+ from_up = { },
+ up_f = false,
+ down_f = false,
+ filters = { },
+ }, Q)
+end
+
+-- helper to share the implementations of positional filters
+local function add_pos_filter(self, position, inverted, inclusive, f, ...)
+ if type(f)=='string' then f = M.has_tag(f, ...) end
+ if not inverted then self.unsatisfied += 1 end
+ local x = {
+ pred = f,
+ position = position,
+ satisfied = false,
+ inverted = inverted or false,
+ inclusive = inclusive or false }
+ table.insert(self.predicates, x)
+ return self
+end
+
+function Q :if_unknown(f)
+ self.unknown_handler = f or (||nil)
+ return self
+end
+
+-- TODO: offer an API for inclusive pos_filters
+
+--- select nodes which are after one which satisfies predicate f
+Q.after = |self, f, ...| add_pos_filter(self, 'after', false, false, f, ...)
+--- select nodes which are not after one which satisfies predicate f
+Q.not_after = |self, f, ...| add_pos_filter(self, 'after', true, false, f, ...)
+--- select nodes which are under one which satisfies predicate f
+Q.under = |self, f, ...| add_pos_filter(self, 'under', false, false, f, ...)
+--- select nodes which are not under one which satisfies predicate f
+Q.not_under = |self, f, ...| add_pos_filter(self, 'under', true, false, f, ...)
+
+--- select nodes which satisfy predicate f
+function Q :filter(f, ...)
+ if type(f)=='string' then f = M.has_tag(f, ...) end
+ table.insert(self.filters, f);
+ return self
+end
+
+--- select nodes which satisfy predicate f
+function Q :filter_not(f, ...)
+ if type(f)=='string' then f = M.has_tag(f, ...) end
+ table.insert(self.filters, |...| not f(...))
+ return self
+end
+
+-- private helper: apply filters and execute up/down callbacks when applicable
+function Q :execute()
+ local cfg = { }
+ -- TODO: optimize away not_under & not_after by pruning the tree
+ function cfg.down(...)
+ --printf ("[down]\t%s\t%s", self.unsatisfied, table.tostring((...)))
+ ACTIVE_SCOPE[...] = cfg.scope
+ local satisfied = self.unsatisfied==0
+ for _, x in ipairs(self.predicates) do
+ if not x.satisfied and x.pred(...) then
+ x.satisfied = true
+ local node, parent = ...
+ local inc = x.inverted and 1 or -1
+ if x.position=='under' then
+ -- satisfied from after we get down this node...
+ self.unsatisfied += inc
+ -- ...until before we get up this node
+ mmap_add(self.until_up, node, x)
+ elseif x.position=='after' then
+ -- satisfied from after we get up this node...
+ mmap_add(self.from_up, node, x)
+ -- ...until before we get up this node's parent
+ mmap_add(self.until_up, parent, x)
+ elseif x.position=='under_or_after' then
+ -- satisfied from after we get down this node...
+ self.satisfied += inc
+ -- ...until before we get up this node's parent...
+ mmap_add(self.until_up, parent, x)
+ else
+ error "position not understood"
+ end -- position
+ if x.inclusive then satisfied = self.unsatisfied==0 end
+ end -- predicate passed
+ end -- for predicates
+
+ if satisfied then
+ for _, f in ipairs(self.filters) do
+ if not f(...) then satisfied=false; break end
+ end
+ if satisfied and self.down_f then self.down_f(...) end
+ end
+ end
+
+ function cfg.up(...)
+ --printf ("[up]\t%s", table.tostring((...)))
+
+ -- Remove predicates which are due before we go up this node
+ local preds = self.until_up[...]
+ if preds then
+ for x, _ in pairs(preds) do
+ local inc = x.inverted and -1 or 1
+ self.unsatisfied += inc
+ x.satisfied = false
+ end
+ self.until_up[...] = nil
+ end
+
+ -- Execute the up callback
+ -- TODO: cache the filter passing result from the down callback
+ -- TODO: skip if there's no callback
+ local satisfied = self.unsatisfied==0
+ if satisfied then
+ for _, f in ipairs(self.filters) do
+ if not f(self, ...) then satisfied=false; break end
+ end
+ if satisfied and self.up_f then self.up_f(...) end
+ end
+
+ -- Set predicate which are due after we go up this node
+ local preds = self.from_up[...]
+ if preds then
+ for p, _ in pairs(preds) do
+ local inc = p.inverted and 1 or -1
+ self.unsatisfied += inc
+ end
+ self.from_up[...] = nil
+ end
+ ACTIVE_SCOPE[...] = nil
+ end
+
+ function cfg.binder(id_node, ...)
+ --printf(" >>> Binder called on %s, %s", table.tostring(id_node),
+ -- table.tostring{...}:sub(2,-2))
+ cfg.down(id_node, ...)
+ cfg.up(id_node, ...)
+ --printf("down/up on binder done")
+ end
+
+ cfg.unknown = self.unknown_handler
+
+ --function cfg.occurrence (binder, occ)
+ -- if binder then OCC2BIND[occ] = binder[1] end
+ --printf(" >>> %s is an occurrence of %s", occ[1], table.tostring(binder and binder[2]))
+ --end
+
+ --function cfg.binder(...) cfg.down(...); cfg.up(...) end
+ return walk.guess(cfg, self.root)
+end
+
+--- Execute a function on each selected node
+-- @down: function executed when we go down a node, i.e. before its children
+-- have been examined.
+-- @up: function executed when we go up a node, i.e. after its children
+-- have been examined.
+function Q :foreach(down, up)
+ if not up and not down then
+ error "iterator missing"
+ end
+ self.up_f = up
+ self.down_f = down
+ return self :execute()
+end
+
+--- Return the list of nodes selected by a given treequery.
+function Q :list()
+ local acc = { }
+ self :foreach(|x| table.insert(acc, x))
+ return acc
+end
+
+--- Return the first matching element
+-- TODO: dirty hack, to implement properly with a 'break' return.
+-- Also, it won't behave correctly if a predicate causes an error,
+-- or if coroutines are involved.
+function Q :first()
+ local result = { }
+ local function f(...) result = {...}; error() end
+ pcall(|| self :foreach(f))
+ return unpack(result)
+end
+
+--- Pretty printer for queries
+function Q :__tostring() return "<treequery>" end
+
+-- -----------------------------------------------------------------------------
+-- -----------------------------------------------------------------------------
+--
+-- Predicates.
+--
+-- -----------------------------------------------------------------------------
+-- -----------------------------------------------------------------------------
+
+--- Return a predicate which is true if the tested node's tag is among the
+-- one listed as arguments
+-- @param ... a sequence of tag names
+function M.has_tag(...)
+ local args = {...}
+ if #args==1 then
+ local tag = ...
+ return (|node| node.tag==tag)
+ --return function(self, node) printf("node %s has_tag %s?", table.tostring(node), tag); return node.tag==tag end
+ else
+ local tags = { }
+ for _, tag in ipairs(args) do tags[tag]=true end
+ return function(node)
+ local node_tag = node.tag
+ return node_tag and tags[node_tag]
+ end
+ end
+end
+
+--- Predicate to test whether a node represents an expression.
+M.is_expr = M.has_tag('Nil', 'Dots', 'True', 'False', 'Number','String',
+ 'Function', 'Table', 'Op', 'Paren', 'Call', 'Invoke',
+ 'Id', 'Index')
+
+-- helper for is_stat
+local STAT_TAGS = { Do=1, Set=1, While=1, Repeat=1, If=1, Fornum=1,
+ Forin=1, Local=1, Localrec=1, Return=1, Break=1 }
+
+--- Predicate to test whether a node represents a statement.
+-- It is context-aware, i.e. it recognizes `Call and `Invoke nodes
+-- used in a statement context as such.
+function M.is_stat(node, parent)
+ local tag = node.tag
+ if not tag then return false
+ elseif STAT_TAGS[tag] then return true
+ elseif tag=='Call' or tag=='Invoke' then return parent.tag==nil
+ else return false end
+end
+
+--- Predicate to test whether a node represents a statements block.
+function M.is_block(node) return node.tag==nil end
+
+-- -----------------------------------------------------------------------------
+-- -----------------------------------------------------------------------------
+--
+-- Variables and scopes.
+--
+-- -----------------------------------------------------------------------------
+-- -----------------------------------------------------------------------------
+
+local BINDER_PARENT_TAG = {
+ Local=true, Localrec=true, Forin=true, Function=true }
+
+--- Test whether a node is a binder. This is local predicate, although it
+-- might need to inspect the parent node.
+function M.is_binder(node, parent)
+ --printf('is_binder(%s, %s)', table.tostring(node), table.tostring(parent))
+ if node.tag ~= 'Id' or not parent then return false end
+ if parent.tag=='Fornum' then return parent[1]==node end
+ if not BINDER_PARENT_TAG[parent.tag] then return false end
+ for _, binder in ipairs(parent[1]) do
+ if binder==node then return true end
+ end
+ return false
+end
+
+--- Retrieve the binder associated to an occurrence within root.
+-- @param occurrence an Id node representing an occurrence in `root`.
+-- @param root the tree in which `node` and its binder occur.
+-- @return the binder node, and its ancestors up to root if found.
+-- @return nil if node is global (or not an occurrence) in `root`.
+function M.binder(occurrence, root)
+ local cfg, id_name, result = { }, occurrence[1], { }
+ function cfg.occurrence(id)
+ if id == occurrence then result = cfg.scope :get(id_name) end
+ -- TODO: break the walker
+ end
+ walk.guess(cfg, root)
+ return unpack(result)
+end
+
+--- Predicate to filter occurrences of a given binder.
+-- Warning: it relies on internal scope book-keeping,
+-- and for this reason, it only works as query method argument.
+-- It won't work outside of a query.
+-- @param binder the binder whose occurrences must be kept by predicate
+-- @return a predicate
+
+-- function M.is_occurrence_of(binder)
+-- return function(node, ...)
+-- if node.tag ~= 'Id' then return nil end
+-- if M.is_binder(node, ...) then return nil end
+-- local scope = ACTIVE_SCOPE[node]
+-- if not scope then return nil end
+-- local result = scope :get (node[1]) or { }
+-- if result[1] ~= binder then return nil end
+-- return unpack(result)
+-- end
+-- end
+
+function M.is_occurrence_of(binder)
+ return function(node, ...)
+ local b = M.get_binder(node)
+ return b and b==binder
+ end
+end
+
+function M.get_binder(occurrence, ...)
+ if occurrence.tag ~= 'Id' then return nil end
+ if M.is_binder(occurrence, ...) then return nil end
+ local scope = ACTIVE_SCOPE[occurrence]
+ local binder_hierarchy = scope :get(occurrence[1])
+ return unpack (binder_hierarchy or { })
+end
+
+--- Transform a predicate on a node into a predicate on this node's
+-- parent. For instance if p tests whether a node has property P,
+-- then parent(p) tests whether this node's parent has property P.
+-- The ancestor level is precised with n, with 1 being the node itself,
+-- 2 its parent, 3 its grand-parent etc.
+-- @param[optional] n the parent to examine, default=2
+-- @param pred the predicate to transform
+-- @return a predicate
+function M.parent(n, pred, ...)
+ if type(n)~='number' then n, pred = 2, n end
+ if type(pred)=='string' then pred = M.has_tag(pred, ...) end
+ return function(self, ...)
+ return select(n, ...) and pred(self, select(n, ...))
+ end
+end
+
+--- Transform a predicate on a node into a predicate on this node's
+-- n-th child.
+-- @param n the child's index number
+-- @param pred the predicate to transform
+-- @return a predicate
+function M.child(n, pred)
+ return function(node, ...)
+ local child = node[n]
+ return child and pred(child, node, ...)
+ end
+end
+
+--- Predicate to test the position of a node in its parent.
+-- The predicate succeeds if the node is the n-th child of its parent,
+-- and a <= n <= b.
+-- nth(a) is equivalent to nth(a, a).
+-- Negative indices are admitted, and count from the last child,
+-- as done for instance by string.sub().
+--
+-- TODO: This is wrong, this tests the table relationship rather than the
+-- AST node relationship.
+-- Must build a getindex helper, based on pattern matching, then build
+-- the predicate around it.
+--
+-- @param a lower bound
+-- @param a upper bound
+-- @return a predicate
+function M.is_nth(a, b)
+ b = b or a
+ return function(self, node, parent)
+ if not parent then return false end
+ local nchildren = #parent
+ local a = a<=0 and nchildren+a+1 or a
+ if a>nchildren then return false end
+ local b = b<=0 and nchildren+b+1 or b>nchildren and nchildren or b
+ for i=a,b do if parent[i]==node then return true end end
+ return false
+ end
+end
+
+
+-- -----------------------------------------------------------------------------
+-- -----------------------------------------------------------------------------
+--
+-- Comments parsing.
+--
+-- -----------------------------------------------------------------------------
+-- -----------------------------------------------------------------------------
+
+local comment_extractor = |which_side| function (node)
+ local x = node.lineinfo
+ x = x and x[which_side]
+ x = x and x.comments
+ if not x then return nil end
+ local lines = { }
+ for _, record in ipairs(x) do
+ table.insert(lines, record[1])
+ end
+ return table.concat(lines, '\n')
+end
+
+M.comment_prefix = comment_extractor 'first'
+M.comment_suffix = comment_extractor 'last'
+
+
+--- Shortcut for the query constructor
+function M :__call(...) return self.treequery(...) end
+setmetatable(M, M)
+
+return M
--- /dev/null
+-------------------------------------------------------------------------------
+-- Copyright (c) 2006-2013 Fabien Fleutot and others.
+--
+-- All rights reserved.
+--
+-- This program and the accompanying materials are made available
+-- under the terms of the Eclipse Public License v1.0 which
+-- accompanies this distribution, and is available at
+-- http://www.eclipse.org/legal/epl-v10.html
+--
+-- This program and the accompanying materials are also made available
+-- under the terms of the MIT public license which accompanies this
+-- distribution, and is available at http://www.lua.org/license.html
+--
+-- Contributors:
+-- Fabien Fleutot - API and implementation
+--
+-------------------------------------------------------------------------------
+
+-- Low level AST traversal library.
+-- This library is a helper for the higher-level treequery library.
+-- It walks through every node of an AST, depth-first, and executes
+-- some callbacks contained in its cfg config table:
+--
+-- * cfg.down(...) is called when it walks down a node, and receive as
+-- parameters the node just entered, followed by its parent, grand-parent
+-- etc. until the root node.
+--
+-- * cfg.up(...) is called when it walks back up a node, and receive as
+-- parameters the node just entered, followed by its parent, grand-parent
+-- etc. until the root node.
+--
+-- * cfg.occurrence(binder, id_node, ...) is called when it visits an `Id{ }
+-- node which isn't a local variable creator. binder is a reference to its
+-- binder with its context. The binder is the `Id{ } node which created
+-- this local variable. By "binder and its context", we mean a list starting
+-- with the `Id{ }, and followed by every ancestor of the binder node, up until
+-- the common root node.
+-- binder is nil if the variable is global.
+-- id_node is followed by its ancestor, up until the root node.
+--
+-- cfg.scope is maintained during the traversal, associating a
+-- variable name to the binder which creates it in the context of the
+-- node currently visited.
+--
+-- walk.traverse.xxx functions are in charge of the recursive descent into
+-- children nodes. They're private helpers.
+--
+-- corresponding walk.xxx functions also take care of calling cfg callbacks.
+
+-{ extension ("match", ...) }
+
+local pp = require 'metalua.pprint'
+
+local M = { traverse = { }; tags = { }; debug = false }
+
+local function table_transpose(t)
+ local tt = { }; for a, b in pairs(t) do tt[b]=a end; return tt
+end
+
+--------------------------------------------------------------------------------
+-- Standard tags: can be used to guess the type of an AST, or to check
+-- that the type of an AST is respected.
+--------------------------------------------------------------------------------
+M.tags.stat = table_transpose{
+ 'Do', 'Set', 'While', 'Repeat', 'Local', 'Localrec', 'Return',
+ 'Fornum', 'Forin', 'If', 'Break', 'Goto', 'Label',
+ 'Call', 'Invoke' }
+M.tags.expr = table_transpose{
+ 'Paren', 'Call', 'Invoke', 'Index', 'Op', 'Function', 'Stat',
+ 'Table', 'Nil', 'Dots', 'True', 'False', 'Number', 'String', 'Id' }
+
+--------------------------------------------------------------------------------
+-- These [M.traverse.xxx()] functions are in charge of actually going through
+-- ASTs. At each node, they make sure to call the appropriate walker.
+--------------------------------------------------------------------------------
+function M.traverse.stat (cfg, x, ...)
+ if M.debug then pp.printf("traverse stat %s", x) end
+ local ancestors = {...}
+ local B = |y| M.block (cfg, y, x, unpack(ancestors)) -- Block
+ local S = |y| M.stat (cfg, y, x, unpack(ancestors)) -- Statement
+ local E = |y| M.expr (cfg, y, x, unpack(ancestors)) -- Expression
+ local EL = |y| M.expr_list (cfg, y, x, unpack(ancestors)) -- Expression List
+ local IL = |y| M.binder_list (cfg, y, x, unpack(ancestors)) -- Id binders List
+ local OS = || cfg.scope :save() -- Open scope
+ local CS = || cfg.scope :restore() -- Close scope
+
+ match x with
+ | {...} if x.tag == nil -> for _, y in ipairs(x) do M.stat(cfg, y, ...) end
+ -- no tag --> node not inserted in the history ancestors
+ | `Do{...} -> OS(x); for _, y in ipairs(x) do S(y) end; CS(x)
+ | `Set{ lhs, rhs } -> EL(lhs); EL(rhs)
+ | `While{ cond, body } -> E(cond); OS(); B(body); CS()
+ | `Repeat{ body, cond } -> OS(body); B(body); E(cond); CS(body)
+ | `Local{ lhs } -> IL(lhs)
+ | `Local{ lhs, rhs } -> EL(rhs); IL(lhs)
+ | `Localrec{ lhs, rhs } -> IL(lhs); EL(rhs)
+ | `Fornum{ i, a, b, body } -> E(a); E(b); OS(); IL{i}; B(body); CS()
+ | `Fornum{ i, a, b, c, body } -> E(a); E(b); E(c); OS(); IL{i}; B(body); CS()
+ | `Forin{ i, rhs, body } -> EL(rhs); OS(); IL(i); B(body); CS()
+ | `If{...} ->
+ for i=1, #x-1, 2 do
+ E(x[i]); OS(); B(x[i+1]); CS()
+ end
+ if #x%2 == 1 then
+ OS(); B(x[#x]); CS()
+ end
+ | `Call{...}|`Invoke{...}|`Return{...} -> EL(x)
+ | `Break | `Goto{ _ } | `Label{ _ } -> -- nothing
+ | { tag=tag, ...} if M.tags.stat[tag]->
+ M.malformed (cfg, x, unpack (ancestors))
+ | _ ->
+ M.unknown (cfg, x, unpack (ancestors))
+ end
+end
+
+function M.traverse.expr (cfg, x, ...)
+ if M.debug then pp.printf("traverse expr %s", x) end
+ local ancestors = {...}
+ local B = |y| M.block (cfg, y, x, unpack(ancestors)) -- Block
+ local S = |y| M.stat (cfg, y, x, unpack(ancestors)) -- Statement
+ local E = |y| M.expr (cfg, y, x, unpack(ancestors)) -- Expression
+ local EL = |y| M.expr_list (cfg, y, x, unpack(ancestors)) -- Expression List
+ local IL = |y| M.binder_list (cfg, y, x, unpack(ancestors)) -- Id binders list
+ local OS = || cfg.scope :save() -- Open scope
+ local CS = || cfg.scope :restore() -- Close scope
+
+ match x with
+ | `Paren{ e } -> E(e)
+ | `Call{...} | `Invoke{...} -> EL(x)
+ | `Index{ a, b } -> E(a); E(b)
+ | `Op{ opid, ... } -> E(x[2]); if #x==3 then E(x[3]) end
+ | `Function{ params, body } -> OS(body); IL(params); B(body); CS(body)
+ | `Stat{ b, e } -> OS(b); B(b); E(e); CS(b)
+ | `Id{ name } -> M.occurrence(cfg, x, unpack(ancestors))
+ | `Table{ ... } ->
+ for i = 1, #x do match x[i] with
+ | `Pair{ k, v } -> E(k); E(v)
+ | v -> E(v)
+ end end
+ | `Nil|`Dots|`True|`False|`Number{_}|`String{_} -> -- terminal node
+ | { tag=tag, ...} if M.tags.expr[tag]-> M.malformed (cfg, x, unpack (ancestors))
+ | _ -> M.unknown (cfg, x, unpack (ancestors))
+ end
+end
+
+function M.traverse.block (cfg, x, ...)
+ assert(type(x)=='table', "traverse.block() expects a table")
+ if x.tag then M.malformed(cfg, x, ...)
+ else for _, y in ipairs(x) do M.stat(cfg, y, x, ...) end
+ end
+end
+
+function M.traverse.expr_list (cfg, x, ...)
+ assert(type(x)=='table', "traverse.expr_list() expects a table")
+ -- x doesn't appear in the ancestors
+ for _, y in ipairs(x) do M.expr(cfg, y, ...) end
+end
+
+function M.malformed(cfg, x, ...)
+ local f = cfg.malformed or cfg.error
+ if f then f(x, ...) else
+ error ("Malformed node of tag "..(x.tag or '(nil)'))
+ end
+end
+
+function M.unknown(cfg, x, ...)
+ local f = cfg.unknown or cfg.error
+ if f then f(x, ...) else
+ error ("Unknown node tag "..(x.tag or '(nil)'))
+ end
+end
+
+function M.occurrence(cfg, x, ...)
+ if cfg.occurrence then cfg.occurrence(cfg.scope :get(x[1]), x, ...) end
+end
+
+-- TODO: Is it useful to call each error handling function?
+function M.binder_list (cfg, id_list, ...)
+ local f = cfg.binder
+ local ferror = cfg.error or cfg.malformed or cfg.unknown
+ for i, id_node in ipairs(id_list) do
+ if id_node.tag == 'Id' then
+ cfg.scope :set (id_node[1], { id_node, ... })
+ if f then f(id_node, ...) end
+ elseif i==#id_list and id_node.tag=='Dots' then
+ -- Do nothing, those are valid `Dots
+ elseif ferror then
+ -- Traverse error handling function
+ ferror(id_node, ...)
+ else
+ error("Invalid binders list")
+ end
+ end
+end
+
+----------------------------------------------------------------------
+-- Generic walker generator.
+-- * if `cfg' has an entry matching the tree name, use this entry
+-- * if not, try to use the entry whose name matched the ast kind
+-- * if an entry is a table, look for 'up' and 'down' entries
+-- * if it is a function, consider it as a `down' traverser.
+----------------------------------------------------------------------
+local walker_builder = function(traverse)
+ assert(traverse)
+ return function (cfg, ...)
+ if not cfg.scope then cfg.scope = M.newscope() end
+ local down, up = cfg.down, cfg.up
+ local broken = down and down(...)
+ if broken ~= 'break' then M.traverse[traverse] (cfg, ...) end
+ if up then up(...) end
+ end
+end
+
+----------------------------------------------------------------------
+-- Declare [M.stat], [M.expr], [M.block] and [M.expr_list]
+----------------------------------------------------------------------
+for _, w in ipairs{ "stat", "expr", "block" } do --, "malformed", "unknown" } do
+ M[w] = walker_builder (w, M.traverse[w])
+end
+
+-- Don't call up/down callbacks on expr lists
+M.expr_list = M.traverse.expr_list
+
+
+----------------------------------------------------------------------
+-- Try to guess the type of the AST then choose the right walkker.
+----------------------------------------------------------------------
+function M.guess (cfg, x, ...)
+ assert(type(x)=='table', "arg #2 in a walker must be an AST")
+ if M.tags.expr[x.tag] then return M.expr(cfg, x, ...) end
+ if M.tags.stat[x.tag] then return M.stat(cfg, x, ...) end
+ if not x.tag then return M.block(cfg, x, ...) end
+ error ("Can't guess the AST type from tag "..(x.tag or '<none>'))
+end
+
+local S = { }; S.__index = S
+
+function M.newscope()
+ local instance = { current = { } }
+ instance.stack = { instance.current }
+ setmetatable (instance, S)
+ return instance
+end
+
+function S :save(...)
+ local current_copy = { }
+ for a, b in pairs(self.current) do current_copy[a]=b end
+ table.insert (self.stack, current_copy)
+ if ... then return self :add(...) end
+end
+
+function S :restore() self.current = table.remove (self.stack) end
+function S :get (var_name) return self.current[var_name] end
+function S :set (key, val) self.current[key] = val end
+
+return M
+++ /dev/null
--- This utility bootstraps the metalua compiler:
--- * The compiler itself is written partly in lua, partly in metalua.
--- * This program uses the lua parts of the compiler to compile the metalua parts.
---
--- Usage: bootstrap output=<resulting file> inputdir=<source directory> <src_1> ... <src_n>
---
-
-
-cfg = { inputs = { } }
-for _, a in ipairs(arg) do
- local var, val = a :match "^(.-)=(.*)"
- if var then cfg[var] = val else table.insert (cfg.inputs, a) end
-end
-
--- metalua.mlc doesn't exist yet; this preload manager loads a mockup which is just
--- sufficient to compile the real mlc.mlua
-package.preload['metalua.mlc'] = function()
-
- print "Loading fake metalua.mlc module for compiler bootstrapping"
-
- mlc = { }
- mlc.metabugs = false
-
- function mlc.function_of_ast (ast)
- local proto = bytecode.metalua_compile (ast)
- local dump = bytecode.dump_string (proto)
- local func = string.undump(dump)
- return func
- end
-
- function mlc.ast_of_luastring (src)
- local lx = mlp.lexer:newstream (src)
- local ast = mlp.chunk (lx)
- return ast
- end
-
- function mlc.function_of_luastring (src)
- local ast = mlc.ast_of_luastring (src)
- local func = mlc.function_of_ast(ast)
- return func
- end
-
- function mlc.function_of_luafile (name)
- local f = io.open(name, 'r')
- local src = f:read '*a'
- f:close()
- return mlc.function_of_luastring (src, "@"..name)
- end
-
- -- don't let require() fork a separate process for *.mlua compilations.
- package.metalua_nopopen = true
-end
-
-require 'verbose_require'
-require 'metalua.base'
-require 'metalua.bytecode'
-require 'metalua.mlp'
-require 'metalua.package2'
-
-local function compile_file (src_filename)
- print("Compiling "..src_filename.."... ")
- local src_file = io.open (src_filename, 'r')
- local src = src_file:read '*a'; src_file:close()
- local ast = mlc.ast_of_luastring (src)
- local proto = bytecode.metalua_compile (ast, '@'..src_filename)
- local dump = bytecode.dump_string (proto)
- local dst_filename = cfg.output or error "no output file name specified"
- local dst_file = io.open (dst_filename, 'wb')
- dst_file:write(dump)
- dst_file:close()
- print("...Wrote "..dst_filename)
-end
-
-if cfg.inputdir then
- local sep = package.config:sub(1,1)
- if not cfg.inputdir :match (sep..'$') then cfg.inputdir = cfg.inputdir..sep end
-else
- cfg.inputdir=""
-end
-
-for _, x in ipairs (cfg.inputs) do compile_file (cfg.inputdir..x) end
-
+++ /dev/null
--- Compile all files called *.mluam in a directory and its sub-directories,
--- into their *.luac counterpart.
---
--- This script is windows-only, Unices have half-decent shell script languages
--- which let you do the same with a find and an xargs.
-
-cfg = { }
-for _, a in ipairs(arg) do
- local var, val = a :match "^(.-)=(.*)"
- if var then cfg[var] = val end
-end
-
-if not cfg.command or not cfg.directory then
- error ("Usage: "..arg[0].." command=<metalua command> directory=<library root>")
-end
-
--- List all files, recursively, from newest to oldest
-local f = io.popen ("dir /S /b /o-D " .. cfg.directory)
-
-local file_seen = { }
-for src in f:lines() do
- file_seen[src] = true
- local base = src:match "^(.+)%.mlua$"
- if base then
- local target = base..".luac"
- if file_seen[target] then
- -- the target file has been listed before the source ==> it's newer
- print ("("..target.." up-to-date)")
- else
- local cmd = cfg.command.." "..src.." -o "..target
- print (cmd)
- os.execute (cmd)
- end
- end
-end
-
-
+++ /dev/null
-include ../config
-
-all: $(LIBRARIES) install metalua
-
-$(PLATFORM): all
-
-LUA_RUN = ../$(LUA_VM_DIR)/$(RUN)
-LUA_COMPILE = ../$(LUA_VM_DIR)/$(COMPILE)
-
-LIBRARIES = \
- bytecode.luac \
- mlp.luac \
- mlc.luac
-
-# Library which compiles an AST into a bytecode string.
-BYTECODE_LUA = \
- lopcodes.lua \
- lcode.lua \
- ldump.lua \
- compile.lua
-
-# Library which compiles source strings into AST
-MLP_LUA = \
- lexer.lua \
- gg.lua \
- mlp_lexer.lua \
- mlp_misc.lua \
- mlp_table.lua \
- mlp_meta.lua \
- mlp_expr.lua \
- mlp_stat.lua \
- mlp_ext.lua
-
-metalua.luac: mlc.luac
-
-bytecode.luac: $(BYTECODE_LUA)
- $(LUA_COMPILE) -o $@ $^
-
-mlp.luac: $(MLP_LUA)
- $(LUA_COMPILE) -o $@ $^
-
-# Plain lua files compilation
-%.luac: %.mlua bootstrap.lua mlp.luac bytecode.luac
- $(LUA_RUN) bootstrap.lua $<
-
-# FIXME what's this?! some old stuff from when metalua files hadn't their own
-# extensions?
-# Metalua files compilation through the bootstrap compiler
-%.luac: %.lua
- $(LUA_COMPILE) -o $@ bootstrap $<
-
-# Compiler/interpreter
-metalua: metalua.luac install-lib
- $(LUA_RUN) metalua.luac --verbose --sharpbang '#!$(TARGET_BIN_PATH)/lua' --output metalua --file metalua.mlua
-
-install-lib: $(LIBRARIES)
- mkdir -p $(TARGET_LUA_PATH)/metalua
- cp $(LIBRARIES) $(TARGET_LUA_PATH)/metalua/
-
-install: install-lib metalua
- mkdir -p $(TARGET_BIN_PATH)
- cp metalua $(TARGET_BIN_PATH)/
-
-.PHONY: all install
-
-clean:
- -rm *.luac metalua
+++ /dev/null
-----------------------------------------------------------------------
---
--- WARNING! You're entering a hackish area, proceed at your own risks!
---
--- This code partly results from the borrowing, then ruthless abuse, of
--- Yueliang's implementation of Lua 5.0 compiler. I claim
--- responsibility for all of the ugly, dirty stuff that you might spot
--- in it.
---
--- Eventually, this code will be rewritten, either in Lua or more
--- probably in C. Meanwhile, if you're interested into digging
--- metalua's sources, this is not the best part to invest your time
--- on.
---
--- End of warning.
---
-----------------------------------------------------------------------
-
-----------------------------------------------------------------------
--- Metalua.
---
--- Summary: Compile ASTs to Lua 5.1 VM function prototype.
--- Largely based on:
---
--- * Yueliang (http://luaforge.net/projects/yueliang),
--- yueliang-0.1.2/orig-5.0.2/lparser.lua
---
--- * Lua 5.1 sources (http://www.lua.org), src/lparser.c
---
-----------------------------------------------------------------------
---
--- Copyright (c) 2006-2008, Fabien Fleutot <metalua@gmail.com>.
---
--- This software is released under the MIT Licence, see licence.txt
--- for details.
---
-----------------------------------------------------------------------
-
-module ("bytecode", package.seeall)
-
-local debugf = function() end
---local debugf=printf
-
-local stat = { }
-local expr = { }
-
-MAX_INT = 2147483645 -- INT_MAX-2 for 32-bit systems (llimits.h)
-MAXVARS = 200 -- (llimits.h)
-MAXUPVALUES = 32 -- (llimits.h)
-MAXPARAMS = 100 -- (llimits.h)
-LUA_MAXPARSERLEVEL = 200 -- (llimits.h)
-
--- from lobject.h
-VARARG_HASARG = 1
-VARARG_ISVARARG = 2
-VARARG_NEEDSARG = 4
-
-local function hasmultret (k)
- return k=="VCALL" or k=="VVARARG"
-end
-
------------------------------------------------------------------------
--- Some ASTs take expression lists as children; it should be
--- acceptible to give an expression instead, and to automatically
--- interpret it as a single element list. That's what does this
--- function, adding a surrounding list iff needed.
---
--- WARNING: "Do" is the tag for chunks, which are essentially lists.
--- Therefore, we don't listify stuffs with a "Do" tag.
------------------------------------------------------------------------
-local function ensure_list (ast)
- return ast.tag and ast.tag ~= "Do" and {ast} or ast end
-
------------------------------------------------------------------------
--- Get a localvar structure { varname, startpc, endpc } from a
--- (zero-based) index of active variable. The catch is: don't get
--- confused between local index and active index.
---
--- locvars[x] contains { varname, startpc, endpc };
--- actvar[i] contains the index of the variable in locvars
------------------------------------------------------------------------
-local function getlocvar (fs, i)
- return fs.f.locvars[fs.actvar[i]]
-end
-
-local function removevars (fs, tolevel)
- while fs.nactvar > tolevel do
- fs.nactvar = fs.nactvar - 1
- -- There may be dummy locvars due to expr.Stat
- -- FIXME: strange that they didn't disappear?!
- local locvar = getlocvar (fs, fs.nactvar)
- --printf("[REMOVEVARS] removing var #%i = %s", fs.nactvar,
- -- locvar and tostringv(locvar) or "<nil>")
- if locvar then locvar.endpc = fs.pc end
- end
-end
-
------------------------------------------------------------------------
--- [f] has a list of all its local variables, active and inactive.
--- Some local vars can correspond to the same register, if they exist
--- in different scopes.
--- [fs.nlocvars] is the total number of local variables, not to be
--- confused with [fs.nactvar] the numebr of variables active at the
--- current PC.
--- At this stage, the existence of the variable is not yet aknowledged,
--- since [fs.nactvar] and [fs.freereg] aren't updated.
------------------------------------------------------------------------
-local function registerlocalvar (fs, varname)
- debugf("[locvar: %s = reg %i]", varname, fs.nlocvars)
- local f = fs.f
- f.locvars[fs.nlocvars] = { } -- LocVar
- f.locvars[fs.nlocvars].varname = varname
- local nlocvars = fs.nlocvars
- fs.nlocvars = fs.nlocvars + 1
- return nlocvars
-end
-
------------------------------------------------------------------------
--- update the active vars counter in [fs] by adding [nvars] of them,
--- and sets those variables' [startpc] to the current [fs.pc].
--- These variables were allready created, but not yet counted, by
--- new_localvar.
------------------------------------------------------------------------
-local function adjustlocalvars (fs, nvars)
- --debugf("adjustlocalvars, nvars=%i, previous fs.nactvar=%i,"..
- -- " #locvars=%i, #actvar=%i",
- -- nvars, fs.nactvar, #fs.f.locvars, #fs.actvar)
-
- fs.nactvar = fs.nactvar + nvars
- for i = nvars, 1, -1 do
- --printf ("adjusting actvar #%i", fs.nactvar - i)
- getlocvar (fs, fs.nactvar - i).startpc = fs.pc
- end
-end
-
-------------------------------------------------------------------------
--- check whether, in an assignment to a local variable, the local variable
--- is needed in a previous assignment (to a table). If so, save original
--- local value in a safe place and use this safe copy in the previous
--- assignment.
-------------------------------------------------------------------------
-local function check_conflict (fs, lh, v)
- local extra = fs.freereg -- eventual position to save local variable
- local conflict = false
- while lh do
- if lh.v.k == "VINDEXED" then
- if lh.v.info == v.info then -- conflict?
- conflict = true
- lh.v.info = extra -- previous assignment will use safe copy
- end
- if lh.v.aux == v.info then -- conflict?
- conflict = true
- lh.v.aux = extra -- previous assignment will use safe copy
- end
- end
- lh = lh.prev
- end
- if conflict then
- luaK:codeABC (fs, "OP_MOVE", fs.freereg, v.info, 0) -- make copy
- luaK:reserveregs (fs, 1)
- end
-end
-
------------------------------------------------------------------------
--- Create an expdesc. To be updated when expdesc is lua-ified.
------------------------------------------------------------------------
-local function init_exp (e, k, i)
- e.f, e.t, e.k, e.info = luaK.NO_JUMP, luaK.NO_JUMP, k, i end
-
------------------------------------------------------------------------
--- Reserve the string in tthe constant pool, and return an expdesc
--- referring to it.
------------------------------------------------------------------------
-local function codestring (fs, e, str)
- --printf( "codestring(%s)", disp.ast(str))
- init_exp (e, "VK", luaK:stringK (fs, str))
-end
-
------------------------------------------------------------------------
--- search for a local variable named [name] in the function being
--- built by [fs]. Doesn't try to visit upvalues.
------------------------------------------------------------------------
-local function searchvar (fs, name)
- for i = fs.nactvar - 1, 0, -1 do
- -- Because of expr.Stat, there can be some actvars which don't
- -- correspond to any locvar. Hence the checking for locvar's
- -- nonnilness before getting the varname.
- local locvar = getlocvar(fs, i)
- if locvar and name == locvar.varname then
- --printf("Found local var: %s; i = %i", tostringv(locvar), i)
- return i
- end
- end
- return -1 -- not found
-end
-
------------------------------------------------------------------------
--- create and return a new proto [f]
------------------------------------------------------------------------
-local function newproto ()
- local f = {}
- f.k = {}
- f.sizek = 0
- f.p = {}
- f.sizep = 0
- f.code = {}
- f.sizecode = 0
- f.sizelineinfo = 0
- f.sizeupvalues = 0
- f.nups = 0
- f.upvalues = {}
- f.numparams = 0
- f.is_vararg = 0
- f.maxstacksize = 0
- f.lineinfo = {}
- f.sizelocvars = 0
- f.locvars = {}
- f.lineDefined = 0
- f.source = nil
- return f
-end
-
-------------------------------------------------------------------------
--- create and return a function state [new_fs] as a sub-funcstate of [fs].
-------------------------------------------------------------------------
-local function open_func (old_fs)
- local new_fs = { }
- new_fs.upvalues = { }
- new_fs.actvar = { }
- local f = newproto ()
- new_fs.f = f
- new_fs.prev = old_fs -- linked list of funcstates
- new_fs.pc = 0
- new_fs.lasttarget = -1
- new_fs.jpc = luaK.NO_JUMP
- new_fs.freereg = 0
- new_fs.nk = 0
- new_fs.h = {} -- constant table; was luaH_new call
- new_fs.np = 0
- new_fs.nlocvars = 0
- new_fs.nactvar = 0
- new_fs.bl = nil
- new_fs.nestlevel = old_fs and old_fs.nestlevel or 0
- f.maxstacksize = 2 -- registers 0/1 are always valid
- new_fs.lastline = 0
- new_fs.forward_gotos = { }
- new_fs.labels = { }
- return new_fs
-end
-
-------------------------------------------------------------------------
--- Finish to set up [f] according to final state of [fs]
-------------------------------------------------------------------------
-local function close_func (fs)
- local f = fs.f
- --printf("[CLOSE_FUNC] remove any remaining var")
- removevars (fs, 0)
- luaK:ret (fs, 0, 0)
- f.sizecode = fs.pc
- f.sizelineinfo = fs.pc
- f.sizek = fs.nk
- f.sizep = fs.np
- f.sizelocvars = fs.nlocvars
- f.sizeupvalues = f.nups
- assert (fs.bl == nil)
- if next(fs.forward_gotos) then
- local x = table.tostring(fs.forward_gotos)
- error ("Unresolved goto: "..x)
- end
-end
-
-------------------------------------------------------------------------
---
-------------------------------------------------------------------------
-local function pushclosure(fs, func, v)
- local f = fs.f
- f.p [fs.np] = func.f
- fs.np = fs.np + 1
- init_exp (v, "VRELOCABLE", luaK:codeABx (fs, "OP_CLOSURE", 0, fs.np - 1))
- for i = 0, func.f.nups - 1 do
- local o = (func.upvalues[i].k == "VLOCAL") and "OP_MOVE" or "OP_GETUPVAL"
- luaK:codeABC (fs, o, 0, func.upvalues[i].info, 0)
- end
-end
-
-------------------------------------------------------------------------
--- FIXME: is there a need for f=fs.f? if yes, why not always using it?
-------------------------------------------------------------------------
-function indexupvalue(fs, name, v)
- local f = fs.f
- for i = 0, f.nups - 1 do
- if fs.upvalues[i].k == v.k and fs.upvalues[i].info == v.info then
- assert(fs.f.upvalues[i] == name)
- return i
- end
- end
- -- new one
- f.upvalues[f.nups] = name
- assert (v.k == "VLOCAL" or v.k == "VUPVAL")
- fs.upvalues[f.nups] = { k = v.k; info = v.info }
- local nups = f.nups
- f.nups = f.nups + 1
- return nups
-end
-
-------------------------------------------------------------------------
---
-------------------------------------------------------------------------
-local function markupval(fs, level)
- local bl = fs.bl
- while bl and bl.nactvar > level do bl = bl.previous end
- if bl then bl.upval = true end
-end
-
-
---for debug only
---[[
-local function bldepth(fs)
- local i, x= 1, fs.bl
- while x do i=i+1; x=x.previous end
- return i
-end
---]]
-
-------------------------------------------------------------------------
---
-------------------------------------------------------------------------
-local function enterblock (fs, bl, isbreakable)
- bl.breaklist = luaK.NO_JUMP
- bl.isbreakable = isbreakable
- bl.nactvar = fs.nactvar
- bl.upval = false
- bl.previous = fs.bl
- fs.bl = bl
- assert (fs.freereg == fs.nactvar)
-end
-
-------------------------------------------------------------------------
---
-------------------------------------------------------------------------
-local function leaveblock (fs)
- local bl = fs.bl
- fs.bl = bl.previous
- --printf("[LEAVEBLOCK] Removing vars...")
- removevars (fs, bl.nactvar)
- --printf("[LEAVEBLOCK] ...Vars removed")
- if bl.upval then
- luaK:codeABC (fs, "OP_CLOSE", bl.nactvar, 0, 0)
- end
- -- a block either controls scope or breaks (never both)
- assert (not bl.isbreakable or not bl.upval)
- assert (bl.nactvar == fs.nactvar)
- fs.freereg = fs.nactvar -- free registers
- luaK:patchtohere (fs, bl.breaklist)
-end
-
-
-------------------------------------------------------------------------
--- read a list of expressions from a list of ast [astlist]
--- starts at the [offset]th element of the list (defaults to 1)
-------------------------------------------------------------------------
-local function explist(fs, astlist, v, offset)
- offset = offset or 1
- if #astlist < offset then error "I don't handle empty expr lists yet" end
- --printf("[EXPLIST] about to precompile 1st element %s", disp.ast(astlist[offset]))
- expr.expr (fs, astlist[offset], v)
- --printf("[EXPLIST] precompiled first element v=%s", tostringv(v))
- for i = offset+1, #astlist do
- luaK:exp2nextreg (fs, v)
- --printf("[EXPLIST] flushed v=%s", tostringv(v))
- expr.expr (fs, astlist[i], v)
- --printf("[EXPLIST] precompiled element v=%s", tostringv(v))
- end
- return #astlist - offset + 1
-end
-
-------------------------------------------------------------------------
---
-------------------------------------------------------------------------
-local function funcargs (fs, ast, v, idx_from)
- local args = { } -- expdesc
- local nparams
- if #ast < idx_from then args.k = "VVOID" else
- explist(fs, ast, args, idx_from)
- luaK:setmultret(fs, args)
- end
- assert(v.k == "VNONRELOC")
- local base = v.info -- base register for call
- if hasmultret(args.k) then nparams = luaK.LUA_MULTRET else -- open call
- if args.k ~= "VVOID" then
- luaK:exp2nextreg(fs, args) end -- close last argument
- nparams = fs.freereg - (base + 1)
- end
- init_exp(v, "VCALL", luaK:codeABC(fs, "OP_CALL", base, nparams + 1, 2))
- if ast.lineinfo then
- luaK:fixline(fs, ast.lineinfo.first[1])
- else
- luaK:fixline(fs, ast.line)
- end
- fs.freereg = base + 1 -- call remove function and arguments and leaves
- -- (unless changed) one result
-end
-
-------------------------------------------------------------------------
--- calculates log value for encoding the hash portion's size
-------------------------------------------------------------------------
-local function log2(x)
- -- math result is always one more than lua0_log2()
- local mn, ex = math.frexp(x)
- return ex - 1
-end
-
-------------------------------------------------------------------------
--- converts an integer to a "floating point byte", represented as
--- (mmmmmxxx), where the real value is (xxx) * 2^(mmmmm)
-------------------------------------------------------------------------
-
--- local function int2fb(x)
--- local m = 0 -- mantissa
--- while x >= 8 do x = math.floor((x + 1) / 2); m = m + 1 end
--- return m * 8 + x
--- end
-
-local function int2fb(x)
- local e = 0
- while x >= 16 do
- x = math.floor ( (x+1) / 2)
- e = e+1
- end
- if x<8 then return x
- else return (e+1) * 8 + x - 8 end
-end
-
-
-------------------------------------------------------------------------
--- FIXME: to be unified with singlevar
-------------------------------------------------------------------------
-local function singlevaraux(fs, n, var, base)
---[[
-print("\n\nsinglevaraux: fs, n, var, base")
-printv(fs)
-printv(n)
-printv(var)
-printv(base)
-print("\n")
---]]
- if fs == nil then -- no more levels?
- init_exp(var, "VGLOBAL", luaP.NO_REG) -- default is global variable
- return "VGLOBAL"
- else
- local v = searchvar(fs, n) -- look up at current level
- if v >= 0 then
- init_exp(var, "VLOCAL", v)
- if not base then
- markupval(fs, v) -- local will be used as an upval
- end
- else -- not found at current level; try upper one
- if singlevaraux(fs.prev, n, var, false) == "VGLOBAL" then
- return "VGLOBAL" end
- var.info = indexupvalue (fs, n, var)
- var.k = "VUPVAL"
- return "VUPVAL"
- end
- end
-end
-
-------------------------------------------------------------------------
---
-------------------------------------------------------------------------
-local function singlevar(fs, varname, var)
- if singlevaraux(fs, varname, var, true) == "VGLOBAL" then
- var.info = luaK:stringK (fs, varname) end
-end
-
-------------------------------------------------------------------------
---
-------------------------------------------------------------------------
-local function new_localvar (fs, name, n)
- assert (type (name) == "string")
- if fs.nactvar + n > MAXVARS then error ("too many local vars") end
- fs.actvar[fs.nactvar + n] = registerlocalvar (fs, name)
- --printf("[NEW_LOCVAR] %i = %s", fs.nactvar+n, name)
-end
-
-------------------------------------------------------------------------
---
-------------------------------------------------------------------------
-local function parlist (fs, ast_params)
- local dots = (#ast_params > 0 and ast_params[#ast_params].tag == "Dots")
- local nparams = dots and #ast_params - 1 or #ast_params
- for i = 1, nparams do
- assert (ast_params[i].tag == "Id", "Function parameters must be Ids")
- new_localvar (fs, ast_params[i][1], i-1)
- end
- -- from [code_param]:
- --checklimit (fs, fs.nactvar, self.MAXPARAMS, "parameters")
- fs.f.numparams = fs.nactvar
- fs.f.is_vararg = dots and VARARG_ISVARARG or 0
- adjustlocalvars (fs, nparams)
- fs.f.numparams = fs.nactvar --FIXME vararg must be taken in account
- luaK:reserveregs (fs, fs.nactvar) -- reserve register for parameters
-end
-
-------------------------------------------------------------------------
--- if there's more variables than expressions in an assignment,
--- some assignations to nil are made for extraneous vars.
--- Also handles multiret functions
-------------------------------------------------------------------------
-local function adjust_assign (fs, nvars, nexps, e)
- local extra = nvars - nexps
- if hasmultret (e.k) then
- extra = extra+1 -- includes call itself
- if extra <= 0 then extra = 0 end
- luaK:setreturns(fs, e, extra) -- call provides the difference
- if extra > 1 then luaK:reserveregs(fs, extra-1) end
- else
- if e.k ~= "VVOID" then
- luaK:exp2nextreg(fs, e) end -- close last expression
- if extra > 0 then
- local reg = fs.freereg
- luaK:reserveregs(fs, extra)
- luaK:_nil(fs, reg, extra)
- end
- end
-end
-
-
-------------------------------------------------------------------------
---
-------------------------------------------------------------------------
-local function enterlevel (fs)
- fs.nestlevel = fs.nestlevel + 1
- assert (fs.nestlevel <= LUA_MAXPARSERLEVEL, "too many syntax levels")
-end
-
-------------------------------------------------------------------------
---
-------------------------------------------------------------------------
-local function leavelevel (fs)
- fs.nestlevel = fs.nestlevel - 1
-end
-
-------------------------------------------------------------------------
--- Parse conditions in if/then/else, while, repeat
-------------------------------------------------------------------------
-local function cond (fs, ast)
- local v = { }
- expr.expr(fs, ast, v) -- read condition
- if v.k == "VNIL" then v.k = "VFALSE" end -- 'falses' are all equal here
- luaK:goiftrue (fs, v)
- return v.f
-end
-
-------------------------------------------------------------------------
---
-------------------------------------------------------------------------
-local function chunk (fs, ast)
- enterlevel (fs)
- assert (not ast.tag)
- for i=1, #ast do
- stat.stat (fs, ast[i]);
- fs.freereg = fs.nactvar
- end
- leavelevel (fs)
-end
-
-------------------------------------------------------------------------
---
-------------------------------------------------------------------------
-local function block (fs, ast)
- local bl = {}
- enterblock (fs, bl, false)
- for i=1, #ast do
- stat.stat (fs, ast[i])
- fs.freereg = fs.nactvar
- end
- assert (bl.breaklist == luaK.NO_JUMP)
- leaveblock (fs)
-end
-
-------------------------------------------------------------------------
--- Forin / Fornum body parser
--- [fs]
--- [body]
--- [base]
--- [nvars]
--- [isnum]
-------------------------------------------------------------------------
-local function forbody (fs, ast_body, base, nvars, isnum)
- local bl = {} -- BlockCnt
- adjustlocalvars (fs, 3) -- control variables
- local prep =
- isnum and luaK:codeAsBx (fs, "OP_FORPREP", base, luaK.NO_JUMP)
- or luaK:jump (fs)
- enterblock (fs, bl, false) -- loop block
- adjustlocalvars (fs, nvars) -- scope for declared variables
- luaK:reserveregs (fs, nvars)
- block (fs, ast_body)
- leaveblock (fs)
- --luaK:patchtohere (fs, prep-1)
- luaK:patchtohere (fs, prep)
- local endfor =
- isnum and luaK:codeAsBx (fs, "OP_FORLOOP", base, luaK.NO_JUMP)
- or luaK:codeABC (fs, "OP_TFORLOOP", base, 0, nvars)
- luaK:fixline (fs, ast_body.line) -- pretend that 'OP_FOR' starts the loop
- luaK:patchlist (fs, isnum and endfor or luaK:jump(fs), prep + 1)
-end
-
-
-------------------------------------------------------------------------
---
-------------------------------------------------------------------------
-local function recfield (fs, ast, cc)
- local reg = fs.freereg
- local key, val = {}, {} -- expdesc
- --FIXME: expr + exp2val = index -->
- -- check reduncancy between exp2val and exp2rk
- cc.nh = cc.nh + 1
- expr.expr(fs, ast[1], key);
- luaK:exp2val (fs, key)
- local keyreg = luaK:exp2RK (fs, key)
- expr.expr(fs, ast[2], val)
- local valreg = luaK:exp2RK (fs, val)
- luaK:codeABC(fs, "OP_SETTABLE", cc.t.info, keyreg, valreg)
- fs.freereg = reg -- free registers
-end
-
-
-------------------------------------------------------------------------
---
-------------------------------------------------------------------------
-local function listfield(fs, ast, cc)
- expr.expr(fs, ast, cc.v)
- assert (cc.na <= luaP.MAXARG_Bx) -- FIXME check <= or <
- cc.na = cc.na + 1
- cc.tostore = cc.tostore + 1
-end
-
-------------------------------------------------------------------------
---
-------------------------------------------------------------------------
-local function closelistfield(fs, cc)
- if cc.v.k == "VVOID" then return end -- there is no list item
- luaK:exp2nextreg(fs, cc.v)
- cc.v.k = "VVOID"
- if cc.tostore == luaP.LFIELDS_PER_FLUSH then
- luaK:setlist (fs, cc.t.info, cc.na, cc.tostore)
- cc.tostore = 0
- end
-end
-
-------------------------------------------------------------------------
--- The last field might be a call to a multireturn function. In that
--- case, we must unfold all of its results into the list.
-------------------------------------------------------------------------
-local function lastlistfield(fs, cc)
- if cc.tostore == 0 then return end
- if hasmultret (cc.v.k) then
- luaK:setmultret(fs, cc.v)
- luaK:setlist (fs, cc.t.info, cc.na, luaK.LUA_MULTRET)
- cc.na = cc.na - 1
- else
- if cc.v.k ~= "VVOID" then luaK:exp2nextreg(fs, cc.v) end
- luaK:setlist (fs, cc.t.info, cc.na, cc.tostore)
- end
-end
-------------------------------------------------------------------------
-------------------------------------------------------------------------
---
--- Statement parsers table
---
-------------------------------------------------------------------------
-------------------------------------------------------------------------
-
-function stat.stat (fs, ast)
- if ast.lineinfo then fs.lastline = ast.lineinfo.last[1] end
- -- debugf (" - Statement %s", disp.ast (ast) )
-
- if not ast.tag then chunk (fs, ast) else
-
- local parser = stat [ast.tag]
- if not parser then
- error ("A statement cannot have tag `"..ast.tag) end
- parser (fs, ast)
- end
- --debugf (" - /Statement `%s", ast.tag or "<nil>")
- debugf (" - /Statement `%s", ast.tag)
-end
-
-------------------------------------------------------------------------
-
-stat.Do = block
-
-------------------------------------------------------------------------
-
-function stat.Break (fs, ast)
- -- if ast.lineinfo then fs.lastline = ast.lineinfo.last[1]
- local bl, upval = fs.bl, false
- while bl and not bl.isbreakable do
- if bl.upval then upval = true end
- bl = bl.previous
- end
- assert (bl, "no loop to break")
- if upval then luaK:codeABC(fs, "OP_CLOSE", bl.nactvar, 0, 0) end
- bl.breaklist = luaK:concat(fs, bl.breaklist, luaK:jump(fs))
-end
-
-------------------------------------------------------------------------
-
-function stat.Return (fs, ast)
- local e = {} -- expdesc
- local first -- registers with returned values
- local nret = #ast
-
- if nret == 0 then first = 0
- else
- --printf("[RETURN] compiling explist")
- explist (fs, ast, e)
- --printf("[RETURN] explist e=%s", tostringv(e))
- if hasmultret (e.k) then
- luaK:setmultret(fs, e)
- if e.k == "VCALL" and nret == 1 then
- luaP:SET_OPCODE(luaK:getcode(fs, e), "OP_TAILCALL")
- assert(luaP:GETARG_A(luaK:getcode(fs, e)) == fs.nactvar)
- end
- first = fs.nactvar
- nret = luaK.LUA_MULTRET -- return all values
- elseif nret == 1 then
- --printf("[RETURN] 1 val: e=%s", tostringv(e))
- first = luaK:exp2anyreg(fs, e)
- --printf("[RETURN] 1 val in reg %i", first)
- else
- --printf("* Return multiple vals in nextreg %i", fs.freereg)
- luaK:exp2nextreg(fs, e) -- values must go to the 'stack'
- first = fs.nactvar -- return all 'active' values
- assert(nret == fs.freereg - first)
- end
- end
- luaK:ret(fs, first, nret)
-end
-------------------------------------------------------------------------
-
-function stat.Local (fs, ast)
- local names, values = ast[1], ast[2] or { }
- for i = 1, #names do new_localvar (fs, names[i][1], i-1) end
- local e = { }
- if #values == 0 then e.k = "VVOID" else explist (fs, values, e) end
- adjust_assign (fs, #names, #values, e)
- adjustlocalvars (fs, #names)
-end
-
-------------------------------------------------------------------------
-
-function stat.Localrec (fs, ast)
- assert(#ast[1]==1 and #ast[2]==1, "Multiple letrecs not implemented yet")
- local ast_var, ast_val, e_var, e_val = ast[1][1], ast[2][1], { }, { }
- new_localvar (fs, ast_var[1], 0)
- init_exp (e_var, "VLOCAL", fs.freereg)
- luaK:reserveregs (fs, 1)
- adjustlocalvars (fs, 1)
- expr.expr (fs, ast_val, e_val)
- luaK:storevar (fs, e_var, e_val)
- getlocvar (fs, fs.nactvar-1).startpc = fs.pc
-end
-
-------------------------------------------------------------------------
-
-function stat.If (fs, ast)
- local astlen = #ast
- -- Degenerate case #1: no statement
- if astlen==0 then return block(fs, { }) end
- -- Degenerate case #2: only an else statement
- if astlen==1 then return block(fs, ast[1]) end
-
- local function test_then_block (fs, test, body)
- local condexit = cond (fs, test);
- block (fs, body)
- return condexit
- end
-
- local escapelist = luaK.NO_JUMP
-
- local flist = test_then_block (fs, ast[1], ast[2]) -- 'then' statement
- for i = 3, #ast - 1, 2 do -- 'elseif' statement
- escapelist = luaK:concat( fs, escapelist, luaK:jump(fs))
- luaK:patchtohere (fs, flist)
- flist = test_then_block (fs, ast[i], ast[i+1])
- end
- if #ast % 2 == 1 then -- 'else' statement
- escapelist = luaK:concat(fs, escapelist, luaK:jump(fs))
- luaK:patchtohere(fs, flist)
- block (fs, ast[#ast])
- else
- escapelist = luaK:concat(fs, escapelist, flist)
- end
- luaK:patchtohere(fs, escapelist)
-end
-
-------------------------------------------------------------------------
-
-function stat.Forin (fs, ast)
- local vars, vals, body = ast[1], ast[2], ast[3]
- -- imitating forstat:
- local bl = { }
- enterblock (fs, bl, true)
- -- imitating forlist:
- local e, base = { }, fs.freereg
- new_localvar (fs, "(for generator)", 0)
- new_localvar (fs, "(for state)", 1)
- new_localvar (fs, "(for control)", 2)
- for i = 1, #vars do new_localvar (fs, vars[i][1], i+2) end
- explist (fs, vals, e)
- adjust_assign (fs, 3, #vals, e)
- luaK:checkstack (fs, 3)
- forbody (fs, body, base, #vars, false)
- -- back to forstat:
- leaveblock (fs)
-end
-
-------------------------------------------------------------------------
-
-function stat.Fornum (fs, ast)
-
- local function exp1 (ast_e)
- local e = { }
- expr.expr (fs, ast_e, e)
- luaK:exp2nextreg (fs, e)
- end
- -- imitating forstat:
- local bl = { }
- enterblock (fs, bl, true)
- -- imitating fornum:
- local base = fs.freereg
- new_localvar (fs, "(for index)", 0)
- new_localvar (fs, "(for limit)", 1)
- new_localvar (fs, "(for step)", 2)
- new_localvar (fs, ast[1][1], 3)
- exp1 (ast[2]) -- initial value
- exp1 (ast[3]) -- limit
- if #ast == 5 then exp1 (ast[4]) else -- default step = 1
- luaK:codeABx(fs, "OP_LOADK", fs.freereg, luaK:numberK(fs, 1))
- luaK:reserveregs(fs, 1)
- end
- forbody (fs, ast[#ast], base, 1, true)
- -- back to forstat:
- leaveblock (fs)
-end
-
-------------------------------------------------------------------------
-function stat.Repeat (fs, ast)
- local repeat_init = luaK:getlabel (fs)
- local bl1, bl2 = { }, { }
- enterblock (fs, bl1, true)
- enterblock (fs, bl2, false)
- chunk (fs, ast[1])
- local condexit = cond (fs, ast[2])
- if not bl2.upval then
- leaveblock (fs)
- luaK:patchlist (fs, condexit, repeat_init)
- else
- stat.Break (fs)
- luaK:patchtohere (fs, condexit)
- leaveblock (fs)
- luaK:patchlist (fs, luaK:jump (fs), repeat_init)
- end
- leaveblock (fs)
-end
-
-------------------------------------------------------------------------
-
-function stat.While (fs, ast)
- local whileinit = luaK:getlabel (fs)
- local condexit = cond (fs, ast[1])
- local bl = { }
- enterblock (fs, bl, true)
- block (fs, ast[2])
- luaK:patchlist (fs, luaK:jump (fs), whileinit)
- leaveblock (fs)
- luaK:patchtohere (fs, condexit);
-end
-
-------------------------------------------------------------------------
-
--- FIXME: it's cumbersome to write this in this semi-recursive way.
-function stat.Set (fs, ast)
- local ast_lhs, ast_vals, e = ast[1], ast[2], { }
-
- --print "\n\nSet ast_lhs ast_vals:"
- --print(disp.ast(ast_lhs))
- --print(disp.ast(ast_vals))
-
- local function let_aux (lhs, nvars)
- local legal = { VLOCAL=1, VUPVAL=1, VGLOBAL=1, VINDEXED=1 }
- --printv(lhs)
- if not legal [lhs.v.k] then
- error ("Bad lhs expr: "..table.tostring(ast_lhs))
- end
- if nvars < #ast_lhs then -- this is not the last lhs
- local nv = { v = { }, prev = lhs }
- expr.expr (fs, ast_lhs [nvars+1], nv.v)
- if nv.v.k == "VLOCAL" then check_conflict (fs, lhs, nv.v) end
- let_aux (nv, nvars+1)
- else -- this IS the last lhs
- explist (fs, ast_vals, e)
- if #ast_vals < nvars then
- adjust_assign (fs, nvars, #ast_vals, e)
- elseif #ast_vals > nvars then
- adjust_assign (fs, nvars, #ast_vals, e)
- fs.freereg = fs.freereg - #ast_vals + nvars
- else -- #ast_vals == nvars (and we're at last lhs)
- luaK:setoneret (fs, e) -- close last expression
- luaK:storevar (fs, lhs.v, e)
- return -- avoid default
- end
- end
- init_exp (e, "VNONRELOC", fs.freereg - 1) -- default assignment
- luaK:storevar (fs, lhs.v, e)
- end
-
- local lhs = { v = { }, prev = nil }
- expr.expr (fs, ast_lhs[1], lhs.v)
- let_aux( lhs, 1)
-end
-
-------------------------------------------------------------------------
-
-function stat.Call (fs, ast)
- local v = { }
- expr.Call (fs, ast, v)
- luaP:SETARG_C (luaK:getcode(fs, v), 1)
-end
-
-------------------------------------------------------------------------
-
-function stat.Invoke (fs, ast)
- local v = { }
- expr.Invoke (fs, ast, v)
- --FIXME: didn't check that, just copied from stat.Call
- luaP:SETARG_C (luaK:getcode(fs, v), 1)
-end
-
-
-local function patch_goto (fs, src, dst)
-
-end
-
-
-------------------------------------------------------------------------
--- Goto/Label data:
--- fs.labels :: string => { nactvar :: int; pc :: int }
--- fs.forward_gotos :: string => list(int)
---
--- fs.labels goes from label ids to the number of active variables at
--- the label's PC, and that PC
---
--- fs.forward_gotos goes from label ids to the list of the PC where
--- some goto wants to jump to this label. Since gotos are actually made
--- up of two instructions OP_CLOSE and OP_JMP, it's the first instruction's
--- PC that's stored in fs.forward_gotos
---
--- Note that backward gotos aren't stored: since their destination is knowns
--- when they're compiled, their target is directly set.
-------------------------------------------------------------------------
-
-------------------------------------------------------------------------
--- Set a Label to jump to with Goto
-------------------------------------------------------------------------
-function stat.Label (fs, ast)
- local label_id = ast[1]
- if type(label_id)=='table' then label_id=label_id[1] end
- -- printf("Label %s at PC %i", label_id, fs.pc)
- -------------------------------------------------------------------
- -- Register the label, so that future gotos can use it.
- -------------------------------------------------------------------
- if fs.labels [label_id] then error "Duplicate label in function"
- else fs.labels [label_id] = { pc = fs.pc; nactvar = fs.nactvar } end
- local gotos = fs.forward_gotos [label_id]
- if gotos then
- ----------------------------------------------------------------
- -- Patch forward gotos which were targetting this label.
- ----------------------------------------------------------------
- for _, goto_pc in ipairs(gotos) do
- local close_instr = fs.f.code[goto_pc]
- local jmp_instr = fs.f.code[goto_pc+1]
- local goto_nactvar = luaP:GETARG_A (close_instr)
- if fs.nactvar < goto_nactvar then
- luaP:SETARG_A (close_instr, fs.nactvar) end
- luaP:SETARG_sBx (jmp_instr, fs.pc - goto_pc - 2)
- end
- ----------------------------------------------------------------
- -- Gotos are patched, they can be forgotten about (when the
- -- function will be finished, it will be checked that all gotos
- -- have been patched, by checking that forward_goto is empty).
- ----------------------------------------------------------------
- fs.forward_gotos[label_id] = nil
- end
-end
-
-------------------------------------------------------------------------
--- jumps to a label set with stat.Label.
--- Argument must be a String or an Id
--- FIXME/optim: get rid of useless OP_CLOSE when nactvar doesn't change.
--- Thsi must be done both here for backward gotos, and in
--- stat.Label for forward gotos.
-------------------------------------------------------------------------
-function stat.Goto (fs, ast)
- local label_id = ast[1]
- if type(label_id)=='table' then label_id=label_id[1] end
- -- printf("Goto %s at PC %i", label_id, fs.pc)
- local label = fs.labels[label_id]
- if label then
- ----------------------------------------------------------------
- -- Backward goto: the label already exists, so I can get its
- -- nactvar and address directly. nactvar is used to close
- -- upvalues if we get out of scoping blocks by jumping.
- ----------------------------------------------------------------
- if fs.nactvar > label.nactvar then
- luaK:codeABC (fs, "OP_CLOSE", label.nactvar, 0, 0) end
- local offset = label.pc - fs.pc - 1
- luaK:codeAsBx (fs, "OP_JMP", 0, offset)
- else
- ----------------------------------------------------------------
- -- Forward goto: will be patched when the matching label is
- -- found, forward_gotos[label_id] keeps the PC of the CLOSE
- -- instruction just before the JMP. [stat.Label] will use it to
- -- patch the OP_CLOSE and the OP_JMP.
- ----------------------------------------------------------------
- if not fs.forward_gotos[label_id] then
- fs.forward_gotos[label_id] = { } end
- table.insert (fs.forward_gotos[label_id], fs.pc)
- luaK:codeABC (fs, "OP_CLOSE", fs.nactvar, 0, 0)
- luaK:codeAsBx (fs, "OP_JMP", 0, luaK.NO_JUMP)
- end
-end
-
-------------------------------------------------------------------------
-------------------------------------------------------------------------
---
--- Expression parsers table
---
-------------------------------------------------------------------------
-------------------------------------------------------------------------
-
-function expr.expr (fs, ast, v)
- if type(ast) ~= "table" then
- error ("Expr AST expected, got "..table.tostring(ast)) end
-
- if ast.lineinfo then fs.lastline = ast.lineinfo.last[1] end
-
- --debugf (" - Expression %s", tostringv (ast))
- local parser = expr[ast.tag]
- if parser then parser (fs, ast, v)
- elseif not ast.tag then
- error ("No tag in expression "..table.tostring(ast, 'nohash', 80))
- else
- error ("No parser for node `"..ast.tag) end
- debugf (" - /`%s", ast.tag)
-end
-
-------------------------------------------------------------------------
-
-function expr.Nil (fs, ast, v) init_exp (v, "VNIL", 0) end
-function expr.True (fs, ast, v) init_exp (v, "VTRUE", 0) end
-function expr.False (fs, ast, v) init_exp (v, "VFALSE", 0) end
-function expr.String (fs, ast, v) codestring (fs, v, ast[1]) end
-function expr.Number (fs, ast, v)
- init_exp (v, "VKNUM", 0)
- v.nval = ast[1]
-end
-
-function expr.Paren (fs, ast, v)
- expr.expr (fs, ast[1], v)
- luaK:setoneret (fs, v)
-end
-
-function expr.Dots (fs, ast, v)
- assert (fs.f.is_vararg ~= 0, "No vararg in this function")
- -- NEEDSARG flag is set if and only if the function is a vararg,
- -- but no vararg has been used yet in its code.
- if fs.f.is_vararg < VARARG_NEEDSARG then
- fs.f.is_varag = fs.f.is_vararg - VARARG_NEEDSARG end
- init_exp (v, "VVARARG", luaK:codeABC (fs, "OP_VARARG", 0, 1, 0))
-end
-
-------------------------------------------------------------------------
-
-function expr.Table (fs, ast, v)
- local pc = luaK:codeABC(fs, "OP_NEWTABLE", 0, 0, 0)
- local cc = { v = { } , na = 0, nh = 0, tostore = 0, t = v } -- ConsControl
- init_exp (v, "VRELOCABLE", pc)
- init_exp (cc.v, "VVOID", 0) -- no value (yet)
- luaK:exp2nextreg (fs, v) -- fix it at stack top (for gc)
- for i = 1, #ast do
- assert(cc.v.k == "VVOID" or cc.tostore > 0)
- closelistfield(fs, cc);
- (ast[i].tag == "Pair" and recfield or listfield) (fs, ast[i], cc)
- end
- lastlistfield(fs, cc)
-
- -- Configure [OP_NEWTABLE] dimensions
- luaP:SETARG_B(fs.f.code[pc], int2fb(cc.na)) -- set initial array size
- luaP:SETARG_C(fs.f.code[pc], int2fb(cc.nh)) -- set initial table size
- --printv(fs.f.code[pc])
-end
-
-
-------------------------------------------------------------------------
-
-function expr.Function (fs, ast, v)
- if ast.lineinfo then fs.lastline = ast.lineinfo.last[1] end
-
- local new_fs = open_func(fs)
- if ast.lineinfo then
- new_fs.f.lineDefined, new_fs.f.lastLineDefined =
- ast.lineinfo.first[1], ast.lineinfo.last[1]
- end
- parlist (new_fs, ast[1])
- chunk (new_fs, ast[2])
- close_func (new_fs)
- pushclosure(fs, new_fs, v)
-end
-
-------------------------------------------------------------------------
-
-function expr.Op (fs, ast, v)
- if ast.lineinfo then fs.lastline = ast.lineinfo.last[1] end
- local op = ast[1]
-
- if #ast == 2 then
- expr.expr (fs, ast[2], v)
- luaK:prefix (fs, op, v)
- elseif #ast == 3 then
- local v2 = { }
- expr.expr (fs, ast[2], v)
- luaK:infix (fs, op, v)
- expr.expr (fs, ast[3], v2)
- luaK:posfix (fs, op, v, v2)
- else
- error "Wrong arg number"
- end
-end
-
-------------------------------------------------------------------------
-
-function expr.Call (fs, ast, v)
- expr.expr (fs, ast[1], v)
- luaK:exp2nextreg (fs, v)
- funcargs(fs, ast, v, 2)
- --debugf("after expr.Call: %s, %s", v.k, luaP.opnames[luaK:getcode(fs, v).OP])
-end
-
-------------------------------------------------------------------------
--- `Invoke{ table key args }
-function expr.Invoke (fs, ast, v)
- expr.expr (fs, ast[1], v)
- luaK:dischargevars (fs, v)
- local key = { }
- codestring (fs, key, ast[2][1])
- luaK:_self (fs, v, key)
- funcargs (fs, ast, v, 3)
-end
-
-------------------------------------------------------------------------
-
-function expr.Index (fs, ast, v)
- if #ast ~= 2 then
- print"\n\nBAD INDEX AST:"
- table.print(ast)
- error "generalized indexes not implemented" end
-
- if ast.lineinfo then fs.lastline = ast.lineinfo.last[1] end
-
- --assert(fs.lastline ~= 0, ast.tag)
-
- expr.expr (fs, ast[1], v)
- luaK:exp2anyreg (fs, v)
-
- local k = { }
- expr.expr (fs, ast[2], k)
- luaK:exp2val (fs, k)
- luaK:indexed (fs, v, k)
-end
-
-------------------------------------------------------------------------
-
-function expr.Id (fs, ast, v)
- assert (ast.tag == "Id")
- singlevar (fs, ast[1], v)
-end
-
-------------------------------------------------------------------------
-
-function expr.Stat (fs, ast, v)
- --printf(" * Stat: %i actvars, first freereg is %i", fs.nactvar, fs.freereg)
- --printf(" actvars: %s", table.tostring(fs.actvar))
-
- -- Protect temporary stack values by pretending they are local
- -- variables. Local vars are in registers 0 ... fs.nactvar-1,
- -- and temporary unnamed variables in fs.nactvar ... fs.freereg-1
- local save_nactvar = fs.nactvar
-
- -- Eventually, the result should go on top of stack *after all
- -- `Stat{ } related computation and string usage is over. The index
- -- of this destination register is kept here:
- local dest_reg = fs.freereg
-
- -- There might be variables in actvar whose register is > nactvar,
- -- and therefore will not be protected by the "nactvar := freereg"
- -- trick. Indeed, `Local only increases nactvar after the variable
- -- content has been computed. Therefore, in
- -- "local foo = -{`Stat{...}}", variable foo will be messed up by
- -- the compilation of `Stat.
- -- FIX: save the active variables at indices >= nactvar in
- -- save_actvar, and restore them after `Stat has been computer.
- --
- -- I use a while rather than for loops and length operators because
- -- fs.actvar is a 0-based array...
- local save_actvar = { } do
- local i = fs.nactvar
- while true do
- local v = fs.actvar[i]
- if not v then break end
- --printf("save hald-baked actvar %s at index %i", table.tostring(v), i)
- save_actvar[i] = v
- i=i+1
- end
- end
-
- fs.nactvar = fs.freereg -- Now temp unnamed registers are protected
- enterblock (fs, { }, false)
- chunk (fs, ast[1])
- expr.expr (fs, ast[2], v)
- luaK:exp2nextreg (fs, v)
- leaveblock (fs)
- luaK:exp2reg (fs, v, dest_reg)
-
- -- Reserve the newly allocated stack level
- -- Puzzled note: here was written "fs.freereg = fs.freereg+1".
- -- I'm pretty sure it should rather be dest_reg+1, but maybe
- -- both are equivalent?
- fs.freereg = dest_reg+1
-
- -- Restore nactvar, so that intermediate stacked value stop
- -- being protected.
- --printf(" nactvar back from %i to %i", fs.nactvar, save_nactvar)
- fs.nactvar = save_nactvar
-
- -- restore messed-up unregistered local vars
- for i, j in pairs(save_actvar) do
- --printf(" Restoring actvar %i", i)
- fs.actvar[i] = j
- end
- --printf(" * End of Stat")
-end
-
-
-
-------------------------------------------------------------------------
--- Main function: ast --> proto
-------------------------------------------------------------------------
-function metalua_compile (ast, source)
- local fs = open_func (nil)
- fs.f.is_vararg = VARARG_ISVARARG
- chunk (fs, ast)
- close_func (fs)
- assert (fs.prev == nil)
- assert (fs.f.nups == 0)
- assert (fs.nestlevel == 0)
- if source then fs.f.source = source end
- return fs.f
-end
+++ /dev/null
-----------------------------------------------------------------------
--- Metalua.
---
--- Summary: parser generator. Collection of higher order functors,
--- which allow to build and combine parsers. Relies on a lexer
--- that supports the same API as the one exposed in mll.lua.
---
-----------------------------------------------------------------------
---
--- Copyright (c) 2006-2008, Fabien Fleutot <metalua@gmail.com>.
---
--- This software is released under the MIT Licence, see licence.txt
--- for details.
---
-----------------------------------------------------------------------
-
---------------------------------------------------------------------------------
---
--- Exported API:
---
--- Parser generators:
--- * [gg.sequence()]
--- * [gg.multisequence()]
--- * [gg.expr()]
--- * [gg.list()]
--- * [gg.onkeyword()]
--- * [gg.optkeyword()]
---
--- Other functions:
--- * [gg.parse_error()]
--- * [gg.make_parser()]
--- * [gg.is_parser()]
---
---------------------------------------------------------------------------------
-
-module("gg", package.seeall)
-
--------------------------------------------------------------------------------
--- parser metatable, which maps __call to method parse, and adds some
--- error tracing boilerplate.
--------------------------------------------------------------------------------
-local parser_metatable = { }
-function parser_metatable.__call (parser, lx, ...)
- --printf ("Call parser %q of type %q", parser.name or "?", parser.kind)
- if mlc.metabugs then
- return parser:parse (lx, ...)
- --local x = parser:parse (lx, ...)
- --printf ("Result of parser %q: %s",
- -- parser.name or "?",
- -- _G.table.tostring(x, "nohash", 80))
- --return x
- else
- local li = lx:lineinfo_right() or { "?", "?", "?", "?" }
- local status, ast = pcall (parser.parse, parser, lx, ...)
- if status then return ast else
- -- Try to replace the gg.lua location, in the error msg, with
- -- the place where the current parser started handling the
- -- lexstream.
- -- Since the error is rethrown, these places are stacked.
- error (string.format ("%s\n - (l.%s, c.%s, k.%s) in parser %s",
- ast :strmatch "gg.lua:%d+: (.*)" or ast,
- li[1], li[2], li[3], parser.name or parser.kind))
- end
- end
-end
-
--------------------------------------------------------------------------------
--- Turn a table into a parser, mainly by setting the metatable.
--------------------------------------------------------------------------------
-function make_parser(kind, p)
- p.kind = kind
- if not p.transformers then p.transformers = { } end
- function p.transformers:add (x)
- table.insert (self, x)
- end
- setmetatable (p, parser_metatable)
- return p
-end
-
--------------------------------------------------------------------------------
--- Return true iff [x] is a parser.
--- If it's a gg-generated parser, return the name of its kind.
--------------------------------------------------------------------------------
-function is_parser (x)
- return type(x)=="function" or getmetatable(x)==parser_metatable and x.kind
-end
-
--------------------------------------------------------------------------------
--- Parse a sequence, without applying builder nor transformers
--------------------------------------------------------------------------------
-local function raw_parse_sequence (lx, p)
- local r = { }
- for i=1, #p do
- e=p[i]
- if type(e) == "string" then
- if not lx:is_keyword (lx:next(), e) then
- parse_error (lx, "A keyword was expected, probably `%s'.", e) end
- elseif is_parser (e) then
- table.insert (r, e (lx))
- else
- gg.parse_error (lx,"Sequence `%s': element #%i is neither a string "..
- "nor a parser: %s",
- p.name, i, table.tostring(e))
- end
- end
- return r
-end
-
--------------------------------------------------------------------------------
--- Parse a multisequence, without applying multisequence transformers.
--- The sequences are completely parsed.
--------------------------------------------------------------------------------
-local function raw_parse_multisequence (lx, sequence_table, default)
- local seq_parser = sequence_table[lx:is_keyword(lx:peek())]
- if seq_parser then return seq_parser (lx)
- elseif default then return default (lx)
- else return false end
-end
-
--------------------------------------------------------------------------------
--- Applies all transformers listed in parser on ast.
--------------------------------------------------------------------------------
-local function transform (ast, parser, fli, lli)
- if parser.transformers then
- for _, t in ipairs (parser.transformers) do ast = t(ast) or ast end
- end
- if type(ast) == 'table'then
- local ali = ast.lineinfo
- if not ali or ali.first~=fli or ali.last~=lli then
- ast.lineinfo = { first = fli, last = lli }
- end
- end
- return ast
-end
-
--------------------------------------------------------------------------------
--- Generate a tracable parsing error (not implemented yet)
--------------------------------------------------------------------------------
-function parse_error(lx, fmt, ...)
- local li = lx:lineinfo_left() or {-1,-1,-1, "<unknown file>"}
- local msg = string.format("line %i, char %i: "..fmt, li[1], li[2], ...)
- local src = lx.src
- if li[3]>0 and src then
- local i, j = li[3], li[3]
- while src:sub(i,i) ~= '\n' and i>=0 do i=i-1 end
- while src:sub(j,j) ~= '\n' and j<=#src do j=j+1 end
- local srcline = src:sub (i+1, j-1)
- local idx = string.rep (" ", li[2]).."^"
- msg = string.format("%s\n>>> %s\n>>> %s", msg, srcline, idx)
- end
- error(msg)
-end
-
--------------------------------------------------------------------------------
---
--- Sequence parser generator
---
--------------------------------------------------------------------------------
--- Input fields:
---
--- * [builder]: how to build an AST out of sequence parts. let [x] be the list
--- of subparser results (keywords are simply omitted). [builder] can be:
--- - [nil], in which case the result of parsing is simply [x]
--- - a string, which is then put as a tag on [x]
--- - a function, which takes [x] as a parameter and returns an AST.
---
--- * [name]: the name of the parser. Used for debug messages
---
--- * [transformers]: a list of AST->AST functions, applied in order on ASTs
--- returned by the parser.
---
--- * Table-part entries corresponds to keywords (strings) and subparsers
--- (function and callable objects).
---
--- After creation, the following fields are added:
--- * [parse] the parsing function lexer->AST
--- * [kind] == "sequence"
--- * [name] is set, if it wasn't in the input.
---
--------------------------------------------------------------------------------
-function sequence (p)
- make_parser ("sequence", p)
-
- -------------------------------------------------------------------
- -- Parsing method
- -------------------------------------------------------------------
- function p:parse (lx)
- -- Raw parsing:
- local fli = lx:lineinfo_right()
- local seq = raw_parse_sequence (lx, self)
- local lli = lx:lineinfo_left()
-
- -- Builder application:
- local builder, tb = self.builder, type (self.builder)
- if tb == "string" then seq.tag = builder
- elseif tb == "function" or builder and builder.__call then seq = builder(seq)
- elseif builder == nil then -- nothing
- else error ("Invalid builder of type "..tb.." in sequence") end
- seq = transform (seq, self, fli, lli)
- assert (not seq or seq.lineinfo)
- return seq
- end
-
- -------------------------------------------------------------------
- -- Construction
- -------------------------------------------------------------------
- -- Try to build a proper name
- if p.name then
- -- don't touch existing name
- elseif type(p[1])=="string" then -- find name based on 1st keyword
- if #p==1 then p.name=p[1]
- elseif type(p[#p])=="string" then
- p.name = p[1] .. " ... " .. p[#p]
- else p.name = p[1] .. " ..." end
- else -- can't find a decent name
- p.name = "<anonymous>"
- end
-
- return p
-end --</sequence>
-
-
--------------------------------------------------------------------------------
---
--- Multiple, keyword-driven, sequence parser generator
---
--------------------------------------------------------------------------------
--- in [p], useful fields are:
---
--- * [transformers]: as usual
---
--- * [name]: as usual
---
--- * Table-part entries must be sequence parsers, or tables which can
--- be turned into a sequence parser by [gg.sequence]. These
--- sequences must start with a keyword, and this initial keyword
--- must be different for each sequence. The table-part entries will
--- be removed after [gg.multisequence] returns.
---
--- * [default]: the parser to run if the next keyword in the lexer is
--- none of the registered initial keywords. If there's no default
--- parser and no suitable initial keyword, the multisequence parser
--- simply returns [false].
---
--- After creation, the following fields are added:
---
--- * [parse] the parsing function lexer->AST
---
--- * [sequences] the table of sequences, indexed by initial keywords.
---
--- * [add] method takes a sequence parser or a config table for
--- [gg.sequence], and adds/replaces the corresponding sequence
--- parser. If the keyword was already used, the former sequence is
--- removed and a warning is issued.
---
--- * [get] method returns a sequence by its initial keyword
---
--- * [kind] == "multisequence"
---
--------------------------------------------------------------------------------
-function multisequence (p)
- make_parser ("multisequence", p)
-
- -------------------------------------------------------------------
- -- Add a sequence (might be just a config table for [gg.sequence])
- -------------------------------------------------------------------
- function p:add (s)
- -- compile if necessary:
- local keyword = type(s)=='table' and s[1]
- if type(s)=='table' and not is_parser(s) then sequence(s) end
- if is_parser(s)~='sequence' or type(keyword)~='string' then
- if self.default then -- two defaults
- error ("In a multisequence parser, all but one sequences "..
- "must start with a keyword")
- else self.default = s end -- first default
- elseif self.sequences[keyword] then -- duplicate keyword
- eprintf (" *** Warning: keyword %q overloaded in multisequence ***",
- keyword)
- self.sequences[keyword] = s
- else -- newly caught keyword
- self.sequences[keyword] = s
- end
- end -- </multisequence.add>
-
- -------------------------------------------------------------------
- -- Get the sequence starting with this keyword. [kw :: string]
- -------------------------------------------------------------------
- function p:get (kw) return self.sequences [kw] end
-
- -------------------------------------------------------------------
- -- Remove the sequence starting with keyword [kw :: string]
- -------------------------------------------------------------------
- function p:del (kw)
- if not self.sequences[kw] then
- eprintf("*** Warning: trying to delete sequence starting "..
- "with %q from a multisequence having no such "..
- "entry ***", kw) end
- local removed = self.sequences[kw]
- self.sequences[kw] = nil
- return removed
- end
-
- -------------------------------------------------------------------
- -- Parsing method
- -------------------------------------------------------------------
- function p:parse (lx)
- local fli = lx:lineinfo_right()
- local x = raw_parse_multisequence (lx, self.sequences, self.default)
- local lli = lx:lineinfo_left()
- return transform (x, self, fli, lli)
- end
-
- -------------------------------------------------------------------
- -- Construction
- -------------------------------------------------------------------
- -- Register the sequences passed to the constructor. They're going
- -- from the array part of the parser to the hash part of field
- -- [sequences]
- p.sequences = { }
- for i=1, #p do p:add (p[i]); p[i] = nil end
-
- -- FIXME: why is this commented out?
- --if p.default and not is_parser(p.default) then sequence(p.default) end
- return p
-end --</multisequence>
-
-
--------------------------------------------------------------------------------
---
--- Expression parser generator
---
--------------------------------------------------------------------------------
---
--- Expression configuration relies on three tables: [prefix], [infix]
--- and [suffix]. Moreover, the primary parser can be replaced by a
--- table: in this case the [primary] table will be passed to
--- [gg.multisequence] to create a parser.
---
--- Each of these tables is a modified multisequence parser: the
--- differences with respect to regular multisequence config tables are:
---
--- * the builder takes specific parameters:
--- - for [prefix], it takes the result of the prefix sequence parser,
--- and the prefixed expression
--- - for [infix], it takes the left-hand-side expression, the results
--- of the infix sequence parser, and the right-hand-side expression.
--- - for [suffix], it takes the suffixed expression, and theresult
--- of the suffix sequence parser.
---
--- * the default field is a list, with parameters:
--- - [parser] the raw parsing function
--- - [transformers], as usual
--- - [prec], the operator's precedence
--- - [assoc] for [infix] table, the operator's associativity, which
--- can be "left", "right" or "flat" (default to left)
---
--- In [p], useful fields are:
--- * [transformers]: as usual
--- * [name]: as usual
--- * [primary]: the atomic expression parser, or a multisequence config
--- table (mandatory)
--- * [prefix]: prefix operators config table, see above.
--- * [infix]: infix operators config table, see above.
--- * [suffix]: suffix operators config table, see above.
---
--- After creation, these fields are added:
--- * [kind] == "expr"
--- * [parse] as usual
--- * each table is turned into a multisequence, and therefore has an
--- [add] method
---
--------------------------------------------------------------------------------
-function expr (p)
- make_parser ("expr", p)
-
- -------------------------------------------------------------------
- -- parser method.
- -- In addition to the lexer, it takes an optional precedence:
- -- it won't read expressions whose precedence is lower or equal
- -- to [prec].
- -------------------------------------------------------------------
- function p:parse (lx, prec)
- prec = prec or 0
-
- ------------------------------------------------------
- -- Extract the right parser and the corresponding
- -- options table, for (pre|in|suff)fix operators.
- -- Options include prec, assoc, transformers.
- ------------------------------------------------------
- local function get_parser_info (tab)
- local p2 = tab:get (lx:is_keyword (lx:peek()))
- if p2 then -- keyword-based sequence found
- local function parser(lx) return raw_parse_sequence(lx, p2) end
- return parser, p2
- else -- Got to use the default parser
- local d = tab.default
- if d then return d.parse or d.parser, d
- else return false, false end
- end
- end
-
- ------------------------------------------------------
- -- Look for a prefix sequence. Multiple prefixes are
- -- handled through the recursive [p.parse] call.
- -- Notice the double-transform: one for the primary
- -- expr, and one for the one with the prefix op.
- ------------------------------------------------------
- local function handle_prefix ()
- local fli = lx:lineinfo_right()
- local p2_func, p2 = get_parser_info (self.prefix)
- local op = p2_func and p2_func (lx)
- if op then -- Keyword-based sequence found
- local ili = lx:lineinfo_right() -- Intermediate LineInfo
- local e = p2.builder (op, self:parse (lx, p2.prec))
- local lli = lx:lineinfo_left()
- return transform (transform (e, p2, ili, lli), self, fli, lli)
- else -- No prefix found, get a primary expression
- local e = self.primary(lx)
- local lli = lx:lineinfo_left()
- return transform (e, self, fli, lli)
- end
- end --</expr.parse.handle_prefix>
-
- ------------------------------------------------------
- -- Look for an infix sequence+right-hand-side operand.
- -- Return the whole binary expression result,
- -- or false if no operator was found.
- ------------------------------------------------------
- local function handle_infix (e)
- local p2_func, p2 = get_parser_info (self.infix)
- if not p2 then return false end
-
- -----------------------------------------
- -- Handle flattening operators: gather all operands
- -- of the series in [list]; when a different operator
- -- is found, stop, build from [list], [transform] and
- -- return.
- -----------------------------------------
- if (not p2.prec or p2.prec>prec) and p2.assoc=="flat" then
- local fli = lx:lineinfo_right()
- local pflat, list = p2, { e }
- repeat
- local op = p2_func(lx)
- if not op then break end
- table.insert (list, self:parse (lx, p2.prec))
- local _ -- We only care about checking that p2==pflat
- _, p2 = get_parser_info (self.infix)
- until p2 ~= pflat
- local e2 = pflat.builder (list)
- local lli = lx:lineinfo_left()
- return transform (transform (e2, pflat, fli, lli), self, fli, lli)
-
- -----------------------------------------
- -- Handle regular infix operators: [e] the LHS is known,
- -- just gather the operator and [e2] the RHS.
- -- Result goes in [e3].
- -----------------------------------------
- elseif p2.prec and p2.prec>prec or
- p2.prec==prec and p2.assoc=="right" then
- local fli = e.lineinfo.first -- lx:lineinfo_right()
- local op = p2_func(lx)
- if not op then return false end
- local e2 = self:parse (lx, p2.prec)
- local e3 = p2.builder (e, op, e2)
- local lli = lx:lineinfo_left()
- return transform (transform (e3, p2, fli, lli), self, fli, lli)
-
- -----------------------------------------
- -- Check for non-associative operators, and complain if applicable.
- -----------------------------------------
- elseif p2.assoc=="none" and p2.prec==prec then
- parse_error (lx, "non-associative operator!")
-
- -----------------------------------------
- -- No infix operator suitable at that precedence
- -----------------------------------------
- else return false end
-
- end --</expr.parse.handle_infix>
-
- ------------------------------------------------------
- -- Look for a suffix sequence.
- -- Return the result of suffix operator on [e],
- -- or false if no operator was found.
- ------------------------------------------------------
- local function handle_suffix (e)
- -- FIXME bad fli, must take e.lineinfo.first
- local p2_func, p2 = get_parser_info (self.suffix)
- if not p2 then return false end
- if not p2.prec or p2.prec>=prec then
- --local fli = lx:lineinfo_right()
- local fli = e.lineinfo.first
- local op = p2_func(lx)
- if not op then return false end
- local lli = lx:lineinfo_left()
- e = p2.builder (e, op)
- e = transform (transform (e, p2, fli, lli), self, fli, lli)
- return e
- end
- return false
- end --</expr.parse.handle_suffix>
-
- ------------------------------------------------------
- -- Parser body: read suffix and (infix+operand)
- -- extensions as long as we're able to fetch more at
- -- this precedence level.
- ------------------------------------------------------
- local e = handle_prefix()
- repeat
- local x = handle_suffix (e); e = x or e
- local y = handle_infix (e); e = y or e
- until not (x or y)
-
- -- No transform: it already happened in operators handling
- return e
- end --</expr.parse>
-
- -------------------------------------------------------------------
- -- Construction
- -------------------------------------------------------------------
- if not p.primary then p.primary=p[1]; p[1]=nil end
- for _, t in ipairs{ "primary", "prefix", "infix", "suffix" } do
- if not p[t] then p[t] = { } end
- if not is_parser(p[t]) then multisequence(p[t]) end
- end
- function p:add(...) return self.primary:add(...) end
- return p
-end --</expr>
-
-
--------------------------------------------------------------------------------
---
--- List parser generator
---
--------------------------------------------------------------------------------
--- In [p], the following fields can be provided in input:
---
--- * [builder]: takes list of subparser results, returns AST
--- * [transformers]: as usual
--- * [name]: as usual
---
--- * [terminators]: list of strings representing the keywords which
--- might mark the end of the list. When non-empty, the list is
--- allowed to be empty. A string is treated as a single-element
--- table, whose element is that string, e.g. ["do"] is the same as
--- [{"do"}].
---
--- * [separators]: list of strings representing the keywords which can
--- separate elements of the list. When non-empty, one of these
--- keyword has to be found between each element. Lack of a separator
--- indicates the end of the list. A string is treated as a
--- single-element table, whose element is that string, e.g. ["do"]
--- is the same as [{"do"}]. If [terminators] is empty/nil, then
--- [separators] has to be non-empty.
---
--- After creation, the following fields are added:
--- * [parse] the parsing function lexer->AST
--- * [kind] == "list"
---
--------------------------------------------------------------------------------
-function list (p)
- make_parser ("list", p)
-
- -------------------------------------------------------------------
- -- Parsing method
- -------------------------------------------------------------------
- function p:parse (lx)
-
- ------------------------------------------------------
- -- Used to quickly check whether there's a terminator
- -- or a separator immediately ahead
- ------------------------------------------------------
- local function peek_is_in (keywords)
- return keywords and lx:is_keyword(lx:peek(), unpack(keywords)) end
-
- local x = { }
- local fli = lx:lineinfo_right()
-
- -- if there's a terminator to start with, don't bother trying
- if not peek_is_in (self.terminators) then
- repeat table.insert (x, self.primary (lx)) -- read one element
- until
- -- First reason to stop: There's a separator list specified,
- -- and next token isn't one. Otherwise, consume it with [lx:next()]
- self.separators and not(peek_is_in (self.separators) and lx:next()) or
- -- Other reason to stop: terminator token ahead
- peek_is_in (self.terminators) or
- -- Last reason: end of file reached
- lx:peek().tag=="Eof"
- end
-
- local lli = lx:lineinfo_left()
-
- -- Apply the builder. It can be a string, or a callable value,
- -- or simply nothing.
- local b = self.builder
- if b then
- if type(b)=="string" then x.tag = b -- b is a string, use it as a tag
- elseif type(b)=="function" then x=b(x)
- else
- local bmt = getmetatable(b)
- if bmt and bmt.__call then x=b(x) end
- end
- end
- return transform (x, self, fli, lli)
- end --</list.parse>
-
- -------------------------------------------------------------------
- -- Construction
- -------------------------------------------------------------------
- if not p.primary then p.primary = p[1]; p[1] = nil end
- if type(p.terminators) == "string" then p.terminators = { p.terminators }
- elseif p.terminators and #p.terminators == 0 then p.terminators = nil end
- if type(p.separators) == "string" then p.separators = { p.separators }
- elseif p.separators and #p.separators == 0 then p.separators = nil end
-
- return p
-end --</list>
-
-
--------------------------------------------------------------------------------
---
--- Keyword-conditionned parser generator
---
--------------------------------------------------------------------------------
---
--- Only apply a parser if a given keyword is found. The result of
--- [gg.onkeyword] parser is the result of the subparser (modulo
--- [transformers] applications).
---
--- lineinfo: the keyword is *not* included in the boundaries of the
--- resulting lineinfo. A review of all usages of gg.onkeyword() in the
--- implementation of metalua has shown that it was the appropriate choice
--- in every case.
---
--- Input fields:
---
--- * [name]: as usual
---
--- * [transformers]: as usual
---
--- * [peek]: if non-nil, the conditionning keyword is left in the lexeme
--- stream instead of being consumed.
---
--- * [primary]: the subparser.
---
--- * [keywords]: list of strings representing triggering keywords.
---
--- * Table-part entries can contain strings, and/or exactly one parser.
--- Strings are put in [keywords], and the parser is put in [primary].
---
--- After the call, the following fields will be set:
---
--- * [parse] the parsing method
--- * [kind] == "onkeyword"
--- * [primary]
--- * [keywords]
---
--------------------------------------------------------------------------------
-function onkeyword (p)
- make_parser ("onkeyword", p)
-
- -------------------------------------------------------------------
- -- Parsing method
- -------------------------------------------------------------------
- function p:parse(lx)
- if lx:is_keyword (lx:peek(), unpack(self.keywords)) then
- --local fli = lx:lineinfo_right()
- if not self.peek then lx:next() end
- local content = self.primary (lx)
- --local lli = lx:lineinfo_left()
- local fli, lli = content.lineinfo.first, content.lineinfo.last
- return transform (content, p, fli, lli)
- else return false end
- end
-
- -------------------------------------------------------------------
- -- Construction
- -------------------------------------------------------------------
- if not p.keywords then p.keywords = { } end
- for _, x in ipairs(p) do
- if type(x)=="string" then table.insert (p.keywords, x)
- else assert (not p.primary and is_parser (x)); p.primary = x end
- end
- if not next (p.keywords) then
- eprintf("Warning, no keyword to trigger gg.onkeyword") end
- assert (p.primary, 'no primary parser in gg.onkeyword')
- return p
-end --</onkeyword>
-
-
--------------------------------------------------------------------------------
---
--- Optional keyword consummer pseudo-parser generator
---
--------------------------------------------------------------------------------
---
--- This doesn't return a real parser, just a function. That function parses
--- one of the keywords passed as parameters, and returns it. It returns
--- [false] if no matching keyword is found.
---
--- Notice that tokens returned by lexer already carry lineinfo, therefore
--- there's no need to add them, as done usually through transform() calls.
--------------------------------------------------------------------------------
-function optkeyword (...)
- local args = {...}
- if type (args[1]) == "table" then
- assert (#args == 1)
- args = args[1]
- end
- for _, v in ipairs(args) do assert (type(v)=="string") end
- return function (lx)
- local x = lx:is_keyword (lx:peek(), unpack (args))
- if x then lx:next(); return x
- else return false end
- end
-end
-
-
--------------------------------------------------------------------------------
---
--- Run a parser with a special lexer
---
--------------------------------------------------------------------------------
---
--- This doesn't return a real parser, just a function.
--- First argument is the lexer class to be used with the parser,
--- 2nd is the parser itself.
--- The resulting parser returns whatever the argument parser does.
---
--------------------------------------------------------------------------------
-function with_lexer(new_lexer, parser)
-
- -------------------------------------------------------------------
- -- Most gg functions take their parameters in a table, so it's
- -- better to silently accept when with_lexer{ } is called with
- -- its arguments in a list:
- -------------------------------------------------------------------
- if not parser and #new_lexer==2 and type(new_lexer[1])=='table' then
- return with_lexer(unpack(new_lexer))
- end
-
- -------------------------------------------------------------------
- -- Save the current lexer, switch it for the new one, run the parser,
- -- restore the previous lexer, even if the parser caused an error.
- -------------------------------------------------------------------
- return function (lx)
- local old_lexer = getmetatable(lx)
- lx:sync()
- setmetatable(lx, new_lexer)
- local status, result = pcall(parser, lx)
- lx:sync()
- setmetatable(lx, old_lexer)
- if status then return result else error(result) end
- end
-end
+++ /dev/null
-----------------------------------------------------------------------
---
--- WARNING! You're entering a hackish area, proceed at your own risks!
---
--- This code results from the borrowing, then ruthless abuse, of
--- Yueliang's implementation of Lua 5.0 compiler. I claim
--- responsibility for all of the ugly, dirty stuff that you might spot
--- in it.
---
--- Eventually, this code will be rewritten, either in Lua or more
--- probably in C. Meanwhile, if you're interested into digging
--- metalua's sources, this is not the best part to invest your time
--- on.
---
--- End of warning.
---
-----------------------------------------------------------------------
-
---[[--------------------------------------------------------------------
-
- $Id$
-
- lcode.lua
- Lua 5 code generator in Lua
- This file is part of Yueliang.
-
- Copyright (c) 2005 Kein-Hong Man <khman@users.sf.net>
- The COPYRIGHT file describes the conditions
- under which this software may be distributed.
-
- See the ChangeLog for more information.
-
-------------------------------------------------------------------------
-
- [FF] Slightly modified, mainly to produce Lua 5.1 bytecode.
-
-----------------------------------------------------------------------]]
-
---[[--------------------------------------------------------------------
--- Notes:
--- * one function manipulate a pointer argument with a simple data type
--- (can't be emulated by a table, ambiguous), now returns that value:
--- luaK:concat(fs, l1, l2)
--- * some function parameters changed to boolean, additional code
--- translates boolean back to 1/0 for instruction fields
--- * Added:
--- luaK:ttisnumber(o) (from lobject.h)
--- luaK:nvalue(o) (from lobject.h)
--- luaK:setnilvalue(o) (from lobject.h)
--- luaK:setsvalue(o) (from lobject.h)
--- luaK:setnvalue(o) (from lobject.h)
--- luaK:sethvalue(o) (from lobject.h)
-----------------------------------------------------------------------]]
-
-module("bytecode", package.seeall)
-
-local function debugf() end
-
-luaK = {}
-
-luaK.MAXSTACK = 250 -- (llimits.h, used in lcode.lua)
-luaK.LUA_MULTRET = -1 -- (lua.h)
-
-------------------------------------------------------------------------
--- Marks the end of a patch list. It is an invalid value both as an absolute
--- address, and as a list link (would link an element to itself).
-------------------------------------------------------------------------
-luaK.NO_JUMP = -1
-
---FF 5.1
-function luaK:isnumeral(e)
- return e.k=="VKNUM" and e.t==self.NO_JUMP and e.t==self.NO_JUMP
-end
-
-------------------------------------------------------------------------
--- emulation of TObject macros (these are from lobject.h)
--- * TObject is a table since lcode passes references around
--- * tt member field removed, using Lua's type() instead
-------------------------------------------------------------------------
-function luaK:ttisnumber(o)
- if o then return type(o.value) == "number" else return false end
-end
-function luaK:nvalue(o) return o.value end
-function luaK:setnilvalue(o) o.value = nil end
-function luaK:setsvalue(o, s) o.value = s end
-luaK.setnvalue = luaK.setsvalue
-luaK.sethvalue = luaK.setsvalue
-
-------------------------------------------------------------------------
--- returns the instruction object for given e (expdesc)
-------------------------------------------------------------------------
-function luaK:getcode(fs, e)
- return fs.f.code[e.info]
-end
-
-------------------------------------------------------------------------
--- codes an instruction with a signed Bx (sBx) field
-------------------------------------------------------------------------
-function luaK:codeAsBx(fs, o, A, sBx)
- return self:codeABx(fs, o, A, sBx + luaP.MAXARG_sBx)
-end
-
-------------------------------------------------------------------------
---
-------------------------------------------------------------------------
-function luaK:hasjumps(e)
- return e.t ~= e.f
-end
-
-------------------------------------------------------------------------
--- FF updated 5.1
-------------------------------------------------------------------------
-function luaK:_nil(fs, from, n)
- if fs.pc > fs.lasttarget then -- no jumps to current position?
- if fs.pc == 0 then return end --function start, positions are already clean
- local previous = fs.f.code[fs.pc - 1]
- if luaP:GET_OPCODE(previous) == "OP_LOADNIL" then
- local pfrom = luaP:GETARG_A(previous)
- local pto = luaP:GETARG_B(previous)
- if pfrom <= from and from <= pto + 1 then -- can connect both?
- if from + n - 1 > pto then
- luaP:SETARG_B(previous, from + n - 1)
- end
- return
- end
- end
- end
- self:codeABC(fs, "OP_LOADNIL", from, from + n - 1, 0) -- else no optimization
-end
-
-------------------------------------------------------------------------
---
-------------------------------------------------------------------------
-function luaK:jump(fs)
- local jpc = fs.jpc -- save list of jumps to here
- fs.jpc = self.NO_JUMP
- local j = self:codeAsBx(fs, "OP_JMP", 0, self.NO_JUMP)
- return self:concat(fs, j, jpc) -- keep them on hold
-end
-
---FF 5.1
-function luaK:ret (fs, first, nret)
- luaK:codeABC (fs, "OP_RETURN", first, nret+1, 0)
-end
-
-
-------------------------------------------------------------------------
---
-------------------------------------------------------------------------
-function luaK:condjump(fs, op, A, B, C)
- self:codeABC(fs, op, A, B, C)
- return self:jump(fs)
-end
-
-------------------------------------------------------------------------
---
-------------------------------------------------------------------------
-function luaK:fixjump(fs, pc, dest)
- local jmp = fs.f.code[pc]
- local offset = dest - (pc + 1)
- assert(dest ~= self.NO_JUMP)
- if math.abs(offset) > luaP.MAXARG_sBx then
- luaX:syntaxerror(fs.ls, "control structure too long")
- end
- luaP:SETARG_sBx(jmp, offset)
-end
-
-------------------------------------------------------------------------
--- returns current 'pc' and marks it as a jump target (to avoid wrong
--- optimizations with consecutive instructions not in the same basic block).
-------------------------------------------------------------------------
-function luaK:getlabel(fs)
- fs.lasttarget = fs.pc
- return fs.pc
-end
-
-------------------------------------------------------------------------
---
-------------------------------------------------------------------------
-function luaK:getjump(fs, pc)
- local offset = luaP:GETARG_sBx(fs.f.code[pc])
- if offset == self.NO_JUMP then -- point to itself represents end of list
- return self.NO_JUMP -- end of list
- else
- return (pc + 1) + offset -- turn offset into absolute position
- end
-end
-
-------------------------------------------------------------------------
---
-------------------------------------------------------------------------
-function luaK:getjumpcontrol(fs, pc)
- local pi = fs.f.code[pc]
- local ppi = fs.f.code[pc - 1]
- if pc >= 1 and luaP:testOpMode(luaP:GET_OPCODE(ppi), "OpModeT") then
- return ppi
- else
- return pi
- end
-end
-
-------------------------------------------------------------------------
--- check whether list has any jump that do not produce a value
--- (or produce an inverted value)
-------------------------------------------------------------------------
---FF updated 5.1
-function luaK:need_value(fs, list, cond)
- while list ~= self.NO_JUMP do
- local i = self:getjumpcontrol(fs, list)
- if luaP:GET_OPCODE(i) ~= "OP_TESTSET" or
- luaP:GETARG_A(i) ~= luaP.NO_REG or
- luaP:GETARG_C(i) ~= cond then
- return true
- end
- list = self:getjump(fs, list)
- end
- return false -- not found
-end
-
-------------------------------------------------------------------------
---
-------------------------------------------------------------------------
---FF updated 5.1
-function luaK:patchtestreg(fs, node, reg)
- assert(reg) -- pour assurer, vu que j'ai ajoute un parametre p/r a 5.0
- local i = self:getjumpcontrol(fs, node)
- if luaP:GET_OPCODE(i) ~= "OP_TESTSET" then
- return false end -- cannot patch other instructions
- if reg ~= luaP.NO_REG and reg ~= luaP:GETARG_B(i) then
- luaP:SETARG_A(i, reg)
- else
- -- no register to put value or register already has the value
- luaP:SET_OPCODE(i, "OP_TEST")
- luaP:SETARG_A(i, luaP:GETARG_B(i))
- luaP:SETARG_B(i, 0)
- luaP:SETARG_C(i, luaP:GETARG_C(i))
- end
- return true
-end
-
---FF added 5.1
-function luaK:removevalues (fs, list)
- while list ~= self.NO_JUMP do
- self:patchtestreg (fs, list, luaP.NO_REG)
- list = self:getjump (fs, list)
- end
-end
-
-------------------------------------------------------------------------
--- FF updated 5.1
-------------------------------------------------------------------------
-function luaK:patchlistaux(fs, list, vtarget, reg, dtarget)
- while list ~= self.NO_JUMP do
- local _next = self:getjump(fs, list)
- if self:patchtestreg (fs, list, reg) then
- self:fixjump(fs, list, vtarget)
- else
- self:fixjump (fs, list, dtarget)
- end
- list = _next
- end
-end
-
-------------------------------------------------------------------------
---
-------------------------------------------------------------------------
-function luaK:dischargejpc(fs)
- self:patchlistaux(fs, fs.jpc, fs.pc, luaP.NO_REG, fs.pc)
- fs.jpc = self.NO_JUMP
-end
-
-------------------------------------------------------------------------
---
-------------------------------------------------------------------------
-function luaK:patchlist(fs, list, target)
- if target == fs.pc then
- self:patchtohere(fs, list)
- else
- assert(target < fs.pc)
- self:patchlistaux(fs, list, target, luaP.NO_REG, target)
- end
-end
-
-------------------------------------------------------------------------
---
-------------------------------------------------------------------------
-function luaK:patchtohere(fs, list)
- self:getlabel(fs)
- fs.jpc = self:concat(fs, fs.jpc, list)
-end
-
-------------------------------------------------------------------------
--- * l1 was a pointer, now l1 is returned and callee assigns the value
-------------------------------------------------------------------------
-function luaK:concat(fs, l1, l2)
- if l2 == self.NO_JUMP then return l1 -- unchanged
- elseif l1 == self.NO_JUMP then
- return l2 -- changed
- else
- local list = l1
- local _next = self:getjump(fs, list)
- while _next ~= self.NO_JUMP do -- find last element
- list = _next
- _next = self:getjump(fs, list)
- end
- self:fixjump(fs, list, l2)
- end
- return l1 -- unchanged
-end
-
-------------------------------------------------------------------------
---
-------------------------------------------------------------------------
-function luaK:checkstack(fs, n)
- local newstack = fs.freereg + n
- if newstack > fs.f.maxstacksize then
- if newstack >= luaK.MAXSTACK then
- luaX:syntaxerror(fs.ls, "function or expression too complex")
- end
- fs.f.maxstacksize = newstack
- end
-end
-
-------------------------------------------------------------------------
---
-------------------------------------------------------------------------
-function luaK:reserveregs(fs, n)
- self:checkstack(fs, n)
- fs.freereg = fs.freereg + n
-end
-
-------------------------------------------------------------------------
---
-------------------------------------------------------------------------
-function luaK:freereg(fs, reg)
- if not luaP:ISK (reg) and reg >= fs.nactvar then
- fs.freereg = fs.freereg - 1
- assert(reg == fs.freereg,
- string.format("reg=%i, fs.freereg=%i", reg, fs.freereg))
- end
-end
-
-------------------------------------------------------------------------
---
-------------------------------------------------------------------------
-function luaK:freeexp(fs, e)
- if e.k == "VNONRELOC" then
- self:freereg(fs, e.info)
- end
-end
-
-------------------------------------------------------------------------
--- k is a constant, v is... what?
--- fs.h is a hash value --> index in f.k
-------------------------------------------------------------------------
--- * luaH_get, luaH_set deleted; direct table access used instead
--- * luaO_rawequalObj deleted in first assert
--- * setobj2n deleted in assignment of v to f.k table
-------------------------------------------------------------------------
---FF radically updated, not completely understood
-function luaK:addk(fs, k, v)
- local idx = fs.h[k.value]
- local f = fs.f
--- local oldsize = f.sizek
- if self:ttisnumber (idx) then
- --TODO this assert currently FAILS
- --assert(fs.f.k[self:nvalue(idx)] == v)
- return self:nvalue(idx)
- else -- constant not found; create a new entry
- do
- local t = type (v.value)
- assert(t=="nil" or t=="string" or t=="number" or t=="boolean")
- end
- --debugf("[const: k[%i] = %s ]", fs.nk, tostringv(v.value))
- fs.f.k[fs.nk] = v
- fs.h[k.value] = { }
- self:setnvalue(fs.h[k.value], fs.nk)
- local nk = fs.nk
- fs.nk = fs.nk+1
- return nk
- end
-end
-
-------------------------------------------------------------------------
---
-------------------------------------------------------------------------
-function luaK:stringK(fs, s)
- assert (type(s)=="string")
- local o = {} -- TObject
- self:setsvalue(o, s)
- return self:addk(fs, o, o)
-end
-
-------------------------------------------------------------------------
---
-------------------------------------------------------------------------
-function luaK:numberK(fs, r)
- assert (type(r)=="number")
- local o = {} -- TObject
- self:setnvalue(o, r)
- return self:addk(fs, o, o)
-end
-
-------------------------------------------------------------------------
---
-------------------------------------------------------------------------
-function luaK:boolK(fs, r)
- assert (type(r)=="boolean")
- local o = {} -- TObject
- self:setnvalue(o, r)
- return self:addk(fs, o, o)
-end
-
-------------------------------------------------------------------------
---
-------------------------------------------------------------------------
-function luaK:nilK(fs)
- local k, v = {}, {} -- TObject
- self:setnilvalue(v)
- self:sethvalue(k, fs.h) -- cannot use nil as key; instead use table itself
- return self:addk(fs, k, v)
-end
-
-
---FF 5.1
-function luaK:setreturns (fs, e, nresults)
- if e.k == "VCALL" then -- expression is an open function call?
- luaP:SETARG_C(self:getcode(fs, e), nresults + 1)
- elseif e.k == "VVARARG" then
- luaP:SETARG_B (self:getcode (fs, e), nresults + 1)
- luaP:SETARG_A (self:getcode (fs, e), fs.freereg)
- self:reserveregs (fs, 1)
- end
-end
-
---FF 5.1
-function luaK:setmultret (fs, e)
- self:setreturns (fs, e, self.LUA_MULTRET)
-end
-
---FF 5.1
-function luaK:setoneret (fs, e)
- if e.k == "VCALL" then -- expression is an open function call?
- e.k = "VNONRELOC"
- e.info = luaP:GETARG_A(self:getcode(fs, e))
- elseif e.k == "VVARARG" then
- luaP:SETARG_B (self:getcode (fs, e), 2)
- e.k = "VRELOCABLE"
- end
-end
-
-
-------------------------------------------------------------------------
---FF deprecated in 5.1
-------------------------------------------------------------------------
-function luaK:setcallreturns(fs, e, nresults)
- assert (false, "setcallreturns deprecated")
- --print "SCR:"
- --printv(e)
- --printv(self:getcode(fs, e))
- if e.k == "VCALL" then -- expression is an open function call?
- luaP:SETARG_C(self:getcode(fs, e), nresults + 1)
- if nresults == 1 then -- 'regular' expression?
- e.k = "VNONRELOC"
- e.info = luaP:GETARG_A(self:getcode(fs, e))
- end
- elseif e.k == "VVARARG" then
- --printf("Handle vararg return on expr %s, whose code is %s",
- -- tostringv(e), tostringv(self:getcode(fs, e)))
- if nresults == 1 then
- luaP:SETARG_B (self:getcode (fs, e), 2)
- e.k = "VRELOCABLE"
---FIXME: why no SETARG_A???
- else
- luaP:SETARG_B (self:getcode (fs, e), nresults + 1)
- luaP:SETARG_A (self:getcode (fs, e), fs.freereg)
- self:reserveregs (fs, 1)
- --printf("Now code is %s", tostringv(self:getcode(fs, e)))
- end
- end
-end
-
-------------------------------------------------------------------------
--- Ajoute le code pour effectuer l'extraction de la locvar/upval/globvar
--- /idx, sachant
-------------------------------------------------------------------------
-function luaK:dischargevars(fs, e)
---printf("\ndischargevars\n")
- local k = e.k
- if k == "VLOCAL" then
- e.k = "VNONRELOC"
- elseif k == "VUPVAL" then
- e.info = self:codeABC(fs, "OP_GETUPVAL", 0, e.info, 0)
- e.k = "VRELOCABLE"
- elseif k == "VGLOBAL" then
- e.info = self:codeABx(fs, "OP_GETGLOBAL", 0, e.info)
- e.k = "VRELOCABLE"
- elseif k == "VINDEXED" then
- self:freereg(fs, e.aux)
- self:freereg(fs, e.info)
- e.info = self:codeABC(fs, "OP_GETTABLE", 0, e.info, e.aux)
- e.k = "VRELOCABLE"
- elseif k == "VCALL" or k == "VVARARG" then
- self:setoneret(fs, e)
- else
- -- there is one value available (somewhere)
- end
---printf("\n/dischargevars\n")
-end
-
-------------------------------------------------------------------------
---
-------------------------------------------------------------------------
-function luaK:code_label(fs, A, b, jump)
- self:getlabel(fs) -- those instructions may be jump targets
- return self:codeABC(fs, "OP_LOADBOOL", A, b, jump)
-end
-
-------------------------------------------------------------------------
--- FF updated 5.1
-------------------------------------------------------------------------
-function luaK:discharge2reg(fs, e, reg)
- self:dischargevars(fs, e)
- local k = e.k
- if k == "VNIL" then
- self:_nil(fs, reg, 1)
- elseif k == "VFALSE" or k == "VTRUE" then
- self:codeABC(fs, "OP_LOADBOOL", reg, (e.k == "VTRUE") and 1 or 0, 0)
- elseif k == "VKNUM" then
- self:codeABx (fs, "OP_LOADK", reg, self:numberK(fs, e.nval))
- elseif k == "VK" then
- self:codeABx(fs, "OP_LOADK", reg, e.info)
- elseif k == "VRELOCABLE" then
- local pc = self:getcode(fs, e)
- luaP:SETARG_A(pc, reg)
- elseif k == "VNONRELOC" then
- if reg ~= e.info then
- self:codeABC(fs, "OP_MOVE", reg, e.info, 0)
- end
- else
- assert(e.k == "VVOID" or e.k == "VJMP")
- return -- nothing to do...
- end
- e.info = reg
- e.k = "VNONRELOC"
-end
-
-------------------------------------------------------------------------
---
-------------------------------------------------------------------------
-function luaK:discharge2anyreg(fs, e)
- if e.k ~= "VNONRELOC" then
- self:reserveregs(fs, 1)
- self:discharge2reg(fs, e, fs.freereg - 1)
- end
-end
-
-------------------------------------------------------------------------
---
-------------------------------------------------------------------------
-function luaK:exp2reg(fs, e, reg)
- self:discharge2reg(fs, e, reg)
- if e.k == "VJMP" then
- e.t = self:concat(fs, e.t, e.info) -- put this jump in 't' list
- end
- if self:hasjumps(e) then
- local final -- position after whole expression
- local p_f = self.NO_JUMP -- position of an eventual LOAD false
- local p_t = self.NO_JUMP -- position of an eventual LOAD true
- if self:need_value(fs, e.t, 1) or self:need_value(fs, e.f, 0) then
- local fj = self.NO_JUMP -- first jump (over LOAD ops.)
- if e.k ~= "VJMP" then fj = self:jump(fs) end
- p_f = self:code_label(fs, reg, 0, 1)
- p_t = self:code_label(fs, reg, 1, 0)
- self:patchtohere(fs, fj)
- end
- final = self:getlabel(fs)
- self:patchlistaux(fs, e.f, final, reg, p_f)
- self:patchlistaux(fs, e.t, final, reg, p_t)
- end
- e.f, e.t = self.NO_JUMP, self.NO_JUMP
- e.info = reg
- e.k = "VNONRELOC"
-end
-
-------------------------------------------------------------------------
---
-------------------------------------------------------------------------
-function luaK:exp2nextreg(fs, e)
- self:dischargevars(fs, e)
- --[FF] Allready in place (added for expr.Stat)
- if e.k == "VNONRELOC" and e.info == fs.freereg then
- --printf("Expression already in next reg %i: %s", fs.freereg, tostringv(e))
- return end
- self:freeexp(fs, e)
- self:reserveregs(fs, 1)
- self:exp2reg(fs, e, fs.freereg - 1)
-end
-
-------------------------------------------------------------------------
---
-------------------------------------------------------------------------
-function luaK:exp2anyreg(fs, e)
- --printf("exp2anyregs(e=%s)", tostringv(e))
- self:dischargevars(fs, e)
- if e.k == "VNONRELOC" then
- if not self:hasjumps(e) then -- exp is already in a register
- return e.info
- end
- if e.info >= fs.nactvar then -- reg. is not a local?
- self:exp2reg(fs, e, e.info) -- put value on it
- return e.info
- end
- end
- self:exp2nextreg(fs, e) -- default
- return e.info
-end
-
-------------------------------------------------------------------------
---
-------------------------------------------------------------------------
-function luaK:exp2val(fs, e)
- if self:hasjumps(e) then
- self:exp2anyreg(fs, e)
- else
- self:dischargevars(fs, e)
- end
-end
-
-------------------------------------------------------------------------
--- FF updated 5.1
-------------------------------------------------------------------------
-function luaK:exp2RK(fs, e)
- self:exp2val(fs, e)
- local k = e.k
- if k=="VNIL" or k=="VTRUE" or k=="VFALSE" or k=="VKNUM" then
- if fs.nk <= luaP.MAXINDEXRK then
- if k=="VNIL" then e.info = self:nilK(fs)
- elseif k=="VKNUM" then e.info = self:numberK (fs, e.nval)
- else e.info = self:boolK(fs, e.k=="VTRUE") end
- e.k = "VK"
- return luaP:RKASK(e.info)
- end
- elseif k == "VK" then
- if e.info <= luaP.MAXINDEXRK then -- constant fit in argC?
- return luaP:RKASK (e.info)
- end
- end
- -- not a constant in the right range: put it in a register
- return self:exp2anyreg(fs, e)
-end
-
-------------------------------------------------------------------------
---
-------------------------------------------------------------------------
-function luaK:storevar(fs, var, exp)
- --print("STOREVAR")
- --printf("var=%s", tostringv(var))
- --printf("exp=%s", tostringv(exp))
-
- local k = var.k
- if k == "VLOCAL" then
- self:freeexp(fs, exp)
- self:exp2reg(fs, exp, var.info)
- return
- elseif k == "VUPVAL" then
- local e = self:exp2anyreg(fs, exp)
- self:codeABC(fs, "OP_SETUPVAL", e, var.info, 0)
- elseif k == "VGLOBAL" then
- --printf("store global, exp=%s", tostringv(exp))
- local e = self:exp2anyreg(fs, exp)
- self:codeABx(fs, "OP_SETGLOBAL", e, var.info)
- elseif k == "VINDEXED" then
- local e = self:exp2RK(fs, exp)
- self:codeABC(fs, "OP_SETTABLE", var.info, var.aux, e)
- else
- assert(0) -- invalid var kind to store
- end
- self:freeexp(fs, exp)
- --print("/STOREVAR")
-end
-
-------------------------------------------------------------------------
---
-------------------------------------------------------------------------
-function luaK:_self(fs, e, key)
- self:exp2anyreg(fs, e)
- self:freeexp(fs, e)
- local func = fs.freereg
- self:reserveregs(fs, 2)
- self:codeABC(fs, "OP_SELF", func, e.info, self:exp2RK(fs, key))
- self:freeexp(fs, key)
- e.info = func
- e.k = "VNONRELOC"
-end
-
-------------------------------------------------------------------------
--- FF updated 5.1
-------------------------------------------------------------------------
-function luaK:invertjump(fs, e)
- --printf("invertjump on jump instruction #%i", e.info)
- --printv(self:getcode(fs, e))
- local pc = self:getjumpcontrol(fs, e.info)
- assert(luaP:testOpMode(luaP:GET_OPCODE(pc), "OpModeT") and
- luaP:GET_OPCODE(pc) ~= "OP_TESTSET" and
- luaP:GET_OPCODE(pc) ~= "OP_TEST")
- --printf("Before invert:")
- --printv(pc)
- luaP:SETARG_A(pc, (luaP:GETARG_A(pc) == 0) and 1 or 0)
- --printf("After invert:")
- --printv(pc)
-end
-
-------------------------------------------------------------------------
---
-------------------------------------------------------------------------
-function luaK:jumponcond(fs, e, cond)
- if e.k == "VRELOCABLE" then
- local ie = self:getcode(fs, e)
- if luaP:GET_OPCODE(ie) == "OP_NOT" then
- fs.pc = fs.pc - 1 -- remove previous OP_NOT
- return self:condjump(fs, "OP_TEST", luaP:GETARG_B(ie), 0,
- cond and 0 or 1)
- end
- -- else go through
- end
- self:discharge2anyreg(fs, e)
- self:freeexp(fs, e)
- return self:condjump(fs, "OP_TESTSET", luaP.NO_REG, e.info, cond and 1 or 0)
-end
-
-------------------------------------------------------------------------
---
-------------------------------------------------------------------------
-function luaK:goiftrue(fs, e)
- local pc -- pc of last jump
- self:dischargevars(fs, e)
- local k = e.k
- if k == "VK" or k == "VTRUE" or k == "VKNUM" then
- pc = self.NO_JUMP -- always true; do nothing
- elseif k == "VFALSE" then
- pc = self:jump(fs) -- always jump
- elseif k == "VJMP" then
- self:invertjump(fs, e)
- pc = e.info
- else
- pc = self:jumponcond(fs, e, false)
- end
- e.f = self:concat(fs, e.f, pc) -- insert last jump in 'f' list
- self:patchtohere(fs, e.t)
- e.t = self.NO_JUMP
-end
-
-------------------------------------------------------------------------
---
-------------------------------------------------------------------------
-function luaK:goiffalse(fs, e)
- local pc -- pc of last jump
- self:dischargevars(fs, e)
- local k = e.k
- if k == "VNIL" or k == "VFALSE"then
- pc = self.NO_JUMP -- always false; do nothing
- elseif k == "VTRUE" then
- pc = self:jump(fs) -- always jump
- elseif k == "VJMP" then
- pc = e.info
- else
- pc = self:jumponcond(fs, e, true)
- end
- e.t = self:concat(fs, e.t, pc) -- insert last jump in 't' list
- self:patchtohere(fs, e.f)
- e.f = self.NO_JUMP
-end
-
-------------------------------------------------------------------------
---
-------------------------------------------------------------------------
-function luaK:codenot(fs, e)
- self:dischargevars(fs, e)
- local k = e.k
- if k == "VNIL" or k == "VFALSE" then
- e.k = "VTRUE"
- elseif k == "VK" or k == "VKNUM" or k == "VTRUE" then
- e.k = "VFALSE"
- elseif k == "VJMP" then
- self:invertjump(fs, e)
- elseif k == "VRELOCABLE" or k == "VNONRELOC" then
- self:discharge2anyreg(fs, e)
- self:freeexp(fs, e)
- e.info = self:codeABC(fs, "OP_NOT", 0, e.info, 0)
- e.k = "VRELOCABLE"
- else
- assert(0) -- cannot happen
- end
- -- interchange true and false lists
- e.f, e.t = e.t, e.f
- self:removevalues(fs, e.f)
- self:removevalues(fs, e.t)
-end
-
-------------------------------------------------------------------------
---
-------------------------------------------------------------------------
-function luaK:indexed(fs, t, k)
- t.aux = self:exp2RK(fs, k)
- t.k = "VINDEXED"
-end
-
---FF 5.1
-function luaK:constfolding (op, e1, e2)
- if not self:isnumeral(e1) or not self:isnumeral(e2) then return false end
- local v1, v2, e, r = e1.nval, e2 and e2.nval, nil
- if op == "OP_ADD" then r = v1+v2
- elseif op == "OP_SUB" then r = v1-v2
- elseif op == "OP_MUL" then r = v1*v2
- elseif op == "OP_DIV" then if v2==0 then return false end r = v1/v2
- elseif op == "OP_MOD" then if v2==0 then return false end r = v1%v2
- elseif op == "OP_POW" then r = v1^v2
- elseif op == "OP_UNM" then r = -v1
- elseif op == "OP_LEN" then return false
- else assert (false, "Unknown numeric value") end
- e1.nval = r
- return true
-end
-
---FF 5.1
-function luaK:codearith (fs, op, e1, e2)
- if self:constfolding (op, e1, e2) then return else
- local o1 = self:exp2RK (fs, e1)
- local o2 = 0
- if op ~= "OP_UNM" and op ~= "OP_LEN" then
- o2 = self:exp2RK (fs, e2) end
- self:freeexp(fs, e2)
- self:freeexp(fs, e1)
- e1.info = self:codeABC (fs, op, 0, o1, o2)
- e1.k = "VRELOCABLE"
- end
-end
-
---FF 5.1
-function luaK:codecomp (fs, op, cond, e1, e2)
- assert (type (cond) == "boolean")
- local o1 = self:exp2RK (fs, e1)
- local o2 = self:exp2RK (fs, e2)
- self:freeexp (fs, e2)
- self:freeexp (fs, e1)
- if not cond and op ~= "OP_EQ" then
- local temp = o1; o1=o2; o2=temp cond = true end
- e1.info = self:condjump (fs, op, cond and 1 or 0, o1, o2)
- e1.k = "VJMP"
-end
-
-------------------------------------------------------------------------
---
-------------------------------------------------------------------------
-function luaK:prefix (fs, op, e)
- local e2 = { t = self.NO_JUMP; f = self.NO_JUMP;
- k = "VKNUM"; nval = 0 }
- if op == "unm" then
- if e.k == "VK" then
- self:exp2anyreg (fs, e) end
- self:codearith (fs, "OP_UNM", e, e2)
- elseif op == "not" then
- self:codenot (fs, e)
- elseif op == "len" then
- self:exp2anyreg (fs, e)
- self:codearith (fs, "OP_LEN", e, e2)
- else
- assert (false, "Unknown unary operator")
- end
-end
-
-------------------------------------------------------------------------
---
-------------------------------------------------------------------------
-function luaK:infix (fs, op, v)
- if op == "and" then
- self:goiftrue(fs, v)
- elseif op == "or" then
- self:goiffalse(fs, v)
- elseif op == "concat" then
- self:exp2nextreg(fs, v) -- operand must be on the 'stack'
- else
- if not self:isnumeral (v) then self:exp2RK(fs, v) end
- end
-end
-
-------------------------------------------------------------------------
---
--- grep "ORDER OPR" if you change these enums
-------------------------------------------------------------------------
-luaK.arith_opc = { -- done as a table lookup instead of a calc
- add = "OP_ADD",
- sub = "OP_SUB",
- mul = "OP_MUL",
- mod = "OP_MOD",
- div = "OP_DIV",
- pow = "OP_POW",
- len = "OP_LEN",
- ["not"] = "OP_NOT"
-}
-luaK.test_opc = { -- was ops[] in the codebinop function
- eq = {opc="OP_EQ", cond=true},
- lt = {opc="OP_LT", cond=true},
- le = {opc="OP_LE", cond=true},
-
- -- Pseudo-ops, with no metatable equivalent:
- ne = {opc="OP_EQ", cond=false},
- gt = {opc="OP_LT", cond=false},
- ge = {opc="OP_LE", cond=false}
-}
-
-------------------------------------------------------------------------
---
-------------------------------------------------------------------------
-function luaK:posfix(fs, op, e1, e2)
- if op == "and" then
- assert(e1.t == self.NO_JUMP) -- list must be closed
- self:dischargevars(fs, e2)
- e2.f = self:concat(fs, e2.f, e1.f)
- for k,v in pairs(e2) do e1[k]=v end -- *e1 = *e2
- elseif op == "or" then
- assert(e1.f == self.NO_JUMP) -- list must be closed
- self:dischargevars(fs, e2)
- e2.t = self:concat(fs, e2.t, e1.t)
- for k,v in pairs(e2) do e1[k]=v end -- *e1 = *e2
- elseif op == "concat" then
- self:exp2val(fs, e2)
- if e2.k == "VRELOCABLE"
- and luaP:GET_OPCODE(self:getcode(fs, e2)) == "OP_CONCAT" then
- assert(e1.info == luaP:GETARG_B(self:getcode(fs, e2)) - 1)
- self:freeexp(fs, e1)
- luaP:SETARG_B(self:getcode(fs, e2), e1.info)
- e1.k = "VRELOCABLE"; e1.info = e2.info
- else
- self:exp2nextreg(fs, e2)
- self:codearith (fs, "OP_CONCAT", e1, e2)
- end
- else
- local opc = self.arith_opc[op]
- if opc then self:codearith (fs, opc, e1, e2) else
- opc = self.test_opc[op] or error ("Unknown operator "..op)
- self:codecomp (fs, opc.opc, opc.cond, e1, e2)
- end
- end
-end
-
-------------------------------------------------------------------------
---
-------------------------------------------------------------------------
-function luaK:fixline(fs, line)
- --assert (line)
- if not line then
- --print(debug.traceback "fixline (line == nil)")
- end
- fs.f.lineinfo[fs.pc - 1] = line or 0
-end
-
-------------------------------------------------------------------------
---
-------------------------------------------------------------------------
-function luaK:code(fs, i, line)
- if not line then
- line = 0
- --print(debug.traceback "line == nil")
- end
- local f = fs.f
-
- do -- print it
- local params = { }
- for _,x in ipairs{"A","B","Bx", "sBx", "C"} do
- if i[x] then table.insert (params, string.format ("%s=%i", x, i[x])) end
- end
- debugf ("[code:\t%s\t%s]", luaP.opnames[i.OP], table.concat (params, ", "))
- end
-
- self:dischargejpc(fs) -- 'pc' will change
-
- f.code[fs.pc] = i
- f.lineinfo[fs.pc] = line
-
- if line == 0 then
- f.lineinfo[fs.pc] = fs.lastline
- if fs.lastline == 0 then
- --print(debug.traceback())
- end
- end
-
- if f.lineinfo[fs.pc] == 0 then
- f.lineinfo[fs.pc] = 42
- end
-
- local pc = fs.pc
- fs.pc = fs.pc + 1
- return pc
-end
-
-------------------------------------------------------------------------
---
-------------------------------------------------------------------------
-function luaK:codeABC(fs, o, a, b, c)
- assert(luaP:getOpMode(o) == "iABC", o.." is not an ABC operation")
- --assert getbmode(o) ~= opargn or b == 0
- --assert getcmode(o) ~= opargn or c == 0
- --FF
- --return self:code(fs, luaP:CREATE_ABC(o, a, b, c), fs.ls.lastline)
- return self:code(fs, luaP:CREATE_ABC(o, a, b, c), fs.lastline)
-end
-
-------------------------------------------------------------------------
---
-------------------------------------------------------------------------
-function luaK:codeABx(fs, o, a, bc)
- assert(luaP:getOpMode(o) == "iABx" or luaP:getOpMode(o) == "iAsBx")
- --assert getcmode(o) == opargn
- --FF
- --return self:code(fs, luaP:CREATE_ABx(o, a, bc), fs.ls.lastline)
- return self:code(fs, luaP:CREATE_ABx(o, a, bc), fs.lastline)
-end
-
-------------------------------------------------------------------------
---
-------------------------------------------------------------------------
-function luaK:setlist (fs, base, nelems, tostore)
- local c = math.floor ((nelems-1) / luaP.LFIELDS_PER_FLUSH + 1)
- local b = tostore == self.LUA_MULTRET and 0 or tostore
- assert (tostore ~= 0)
- if c <= luaP.MAXARG_C then self:codeABC (fs, "OP_SETLIST", base, b, c)
- else
- self:codeABC (fs, "OP_SETLIST", base, b, 0)
- self:code (fs, c, fs.lastline)--FIXME
- end
- fs.freereg = base + 1
-end
+++ /dev/null
-----------------------------------------------------------------------
---
--- WARNING! You're entering a hackish area, proceed at your own risks!
---
--- This code results from the borrowing, then ruthless abuse, of
--- Yueliang's implementation of Lua 5.0 compiler. I claim
--- responsibility for all of the ugly, dirty stuff that you might spot
--- in it.
---
--- Eventually, this code will be rewritten, either in Lua or more
--- probably in C. Meanwhile, if you're interested into digging
--- metalua's sources, this is not the best part to invest your time
--- on.
---
--- End of warning.
---
-----------------------------------------------------------------------
-
---[[--------------------------------------------------------------------
-
- $Id$
-
- ldump.lua
- Save bytecodes in Lua
- This file is part of Yueliang.
-
- Copyright (c) 2005 Kein-Hong Man <khman@users.sf.net>
- The COPYRIGHT file describes the conditions
- under which this software may be distributed.
-
-------------------------------------------------------------------------
-
- [FF] Slightly modified, mainly to produce Lua 5.1 bytecode.
-
-----------------------------------------------------------------------]]
-
---[[--------------------------------------------------------------------
--- Notes:
--- * LUA_NUMBER (double), byte order (little endian) and some other
--- header values hard-coded; see other notes below...
--- * One significant difference is that instructions are still in table
--- form (with OP/A/B/C/Bx fields) and luaP:Instruction() is needed to
--- convert them into 4-char strings
--- * Deleted:
--- luaU:DumpVector: folded into DumpLines, DumpCode
--- * Added:
--- luaU:endianness() (from lundump.c)
--- luaU:make_setS: create a chunk writer that writes to a string
--- luaU:make_setF: create a chunk writer that writes to a file
--- (lua.h contains a typedef for a Chunkwriter pointer, and
--- a Lua-based implementation exists, writer() in lstrlib.c)
--- luaU:from_double(x): encode double value for writing
--- luaU:from_int(x): encode integer value for writing
--- (error checking is limited for these conversion functions)
--- (double conversion does not support denormals or NaNs)
--- luaU:ttype(o) (from lobject.h)
-----------------------------------------------------------------------]]
-
-module("bytecode", package.seeall)
-
-format = { }
-format.header = string.dump(function()end):sub(1, 12)
-format.little_endian, format.int_size,
-format.size_t_size, format.instr_size,
-format.number_size, format.integral = format.header:byte(7, 12)
-format.little_endian = format.little_endian~=0
-format.integral = format.integral ~=0
-
-assert(format.integral or format.number_size==8, "Number format not supported by dumper")
-assert(format.little_endian, "Big endian architectures not supported by dumper")
-
---requires luaP
-luaU = {}
-
--- constants used by dumper
-luaU.LUA_TNIL = 0
-luaU.LUA_TBOOLEAN = 1
-luaU.LUA_TNUMBER = 3 -- (all in lua.h)
-luaU.LUA_TSTRING = 4
-luaU.LUA_TNONE = -1
-
--- definitions for headers of binary files
---luaU.LUA_SIGNATURE = "\27Lua" -- binary files start with "<esc>Lua"
---luaU.VERSION = 81 -- 0x50; last format change was in 5.0
---luaU.FORMAT_VERSION = 0 -- 0 is official version. yeah I know I'm a liar.
-
--- a multiple of PI for testing native format
--- multiplying by 1E7 gives non-trivial integer values
---luaU.TEST_NUMBER = 3.14159265358979323846E7
-
---[[--------------------------------------------------------------------
--- Additional functions to handle chunk writing
--- * to use make_setS and make_setF, see test_ldump.lua elsewhere
-----------------------------------------------------------------------]]
-
-------------------------------------------------------------------------
--- works like the lobject.h version except that TObject used in these
--- scripts only has a 'value' field, no 'tt' field (native types used)
-------------------------------------------------------------------------
-function luaU:ttype(o)
- local tt = type(o.value)
- if tt == "number" then return self.LUA_TNUMBER
- elseif tt == "string" then return self.LUA_TSTRING
- elseif tt == "nil" then return self.LUA_TNIL
- elseif tt == "boolean" then return self.LUA_TBOOLEAN
- else
- return self.LUA_TNONE -- the rest should not appear
- end
-end
-
-------------------------------------------------------------------------
--- create a chunk writer that writes to a string
--- * returns the writer function and a table containing the string
--- * to get the final result, look in buff.data
-------------------------------------------------------------------------
-function luaU:make_setS()
- local buff = {}
- buff.data = ""
- local writer =
- function(s, buff) -- chunk writer
- if not s then return end
- buff.data = buff.data..s
- end
- return writer, buff
-end
-
-------------------------------------------------------------------------
--- create a chunk writer that writes to a file
--- * returns the writer function and a table containing the file handle
--- * if a nil is passed, then writer should close the open file
-------------------------------------------------------------------------
-function luaU:make_setF(filename)
- local buff = {}
- buff.h = io.open(filename, "wb")
- if not buff.h then return nil end
- local writer =
- function(s, buff) -- chunk writer
- if not buff.h then return end
- if not s then buff.h:close(); return end
- buff.h:write(s)
- end
- return writer, buff
-end
-
------------------------------------------------------------------------
--- converts a IEEE754 double number to an 8-byte little-endian string
--- * luaU:from_double() and luaU:from_int() are from ChunkBake project
--- * supports +/- Infinity, but not denormals or NaNs
------------------------------------------------------------------------
-function luaU:from_double(x)
- local function grab_byte(v)
- return math.floor(v / 256),
- string.char(math.mod(math.floor(v), 256))
- end
- local sign = 0
- if x < 0 then sign = 1; x = -x end
- local mantissa, exponent = math.frexp(x)
- if x == 0 then -- zero
- mantissa, exponent = 0, 0
- elseif x == 1/0 then
- mantissa, exponent = 0, 2047
- else
- mantissa = (mantissa * 2 - 1) * math.ldexp(0.5, 53)
- exponent = exponent + 1022
- end
- local v, byte = "" -- convert to bytes
- x = mantissa
- for i = 1,6 do
- x, byte = grab_byte(x); v = v..byte -- 47:0
- end
- x, byte = grab_byte(exponent * 16 + x); v = v..byte -- 55:48
- x, byte = grab_byte(sign * 128 + x); v = v..byte -- 63:56
- return v
-end
-
------------------------------------------------------------------------
--- converts a number to a little-endian 32-bit integer string
--- * input value assumed to not overflow, can be signed/unsigned
------------------------------------------------------------------------
-function luaU:from_int(x, size)
- local v = ""
- x = math.floor(x)
- if x >= 0 then
- for i = 1, size do
- v = v..string.char(math.mod(x, 256)); x = math.floor(x / 256)
- end
- else -- x < 0
- x = -x
- local carry = 1
- for i = 1, size do
- local c = 255 - math.mod(x, 256) + carry
- if c == 256 then c = 0; carry = 1 else carry = 0 end
- v = v..string.char(c); x = math.floor(x / 256)
- end
- end
- return v
-end
-
---[[--------------------------------------------------------------------
--- Functions to make a binary chunk
--- * many functions have the size parameter removed, since output is
--- in the form of a string and some sizes are implicit or hard-coded
--- * luaU:DumpVector has been deleted (used in DumpCode & DumpLines)
-----------------------------------------------------------------------]]
-
-------------------------------------------------------------------------
--- dump a block of literal bytes
-------------------------------------------------------------------------
-function luaU:DumpLiteral(s, D) self:DumpBlock(s, D) end
-
---[[--------------------------------------------------------------------
--- struct DumpState:
--- L -- lua_State (not used in this script)
--- write -- lua_Chunkwriter (chunk writer function)
--- data -- void* (chunk writer context or data already written)
-----------------------------------------------------------------------]]
-
-------------------------------------------------------------------------
--- dumps a block of bytes
--- * lua_unlock(D.L), lua_lock(D.L) deleted
-------------------------------------------------------------------------
-function luaU:DumpBlock(b, D) D.write(b, D.data) end
-
-------------------------------------------------------------------------
--- dumps a single byte
-------------------------------------------------------------------------
-function luaU:DumpByte(y, D)
- self:DumpBlock(string.char(y), D)
-end
-
-------------------------------------------------------------------------
--- dumps a 32-bit signed integer (for int)
-------------------------------------------------------------------------
-function luaU:DumpInt(x, D)
- self:DumpBlock(self:from_int(x, format.int_size), D)
-end
-
-------------------------------------------------------------------------
--- dumps a 32-bit unsigned integer (for size_t)
-------------------------------------------------------------------------
-function luaU:DumpSize(x, D)
- self:DumpBlock(self:from_int(x, format.size_t_size), D)
-end
-
-------------------------------------------------------------------------
--- dumps a LUA_NUMBER (hard-coded as a double)
-------------------------------------------------------------------------
-function luaU:DumpNumber(x, D)
- if format.integral then
- self:DumpBlock(self:from_int(x, format.number_size), D)
- else
- self:DumpBlock(self:from_double(x), D)
- end
-end
-
-------------------------------------------------------------------------
--- dumps a Lua string
-------------------------------------------------------------------------
-function luaU:DumpString(s, D)
- if s == nil then
- self:DumpSize(0, D)
- else
- s = s.."\0" -- include trailing '\0'
- self:DumpSize(string.len(s), D)
- self:DumpBlock(s, D)
- end
-end
-
-------------------------------------------------------------------------
--- dumps instruction block from function prototype
-------------------------------------------------------------------------
-function luaU:DumpCode(f, D)
- local n = f.sizecode
- self:DumpInt(n, D)
- --was DumpVector
- for i = 0, n - 1 do
- self:DumpBlock(luaP:Instruction(f.code[i]), D)
- end
-end
-
-------------------------------------------------------------------------
--- dumps local variable names from function prototype
-------------------------------------------------------------------------
-function luaU:DumpLocals(f, D)
- local n = f.sizelocvars
- self:DumpInt(n, D)
- for i = 0, n - 1 do
- -- Dirty temporary fix:
- -- `Stat{ } keeps properly count of the number of local vars,
- -- but fails to keep score of their debug info (names).
- -- It therefore might happen that #f.localvars < f.sizelocvars, or
- -- that a variable's startpc and endpc fields are left unset.
- -- FIXME: This might not be needed anymore, check the bug report
- -- by J. Belmonte.
- local var = f.locvars[i]
- if not var then break end
- -- printf("[DUMPLOCALS] dumping local var #%i = %s", i, table.tostring(var))
- self:DumpString(var.varname, D)
- self:DumpInt(var.startpc or 0, D)
- self:DumpInt(var.endpc or 0, D)
- end
-end
-
-------------------------------------------------------------------------
--- dumps line information from function prototype
-------------------------------------------------------------------------
-function luaU:DumpLines(f, D)
- local n = f.sizelineinfo
- self:DumpInt(n, D)
- --was DumpVector
- for i = 0, n - 1 do
- self:DumpInt(f.lineinfo[i], D) -- was DumpBlock
- --print(i, f.lineinfo[i])
- end
-end
-
-------------------------------------------------------------------------
--- dump upvalue names from function prototype
-------------------------------------------------------------------------
-function luaU:DumpUpvalues(f, D)
- local n = f.sizeupvalues
- self:DumpInt(n, D)
- for i = 0, n - 1 do
- self:DumpString(f.upvalues[i], D)
- end
-end
-
-------------------------------------------------------------------------
--- dump constant pool from function prototype
--- * nvalue(o) and tsvalue(o) macros removed
-------------------------------------------------------------------------
-function luaU:DumpConstants(f, D)
- local n = f.sizek
- self:DumpInt(n, D)
- for i = 0, n - 1 do
- local o = f.k[i] -- TObject
- local tt = self:ttype(o)
- assert (tt >= 0)
- self:DumpByte(tt, D)
- if tt == self.LUA_TNUMBER then
- self:DumpNumber(o.value, D)
- elseif tt == self.LUA_TSTRING then
- self:DumpString(o.value, D)
- elseif tt == self.LUA_TBOOLEAN then
- self:DumpByte (o.value and 1 or 0, D)
- elseif tt == self.LUA_TNIL then
- else
- assert(false) -- cannot happen
- end
- end
-end
-
-
-function luaU:DumpProtos (f, D)
- local n = f.sizep
- assert (n)
- self:DumpInt(n, D)
- for i = 0, n - 1 do
- self:DumpFunction(f.p[i], f.source, D)
- end
-end
-
-function luaU:DumpDebug(f, D)
- self:DumpLines(f, D)
- self:DumpLocals(f, D)
- self:DumpUpvalues(f, D)
-end
-
-
-------------------------------------------------------------------------
--- dump child function prototypes from function prototype
---FF completely reworked for 5.1 format
-------------------------------------------------------------------------
-function luaU:DumpFunction(f, p, D)
- -- print "Dumping function:"
- -- table.print(f, 60)
-
- local source = f.source
- if source == p then source = nil end
- self:DumpString(source, D)
- self:DumpInt(f.lineDefined, D)
- self:DumpInt(f.lastLineDefined or 42, D)
- self:DumpByte(f.nups, D)
- self:DumpByte(f.numparams, D)
- self:DumpByte(f.is_vararg, D)
- self:DumpByte(f.maxstacksize, D)
- self:DumpCode(f, D)
- self:DumpConstants(f, D)
- self:DumpProtos( f, D)
- self:DumpDebug(f, D)
-end
-
-------------------------------------------------------------------------
--- dump Lua header section (some sizes hard-coded)
---FF: updated for version 5.1
-------------------------------------------------------------------------
-function luaU:DumpHeader(D)
- self:DumpLiteral(format.header, D)
-end
-
-------------------------------------------------------------------------
--- dump function as precompiled chunk
--- * w, data are created from make_setS, make_setF
---FF: suppressed extraneous [L] param
-------------------------------------------------------------------------
-function luaU:dump (Main, w, data)
- local D = {} -- DumpState
- D.write = w
- D.data = data
- self:DumpHeader(D)
- self:DumpFunction(Main, nil, D)
- -- added: for a chunk writer writing to a file, this final call with
- -- nil data is to indicate to the writer to close the file
- D.write(nil, D.data)
-end
-
-------------------------------------------------------------------------
--- find byte order (from lundump.c)
--- * hard-coded to little-endian
-------------------------------------------------------------------------
-function luaU:endianness()
- return 1
-end
-
--- FIXME: ugly concat-base generation in [make_setS], bufferize properly!
-function dump_string (proto)
- local writer, buff = luaU:make_setS()
- luaU:dump (proto, writer, buff)
- return buff.data
-end
-
--- FIXME: [make_setS] sucks, perform synchronous file writing
--- Now unused
-function dump_file (proto, filename)
- local writer, buff = luaU:make_setS()
- luaU:dump (proto, writer, buff)
- local file = io.open (filename, "wb")
- file:write (buff.data)
- io.close(file)
- if UNIX_SHARPBANG then os.execute ("chmod a+x "..filename) end
-end
\ No newline at end of file
+++ /dev/null
-----------------------------------------------------------------------
--- Metalua: $Id: mll.lua,v 1.3 2006/11/15 09:07:50 fab13n Exp $
---
--- Summary: generic Lua-style lexer definition. You need this plus
--- some keyword additions to create the complete Lua lexer,
--- as is done in mlp_lexer.lua.
---
--- TODO:
---
--- * Make it easy to define new flavors of strings. Replacing the
--- lexer.patterns.long_string regexp by an extensible list, with
--- customizable token tag, would probably be enough. Maybe add:
--- + an index of capture for the regexp, that would specify
--- which capture holds the content of the string-like token
--- + a token tag
--- + or a string->string transformer function.
---
--- * There are some _G.table to prevent a namespace clash which has
--- now disappered. remove them.
-----------------------------------------------------------------------
---
--- Copyright (c) 2006, Fabien Fleutot <metalua@gmail.com>.
---
--- This software is released under the MIT Licence, see licence.txt
--- for details.
---
-----------------------------------------------------------------------
-
-module ("lexer", package.seeall)
-
-require 'metalua.runtime'
-
-
-lexer = { alpha={ }, sym={ } }
-lexer.__index=lexer
-
-local debugf = function() end
---local debugf=printf
-
-----------------------------------------------------------------------
--- Patterns used by [lexer:extract] to decompose the raw string into
--- correctly tagged tokens.
-----------------------------------------------------------------------
-lexer.patterns = {
- spaces = "^[ \r\n\t]*()",
- short_comment = "^%-%-([^\n]*)()\n",
- final_short_comment = "^%-%-([^\n]*)()$",
- long_comment = "^%-%-%[(=*)%[\n?(.-)%]%1%]()",
- long_string = "^%[(=*)%[\n?(.-)%]%1%]()",
- number_mantissa = { "^%d+%.?%d*()", "^%d*%.%d+()" },
- number_exponant = "^[eE][%+%-]?%d+()",
- number_hex = "^0[xX]%x+()",
- word = "^([%a_][%w_]*)()"
-}
-
-----------------------------------------------------------------------
--- unescape a whole string, applying [unesc_digits] and
--- [unesc_letter] as many times as required.
-----------------------------------------------------------------------
-local function unescape_string (s)
-
- -- Turn the digits of an escape sequence into the corresponding
- -- character, e.g. [unesc_digits("123") == string.char(123)].
- local function unesc_digits (backslashes, digits)
- if #backslashes%2==0 then
- -- Even number of backslashes, they escape each other, not the digits.
- -- Return them so that unesc_letter() can treaat them
- return backslashes..digits
- else
- -- Remove the odd backslash, which escapes the number sequence.
- -- The rest will be returned and parsed by unesc_letter()
- backslashes = backslashes :sub (1,-2)
- end
- local k, j, i = digits:reverse():byte(1, 3)
- local z = _G.string.byte "0"
- local code = (k or z) + 10*(j or z) + 100*(i or z) - 111*z
- if code > 255 then
- error ("Illegal escape sequence '\\"..digits..
- "' in string: ASCII codes must be in [0..255]")
- end
- return backslashes .. string.char (code)
- end
-
- -- Take a letter [x], and returns the character represented by the
- -- sequence ['\\'..x], e.g. [unesc_letter "n" == "\n"].
- local function unesc_letter(x)
- local t = {
- a = "\a", b = "\b", f = "\f",
- n = "\n", r = "\r", t = "\t", v = "\v",
- ["\\"] = "\\", ["'"] = "'", ['"'] = '"', ["\n"] = "\n" }
- return t[x] or error([[Unknown escape sequence '\]]..x..[[']])
- end
-
- return s
- :gsub ("(\\+)([0-9][0-9]?[0-9]?)", unesc_digits)
- :gsub ("\\(%D)",unesc_letter)
-end
-
-lexer.extractors = {
- "skip_whitespaces_and_comments",
- "extract_short_string", "extract_word", "extract_number",
- "extract_long_string", "extract_symbol" }
-
-lexer.token_metatable = {
--- __tostring = function(a)
--- return string.format ("`%s{'%s'}",a.tag, a[1])
--- end
-}
-
-lexer.lineinfo_metatable = { }
-
-----------------------------------------------------------------------
--- Really extract next token fron the raw string
--- (and update the index).
--- loc: offset of the position just after spaces and comments
--- previous_i: offset in src before extraction began
-----------------------------------------------------------------------
-function lexer:extract ()
- local previous_i = self.i
- local loc = self.i
- local eof, token
-
- -- Put line info, comments and metatable around the tag and content
- -- provided by extractors, thus returning a complete lexer token.
- -- first_line: line # at the beginning of token
- -- first_column_offset: char # of the last '\n' before beginning of token
- -- i: scans from beginning of prefix spaces/comments to end of token.
- local function build_token (tag, content)
- assert (tag and content)
- local i, first_line, first_column_offset, previous_line_length =
- previous_i, self.line, self.column_offset, nil
-
- -- update self.line and first_line. i := indexes of '\n' chars
- while true do
- i = self.src :find ("\n", i+1, true)
- if not i or i>self.i then break end -- no more '\n' until end of token
- previous_line_length = i - self.column_offset
- if loc and i <= loc then -- '\n' before beginning of token
- first_column_offset = i
- first_line = first_line+1
- end
- self.line = self.line+1
- self.column_offset = i
- end
-
- -- lineinfo entries: [1]=line, [2]=column, [3]=char, [4]=filename
- local fli = { first_line, loc-first_column_offset, loc, self.src_name }
- local lli = { self.line, self.i-self.column_offset-1, self.i-1, self.src_name }
- --Pluto barfes when the metatable is set:(
- setmetatable(fli, lexer.lineinfo_metatable)
- setmetatable(lli, lexer.lineinfo_metatable)
- local a = { tag = tag, lineinfo = { first=fli, last=lli }, content }
- if lli[2]==-1 then lli[1], lli[2] = lli[1]-1, previous_line_length-1 end
- if #self.attached_comments > 0 then
- a.lineinfo.comments = self.attached_comments
- fli.comments = self.attached_comments
- if self.lineinfo_last then
- self.lineinfo_last.comments = self.attached_comments
- end
- end
- self.attached_comments = { }
- return setmetatable (a, self.token_metatable)
- end --</function build_token>
-
- for ext_idx, extractor in ipairs(self.extractors) do
- -- printf("method = %s", method)
- local tag, content = self [extractor] (self)
- -- [loc] is placed just after the leading whitespaces and comments;
- -- for this to work, the whitespace extractor *must be* at index 1.
- if ext_idx==1 then loc = self.i end
-
- if tag then
- --printf("`%s{ %q }\t%i", tag, content, loc);
- return build_token (tag, content)
- end
- end
-
- error "None of the lexer extractors returned anything!"
-end
-
-----------------------------------------------------------------------
--- skip whites and comments
--- FIXME: doesn't take into account:
--- - unterminated long comments
--- - short comments at last line without a final \n
-----------------------------------------------------------------------
-function lexer:skip_whitespaces_and_comments()
- local table_insert = _G.table.insert
- repeat -- loop as long as a space or comment chunk is found
- local _, j
- local again = false
- local last_comment_content = nil
- -- skip spaces
- self.i = self.src:match (self.patterns.spaces, self.i)
- -- skip a long comment if any
- _, last_comment_content, j =
- self.src :match (self.patterns.long_comment, self.i)
- if j then
- table_insert(self.attached_comments,
- {last_comment_content, self.i, j, "long"})
- self.i=j; again=true
- end
- -- skip a short comment if any
- last_comment_content, j = self.src:match (self.patterns.short_comment, self.i)
- if j then
- table_insert(self.attached_comments,
- {last_comment_content, self.i, j, "short"})
- self.i=j; again=true
- end
- if self.i>#self.src then return "Eof", "eof" end
- until not again
-
- if self.src:match (self.patterns.final_short_comment, self.i) then
- return "Eof", "eof" end
- --assert (not self.src:match(self.patterns.short_comment, self.i))
- --assert (not self.src:match(self.patterns.long_comment, self.i))
- -- --assert (not self.src:match(self.patterns.spaces, self.i))
- return
-end
-
-----------------------------------------------------------------------
--- extract a '...' or "..." short string
-----------------------------------------------------------------------
-function lexer:extract_short_string()
- -- [k] is the first unread char, [self.i] points to [k] in [self.src]
- local j, k = self.i, self.src :sub (self.i,self.i)
- if k~="'" and k~='"' then return end
- local i = self.i + 1
- local j = i
- while true do
- -- k = opening char: either simple-quote or double-quote
- -- i = index of beginning-of-string
- -- x = next "interesting" character
- -- j = position after interesting char
- -- y = char just after x
- local x, y
- x, j, y = self.src :match ("([\\\r\n"..k.."])()(.?)", j)
- if x == '\\' then j=j+1 -- don't parse escaped char
- elseif x == k then break -- unescaped end of string
- else -- eof or '\r' or '\n' reached before end of string
- assert (not x or x=="\r" or x=="\n")
- error "Unterminated string"
- end
- end
- self.i = j
-
- return "String", unescape_string (self.src:sub (i,j-2))
-end
-
-----------------------------------------------------------------------
---
-----------------------------------------------------------------------
-function lexer:extract_word()
- -- Id / keyword
- local word, j = self.src:match (self.patterns.word, self.i)
- if word then
- self.i = j
- if self.alpha [word] then return "Keyword", word
- else return "Id", word end
- end
-end
-
-----------------------------------------------------------------------
---
-----------------------------------------------------------------------
-function lexer:extract_number()
- -- Number
- local j = self.src:match(self.patterns.number_hex, self.i)
- if not j then
- j = self.src:match (self.patterns.number_mantissa[1], self.i) or
- self.src:match (self.patterns.number_mantissa[2], self.i)
- if j then
- j = self.src:match (self.patterns.number_exponant, j) or j;
- end
- end
- if not j then return end
- -- Number found, interpret with tonumber() and return it
- local n = tonumber (self.src:sub (self.i, j-1))
- self.i = j
- return "Number", n
-end
-
-----------------------------------------------------------------------
---
-----------------------------------------------------------------------
-function lexer:extract_long_string()
- -- Long string
- local _, content, j = self.src:match (self.patterns.long_string, self.i)
- if j then self.i = j; return "String", content end
-end
-
-----------------------------------------------------------------------
---
-----------------------------------------------------------------------
-function lexer:extract_symbol()
- -- compound symbol
- local k = self.src:sub (self.i,self.i)
- local symk = self.sym [k]
- if not symk then
- self.i = self.i + 1
- return "Keyword", k
- end
- for _, sym in pairs (symk) do
- if sym == self.src:sub (self.i, self.i + #sym - 1) then
- self.i = self.i + #sym;
- return "Keyword", sym
- end
- end
- -- single char symbol
- self.i = self.i+1
- return "Keyword", k
-end
-
-----------------------------------------------------------------------
--- Add a keyword to the list of keywords recognized by the lexer.
-----------------------------------------------------------------------
-function lexer:add (w, ...)
- assert(not ..., "lexer:add() takes only one arg, although possibly a table")
- if type (w) == "table" then
- for _, x in ipairs (w) do self:add (x) end
- else
- if w:match (self.patterns.word .. "$") then self.alpha [w] = true
- elseif w:match "^%p%p+$" then
- local k = w:sub(1,1)
- local list = self.sym [k]
- if not list then list = { }; self.sym [k] = list end
- _G.table.insert (list, w)
- elseif w:match "^%p$" then return
- else error "Invalid keyword" end
- end
-end
-
-----------------------------------------------------------------------
--- Return the [n]th next token, without consumming it.
--- [n] defaults to 1. If it goes pass the end of the stream, an EOF
--- token is returned.
-----------------------------------------------------------------------
-function lexer:peek (n)
- if not n then n=1 end
- if n > #self.peeked then
- for i = #self.peeked+1, n do
- self.peeked [i] = self:extract()
- end
- end
- return self.peeked [n]
-end
-
-----------------------------------------------------------------------
--- Return the [n]th next token, removing it as well as the 0..n-1
--- previous tokens. [n] defaults to 1. If it goes pass the end of the
--- stream, an EOF token is returned.
-----------------------------------------------------------------------
-function lexer:next (n)
- n = n or 1
- self:peek (n)
- local a
- for i=1,n do
- a = _G.table.remove (self.peeked, 1)
- if a then
- --debugf ("lexer:next() ==> %s %s",
- -- table.tostring(a), tostring(a))
- end
- self.lastline = a.lineinfo.last[1]
- end
- self.lineinfo_last = a.lineinfo.last
- return a or eof_token
-end
-
-----------------------------------------------------------------------
--- Returns an object which saves the stream's current state.
-----------------------------------------------------------------------
--- FIXME there are more fields than that to save
-function lexer:save () return { self.i; _G.table.cat(self.peeked) } end
-
-----------------------------------------------------------------------
--- Restore the stream's state, as saved by method [save].
-----------------------------------------------------------------------
--- FIXME there are more fields than that to restore
-function lexer:restore (s) self.i=s[1]; self.peeked=s[2] end
-
-----------------------------------------------------------------------
--- Resynchronize: cancel any token in self.peeked, by emptying the
--- list and resetting the indexes
-----------------------------------------------------------------------
-function lexer:sync()
- local p1 = self.peeked[1]
- if p1 then
- li = p1.lineinfo.first
- self.line, self.i = li[1], li[3]
- self.column_offset = self.i - li[2]
- self.peeked = { }
- self.attached_comments = p1.lineinfo.first.comments or { }
- end
-end
-
-----------------------------------------------------------------------
--- Take the source and offset of an old lexer.
-----------------------------------------------------------------------
-function lexer:takeover(old)
- self:sync()
- self.line, self.column_offset, self.i, self.src, self.attached_comments =
- old.line, old.column_offset, old.i, old.src, old.attached_comments
- return self
-end
-
--- function lexer:lineinfo()
--- if self.peeked[1] then return self.peeked[1].lineinfo.first
--- else return { self.line, self.i-self.column_offset, self.i } end
--- end
-
-
-----------------------------------------------------------------------
--- Return the current position in the sources. This position is between
--- two tokens, and can be within a space / comment area, and therefore
--- have a non-null width. :lineinfo_left() returns the beginning of the
--- separation area, :lineinfo_right() returns the end of that area.
---
--- ____ last consummed token ____ first unconsummed token
--- / /
--- XXXXX <spaces and comments> YYYYY
--- \____ \____
--- :lineinfo_left() :lineinfo_right()
-----------------------------------------------------------------------
-function lexer:lineinfo_right()
- return self:peek(1).lineinfo.first
-end
-
-function lexer:lineinfo_left()
- return self.lineinfo_last
-end
-
-----------------------------------------------------------------------
--- Create a new lexstream.
-----------------------------------------------------------------------
-function lexer:newstream (src_or_stream, name)
- name = name or "?"
- if type(src_or_stream)=='table' then -- it's a stream
- return setmetatable ({ }, self) :takeover (src_or_stream)
- elseif type(src_or_stream)=='string' then -- it's a source string
- local src = src_or_stream
- local stream = {
- src_name = name; -- Name of the file
- src = src; -- The source, as a single string
- peeked = { }; -- Already peeked, but not discarded yet, tokens
- i = 1; -- Character offset in src
- line = 1; -- Current line number
- column_offset = 0; -- distance from beginning of file to last '\n'
- attached_comments = { },-- comments accumulator
- lineinfo_last = { 1, 1, 1, name }
- }
- setmetatable (stream, self)
-
- -- skip initial sharp-bang for unix scripts
- -- FIXME: redundant with mlp.chunk()
- if src and src :match "^#" then stream.i = src :find "\n" + 1 end
- return stream
- else
- assert(false, ":newstream() takes a source string or a stream, not a "..
- type(src_or_stream))
- end
-end
-
-----------------------------------------------------------------------
--- if there's no ... args, return the token a (whose truth value is
--- true) if it's a `Keyword{ }, or nil. If there are ... args, they
--- have to be strings. if the token a is a keyword, and it's content
--- is one of the ... args, then returns it (it's truth value is
--- true). If no a keyword or not in ..., return nil.
-----------------------------------------------------------------------
-function lexer:is_keyword (a, ...)
- if not a or a.tag ~= "Keyword" then return false end
- local words = {...}
- if #words == 0 then return a[1] end
- for _, w in ipairs (words) do
- if w == a[1] then return w end
- end
- return false
-end
-
-----------------------------------------------------------------------
--- Cause an error if the next token isn't a keyword whose content
--- is listed among ... args (which have to be strings).
-----------------------------------------------------------------------
-function lexer:check (...)
- local words = {...}
- local a = self:next()
- local function err ()
- error ("Got " .. tostring (a) ..
- ", expected one of these keywords : '" ..
- _G.table.concat (words,"', '") .. "'") end
-
- if not a or a.tag ~= "Keyword" then err () end
- if #words == 0 then return a[1] end
- for _, w in ipairs (words) do
- if w == a[1] then return w end
- end
- err ()
-end
-
-----------------------------------------------------------------------
---
-----------------------------------------------------------------------
-function lexer:clone()
- local clone = {
- alpha = table.deep_copy(self.alpha),
- sym = table.deep_copy(self.sym) }
- setmetatable(clone, self)
- clone.__index = clone
- return clone
-end
+++ /dev/null
-----------------------------------------------------------------------
---
--- WARNING! You're entering a hackish area, proceed at your own risks!
---
--- This code results from the borrowing, then ruthless abuse, of
--- Yueliang's implementation of Lua 5.0 compiler. I claim
--- responsibility for all of the ugly, dirty stuff that you might spot
--- in it.
---
--- Eventually, this code will be rewritten, either in Lua or more
--- probably in C. Meanwhile, if you're interested into digging
--- metalua's sources, this is not the best part to invest your time
--- on.
---
--- End of warning.
---
-----------------------------------------------------------------------
-
---[[--------------------------------------------------------------------
-
- $Id$
-
- lopcodes.lua
- Lua 5 virtual machine opcodes in Lua
- This file is part of Yueliang.
-
- Copyright (c) 2005 Kein-Hong Man <khman@users.sf.net>
- The COPYRIGHT file describes the conditions
- under which this software may be distributed.
-
- See the ChangeLog for more information.
-
-------------------------------------------------------------------------
-
- [FF] Slightly modified, mainly to produce Lua 5.1 bytecode.
-
-----------------------------------------------------------------------]]
-
---[[--------------------------------------------------------------------
--- Notes:
--- * an Instruction is a table with OP, A, B, C, Bx elements; this
--- should allow instruction handling to work with doubles and ints
--- * Added:
--- luaP:Instruction(i): convert field elements to a 4-char string
--- luaP:DecodeInst(x): convert 4-char string into field elements
--- * WARNING luaP:Instruction outputs instructions encoded in little-
--- endian form and field size and positions are hard-coded
-----------------------------------------------------------------------]]
-
-module("bytecode", package.seeall)
-
-local function debugf() end
-
-luaP = { }
-
---[[
-===========================================================================
- We assume that instructions are unsigned numbers.
- All instructions have an opcode in the first 6 bits.
- Instructions can have the following fields:
- 'A' : 8 bits
- 'B' : 9 bits
- 'C' : 9 bits
- 'Bx' : 18 bits ('B' and 'C' together)
- 'sBx' : signed Bx
-
- A signed argument is represented in excess K; that is, the number
- value is the unsigned value minus K. K is exactly the maximum value
- for that argument (so that -max is represented by 0, and +max is
- represented by 2*max), which is half the maximum for the corresponding
- unsigned argument.
-===========================================================================
---]]
-
-luaP.OpMode = {"iABC", "iABx", "iAsBx"} -- basic instruction format
-
-------------------------------------------------------------------------
--- size and position of opcode arguments.
--- * WARNING size and position is hard-coded elsewhere in this script
-------------------------------------------------------------------------
-luaP.SIZE_C = 9
-luaP.SIZE_B = 9
-luaP.SIZE_Bx = luaP.SIZE_C + luaP.SIZE_B
-luaP.SIZE_A = 8
-
-luaP.SIZE_OP = 6
-
-luaP.POS_C = luaP.SIZE_OP
-luaP.POS_B = luaP.POS_C + luaP.SIZE_C
-luaP.POS_Bx = luaP.POS_C
-luaP.POS_A = luaP.POS_B + luaP.SIZE_B
-
---FF from 5.1
-luaP.BITRK = 2^(luaP.SIZE_B - 1)
-function luaP:ISK(x) return x >= self.BITRK end
-luaP.MAXINDEXRK = luaP.BITRK - 1
-function luaP:RKASK(x)
- if x < self.BITRK then return x+self.BITRK else return x end
-end
-
-
-
-------------------------------------------------------------------------
--- limits for opcode arguments.
--- we use (signed) int to manipulate most arguments,
--- so they must fit in BITS_INT-1 bits (-1 for sign)
-------------------------------------------------------------------------
--- removed "#if SIZE_Bx < BITS_INT-1" test, assume this script is
--- running on a Lua VM with double or int as LUA_NUMBER
-
-luaP.MAXARG_Bx = math.ldexp(1, luaP.SIZE_Bx) - 1
-luaP.MAXARG_sBx = math.floor(luaP.MAXARG_Bx / 2) -- 'sBx' is signed
-
-luaP.MAXARG_A = math.ldexp(1, luaP.SIZE_A) - 1
-luaP.MAXARG_B = math.ldexp(1, luaP.SIZE_B) - 1
-luaP.MAXARG_C = math.ldexp(1, luaP.SIZE_C) - 1
-
--- creates a mask with 'n' 1 bits at position 'p'
--- MASK1(n,p) deleted
--- creates a mask with 'n' 0 bits at position 'p'
--- MASK0(n,p) deleted
-
---[[--------------------------------------------------------------------
- Visual representation for reference:
-
- 31 | | | 0 bit position
- +-----+-----+-----+----------+
- | B | C | A | Opcode | iABC format
- +-----+-----+-----+----------+
- - 9 - 9 - 8 - 6 - field sizes
- +-----+-----+-----+----------+
- | [s]Bx | A | Opcode | iABx | iAsBx format
- +-----+-----+-----+----------+
-----------------------------------------------------------------------]]
-
-------------------------------------------------------------------------
--- the following macros help to manipulate instructions
--- * changed to a table object representation, very clean compared to
--- the [nightmare] alternatives of using a number or a string
-------------------------------------------------------------------------
-
--- these accept or return opcodes in the form of string names
-function luaP:GET_OPCODE(i) return self.ROpCode[i.OP] end
-function luaP:SET_OPCODE(i, o) i.OP = self.OpCode[o] end
-
-function luaP:GETARG_A(i) return i.A end
-function luaP:SETARG_A(i, u) i.A = u end
-
-function luaP:GETARG_B(i) return i.B end
-function luaP:SETARG_B(i, b) i.B = b end
-
-function luaP:GETARG_C(i) return i.C end
-function luaP:SETARG_C(i, b) i.C = b end
-
-function luaP:GETARG_Bx(i) return i.Bx end
-function luaP:SETARG_Bx(i, b) i.Bx = b end
-
-function luaP:GETARG_sBx(i) return i.Bx - self.MAXARG_sBx end
-function luaP:SETARG_sBx(i, b) i.Bx = b + self.MAXARG_sBx end
-
-function luaP:CREATE_ABC(o,a,b,c)
- return {OP = self.OpCode[o], A = a, B = b, C = c}
-end
-
-function luaP:CREATE_ABx(o,a,bc)
- return {OP = self.OpCode[o], A = a, Bx = bc}
-end
-
-------------------------------------------------------------------------
--- Bit shuffling stuffs
-------------------------------------------------------------------------
-
-if false and pcall (require, 'bit') then
- ------------------------------------------------------------------------
- -- Return a 4-char string little-endian encoded form of an instruction
- ------------------------------------------------------------------------
- function luaP:Instruction(i)
- --FIXME
- end
-else
- ------------------------------------------------------------------------
- -- Version without bit manipulation library.
- ------------------------------------------------------------------------
- local p2 = {1,2,4,8,16,32,64,128,256, 512, 1024, 2048, 4096}
- -- keeps [n] bits from [x]
- local function keep (x, n) return x % p2[n+1] end
- -- shifts bits of [x] [n] places to the right
- local function srb (x,n) return math.floor (x / p2[n+1]) end
- -- shifts bits of [x] [n] places to the left
- local function slb (x,n) return x * p2[n+1] end
-
- ------------------------------------------------------------------------
- -- Return a 4-char string little-endian encoded form of an instruction
- ------------------------------------------------------------------------
- function luaP:Instruction(i)
- -- printf("Instr->string: %s %s", self.opnames[i.OP], table.tostring(i))
- local c0, c1, c2, c3
- -- change to OP/A/B/C format if needed
- if i.Bx then i.C = keep (i.Bx, 9); i.B = srb (i.Bx, 9) end
- -- c0 = 6B from opcode + 2LSB from A (flushed to MSB)
- c0 = i.OP + slb (keep (i.A, 2), 6)
- -- c1 = 6MSB from A + 2LSB from C (flushed to MSB)
- c1 = srb (i.A, 2) + slb (keep (i.C, 2), 6)
- -- c2 = 7MSB from C + 1LSB from B (flushed to MSB)
- c2 = srb (i.C, 2) + slb (keep (i.B, 1), 7)
- -- c3 = 8MSB from B
- c3 = srb (i.B, 1)
- --printf ("Instruction: %s %s", self.opnames[i.OP], tostringv (i))
- --printf ("Bin encoding: %x %x %x %x", c0, c1, c2, c3)
- return string.char(c0, c1, c2, c3)
- end
-end
-------------------------------------------------------------------------
--- decodes a 4-char little-endian string into an instruction struct
-------------------------------------------------------------------------
-function luaP:DecodeInst(x)
- error "Not implemented"
-end
-
-------------------------------------------------------------------------
--- invalid register that fits in 8 bits
-------------------------------------------------------------------------
-luaP.NO_REG = luaP.MAXARG_A
-
-------------------------------------------------------------------------
--- R(x) - register
--- Kst(x) - constant (in constant table)
--- RK(x) == if x < MAXSTACK then R(x) else Kst(x-MAXSTACK)
-------------------------------------------------------------------------
-
-------------------------------------------------------------------------
--- grep "ORDER OP" if you change these enums
-------------------------------------------------------------------------
-
---[[--------------------------------------------------------------------
-Lua virtual machine opcodes (enum OpCode):
-------------------------------------------------------------------------
-name args description
-------------------------------------------------------------------------
-OP_MOVE A B R(A) := R(B)
-OP_LOADK A Bx R(A) := Kst(Bx)
-OP_LOADBOOL A B C R(A) := (Bool)B; if (C) PC++
-OP_LOADNIL A B R(A) := ... := R(B) := nil
-OP_GETUPVAL A B R(A) := UpValue[B]
-OP_GETGLOBAL A Bx R(A) := Gbl[Kst(Bx)]
-OP_GETTABLE A B C R(A) := R(B)[RK(C)]
-OP_SETGLOBAL A Bx Gbl[Kst(Bx)] := R(A)
-OP_SETUPVAL A B UpValue[B] := R(A)
-OP_SETTABLE A B C R(A)[RK(B)] := RK(C)
-OP_NEWTABLE A B C R(A) := {} (size = B,C)
-OP_SELF A B C R(A+1) := R(B); R(A) := R(B)[RK(C)]
-OP_ADD A B C R(A) := RK(B) + RK(C)
-OP_SUB A B C R(A) := RK(B) - RK(C)
-OP_MUL A B C R(A) := RK(B) * RK(C)
-OP_DIV A B C R(A) := RK(B) / RK(C)
-OP_POW A B C R(A) := RK(B) ^ RK(C)
-OP_UNM A B R(A) := -R(B)
-OP_NOT A B R(A) := not R(B)
-OP_CONCAT A B C R(A) := R(B).. ... ..R(C)
-OP_JMP sBx PC += sBx
-OP_EQ A B C if ((RK(B) == RK(C)) ~= A) then pc++
-OP_LT A B C if ((RK(B) < RK(C)) ~= A) then pc++
-OP_LE A B C if ((RK(B) <= RK(C)) ~= A) then pc++
-OP_TEST A B C if (R(B) <=> C) then R(A) := R(B) else pc++
-OP_CALL A B C R(A), ... ,R(A+C-2) := R(A)(R(A+1), ... ,R(A+B-1))
-OP_TAILCALL A B C return R(A)(R(A+1), ... ,R(A+B-1))
-OP_RETURN A B return R(A), ... ,R(A+B-2) (see note)
-OP_FORLOOP A sBx R(A)+=R(A+2); if R(A) <?= R(A+1) then PC+= sBx
-OP_TFORLOOP A C R(A+2), ... ,R(A+2+C) := R(A)(R(A+1), R(A+2));
- if R(A+2) ~= nil then pc++
-OP_TFORPREP A sBx if type(R(A)) == table then R(A+1):=R(A), R(A):=next;
- PC += sBx
-OP_SETLIST A Bx R(A)[Bx-Bx%FPF+i] := R(A+i), 1 <= i <= Bx%FPF+1
-OP_SETLISTO A Bx (see note)
-OP_CLOSE A close all variables in the stack up to (>=) R(A)
-OP_CLOSURE A Bx R(A) := closure(KPROTO[Bx], R(A), ... ,R(A+n))
-----------------------------------------------------------------------]]
-
-luaP.opnames = {} -- opcode names
-luaP.OpCode = {} -- lookup name -> number
-luaP.ROpCode = {} -- lookup number -> name
-
-local i = 0
-for v in string.gfind([[
-MOVE -- 0
-LOADK
-LOADBOOL
-LOADNIL
-GETUPVAL
-GETGLOBAL -- 5
-GETTABLE
-SETGLOBAL
-SETUPVAL
-SETTABLE
-NEWTABLE -- 10
-SELF
-ADD
-SUB
-MUL
-DIV -- 15
-MOD
-POW
-UNM
-NOT
-LEN -- 20
-CONCAT
-JMP
-EQ
-LT
-LE -- 25
-TEST
-TESTSET
-CALL
-TAILCALL
-RETURN -- 30
-FORLOOP
-FORPREP
-TFORLOOP
-SETLIST
-CLOSE -- 35
-CLOSURE
-VARARG
-]], "[%a]+") do
- local n = "OP_"..v
- luaP.opnames[i] = v
- luaP.OpCode[n] = i
- luaP.ROpCode[i] = n
- i = i + 1
-end
-luaP.NUM_OPCODES = i
-
---[[
-===========================================================================
- Notes:
- (1) In OP_CALL, if (B == 0) then B = top. C is the number of returns - 1,
- and can be 0: OP_CALL then sets 'top' to last_result+1, so
- next open instruction (OP_CALL, OP_RETURN, OP_SETLIST) may use 'top'.
-
- (2) In OP_RETURN, if (B == 0) then return up to 'top'
-
- (3) For comparisons, B specifies what conditions the test should accept.
-
- (4) All 'skips' (pc++) assume that next instruction is a jump
-
- (5) OP_SETLISTO is used when the last item in a table constructor is a
- function, so the number of elements set is up to top of stack
-===========================================================================
---]]
-
-------------------------------------------------------------------------
--- masks for instruction properties
-------------------------------------------------------------------------
--- was enum OpModeMask:
-luaP.OpModeBreg = 2 -- B is a register
-luaP.OpModeBrk = 3 -- B is a register/constant
-luaP.OpModeCrk = 4 -- C is a register/constant
-luaP.OpModesetA = 5 -- instruction set register A
-luaP.OpModeK = 6 -- Bx is a constant
-luaP.OpModeT = 1 -- operator is a test
-
-------------------------------------------------------------------------
--- get opcode mode, e.g. "iABC"
-------------------------------------------------------------------------
-function luaP:getOpMode(m)
- --printv(m)
- --printv(self.OpCode[m])
- --printv(self.opmodes [self.OpCode[m]+1])
- return self.OpMode[tonumber(string.sub(self.opmodes[self.OpCode[m] + 1], 7, 7))]
-end
-
-------------------------------------------------------------------------
--- test an instruction property flag
--- * b is a string, e.g. "OpModeBreg"
-------------------------------------------------------------------------
-function luaP:testOpMode(m, b)
- return (string.sub(self.opmodes[self.OpCode[m] + 1], self[b], self[b]) == "1")
-end
-
--- number of list items to accumulate before a SETLIST instruction
--- (must be a power of 2)
--- * used in lparser, lvm, ldebug, ltests
-luaP.LFIELDS_PER_FLUSH = 50 --FF updated to match 5.1
-
--- luaP_opnames[] is set above, as the luaP.opnames table
--- opmode(t,b,bk,ck,sa,k,m) deleted
-
---[[--------------------------------------------------------------------
- Legend for luaP:opmodes:
- 1 T -> T (is a test?)
- 2 B -> B is a register
- 3 b -> B is an RK register/constant combination
- 4 C -> C is an RK register/constant combination
- 5 A -> register A is set by the opcode
- 6 K -> Bx is a constant
- 7 m -> 1 if iABC layout,
- 2 if iABx layout,
- 3 if iAsBx layout
-----------------------------------------------------------------------]]
-
-luaP.opmodes = {
--- TBbCAKm opcode
- "0100101", -- OP_MOVE 0
- "0000112", -- OP_LOADK
- "0000101", -- OP_LOADBOOL
- "0100101", -- OP_LOADNIL
- "0000101", -- OP_GETUPVAL
- "0000112", -- OP_GETGLOBAL 5
- "0101101", -- OP_GETTABLE
- "0000012", -- OP_SETGLOBAL
- "0000001", -- OP_SETUPVAL
- "0011001", -- OP_SETTABLE
- "0000101", -- OP_NEWTABLE 10
- "0101101", -- OP_SELF
- "0011101", -- OP_ADD
- "0011101", -- OP_SUB
- "0011101", -- OP_MUL
- "0011101", -- OP_DIV 15
- "0011101", -- OP_MOD
- "0011101", -- OP_POW
- "0100101", -- OP_UNM
- "0100101", -- OP_NOT
- "0100101", -- OP_LEN 20
- "0101101", -- OP_CONCAT
- "0000003", -- OP_JMP
- "1011001", -- OP_EQ
- "1011001", -- OP_LT
- "1011001", -- OP_LE 25
- "1000101", -- OP_TEST
- "1100101", -- OP_TESTSET
- "0000001", -- OP_CALL
- "0000001", -- OP_TAILCALL
- "0000001", -- OP_RETURN 30
- "0000003", -- OP_FORLOOP
- "0000103", -- OP_FORPREP
- "1000101", -- OP_TFORLOOP
- "0000001", -- OP_SETLIST
- "0000001", -- OP_CLOSE 35
- "0000102", -- OP_CLOSURE
- "0000101" -- OP_VARARG
-}
+++ /dev/null
---*-lua-*- Set as a metalua file because it requires some metalua libs
-
---require 'verbose_require'
-
-require 'metalua.compiler'
-require 'metalua.clopts'
-require 'metalua.mlc_xcall'
-
-AST_COMPILE_ERROR_NUMBER = -1
-RUNTIME_ERROR_NUMBER = -3
-BYTECODE_SYNTHESE_ERROR_NUMBER = -100
-
--{ extension 'match' }
-
-local chunks = { }
-local runargs = { }
-
-local acc_chunk = |kind| |arg| table.insert (chunks, { tag=kind, arg })
-
-parser = clopts {
- -- Chunk loading
- { short = 'f', long = 'file', type = 'string', action = acc_chunk 'File',
- usage = 'load a file to compile and/or run'
- },
- { short = 'l', long = 'library', type = 'string', action = acc_chunk 'Library',
- usage = 'load a libary from the standard paths'
- },
- { short = 'e', long = 'literal', type = 'string', action = acc_chunk 'Literal',
- usage = 'load a literal piece of source code'
- },
- -- What to do with chunks
- { short = 'o', long = 'output', type = 'string',
- usage = 'set the target name of the next compiled file'
- },
- { short = 'x', long = 'run', type = 'boolean',
- usage = 'execute the compiled file instead of saving it (unless -o is also used)'
- },
- { short = 'i', long = 'interactive', type = 'boolean',
- usage = 'run an interactive loop after having run other files'
- },
- -- Advanced stuff
- { short = 'v', long = 'verbose', type = 'boolean',
- usage = 'verbose mode'
- },
- { short = 'a', long = 'print-ast', type = 'boolean',
- usage = 'print the AST resulting from file compilation'
- },
- { short = 'A', long = 'print-ast-lineinfo', type = 'boolean',
- usage = 'print the AST resulting from file compilation, including lineinfo data'
- },
- { short = 'S', long = 'print-src', type = 'boolean',
- usage = 'print the AST resulting from file compilation, as re-gerenerated sources'
- },
- { short = 'b', long = 'metabugs', type = 'boolean',
- usage = 'show syntax errors as compile-time execution errors'
- },
- { short = 's', long = 'sharpbang', type = 'string',
- usage = 'set a first line to add to compiled file, typically "#!/bin/env mlr"'
- },
- { long = 'no-runtime', type = 'boolean',
- usage = "prevent the automatic requirement of metalua runtime"
- },
- { long = '', short = 'p', type = '*',
- action= function (newargs) runargs=table.icat(runargs, newargs) end,
- usage = "pass all remaining arguments to the program"
- },
-usage=[[
-
-Compile and/or execute metalua programs. Parameters passed to the
-compiler should be prefixed with an option flag, hinting what must be
-done with them: take tham as file names to compile, as library names
-to load, as parameters passed to the running program... When option
-flags are absent, metalua tries to adopt a "Do What I Mean" approach:
-
-- if no code (no library, no literal expression and no file) is
- specified, the first flag-less parameter is taken as a file name to
- load.
-
-- if no code and no parameter is passed, an interactive loop is
- started.
-
-- if a target file is specified with --output, the program is not
- executed by default, unless a --run flag forces it to. Conversely,
- if no --output target is specified, the code is run unless ++run
- forbids it.
-]]}
-
-local function main (...)
-
- local cfg = parser(...)
-
- -------------------------------------------------------------------
- -- Print messages if in verbose mode
- -------------------------------------------------------------------
- local function verb_print (fmt, ...)
- if cfg.verbose then
- return printf ("[ "..fmt.." ]", ...)
- end
- end
-
- if cfg.verbose then
- verb_print("raw options: %s", table.tostring(cfg))
- end
-
- -------------------------------------------------------------------
- -- If there's no chunk but there are params, interpret the first
- -- param as a file name.
- if #chunks==0 and cfg.params then
- local the_file = table.remove(cfg.params, 1)
- verb_print("Param %q considered as a source file", the_file)
- chunks = { `File{ the_file } }
- end
-
- -------------------------------------------------------------------
- -- If nothing to do, run REPL loop
- if #chunks==0 and cfg.interactive==nil then
- verb_print "Nothing to compile nor run, force interactive loop"
- cfg.interactive=true
- end
-
-
- -------------------------------------------------------------------
- -- Run if asked to, or if no --output has been given
- -- if cfg.run==false it's been *forced* to false, don't override.
- if cfg.run==nil and not cfg.output then
- verb_print("No output file specified; I'll run the program")
- cfg.run = true
- end
-
- local code = { }
-
- -------------------------------------------------------------------
- -- Get ASTs from sources
- mlc.metabugs = cfg.metabugs
- local last_file
- for x in values(chunks) do
- verb_print("Compiling %s", table.tostring(x))
- local st, ast
- match x with
- | `Library{ l } -> st, ast = true, `Call{ `Id 'require', `String{ l } }
- | `Literal{ e } -> st, ast = mlc_xcall.client_literal (e)
- | `File{ f } ->
- st, ast = mlc_xcall.client_file (f)
- -- Isolate each file in a separate fenv
- if st then
- ast = +{ function (...) -{ast} end (...) }
- ast.source = '@'..f -- TODO [EVE]
- code.source = '@'..f -- TODO [EVE]
- last_file = ast
- end
- end
- if not st then
- printf ("Cannot compile %s:\n%s", table.tostring(x), ast or "no msg")
- os.exit (AST_COMPILE_ERROR_NUMBER)
- end
- ast.origin = x
- table.insert(code, ast)
- end
- -- The last file returns the whole chunk's result
- if last_file then
- local c = table.shallow_copy(last_file)
- last_file <- `Return{ source = c.source, c }
- end
-
- -------------------------------------------------------------------
- -- AST printing
- if cfg['print-ast'] or cfg['print-ast-lineinfo'] then
- verb_print "Resulting AST:"
- for x in ivalues(code) do
- printf("--- AST From %s: ---", table.tostring(x.source, 'nohash'))
- if x.origin and x.origin.tag=='File' then x=x[1][1][2][1] end
- if cfg['print-ast-lineinfo'] then table.print(x, 80, "indent1")
- else table.print(x, 80, 'nohash') end
- end
- end
-
- -------------------------------------------------------------------
- -- Source printing
- if cfg['print-src'] then
- verb_print "Resulting sources:"
- require 'metalua.ast_to_string'
- for x in ivalues(code) do
- printf("--- Source From %s: ---", table.tostring(x.source, 'nohash'))
- if x.origin and x.origin.tag=='File' then x=x[1][1][2][1] end
- print (ast_to_string (x))
- end
- end
-
- -- FIXME: canonize/check AST
-
- -------------------------------------------------------------------
- -- Insert runtime loader
- if cfg['no-runtime'] then
- verb_print "Prevent insertion of command \"require 'metalua.runtime'\""
- else
- table.insert(code, 1, +{require'metalua.runtime'})
- end
-
- local bytecode = mlc.luacstring_of_ast (code)
- code = nil
-
- -------------------------------------------------------------------
- -- Insert #!... command
- if cfg.sharpbang then
- local shbang = cfg.sharpbang
- verb_print ("Adding sharp-bang directive %q", shbang)
- if not shbang :strmatch'^#!' then shbang = '#!' .. shbang end
- if not shbang :strmatch'\n$' then shbang = shbang .. '\n' end
- bytecode = shbang .. bytecode
- end
-
- -------------------------------------------------------------------
- -- Save to file
- if cfg.output then
- -- FIXME: handle '-'
- verb_print ("Saving to file %q", cfg.output)
- local file, err_msg = io.open(cfg.output, 'wb')
- if not file then error("can't open output file: "..err_msg) end
- file:write(bytecode)
- file:close()
- if cfg.sharpbang and os.getenv "OS" ~= "Windows_NT" then
- pcall(os.execute, 'chmod a+x "'..cfg.output..'"')
- end
- end
-
- -------------------------------------------------------------------
- -- Run compiled code
- if cfg.run then
- verb_print "Running"
- local f = mlc.function_of_luacstring (bytecode)
- bytecode = nil
- -- FIXME: isolate execution in a ring
- -- FIXME: check for failures
-
- runargs = table.icat(cfg.params or { }, runargs)
- local function print_traceback (errmsg)
- return errmsg .. '\n' .. debug.traceback ('',2) .. '\n'
- end
- local st, msg = xpcall(|| f(unpack (runargs)), print_traceback)
- if not st then
- io.stderr:write(msg)
- os.exit(RUNTIME_ERROR_NUMBER)
- end
- end
-
- -------------------------------------------------------------------
- -- Run REPL loop
- if cfg.interactive then
- verb_print "Starting REPL loop"
- require 'metalua.metaloop'
- metaloop.run()
- end
-
- verb_print "Done"
-
-end
-
-main(...)
+++ /dev/null
---*-lua-*-----------------------------------------------------------------------
--- This module is written in a more hackish way than necessary, just
--- because I can. Its core feature is to dynamically generate a
--- function that converts from a source format to a destination
--- format; these formats are the various ways to represent a piece of
--- program, from the source file to the executable function. Legal
--- formats are:
---
--- * luafile: the name of a file containing sources.
--- * luastring: these sources as a single string.
--- * lexstream: a stream of lexemes.
--- * ast: an abstract syntax tree.
--- * proto: a (Yueliang) struture containing a high level
--- representation of bytecode. Largely based on the
--- Proto structure in Lua's VM.
--- * luacstring: a string dump of the function, as taken by
--- loadstring() and produced by string.dump().
--- * function: an executable lua function in RAM.
---
---------------------------------------------------------------------------------
-
-require 'metalua.bytecode'
-require 'metalua.mlp'
-
-mlc = { }
-setmetatable(mlc, mlc)
-mlc.metabugs = false
-
---------------------------------------------------------------------------------
--- Order of the transformations. if 'a' is on the left of 'b', then a 'a' can
--- be transformed into a 'b' (but not the other way around). Since the table
--- is transposed, the test is 'if mlc.order.a > mlc.order.b then error(...) end'
---------------------------------------------------------------------------------
-mlc.order = table.transpose{
- 'luafile', 'luastring', 'lexstream', 'ast', 'proto',
- 'luacstring', 'function' }
-
---------------------------------------------------------------------------------
--- The macro 'POINT(point_name, expected_type)' creates an entry point in the
--- 'mlc.convert' function. When we convert a 'a' into a 'b', FIXME
---------------------------------------------------------------------------------
--{ block:
- jump_to_point = `If{ }
- function point_builder(args)
- local name, point_type, code = unpack(args)
- table.insert(jump_to_point, +{src_fmt == -{name}}) -- if source format is 'name'
- table.insert(jump_to_point, { `Goto{name} }) -- then jump to label 'name'
- return {
- ---------------------------------------------------
- -- Stop if this is the destination format
- ---------------------------------------------------
- +{stat: if dst_fmt == -{name} then return x end },
- ---------------------------------------------------
- -- Start here if the source format matches
- ---------------------------------------------------
- `Label{ name },
- -- +{print(" *** point "..-{name})}, -- debug trace
- ---------------------------------------------------
- -- Check that the type matches
- ---------------------------------------------------
- +{stat: assert (-{point_type} == type(x), "Invalid source type") },
- -- perform transformation operations to the next type
- }
- end
- mlp.lexer:add 'POINT'
- mlp.stat:add{ 'POINT', mlp.string, ',', mlp.string, builder = point_builder }
-} -- end of meta-block
-
-function mlc.convert (x, src_fmt, dst_fmt, name)
- -- printf(" *** Convert a %s into a %s", src_fmt, dst_fmt)
-
- -{ jump_to_point }
-
- error "Can't perform this conversion (bad src name)"
-
- POINT 'luafile', 'string' -- x is the src file's name
-
- if not name then name = '@'..x end
- local f, msg = io.open(x, "rb")
- if not f then error("While trying to open file '"..x.."': "..msg) end
- x = f:read'*a'
- f:close()
-
- POINT 'luastring', 'string' -- x is the source
-
- x = mlp.lexer:newstream(x, name)
-
- POINT 'lexstream', 'table' -- x is the lexeme stream
-
- local status -- status = compilation success
- local lx=x
- if mlc.metabugs
- -- If metabugs is true, errors should be attributed to a parser bug.
- then status, x = true, mlp.chunk (lx)
- -- If metabugs is false, errors should be attributed to an invalid entry.
- else status, x = pcall (mlp.chunk, lx) end
- -- FIXME: this test seems wrong ??? Or is it the message?
- if status and lx:peek().tag ~= "Eof"
- then status, x = false, "Premature Eof"
- elseif status and lx:peek().tag == "End"
- then status, x = false, "Unexpected 'end' keyword" end
- if not status and x then
- -- x = error msg; get rid of ???
- x = x:strmatch "[^:]+:[0-9]+: (.*)" or x
- local li = lx:lineinfo_left()
- error (string.format (
- "Parsing error in %s line %s, column %i, char %s: \n%s",
- name or "<nofilename>", li[1], li[2], li[3], x), 2)
- return nil
- end
-
- if x then x.source = name end -- TODO [EVE] store debug info in the special part of ast
-
- POINT 'ast', 'table' -- x is the AST
- x = bytecode.metalua_compile(x, name or x.source)
- POINT 'proto', 'table'
- x = bytecode.dump_string (x)
- POINT 'luacstring', 'string' -- normally x is a bytecode dump
- x = string.undump(x, name)
- POINT 'function', 'function'
- error "Can't perform this conversion (bad dst name)"
-end
-
---------------------------------------------------------------------------------
--- Dynamically compose a conversion function from a function name
--- xxx_of_yyy() or yyy_to_xxx().
---------------------------------------------------------------------------------
-function mlc.__index(_, name)
- local dst, src = name:strmatch '^([a-z]+)_of_([a-z]+)$'
- if not dst then src, dst = name:strmatch '^([a-z]+)_to_([a-z]+)$' end
- if not (src and dst) then return nil end -- not a converter
- local osrc, odst = mlc.order[src], mlc.order[dst] -- check existence of formats
- if not osrc then error ("unknown source format "..src) end
- if not odst then error ("unknown destination format "..src) end
- if osrc > odst then error "Can't convert in this direction" end
- return |x, name| mlc.convert(x, src, dst, name)
-end
-
---------------------------------------------------------------------------------
--- This case isn't handled by the __index method, as it goes "in the wrong direction"
---------------------------------------------------------------------------------
-mlc.function_to_luacstring = string.dump
-mlc.luacstring_of_function = string.dump
-
---------------------------------------------------------------------------------
--- These are drop-in replacement for loadfile() and loadstring(). The
--- C functions will call them instead of the original versions if
--- they're referenced in the registry.
---------------------------------------------------------------------------------
-
-lua_loadstring = loadstring
-local lua_loadstring = loadstring
-lua_loadfile = loadfile
-local lua_loadfile = loadfile
-
-function loadstring(str, name)
- if type(str) ~= 'string' then error 'string expected' end
- if str:match '^\027LuaQ' then return lua_loadstring(str) end
- local n = str:match '^#![^\n]*\n()'
- if n then str=str:sub(n, -1) end
- -- FIXME: handle erroneous returns (return nil + error msg)
- local success, f = pcall (mlc.function_of_luastring, str, name)
- if success then return f else return nil, f end
-end
-
-function loadfile(filename)
- local f, err_msg = io.open(filename, 'rb')
- if not f then return nil, err_msg end
- local success, src = pcall( f.read, f, '*a')
- pcall(f.close, f)
- if success then return loadstring (src, '@'..filename)
- else return nil, src end
-end
-
-function load(f, name)
- while true do
- local x = f()
- if not x then break end
- assert(type(x)=='string', "function passed to load() must return strings")
- table.insert(acc, x)
- end
- return loadstring(table.concat(x))
-end
-
-function dostring(src)
- local f, msg = loadstring(src)
- if not f then error(msg) end
- return f()
-end
-
-function dofile(name)
- local f, msg = loadfile(name)
- if not f then error(msg) end
- return f()
-end
+++ /dev/null
-----------------------------------------------------------------------
--- Metalua: $Id: mlp_expr.lua,v 1.7 2006/11/15 09:07:50 fab13n Exp $
---
--- Summary: metalua parser, expression parser. This is part of the
--- definition of module [mlp].
---
-----------------------------------------------------------------------
---
--- Copyright (c) 2006, Fabien Fleutot <metalua@gmail.com>.
---
--- This software is released under the MIT Licence, see licence.txt
--- for details.
---
-----------------------------------------------------------------------
--- History:
--- $Log: mlp_expr.lua,v $
--- Revision 1.7 2006/11/15 09:07:50 fab13n
--- debugged meta operators.
--- Added command line options handling.
---
--- Revision 1.6 2006/11/10 02:11:17 fab13n
--- compiler faithfulness to 5.1 improved
--- gg.expr extended
--- mlp.expr refactored
---
--- Revision 1.5 2006/11/09 09:39:57 fab13n
--- some cleanup
---
--- Revision 1.4 2006/11/07 21:29:02 fab13n
--- improved quasi-quoting
---
--- Revision 1.3 2006/11/07 04:38:00 fab13n
--- first bootstrapping version.
---
--- Revision 1.2 2006/11/05 15:08:34 fab13n
--- updated code generation, to be compliant with 5.1
---
-----------------------------------------------------------------------
-
---------------------------------------------------------------------------------
---
--- Exported API:
--- * [mlp.expr()]
--- * [mlp.expr_list()]
--- * [mlp.func_val()]
---
---------------------------------------------------------------------------------
-
---require "gg"
---require "mlp_misc"
---require "mlp_table"
---require "mlp_meta"
-
---------------------------------------------------------------------------------
--- These function wrappers (eta-expansions ctually) are just here to break
--- some circular dependencies between mlp_xxx.lua files.
---------------------------------------------------------------------------------
-local function _expr (lx) return mlp.expr (lx) end
-local function _table_content (lx) return mlp.table_content (lx) end
-local function block (lx) return mlp.block (lx) end
-local function stat (lx) return mlp.stat (lx) end
-
-module ("mlp", package.seeall)
-
---------------------------------------------------------------------------------
--- Non-empty expression list. Actually, this isn't used here, but that's
--- handy to give to users.
---------------------------------------------------------------------------------
-expr_list = gg.list{ _expr, separators = "," }
-
---------------------------------------------------------------------------------
--- Helpers for function applications / method applications
---------------------------------------------------------------------------------
-func_args_content = gg.list {
- name = "function arguments",
- _expr, separators = ",", terminators = ")" }
-
--- Used to parse methods
-method_args = gg.multisequence{
- name = "function argument(s)",
- { "{", table_content, "}" },
- { "(", func_args_content, ")", builder = fget(1) },
- { "+{", quote_content, "}" },
- function(lx) local r = opt_string(lx); return r and {r} or { } end }
-
---------------------------------------------------------------------------------
--- [func_val] parses a function, from opening parameters parenthese to
--- "end" keyword included. Used for anonymous functions as well as
--- function declaration statements (both local and global).
---
--- It's wrapped in a [_func_val] eta expansion, so that when expr
--- parser uses the latter, they will notice updates of [func_val]
--- definitions.
---------------------------------------------------------------------------------
-func_params_content = gg.list{ name="function parameters",
- gg.multisequence{ { "...", builder = "Dots" }, id },
- separators = ",", terminators = {")", "|"} }
-
-local _func_params_content = function (lx) return func_params_content(lx) end
-
-func_val = gg.sequence { name="function body",
- "(", func_params_content, ")", block, "end", builder = "Function" }
-
-local _func_val = function (lx) return func_val(lx) end
-
---------------------------------------------------------------------------------
--- Default parser for primary expressions
---------------------------------------------------------------------------------
-function id_or_literal (lx)
- local a = lx:next()
- if a.tag~="Id" and a.tag~="String" and a.tag~="Number" then
- local msg
- if a.tag=='Eof' then
- msg = "End of file reached when an expression was expected"
- elseif a.tag=='Keyword' then
- msg = "An expression was expected, and `"..a[1]..
- "' can't start an expression"
- else
- msg = "Unexpected expr token " .. _G.table.tostring (a, 'nohash')
- end
- gg.parse_error (lx, msg)
- end
- return a
-end
-
-
---------------------------------------------------------------------------------
--- Builder generator for operators. Wouldn't be worth it if "|x|" notation
--- were allowed, but then lua 5.1 wouldn't compile it
---------------------------------------------------------------------------------
-
--- opf1 = |op| |_,a| `Op{ op, a }
-local function opf1 (op) return
- function (_,a) return { tag="Op", op, a } end end
-
--- opf2 = |op| |a,_,b| `Op{ op, a, b }
-local function opf2 (op) return
- function (a,_,b) return { tag="Op", op, a, b } end end
-
--- opf2r = |op| |a,_,b| `Op{ op, b, a } -- (args reversed)
-local function opf2r (op) return
- function (a,_,b) return { tag="Op", op, b, a } end end
-
-local function op_ne(a, _, b)
- -- The first version guarantees to return the same code as Lua,
- -- but it relies on the non-standard 'ne' operator, which has been
- -- suppressed from the official AST grammar (although still supported
- -- in practice by the compiler).
- -- return { tag="Op", "ne", a, b }
- return { tag="Op", "not", { tag="Op", "eq", a, b, lineinfo= {
- first = a.lineinfo.first, last = b.lineinfo.last } } }
-end
-
-
---------------------------------------------------------------------------------
---
--- complete expression
---
---------------------------------------------------------------------------------
-
--- FIXME: set line number. In [expr] transformers probably
-
-expr = gg.expr { name = "expression",
-
- primary = gg.multisequence{ name="expr primary",
- { "(", _expr, ")", builder = "Paren" },
- { "function", _func_val, builder = fget(1) },
- { "-{", splice_content, "}", builder = fget(1) },
- { "+{", quote_content, "}", builder = fget(1) },
- { "nil", builder = "Nil" },
- { "true", builder = "True" },
- { "false", builder = "False" },
- { "...", builder = "Dots" },
- table,
- id_or_literal },
-
- infix = { name="expr infix op",
- { "+", prec = 60, builder = opf2 "add" },
- { "-", prec = 60, builder = opf2 "sub" },
- { "*", prec = 70, builder = opf2 "mul" },
- { "/", prec = 70, builder = opf2 "div" },
- { "%", prec = 70, builder = opf2 "mod" },
- { "^", prec = 90, builder = opf2 "pow", assoc = "right" },
- { "..", prec = 40, builder = opf2 "concat", assoc = "right" },
- { "==", prec = 30, builder = opf2 "eq" },
- { "~=", prec = 30, builder = op_ne },
- { "<", prec = 30, builder = opf2 "lt" },
- { "<=", prec = 30, builder = opf2 "le" },
- { ">", prec = 30, builder = opf2r "lt" },
- { ">=", prec = 30, builder = opf2r "le" },
- { "and",prec = 20, builder = opf2 "and" },
- { "or", prec = 10, builder = opf2 "or" } },
-
- prefix = { name="expr prefix op",
- { "not", prec = 80, builder = opf1 "not" },
- { "#", prec = 80, builder = opf1 "len" },
- { "-", prec = 80, builder = opf1 "unm" } },
-
- suffix = { name="expr suffix op",
- { "[", _expr, "]", builder = function (tab, idx)
- return {tag="Index", tab, idx[1]} end},
- { ".", id, builder = function (tab, field)
- return {tag="Index", tab, id2string(field[1])} end },
- { "(", func_args_content, ")", builder = function(f, args)
- return {tag="Call", f, unpack(args[1])} end },
- { "{", _table_content, "}", builder = function (f, arg)
- return {tag="Call", f, arg[1]} end},
- { ":", id, method_args, builder = function (obj, post)
- return {tag="Invoke", obj, id2string(post[1]), unpack(post[2])} end},
- { "+{", quote_content, "}", builder = function (f, arg)
- return {tag="Call", f, arg[1] } end },
- default = { name="opt_string_arg", parse = mlp.opt_string, builder = function(f, arg)
- return {tag="Call", f, arg } end } } }
+++ /dev/null
---------------------------------------------------------------------------------
---
--- Non-Lua syntax extensions
---
---------------------------------------------------------------------------------
-
-module ("mlp", package.seeall)
-
---------------------------------------------------------------------------------
--- Alebraic Datatypes
---------------------------------------------------------------------------------
-local function adt (lx)
- local tagval = id (lx) [1]
- local tagkey = {tag="Pair", {tag="String", "tag"}, {tag="String", tagval} }
- if lx:peek().tag == "String" or lx:peek().tag == "Number" then
- return { tag="Table", tagkey, lx:next() }
- elseif lx:is_keyword (lx:peek(), "{") then
- local x = table (lx)
- _G.table.insert (x, 1, tagkey)
- return x
- else return { tag="Table", tagkey } end
-end
-
-expr:add{ "`", adt, builder = fget(1) }
-
---------------------------------------------------------------------------------
--- Anonymous lambda
---------------------------------------------------------------------------------
-local lambda_expr = gg.sequence{
- "|", func_params_content, "|", expr,
- builder= function (x)
- local li = x[2].lineinfo
- return { tag="Function", x[1],
- { {tag="Return", x[2], lineinfo=li }, lineinfo=li } }
- end }
-
--- In an earlier version, lambda_expr took an expr_list rather than an expr
--- after the 2nd bar. However, it happened to be much more of a burden than an
--- help, So finally I disabled it. If you want to return several results,
--- use the long syntax.
---------------------------------------------------------------------------------
--- local lambda_expr = gg.sequence{
--- "|", func_params_content, "|", expr_list,
--- builder= function (x)
--- return {tag="Function", x[1], { {tag="Return", unpack(x[2]) } } } end }
-
-expr:add (lambda_expr)
-
---------------------------------------------------------------------------------
--- Allows to write "a `f` b" instead of "f(a, b)". Taken from Haskell.
--- This is not part of Lua 5.1 syntax, so it's added to the expression
--- afterwards, so that it's easier to disable.
---------------------------------------------------------------------------------
-local function expr_in_backquotes (lx) return expr(lx, 35) end
-
-expr.infix:add{ name = "infix function",
- "`", expr_in_backquotes, "`", prec = 35, assoc="left",
- builder = function(a, op, b) return {tag="Call", op[1], a, b} end }
-
-
---------------------------------------------------------------------------------
--- table.override assignment
---------------------------------------------------------------------------------
-
-mlp.lexer:add "<-"
-stat.assignments["<-"] = function (a, b)
- assert( #a==1 and #b==1, "No multi-args for '<-'")
- return { tag="Call", { tag="Index", { tag="Id", "table" },
- { tag="String", "override" } },
- a[1], b[1]}
-end
-
---------------------------------------------------------------------------------
--- C-style op+assignments
---------------------------------------------------------------------------------
-local function op_assign(kw, op)
- local function rhs(a, b)
- return { tag="Op", op, a, b }
- end
- local function f(a,b)
- return { tag="Set", a, _G.table.imap(rhs, a, b) }
- end
- mlp.lexer:add (kw)
- mlp.stat.assignments[kw] = f
-end
-
-_G.table.iforeach (op_assign,
- {"+=", "-=", "*=", "/="},
- {"add", "sub", "mul", "div"})
\ No newline at end of file
+++ /dev/null
-----------------------------------------------------------------------
--- Metalua: $Id: mll.lua,v 1.3 2006/11/15 09:07:50 fab13n Exp $
---
--- Summary: Source file lexer. ~~Currently only works on strings.
--- Some API refactoring is needed.
---
-----------------------------------------------------------------------
---
--- Copyright (c) 2006-2007, Fabien Fleutot <metalua@gmail.com>.
---
--- This software is released under the MIT Licence, see licence.txt
--- for details.
---
-----------------------------------------------------------------------
-
-module ("mlp", package.seeall)
-
-require "lexer"
-
-local mlp_lexer = lexer.lexer:clone()
-
-local keywords = {
- "and", "break", "do", "else", "elseif",
- "end", "false", "for", "function", "if",
- "in", "local", "nil", "not", "or", "repeat",
- "return", "then", "true", "until", "while",
- "...", "..", "==", ">=", "<=", "~=",
- "+{", "-{" }
-
-for w in values(keywords) do mlp_lexer:add(w) end
-
-_M.lexer = mlp_lexer
+++ /dev/null
-----------------------------------------------------------------------
--- Metalua: $Id: mlp_meta.lua,v 1.4 2006/11/15 09:07:50 fab13n Exp $
---
--- Summary: Meta-operations: AST quasi-quoting and splicing
---
-----------------------------------------------------------------------
---
--- Copyright (c) 2006, Fabien Fleutot <metalua@gmail.com>.
---
--- This software is released under the MIT Licence, see licence.txt
--- for details.
---
-----------------------------------------------------------------------
-
-
---------------------------------------------------------------------------------
---
--- Exported API:
--- * [mlp.splice_content()]
--- * [mlp.quote_content()]
---
---------------------------------------------------------------------------------
-
-module ("mlp", package.seeall)
-
---------------------------------------------------------------------------------
--- External splicing: compile an AST into a chunk, load and evaluate
--- that chunk, and replace the chunk by its result (which must also be
--- an AST).
---------------------------------------------------------------------------------
-
-function splice (ast)
- local f = mlc.function_of_ast(ast, '=splice')
- local result=f()
- return result
-end
-
---------------------------------------------------------------------------------
--- Going from an AST to an AST representing that AST
--- the only key being lifted in this version is ["tag"]
---------------------------------------------------------------------------------
-function quote (t)
- --print("QUOTING:", _G.table.tostring(t, 60))
- local cases = { }
- function cases.table (t)
- local mt = { tag = "Table" }
- --_G.table.insert (mt, { tag = "Pair", quote "quote", { tag = "True" } })
- if t.tag == "Splice" then
- assert (#t==1, "Invalid splice")
- local sp = t[1]
- return sp
- elseif t.tag then
- _G.table.insert (mt, { tag = "Pair", quote "tag", quote (t.tag) })
- end
- for _, v in ipairs (t) do
- _G.table.insert (mt, quote(v))
- end
- return mt
- end
- function cases.number (t) return { tag = "Number", t, quote = true } end
- function cases.string (t) return { tag = "String", t, quote = true } end
- return cases [ type (t) ] (t)
-end
-
---------------------------------------------------------------------------------
--- when this variable is false, code inside [-{...}] is compiled and
--- avaluated immediately. When it's true (supposedly when we're
--- parsing data inside a quasiquote), [-{foo}] is replaced by
--- [`Splice{foo}], which will be unpacked by [quote()].
---------------------------------------------------------------------------------
-in_a_quote = false
-
---------------------------------------------------------------------------------
--- Parse the inside of a "-{ ... }"
---------------------------------------------------------------------------------
-function splice_content (lx)
- local parser_name = "expr"
- if lx:is_keyword (lx:peek(2), ":") then
- local a = lx:next()
- lx:next() -- skip ":"
- assert (a.tag=="Id", "Invalid splice parser name")
- parser_name = a[1]
- end
- local ast = mlp[parser_name](lx)
- if in_a_quote then
- --printf("SPLICE_IN_QUOTE:\n%s", _G.table.tostring(ast, "nohash", 60))
- return { tag="Splice", ast }
- else
- if parser_name == "expr" then ast = { { tag="Return", ast } }
- elseif parser_name == "stat" then ast = { ast }
- elseif parser_name ~= "block" then
- error ("splice content must be an expr, stat or block") end
- --printf("EXEC THIS SPLICE:\n%s", _G.table.tostring(ast, "nohash", 60))
- return splice (ast)
- end
-end
-
---------------------------------------------------------------------------------
--- Parse the inside of a "+{ ... }"
---------------------------------------------------------------------------------
-function quote_content (lx)
- local parser
- if lx:is_keyword (lx:peek(2), ":") then -- +{parser: content }
- parser = mlp[id(lx)[1]]
- lx:next()
- else -- +{ content }
- parser = mlp.expr
- end
-
- local prev_iq = in_a_quote
- in_a_quote = true
- --print("IN_A_QUOTE")
- local content = parser (lx)
- local q_content = quote (content)
- in_a_quote = prev_iq
- return q_content
-end
-
+++ /dev/null
-----------------------------------------------------------------------
--- Metalua: $Id: mlp_misc.lua,v 1.6 2006/11/15 09:07:50 fab13n Exp $
---
--- Summary: metalua parser, miscellaneous utility functions.
---
-----------------------------------------------------------------------
---
--- Copyright (c) 2006, Fabien Fleutot <metalua@gmail.com>.
---
--- This software is released under the MIT Licence, see licence.txt
--- for details.
---
-----------------------------------------------------------------------
--- History:
--- $Log: mlp_misc.lua,v $
--- Revision 1.6 2006/11/15 09:07:50 fab13n
--- debugged meta operators.
--- Added command line options handling.
---
--- Revision 1.5 2006/11/10 02:11:17 fab13n
--- compiler faithfulness to 5.1 improved
--- gg.expr extended
--- mlp.expr refactored
---
--- Revision 1.4 2006/11/09 09:39:57 fab13n
--- some cleanup
---
--- Revision 1.3 2006/11/07 04:38:00 fab13n
--- first bootstrapping version.
---
--- Revision 1.2 2006/11/05 15:08:34 fab13n
--- updated code generation, to be compliant with 5.1
---
-----------------------------------------------------------------------
-
---------------------------------------------------------------------------------
---
--- Exported API:
--- * [mlp.fget()]
--- * [mlp.id()]
--- * [mlp.opt_id()]
--- * [mlp.id_list()]
--- * [mlp.gensym()]
--- * [mlp.string()]
--- * [mlp.opt_string()]
--- * [mlp.id2string()]
---
---------------------------------------------------------------------------------
-
---require "gg"
---require "mll"
-
-module ("mlp", package.seeall)
-
---------------------------------------------------------------------------------
--- returns a function that takes the [n]th element of a table.
--- if [tag] is provided, then this element is expected to be a
--- table, and this table receives a "tag" field whose value is
--- set to [tag].
---
--- The primary purpose of this is to generate builders for
--- grammar generators. It has little purpose in metalua, as lambda has
--- a lightweight syntax.
---------------------------------------------------------------------------------
-
-function fget (n, tag)
- assert (type (n) == "number")
- if tag then
- assert (type (tag) == "string")
- return function (x)
- assert (type (x[n]) == "table")
- return {tag=tag, unpack(x[n])} end
- else
- return function (x) return x[n] end
- end
-end
-
-
---------------------------------------------------------------------------------
--- Try to read an identifier (possibly as a splice), or return [false] if no
--- id is found.
---------------------------------------------------------------------------------
-function opt_id (lx)
- local a = lx:peek();
- if lx:is_keyword (a, "-{") then
- local v = gg.sequence{ "-{", splice_content, "}" } (lx) [1]
- if v.tag ~= "Id" and v.tag ~= "Splice" then
- gg.parse_error(lx,"Bad id splice")
- end
- return v
- elseif a.tag == "Id" then return lx:next()
- else return false end
-end
-
---------------------------------------------------------------------------------
--- Mandatory reading of an id: causes an error if it can't read one.
---------------------------------------------------------------------------------
-function id (lx)
- return opt_id (lx) or gg.parse_error(lx,"Identifier expected")
-end
-
---------------------------------------------------------------------------------
--- Common helper function
---------------------------------------------------------------------------------
-id_list = gg.list { primary = mlp.id, separators = "," }
-
---------------------------------------------------------------------------------
--- Symbol generator: [gensym()] returns a guaranteed-to-be-unique identifier.
--- The main purpose is to avoid variable capture in macros.
---
--- If a string is passed as an argument, theis string will be part of the
--- id name (helpful for macro debugging)
---------------------------------------------------------------------------------
-local gensymidx = 0
-
-function gensym (arg)
- gensymidx = gensymidx + 1
- return { tag="Id", _G.string.format(".%i.%s", gensymidx, arg or "")}
-end
-
---------------------------------------------------------------------------------
--- Converts an identifier into a string. Hopefully one day it'll handle
--- splices gracefully, but that proves quite tricky.
---------------------------------------------------------------------------------
-function id2string (id)
- --print("id2string:", disp.ast(id))
- if id.tag == "Id" then id.tag = "String"; return id
- elseif id.tag == "Splice" then
- assert (in_a_quote, "can't do id2string on an outermost splice")
- error ("id2string on splice not implemented")
- -- Evaluating id[1] will produce `Id{ xxx },
- -- and we want it to produce `String{ xxx }
- -- Morally, this is what I want:
- -- return `String{ `Index{ `Splice{ id[1] }, `Number 1 } }
- -- That is, without sugar:
- return {tag="String", {tag="Index", {tag="Splice", id[1] },
- {tag="Number", 1 } } }
- else error ("Identifier expected: ".._G.table.tostring(id, 'nohash')) end
-end
-
---------------------------------------------------------------------------------
--- Read a string, possibly spliced, or return an error if it can't
---------------------------------------------------------------------------------
-function string (lx)
- local a = lx:peek()
- if lx:is_keyword (a, "-{") then
- local v = gg.sequence{ "-{", splice_content, "}" } (lx) [1]
- if v.tag ~= "" and v.tag ~= "Splice" then
- gg.parse_error(lx,"Bad string splice")
- end
- return v
- elseif a.tag == "String" then return lx:next()
- else error "String expected" end
-end
-
---------------------------------------------------------------------------------
--- Try to read a string, or return false if it can't. No splice allowed.
---------------------------------------------------------------------------------
-function opt_string (lx)
- return lx:peek().tag == "String" and lx:next()
-end
-
---------------------------------------------------------------------------------
--- Chunk reader: block + Eof
---------------------------------------------------------------------------------
-function skip_initial_sharp_comment (lx)
- -- Dirty hack: I'm happily fondling lexer's private parts
- -- FIXME: redundant with lexer:newstream()
- lx :sync()
- local i = lx.src:match ("^#.-\n()", lx.i)
- if i then lx.i, lx.column_offset, lx.line = i, i, lx.line+1 end
-end
-
-local function _chunk (lx)
- if lx:peek().tag == 'Eof' then return { } -- handle empty files
- else
- skip_initial_sharp_comment (lx)
- local chunk = block (lx)
- if lx:peek().tag ~= "Eof" then error "End-of-file expected" end
- return chunk
- end
-end
-
--- chunk is wrapped in a sequence so that it has a "transformer" field.
-chunk = gg.sequence { _chunk, builder = unpack }
\ No newline at end of file
+++ /dev/null
-----------------------------------------------------------------------
--- Metalua: $Id: mlp_stat.lua,v 1.7 2006/11/15 09:07:50 fab13n Exp $
---
--- Summary: metalua parser, statement/block parser. This is part of
--- the definition of module [mlp].
---
-----------------------------------------------------------------------
---
--- Copyright (c) 2006, Fabien Fleutot <metalua@gmail.com>.
---
--- This software is released under the MIT Licence, see licence.txt
--- for details.
---
-----------------------------------------------------------------------
---
-----------------------------------------------------------------------
-
---------------------------------------------------------------------------------
---
--- Exports API:
--- * [mlp.stat()]
--- * [mlp.block()]
--- * [mlp.for_header()]
---
---------------------------------------------------------------------------------
-
---------------------------------------------------------------------------------
--- eta-expansions to break circular dependency
---------------------------------------------------------------------------------
-local expr = function (lx) return mlp.expr (lx) end
-local func_val = function (lx) return mlp.func_val (lx) end
-local expr_list = function (lx) return mlp.expr_list(lx) end
-
-module ("mlp", package.seeall)
-
---------------------------------------------------------------------------------
--- List of all keywords that indicate the end of a statement block. Users are
--- likely to extend this list when designing extensions.
---------------------------------------------------------------------------------
-
-
-local block_terminators = { "else", "elseif", "end", "until", ")", "}", "]" }
-
--- FIXME: this must be handled from within GG!!!
-function block_terminators:add(x)
- if type (x) == "table" then for _, y in ipairs(x) do self:add (y) end
- else _G.table.insert (self, x) end
-end
-
---------------------------------------------------------------------------------
--- list of statements, possibly followed by semicolons
---------------------------------------------------------------------------------
-block = gg.list {
- name = "statements block",
- terminators = block_terminators,
- primary = function (lx)
- -- FIXME use gg.optkeyword()
- local x = stat (lx)
- if lx:is_keyword (lx:peek(), ";") then lx:next() end
- return x
- end }
-
---------------------------------------------------------------------------------
--- Helper function for "return <expr_list>" parsing.
--- Called when parsing return statements.
--- The specific test for initial ";" is because it's not a block terminator,
--- so without itgg.list would choke on "return ;" statements.
--- We don't make a modified copy of block_terminators because this list
--- is sometimes modified at runtime, and the return parser would get out of
--- sync if it was relying on a copy.
---------------------------------------------------------------------------------
-local return_expr_list_parser = gg.multisequence{
- { ";" , builder = function() return { } end },
- default = gg.list {
- expr, separators = ",", terminators = block_terminators } }
-
---------------------------------------------------------------------------------
--- for header, between [for] and [do] (exclusive).
--- Return the `Forxxx{...} AST, without the body element (the last one).
---------------------------------------------------------------------------------
-function for_header (lx)
- local var = mlp.id (lx)
- if lx:is_keyword (lx:peek(), "=") then
- -- Fornum: only 1 variable
- lx:next() -- skip "="
- local e = expr_list (lx)
- assert (2 <= #e and #e <= 3, "2 or 3 values in a fornum")
- return { tag="Fornum", var, unpack (e) }
- else
- -- Forin: there might be several vars
- local a = lx:is_keyword (lx:next(), ",", "in")
- if a=="in" then var_list = { var, lineinfo = var.lineinfo } else
- -- several vars; first "," skipped, read other vars
- var_list = gg.list{
- primary = id, separators = ",", terminators = "in" } (lx)
- _G.table.insert (var_list, 1, var) -- put back the first variable
- lx:next() -- skip "in"
- end
- local e = expr_list (lx)
- return { tag="Forin", var_list, e }
- end
-end
-
---------------------------------------------------------------------------------
--- Function def parser helper: id ( . id ) *
---------------------------------------------------------------------------------
-local function fn_builder (list)
- local r = list[1]
- for i = 2, #list do r = { tag="Index", r, id2string(list[i]) } end
- return r
-end
-local func_name = gg.list{ id, separators = ".", builder = fn_builder }
-
---------------------------------------------------------------------------------
--- Function def parser helper: ( : id )?
---------------------------------------------------------------------------------
-local method_name = gg.onkeyword{ name = "method invocation", ":", id,
- transformers = { function(x) return x and id2string(x) end } }
-
---------------------------------------------------------------------------------
--- Function def builder
---------------------------------------------------------------------------------
-local function funcdef_builder(x)
- local name, method, func = x[1], x[2], x[3]
- if method then
- name = { tag="Index", name, method, lineinfo = {
- first = name.lineinfo.first,
- last = method.lineinfo.last } }
- _G.table.insert (func[1], 1, {tag="Id", "self"})
- end
- local r = { tag="Set", {name}, {func} }
- r[1].lineinfo = name.lineinfo
- r[2].lineinfo = func.lineinfo
- return r
-end
-
-
---------------------------------------------------------------------------------
--- if statement builder
---------------------------------------------------------------------------------
-local function if_builder (x)
- local cb_pairs, else_block, r = x[1], x[2], {tag="If"}
- for i=1,#cb_pairs do r[2*i-1]=cb_pairs[i][1]; r[2*i]=cb_pairs[i][2] end
- if else_block then r[#r+1] = else_block end
- return r
-end
-
---------------------------------------------------------------------------------
--- produce a list of (expr,block) pairs
---------------------------------------------------------------------------------
-local elseifs_parser = gg.list {
- gg.sequence { expr, "then", block },
- separators = "elseif",
- terminators = { "else", "end" } }
-
---------------------------------------------------------------------------------
--- assignments and calls: statements that don't start with a keyword
---------------------------------------------------------------------------------
-local function assign_or_call_stat_parser (lx)
- local e = expr_list (lx)
- local a = lx:is_keyword(lx:peek())
- local op = a and stat.assignments[a]
- if op then
- --FIXME: check that [e] is a LHS
- lx:next()
- local v = expr_list (lx)
- if type(op)=="string" then return { tag=op, e, v }
- else return op (e, v) end
- else
- assert (#e > 0)
- if #e > 1 then
- gg.parse_error (lx,
- "comma is not a valid statement separator; statement can be "..
- "separated by semicolons, or not separated at all") end
- if e[1].tag ~= "Call" and e[1].tag ~= "Invoke" then
- local typename
- if e[1].tag == 'Id' then
- typename = '("'..e[1][1]..'") is an identifier'
- elseif e[1].tag == 'Op' then
- typename = "is an arithmetic operation"
- else typename = "is of type '"..(e[1].tag or "<list>").."'" end
-
- gg.parse_error (lx, "This expression " .. typename ..
- "; a statement was expected, and only function and method call "..
- "expressions can be used as statements");
- end
- return e[1]
- end
-end
-
-local_stat_parser = gg.multisequence{
- -- local function <name> <func_val>
- { "function", id, func_val, builder =
- function(x)
- local vars = { x[1], lineinfo = x[1].lineinfo }
- local vals = { x[2], lineinfo = x[2].lineinfo }
- return { tag="Localrec", vars, vals }
- end },
- -- local <id_list> ( = <expr_list> )?
- default = gg.sequence{ id_list, gg.onkeyword{ "=", expr_list },
- builder = function(x) return {tag="Local", x[1], x[2] or { } } end } }
-
---------------------------------------------------------------------------------
--- statement
---------------------------------------------------------------------------------
-stat = gg.multisequence {
- name="statement",
- { "do", block, "end", builder =
- function (x) return { tag="Do", unpack (x[1]) } end },
- { "for", for_header, "do", block, "end", builder =
- function (x) x[1][#x[1]+1] = x[2]; return x[1] end },
- { "function", func_name, method_name, func_val, builder=funcdef_builder },
- { "while", expr, "do", block, "end", builder = "While" },
- { "repeat", block, "until", expr, builder = "Repeat" },
- { "local", local_stat_parser, builder = fget (1) },
- { "return", return_expr_list_parser, builder = fget (1, "Return") },
- { "break", builder = function() return { tag="Break" } end },
- { "-{", splice_content, "}", builder = fget(1) },
- { "if", elseifs_parser, gg.onkeyword{ "else", block }, "end",
- builder = if_builder },
- default = assign_or_call_stat_parser }
-
-stat.assignments = {
- ["="] = "Set" }
-
-function stat.assignments:add(k, v) self[k] = v end
+++ /dev/null
-----------------------------------------------------------------------
--- Metalua: $Id: mlp_table.lua,v 1.5 2006/11/10 02:11:17 fab13n Exp $
---
--- Summary: metalua parser, table constructor parser. This is part
--- of thedefinition of module [mlp].
---
-----------------------------------------------------------------------
---
--- Copyright (c) 2006, Fabien Fleutot <metalua@gmail.com>.
---
--- This software is released under the MIT Licence, see licence.txt
--- for details.
---
-----------------------------------------------------------------------
--- History:
--- $Log: mlp_table.lua,v $
--- Revision 1.5 2006/11/10 02:11:17 fab13n
--- compiler faithfulness to 5.1 improved
--- gg.expr extended
--- mlp.expr refactored
---
--- Revision 1.4 2006/11/09 09:39:57 fab13n
--- some cleanup
---
--- Revision 1.3 2006/11/07 04:38:00 fab13n
--- first bootstrapping version.
---
--- Revision 1.2 2006/11/05 15:08:34 fab13n
--- updated code generation, to be compliant with 5.1
---
-----------------------------------------------------------------------
-
---------------------------------------------------------------------------------
---
--- Exported API:
--- * [mlp.table_field()]
--- * [mlp.table_content()]
--- * [mlp.table()]
---
--- KNOWN BUG: doesn't handle final ";" or "," before final "}"
---
---------------------------------------------------------------------------------
-
---require "gg"
---require "mll"
---require "mlp_misc"
-
-module ("mlp", package.seeall)
-
---------------------------------------------------------------------------------
--- eta expansion to break circular dependencies:
---------------------------------------------------------------------------------
-local function _expr (lx) return expr(lx) end
-
---------------------------------------------------------------------------------
--- [[key] = value] table field definition
---------------------------------------------------------------------------------
-local bracket_field = gg.sequence{ "[", _expr, "]", "=", _expr, builder = "Pair" }
-
---------------------------------------------------------------------------------
--- [id = value] or [value] table field definition;
--- [[key]=val] are delegated to [bracket_field()]
---------------------------------------------------------------------------------
-function table_field (lx)
- if lx:is_keyword (lx:peek(), "[") then return bracket_field (lx) end
- local e = _expr (lx)
- if lx:is_keyword (lx:peek(), "=") then
- lx:next(); -- skip the "="
- local key = id2string(e)
- local val = _expr(lx)
- local r = { tag="Pair", key, val }
- r.lineinfo = { first = key.lineinfo.first, last = val.lineinfo.last }
- return r
- else return e end
-end
-
-local function _table_field(lx) return table_field(lx) end
-
---------------------------------------------------------------------------------
--- table constructor, without enclosing braces; returns a full table object
---------------------------------------------------------------------------------
-table_content = gg.list { _table_field,
- separators = { ",", ";" }, terminators = "}", builder = "Table" }
-
-local function _table_content(lx) return table_content(lx) end
-
---------------------------------------------------------------------------------
--- complete table constructor including [{...}]
---------------------------------------------------------------------------------
-table = gg.sequence{ "{", _table_content, "}", builder = fget(1) }
-
-
+++ /dev/null
-require 'metalua.compiler'
---
--- Ecapsulates funcion mlc.luastring_to_ast in order to protect call and parse
--- error string when an error occurs.
---
--- @param src string containg Lua code to evaluate
--- @return AST of table type, as returned by mlc.luastring_to_ast. Contains an
--- error when AST generation fails
---
-function getast(src)
- local status, result = pcall(mlc.luastring_to_ast, src)
- if status then return result else
- local line, column, offset = result:match '%(l.(%d+), c.(%d+), k.(%d+)%)'
- local filename = result :match '^([^:]+)'
- local msg = result :match 'line %d+, char %d+: (.-)\n'
- local li = {line, column, offset, filename}
- return {tag='Error', lineinfo={first=li, last=li}, msg}
- end
-end
+++ /dev/null
--{ extension 'match' }
-
-local M = { }
-M.__index = M
-
-ast_to_string = |x| M.run(x)
-
---------------------------------------------------------------------------------
--- Instanciate a new AST->source synthetizer
---------------------------------------------------------------------------------
-function M.new ()
- local self = {
- _acc = { }, -- Accumulates pieces of source as strings
- current_indent = 0, -- Current level of line indentation
- indent_step = " " -- Indentation symbol, normally spaces or '\t'
- }
- return setmetatable (self, M)
-end
-
---------------------------------------------------------------------------------
--- Run a synthetizer on the `ast' arg and return the source as a string.
--- Can also be used as a static method `M.run (ast)'; in this case,
--- a temporary Metizer is instanciated on the fly.
---------------------------------------------------------------------------------
-function M:run (ast)
- if not ast then
- self, ast = M.new(), self
- end
- self._acc = { }
- self:node (ast)
- return table.concat (self._acc)
-end
-
---------------------------------------------------------------------------------
--- Accumulate a piece of source file in the synthetizer.
---------------------------------------------------------------------------------
-function M:acc (x)
- if x then table.insert (self._acc, x) end
-end
-
---------------------------------------------------------------------------------
--- Accumulate an indented newline.
--- Jumps an extra line if indentation is 0, so that
--- toplevel definitions are separated by an extra empty line.
---------------------------------------------------------------------------------
-function M:nl ()
- if self.current_indent == 0 then self:acc "\n" end
- self:acc ("\n" .. self.indent_step:rep (self.current_indent))
-end
-
---------------------------------------------------------------------------------
--- Increase indentation and accumulate a new line.
---------------------------------------------------------------------------------
-function M:nlindent ()
- self.current_indent = self.current_indent + 1
- self:nl ()
-end
-
---------------------------------------------------------------------------------
--- Decrease indentation and accumulate a new line.
---------------------------------------------------------------------------------
-function M:nldedent ()
- self.current_indent = self.current_indent - 1
- self:acc ("\n" .. self.indent_step:rep (self.current_indent))
-end
-
---------------------------------------------------------------------------------
--- Keywords, which are illegal as identifiers.
---------------------------------------------------------------------------------
-local keywords = table.transpose {
- "and", "break", "do", "else", "elseif",
- "end", "false", "for", "function", "if",
- "in", "local", "nil", "not", "or",
- "repeat", "return", "then", "true", "until",
- "while" }
-
---------------------------------------------------------------------------------
--- Return true iff string `id' is a legal identifier name.
---------------------------------------------------------------------------------
-local function is_ident (id)
- return id:strmatch "^[%a_][%w_]*$" and not keywords[id]
-end
-
---------------------------------------------------------------------------------
--- Return true iff ast represents a legal function name for
--- syntax sugar ``function foo.bar.gnat() ... end'':
--- a series of nested string indexes, with an identifier as
--- the innermost node.
---------------------------------------------------------------------------------
-local function is_idx_stack (ast)
- match ast with
- | `Id{ _ } -> return true
- | `Index{ left, `String{ _ } } -> return is_idx_stack (left)
- | _ -> return false
- end
-end
-
---------------------------------------------------------------------------------
--- Operator precedences, in increasing order.
--- This is not directly used, it's used to generate op_prec below.
---------------------------------------------------------------------------------
-local op_preprec = {
- { "or", "and" },
- { "lt", "le", "eq", "ne" },
- { "concat" },
- { "add", "sub" },
- { "mul", "div", "mod" },
- { "unary", "not", "len" },
- { "pow" },
- { "index" } }
-
---------------------------------------------------------------------------------
--- operator --> precedence table, generated from op_preprec.
---------------------------------------------------------------------------------
-local op_prec = { }
-
-for prec, ops in ipairs (op_preprec) do
- for op in ivalues (ops) do
- op_prec[op] = prec
- end
-end
-
---------------------------------------------------------------------------------
--- operator --> source representation.
---------------------------------------------------------------------------------
-local op_symbol = {
- add = " + ", sub = " - ", mul = " * ",
- div = " / ", mod = " % ", pow = " ^ ",
- concat = " .. ", eq = " == ", ne = " ~= ",
- lt = " < ", le = " <= ", ["and"] = " and ",
- ["or"] = " or ", ["not"] = "not ", len = "# " }
-
---------------------------------------------------------------------------------
--- Accumulate the source representation of AST `node' in
--- the synthetizer. Most of the work is done by delegating to
--- the method having the name of the AST tag.
--- If something can't be converted to normal sources, it's
--- instead dumped as a `-{ ... }' splice in the source accumulator.
---------------------------------------------------------------------------------
-function M:node (node)
- assert (self~=M and self._acc)
- if not node.tag then -- tagless block.
- self:list (node, self.nl)
- else
- local f = M[node.tag]
- if type (f) == "function" then -- Delegate to tag method.
- f (self, node, unpack (node))
- elseif type (f) == "string" then -- tag string.
- self:acc (f)
- else -- No appropriate method, fall back to splice dumping.
- -- This cannot happen in a plain Lua AST.
- self:acc " -{ "
- self:acc (table.tostring (node, "nohash"), 80)
- self:acc " }"
- end
- end
-end
-
---------------------------------------------------------------------------------
--- Convert every node in the AST list `list' passed as 1st arg.
--- `sep' is an optional separator to be accumulated between each list element,
--- it can be a string or a synth method.
--- `start' is an optional number (default == 1), indicating which is the
--- first element of list to be converted, so that we can skip the begining
--- of a list.
---------------------------------------------------------------------------------
-function M:list (list, sep, start)
- for i = start or 1, # list do
- self:node (list[i])
- if list[i + 1] then
- if not sep then
- elseif type (sep) == "function" then sep (self)
- elseif type (sep) == "string" then self:acc (sep)
- else error "Invalid list separator" end
- end
- end
-end
-
---------------------------------------------------------------------------------
---
--- Tag methods.
--- ------------
---
--- Specific AST node dumping methods, associated to their node kinds
--- by their name, which is the corresponding AST tag.
--- synth:node() is in charge of delegating a node's treatment to the
--- appropriate tag method.
---
--- Such tag methods are called with the AST node as 1st arg.
--- As a convenience, the n node's children are passed as args #2 ... n+1.
---
--- There are several things that could be refactored into common subroutines
--- here: statement blocks dumping, function dumping...
--- However, given their small size and linear execution
--- (they basically perform series of :acc(), :node(), :list(),
--- :nl(), :nlindent() and :nldedent() calls), it seems more readable
--- to avoid multiplication of such tiny functions.
---
--- To make sense out of these, you need to know metalua's AST syntax, as
--- found in the reference manual or in metalua/doc/ast.txt.
---
---------------------------------------------------------------------------------
-
-function M:Do (node)
- self:acc "do"
- self:nlindent ()
- self:list (node, self.nl)
- self:nldedent ()
- self:acc "end"
-end
-
-function M:Set (node)
- match node with
- | `Set{ { `Index{ lhs, `String{ method } } },
- { `Function{ { `Id "self", ... } == params, body } } }
- if is_idx_stack (lhs) and is_ident (method) ->
- -- ``function foo:bar(...) ... end'' --
- self:acc "function "
- self:node (lhs)
- self:acc ":"
- self:acc (method)
- self:acc " ("
- self:list (params, ", ", 2)
- self:acc ")"
- self:nlindent ()
- self:list (body, self.nl)
- self:nldedent ()
- self:acc "end"
-
- | `Set{ { lhs }, { `Function{ params, body } } } if is_idx_stack (lhs) ->
- -- ``function foo(...) ... end'' --
- self:acc "function "
- self:node (lhs)
- self:acc " ("
- self:list (params, ", ")
- self:acc ")"
- self:nlindent ()
- self:list (body, self.nl)
- self:nldedent ()
- self:acc "end"
-
- | `Set{ { `Id{ lhs1name } == lhs1, ... } == lhs, rhs }
- if not is_ident (lhs1name) ->
- -- ``foo, ... = ...'' when foo is *not* a valid identifier.
- -- In that case, the spliced 1st variable must get parentheses,
- -- to be distinguished from a statement splice.
- -- This cannot happen in a plain Lua AST.
- self:acc "("
- self:node (lhs1)
- self:acc ")"
- if lhs[2] then -- more than one lhs variable
- self:acc ", "
- self:list (lhs, ", ", 2)
- end
- self:acc " = "
- self:list (rhs, ", ")
-
- | `Set{ lhs, rhs } ->
- -- ``... = ...'', no syntax sugar --
- self:list (lhs, ", ")
- self:acc " = "
- self:list (rhs, ", ")
- end
-end
-
-function M:While (node, cond, body)
- self:acc "while "
- self:node (cond)
- self:acc " do"
- self:nlindent ()
- self:list (body, self.nl)
- self:nldedent ()
- self:acc "end"
-end
-
-function M:Repeat (node, body, cond)
- self:acc "repeat"
- self:nlindent ()
- self:list (body, self.nl)
- self:nldedent ()
- self:acc "until "
- self:node (cond)
-end
-
-function M:If (node)
- for i = 1, #node-1, 2 do
- -- for each ``if/then'' and ``elseif/then'' pair --
- local cond, body = node[i], node[i+1]
- self:acc (i==1 and "if " or "elseif ")
- self:node (cond)
- self:acc " then"
- self:nlindent ()
- self:list (body, self.nl)
- self:nldedent ()
- end
- -- odd number of children --> last one is an `else' clause --
- if #node%2 == 1 then
- self:acc "else"
- self:nlindent ()
- self:list (node[#node], self.nl)
- self:nldedent ()
- end
- self:acc "end"
-end
-
-function M:Fornum (node, var, first, last)
- local body = node[#node]
- self:acc "for "
- self:node (var)
- self:acc " = "
- self:node (first)
- self:acc ", "
- self:node (last)
- if #node==5 then -- 5 children --> child #4 is a step increment.
- self:acc ", "
- self:node (node[4])
- end
- self:acc " do"
- self:nlindent ()
- self:list (body, self.nl)
- self:nldedent ()
- self:acc "end"
-end
-
-function M:Forin (node, vars, generators, body)
- self:acc "for "
- self:list (vars, ", ")
- self:acc " in "
- self:list (generators, ", ")
- self:acc " do"
- self:nlindent ()
- self:list (body, self.nl)
- self:nldedent ()
- self:acc "end"
-end
-
-function M:Local (node, lhs, rhs)
- if next (lhs) then
- self:acc "local "
- self:list (lhs, ", ")
- if rhs[1] then
- self:acc " = "
- self:list (rhs, ", ")
- end
- else -- Can't create a local statement with 0 variables in plain Lua
- self:acc (table.tostring (node, 'nohash', 80))
- end
-end
-
-function M:Localrec (node, lhs, rhs)
- match node with
- | `Localrec{ { `Id{name} }, { `Function{ params, body } } }
- if is_ident (name) ->
- -- ``local function name() ... end'' --
- self:acc "local function "
- self:acc (name)
- self:acc " ("
- self:list (params, ", ")
- self:acc ")"
- self:nlindent ()
- self:list (body, self.nl)
- self:nldedent ()
- self:acc "end"
-
- | _ ->
- -- Other localrec are unprintable ==> splice them --
- -- This cannot happen in a plain Lua AST. --
- self:acc "-{ "
- self:acc (table.tostring (node, 'nohash', 80))
- self:acc " }"
- end
-end
-
-function M:Call (node, f)
- -- single string or table literal arg ==> no need for parentheses. --
- local parens
- match node with
- | `Call{ _, `String{_} }
- | `Call{ _, `Table{...}} -> parens = false
- | _ -> parens = true
- end
- self:node (f)
- self:acc (parens and " (" or " ")
- self:list (node, ", ", 2) -- skip `f'.
- self:acc (parens and ")")
-end
-
-function M:Invoke (node, f, method)
- -- single string or table literal arg ==> no need for parentheses. --
- local parens
- match node with
- | `Invoke{ _, _, `String{_} }
- | `Invoke{ _, _, `Table{...}} -> parens = false
- | _ -> parens = true
- end
- self:node (f)
- self:acc ":"
- self:acc (method[1])
- self:acc (parens and " (" or " ")
- self:list (node, ", ", 3) -- Skip args #1 and #2, object and method name.
- self:acc (parens and ")")
-end
-
-function M:Return (node)
- self:acc "return "
- self:list (node, ", ")
-end
-
-M.Break = "break"
-M.Nil = "nil"
-M.False = "false"
-M.True = "true"
-M.Dots = "..."
-
-function M:Number (node, n)
- self:acc (tostring (n))
-end
-
-function M:String (node, str)
- -- format "%q" prints '\n' in an umpractical way IMO,
- -- so this is fixed with the :gsub( ) call.
- self:acc (string.format ("%q", str):gsub ("\\\n", "\\n"))
-end
-
-function M:Function (node, params, body)
- self:acc "function ("
- self:list (params, ", ")
- self:acc ")"
- self:nlindent ()
- self:list (body, self.nl)
- self:nldedent ()
- self:acc "end"
-end
-
-function M:Table (node)
- if not node[1] then self:acc "{ }" else
- self:acc "{"
- if #node > 1 then self:nlindent () else self:acc " " end
- for i, elem in ipairs (node) do
- match elem with
- | `Pair{ `String{ key }, value } if is_ident (key) ->
- -- ``key = value''. --
- self:acc (key)
- self:acc " = "
- self:node (value)
-
- | `Pair{ key, value } ->
- -- ``[key] = value''. --
- self:acc "["
- self:node (key)
- self:acc "] = "
- self:node (value)
-
- | _ ->
- -- ``value''. --
- self:node (elem)
- end
- if node [i+1] then
- self:acc ","
- self:nl ()
- end
- end
- if #node > 1 then self:nldedent () else self:acc " " end
- self:acc "}"
- end
-end
-
-function M:Op (node, op, a, b)
- -- Transform ``not (a == b)'' into ``a ~= b''. --
- match node with
- | `Op{ "not", `Op{ "eq", _a, _b } }
- | `Op{ "not", `Paren{ `Op{ "eq", _a, _b } } } ->
- op, a, b = "ne", _a, _b
- | _ ->
- end
-
- if b then -- binary operator.
- local left_paren, right_paren
- match a with
- | `Op{ op_a, ...} if op_prec[op] >= op_prec[op_a] -> left_paren = true
- | _ -> left_paren = false
- end
-
- match b with -- FIXME: might not work with right assoc operators ^ and ..
- | `Op{ op_b, ...} if op_prec[op] >= op_prec[op_b] -> right_paren = true
- | _ -> right_paren = false
- end
-
- self:acc (left_paren and "(")
- self:node (a)
- self:acc (left_paren and ")")
-
- self:acc (op_symbol [op])
-
- self:acc (right_paren and "(")
- self:node (b)
- self:acc (right_paren and ")")
-
- else -- unary operator.
- local paren
- match a with
- | `Op{ op_a, ... } if op_prec[op] >= op_prec[op_a] -> paren = true
- | _ -> paren = false
- end
- self:acc (op_symbol[op])
- self:acc (paren and "(")
- self:node (a)
- self:acc (paren and ")")
- end
-end
-
-function M:Paren (node, content)
- self:acc "("
- self:node (content)
- self:acc ")"
-end
-
-function M:Index (node, table, key)
- local paren_table
- -- Check precedence, see if parens are needed around the table --
- match table with
- | `Op{ op, ... } if op_prec[op] < op_prec.index -> paren_table = true
- | _ -> paren_table = false
- end
-
- self:acc (paren_table and "(")
- self:node (table)
- self:acc (paren_table and ")")
-
- match key with
- | `String{ field } if is_ident (field) ->
- -- ``table.key''. --
- self:acc "."
- self:acc (field)
- | _ ->
- -- ``table [key]''. --
- self:acc "["
- self:node (key)
- self:acc "]"
- end
-end
-
-function M:Id (node, name)
- if is_ident (name) then
- self:acc (name)
- else -- Unprintable identifier, fall back to splice representation.
- -- This cannot happen in a plain Lua AST.
- self:acc "-{`Id "
- self:String (node, name)
- self:acc "}"
- end
-end
-
+++ /dev/null
-----------------------------------------------------------------------
-----------------------------------------------------------------------
---
--- Base library extension
---
-----------------------------------------------------------------------
-----------------------------------------------------------------------
-
-if not metalua then rawset(getfenv(), 'metalua', { }) end
-metalua.version = "v-0.5"
-
-if not rawpairs then
- rawpairs, rawipairs, rawtype = pairs, ipairs, type
-end
-
-function pairs(x)
- assert(type(x)=='table', 'pairs() expects a table')
- local mt = getmetatable(x)
- if mt then
- local mtp = mt.__pairs
- if mtp then return mtp(x) end
- end
- return rawpairs(x)
-end
-
-function ipairs(x)
- assert(type(x)=='table', 'ipairs() expects a table')
- local mt = getmetatable(x)
- if mt then
- local mti = mt.__ipairs
- if mti then return mti(x) end
- end
- return rawipairs(x)
-end
-
---[[
-function type(x)
- local mt = getmetatable(x)
- if mt then
- local mtt = mt.__type
- if mtt then return mtt end
- end
- return rawtype(x)
-end
-]]
-
-function min (a, ...)
- for n in values{...} do if n<a then a=n end end
- return a
-end
-
-function max (a, ...)
- for n in values{...} do if n>a then a=n end end
- return a
-end
-
-function o (...)
- local args = {...}
- local function g (...)
- local result = {...}
- for i=#args, 1, -1 do result = {args[i](unpack(result))} end
- return unpack (result)
- end
- return g
-end
-
-function id (...) return ... end
-function const (k) return function () return k end end
-
-function printf(...) return print(string.format(...)) end
-function eprintf(...)
- io.stderr:write(string.format(...).."\n")
-end
-
-function ivalues (x)
- assert(type(x)=='table', 'ivalues() expects a table')
- local i = 1
- local function iterator ()
- local r = x[i]; i=i+1; return r
- end
- return iterator
-end
-
-
-function values (x)
- assert(type(x)=='table', 'values() expects a table')
- local function iterator (state)
- local it
- state.content, it = next(state.list, state.content)
- return it
- end
- return iterator, { list = x }
-end
-
-function keys (x)
- assert(type(x)=='table', 'keys() expects a table')
- local function iterator (state)
- local it = next(state.list, state.content)
- state.content = it
- return it
- end
- return iterator, { list = x }
-end
-
+++ /dev/null
---------------------------------------------------------------------------------
--- Command Line OPTionS handler
--- ============================
---
--- This lib generates parsers for command-line options. It encourages
--- the following of some common idioms: I'm pissed off by Unix tools
--- which sometimes will let you concatenate single letters options,
--- sometimes won't, will prefix long name options with simple dashes
--- instead of doubles, etc.
---
---------------------------------------------------------------------------------
-
--- TODO:
--- * add a generic way to unparse options ('grab everything')
--- * doc
--- * when a short options that takes a param isn't the last element of a series
--- of shorts, take the remaining of the sequence as that param, e.g. -Ifoo
--- * let unset strings/numbers with +
--- * add a ++ long counterpart to +
---
-
--{ extension 'match' }
-
-function clopts(cfg)
- local short, long, param_func = { }, { }
- local legal_types = table.transpose{
- 'boolean','string','number','string*','number*','nil', '*' }
-
- -----------------------------------------------------------------------------
- -- Fill short and long name indexes, and check its validity
- -----------------------------------------------------------------------------
- for x in ivalues(cfg) do
- local xtype = type(x)
- if xtype=='table' then
- if not x.type then x.type='nil' end
- if not legal_types[x.type] then error ("Invalid type name "..x.type) end
- if x.short then
- if short[x.short] then error ("multiple definitions for option "..x.short)
- else short[x.short] = x end
- end
- if x.long then
- if long[x.long] then error ("multiple definitions for option "..x.long)
- else long[x.long] = x end
- end
- elseif xtype=='function' then
- if param_func then error "multiple parameters handler in clopts"
- else param_func=x end
- end
- end
-
- -----------------------------------------------------------------------------
- -- Print a help message, summarizing how to use the command line
- -----------------------------------------------------------------------------
- local function print_usage(msg)
- if msg then print(msg,'\n') end
- print(cfg.usage or "Options:\n")
- for x in values(cfg) do
- if type(x) == 'table' then
- local opts = { }
- if x.type=='boolean' then
- if x.short then opts = { '-'..x.short..'/+'..x.short } end
- if x.long then table.insert (opts, '--'..x.long..'/++'..x.long) end
- else
- if x.short then opts = { '-'..x.short..' <'..x.type..'>' } end
- if x.long then table.insert (opts, '--'..x.long..' <'..x.type..'>' ) end
- end
- printf(" %s: %s", table.concat(opts,', '), x.usage or '<undocumented>')
- end
- end
- print''
- end
-
- -- Unless overridden, -h and --help display the help msg
- local default_help = { action = | | print_usage() or os.exit(0);
- long='help';short='h';type='nil'}
- if not short.h then short.h = default_help end
- if not long.help then long.help = default_help end
-
- -----------------------------------------------------------------------------
- -- Helper function for options parsing. Execute the attached action and/or
- -- register the config in cfg.
- --
- -- * cfg is the table which registers the options
- -- * dict the name->config entry hash table that describes options
- -- * flag is the prefix '-', '--' or '+'
- -- * opt is the option name
- -- * i the current index in the arguments list
- -- * args is the arguments list
- -----------------------------------------------------------------------------
- local function actionate(cfg, dict, flag, opt, i, args)
- local entry = dict[opt]
- if not entry then print_usage ("invalid option "..flag..opt); return false; end
- local etype, name = entry.type, entry.name or entry.long or entry.short
- match etype with
- | 'string' | 'number' | 'string*' | 'number*' ->
- if flag=='+' or flag=='++' then
- print_usage ("flag "..flag.." is reserved for boolean options, not for "..opt)
- return false
- end
- local arg = args[i+1]
- if not arg then
- print_usage ("missing parameter for option "..flag..opt)
- return false
- end
- if etype:strmatch '^number' then
- arg = tonumber(arg)
- if not arg then
- print_usage ("option "..flag..opt.." expects a number argument")
- end
- end
- if etype:strmatch '%*$' then
- if not cfg[name] then cfg[name]={ } end
- table.insert(cfg[name], arg)
- else cfg[name] = arg end
- if entry.action then entry.action(arg) end
- return i+2
- | 'boolean' ->
- local arg = flag=='-' or flag=='--'
- cfg[name] = arg
- if entry.action then entry.action(arg) end
- return i+1
- | 'nil' ->
- cfg[name] = true;
- if entry.action then entry.action() end
- return i+1
- | '*' ->
- local arg = table.isub(args, i+1, #args)
- cfg[name] = arg
- if entry.action then entry.action(arg) end
- return #args+1
- | _ -> assert( false, 'undetected bad type for clopts action')
- end
- end
-
- -----------------------------------------------------------------------------
- -- Parse a list of commands: the resulting function
- -----------------------------------------------------------------------------
- local function parse(...)
- local cfg = { }
- if not ... then return cfg end
- local args = type(...)=='table' and ... or {...}
- local i, i_max = 1, #args
- while i <= i_max do
- local arg, flag, opt, opts = args[i]
- --printf('beginning of loop: i=%i/%i, arg=%q', i, i_max, arg)
- if arg=='-' then
- i=actionate (cfg, short, '-', '', i, args)
- -{ `Goto 'continue' }
- end
-
- -----------------------------------------------------------------------
- -- double dash option
- -----------------------------------------------------------------------
- flag, opt = arg:strmatch "^(%-%-)(.*)"
- if opt then
- i=actionate (cfg, long, flag, opt, i, args)
- -{ `Goto 'continue' }
- end
-
- -----------------------------------------------------------------------
- -- double plus option
- -----------------------------------------------------------------------
- flag, opt = arg:strmatch "^(%+%+)(.*)"
- if opt then
- i=actionate (cfg, long, flag, opt, i, args)
- -{ `Goto 'continue' }
- end
-
- -----------------------------------------------------------------------
- -- single plus or single dash series of short options
- -----------------------------------------------------------------------
- flag, opts = arg:strmatch "^([+-])(.+)"
- if opts then
- local j_max, i2 = opts:len()
- for j = 1, j_max do
- opt = opts:sub(j,j)
- --printf ('parsing short opt %q', opt)
- i2 = actionate (cfg, short, flag, opt, i, args)
- if i2 ~= i+1 and j < j_max then
- error ('short option '..opt..' needs a param of type '..short[opt])
- end
- end
- i=i2
- -{ `Goto 'continue' }
- end
-
- -----------------------------------------------------------------------
- -- handler for non-option parameter
- -----------------------------------------------------------------------
- if param_func then param_func(args[i]) end
- if cfg.params then table.insert(cfg.params, args[i])
- else cfg.params = { args[i] } end
- i=i+1
-
- -{ `Label 'continue' }
- if not i then return false end
- end -- </while>
- return cfg
- end
-
- return parse
-end
-
-
+++ /dev/null
-require 'metalua.runtime'
-require 'metalua.mlc'
-require 'metalua.package2'
+++ /dev/null
--- TODO: support modules as macros?
--- does it make sense to store a constant AST as a macro?
-
--{ extension 'match' }
-
-dollar = rawget(getfenv(), 'dollar') or { }
-
-local function dollar_builder(call)
- match call with
- | `Call{ `Id{name}, ... } -> return dollar[name](select(2, unpack(call)))
- | `Id{name} ->
- local m = dollar[name]
- match type(m) with
- | 'function' -> return m()
- | 'table' -> return m
- | 'nil' -> error "No such macro registered"
- | t -> error ("Invalid macro type "..t)
- end
- | _ -> error "Invalid $macro, '$' should be followed by an identifier or function call"
- end
-end
-
-mlp.expr.prefix:add{ '$', prec = 100, builder = |_, x| dollar_builder(x) }
-mlp.stat:add{ '$', mlp.expr, builder = |x| dollar_builder(x[1]) }
+++ /dev/null
-require 'metalua.walk.id'
--{ extension 'log' }
-
---------------------------------------------------------------------------------
---
--- H params:
--- * H.alpha is the `Local{ } (or `Set{ }) statement which will
--- receive the alpha-conversions required to restore the free
--- variables of the transformed term. For instance,
--- H+{print(1)} will be transformed into +{.1.X.print(1)},
--- and alpha will contain +{local -{`Id '.1.X.print} = print }.
--- alpha is reused and augmented by successive calls to H().
---
--- * H.side contains 'inside', 'outside', 'both' or nil (equivalent to
--- 'both'). It indicates the kind of hygienization that's to be
--- performed.
---
--- * H.keep contain a set of free variable names which must not be
--- renamed.
---
--- * H.kind is the kind of walker that must be used ('expr', 'stat',
--- 'block'...) and defaults to 'guess'.
---
--- * H:set (field, val) sets a field in H and returns H, so that calls
--- can be chained, e.g.:
--- > H:set(keep, {'print'}):set('side', outside)+{print(x)}
---
--- * H:reset(field) sets a field to nil, and returns the value of that
--- field prior to nilification.
---------------------------------------------------------------------------------
-
-H = { } --setmetatable(H, H)
-H.__index=H
-H.template = { alpha = { } }
-
---------------------------------------------------------------------------------
---
---------------------------------------------------------------------------------
-function H:new(x)
- local instance = table.deep_copy(self.template)
- if x then instance <- x end
- setmetatable(instance, self)
- return instance
-end
-
---------------------------------------------------------------------------------
---
---------------------------------------------------------------------------------
-function H:__call (ast)
- assert (type(ast)=='table', "H expects an AST")
-
- local local_renames -- only set if inside hygienization's required
-
- -----------------------------------------------------------------------------
- -- kind of hygienization(s) to perform: h_inseide and/or h_outside
- -----------------------------------------------------------------------------
- local h_inside, h_outside do
- local side = self.side or 'both'
- h_inside = side=='inside' or side=='both'
- h_outside = side=='outside' or side=='both'
- end
-
- -----------------------------------------------------------------------------
- -- Initialize self.keep:
- -- self.keep is a dictionary of free var names to be protected from capture
- -----------------------------------------------------------------------------
- do
- local k = self.keep
- -- If there's no self.keep, that's an empty dictionary
- if not k then k = { }; self.keep = k
- -- If it's a string, consider it as a single-element dictionary
- elseif type(k)=='string' then k = { [k] = true }; self.keep=k
- -- If there's a list-part in self.keep, transpose it:
- else for i, v in ipairs(k) do k[v], k[i] = true, nil end end
- end
-
- -----------------------------------------------------------------------------
- -- Config skeleton for the id walker
- -----------------------------------------------------------------------------
- local cfg = { expr = { }, stat = { }, id = { } }
-
- -----------------------------------------------------------------------------
- -- Outside hygienization: all free variables are renamed to fresh ones,
- -- and self.alpha is updated to contain the assignments required to keep
- -- the AST's semantics.
- -----------------------------------------------------------------------------
- if h_outside then
- local alpha = self.alpha
-
- -- free_vars is an old_name -> new_name dictionary computed from alpha:
- -- self.alpha is not an efficient representation for searching.
- if not alpha then alpha = { }; self.alpha = alpha end
- -- FIXME: alpha should only be overridden when there actually are some
- -- globals renamed.
- if #alpha==0 then alpha <- `Local{ { }, { } } end
- local new, old = unpack(alpha)
- local free_vars = { }
-
- assert (#new==#old, "Invalid alpha list")
- for i = 1, #new do
- assert (old[i].tag=='Id' and #old[i]==1, "Invalid lhs in alpha list")
- assert (new[i].tag=='Id' and #new[i]==1, "Invalid rhs in alpha list")
- free_vars[old[i][1]] = new[i][1]
- end
-
- -- Rename free variables that are not supposed to be captured.
- function cfg.id.free (id)
- local old_name = id[1]
- if self.keep[old_name] then return end
- local new_name = free_vars[old_name]
- if not new_name then
- new_name = mlp.gensym('X.'..old_name)[1]
- free_vars[old_name] = new_name
- table.insert(alpha[1], `Id{new_name})
- table.insert(alpha[2], `Id{old_name})
- end
- id[1] = new_name
- end
- end
-
- -----------------------------------------------------------------------------
- -- Inside hygienization: rename all local variables and their ocurrences.
- -----------------------------------------------------------------------------
- if h_inside then
-
- ----------------------------------------------------------------
- -- Renamings can't performed on-the-spot, as it would
- -- transiently break the link between binders and bound vars,
- -- thus preventing the algo to work. They're therefore stored
- -- in local_renames, and performed after the whole tree has been
- -- walked.
- ----------------------------------------------------------------
-
- local_renames = { } -- `Id{ old_name } -> new_name
- local bound_vars = { } -- binding statement -> old_name -> new_name
-
- ----------------------------------------------------------------
- -- Give a new name to newly created local vars, store it in
- -- bound_vars
- ----------------------------------------------------------------
- function cfg.binder (id, binder)
- if id.h_boundary then return end
- local old_name = id[1]
- local binder_table = bound_vars[binder]
- if not binder_table then
- binder_table = { }
- bound_vars[binder] = binder_table
- end
- local new_name = mlp.gensym('L.'..old_name)[1]
- binder_table[old_name] = new_name
- local_renames[id] = new_name
- end
-
- ----------------------------------------------------------------
- -- List a bound var for renaming. The new name has already been
- -- chosen and put in bound_vars by cfg.binder().
- ----------------------------------------------------------------
- function cfg.id.bound (id, binder)
- if id.h_boundary then return end
- local old_name = id[1]
- local new_name = bound_vars[binder][old_name]
- --.log(bound_vars[binder])
- assert(new_name, "no alpha conversion for a bound var?!")
- local_renames[id] = new_name
- end
- end
-
- -----------------------------------------------------------------------------
- -- Don't traverse subtrees marked by '!'
- -----------------------------------------------------------------------------
- local cut_boundaries = |x| x.h_boundary and 'break' or nil
- cfg.stat.down, cfg.expr.down = cut_boundaries, cut_boundaries
-
- -----------------------------------------------------------------------------
- -- The walker's config is ready, let's go.
- -- After that, ids are renamed in ast, free_vars and bound_vars are set.
- -----------------------------------------------------------------------------
- walk_id [self.kind or 'guess'] (cfg, ast)
-
- if h_inside then -- Apply local name changes
- for id, new_name in pairs(local_renames) do id[1] = new_name end
- end
-
- return ast
-end
-
---------------------------------------------------------------------------------
--- Return H to allow call chainings
---------------------------------------------------------------------------------
-function H:set(field, val)
- local t = type(field)
- if t=='string' then self[field]=val
- elseif t=='table' then self <- field
- else error("Can't set H, field arg can't be of type "..t) end
- return self
-end
-
---------------------------------------------------------------------------------
--- Return the value before reset
---------------------------------------------------------------------------------
-function H:reset(field)
- if type(field) ~= 'string' then error "Can only reset H string fields" end
- local r = H[field]
- H[field] = nil
- return r
-end
-
--- local function commit_locals_to_chunk(x)
--- local alpha = H:reset 'alpha'
--- --$log ('commit locals', x, alpha, 'nohash')
--- if not alpha or not alpha[1][1] then return end
--- if not x then return alpha end
--- table.insert(x, 1, alpha)
--- end
-
--- mlp.chunk.transformers:add (commit_locals_to_chunk)
+++ /dev/null
-require 'metalua.walk.id'
--{ extension 'log' }
-
-mlp.expr.prefix:add{ '!', prec = 5,
- builder = function(_,x)
- local v = mlp.gensym()
- return `Stat{ +{ block:
- local -{v} = -{x};
- (-{v}).h_boundary=true },
- v }
- end }
-
-mlp.stat:add{ '!', mlp.expr, builder = |x| +{stat: (-{x[1]}).h_boundary=true } }
-
--- * if there's no boundary in it, is there a need to rename vars?
--- ==> first pass to mark binders which contain boundaries,
--- then 2nd pass only touched those which have a splice
--- in them.
-
-return +{ require (-{ `String{ package.metalua_extension_prefix .. 'H-runtime' } }) }
-
-
+++ /dev/null
---------------------------------------------------------------------------------
---
--- Anaphoric macros.
---
--- This extension turns 'it' into a special variable, that's bound to
--- an often used value:
---
--- * in an 'if' statement, 'it' is bound, in a block, to the condition
--- that triggered the block's execution:
--- > if 1234 then y=it end; assert (y == 1234)
---
--- * in a while loop, it's bound to the test:
--- > while file:read "*a" do table.insert (lines, it) end
---
--- 'it' is bound the the most closely surrounding structure. If you wanted to
--- use its content at a deeper position in the AST, you would have to save it
--- in a temporary variable. But what you should really do in such a case is
--- avoiding to use anaphoric macros: they're fine for one-liner, but they
--- reduce readability for bigger functions.
---------------------------------------------------------------------------------
-
--- TODO: 'and' operator could, and maybe should, be anaphoric as well
--- TODO: anaphoric functions would be cool for recursive functions, but
--- recursive calls are always in an 'if' statement, so the pronoun
--- used for functions must not be the same as for 'if'.
-
-require 'freevars'
-
-local function anaphoric_if(ast)
- local it_found = false
- for i=2, #ast do
- if freevars.block(ast[i])['it'] then
- it_found = true
- break
- end
- end
- if it_found then
- local cond = ast[1]
- ast[1] = +{it}
- return +{stat: do local it = -{cond}; -{ast} end }
- end
-end
-
-local function anaphoric_while(ast)
- local it_found = false
- if freevars.block(ast[2])['it'] then
- local cond = ast[1]
- ast[1] = +{it}
- return +{stat: do local it = -{cond}; -{ast} end }
- end
-end
-
-mlp.stat:get'if'.transformers:add(anaphoric_if)
-mlp.stat:get'while'.transformers:add(anaphoric_while)
\ No newline at end of file
+++ /dev/null
-----------------------------------------------------------------------
--- Metalua samples: $Id$
---
--- Summary: Lists by comprehension
---
-----------------------------------------------------------------------
---
--- Copyright (c) 2006-2007, Fabien Fleutot <metalua@gmail.com>.
---
--- This software is released under the MIT Licence, see licence.txt
--- for details.
---
---------------------------------------------------------------------------------
---
--- This extension implements list comprehensions, similar to Haskell and
--- Python syntax, to easily describe lists.
---
---------------------------------------------------------------------------------
-
--{ extension "match" }
-
-local function dots_builder (x) return `Dots{ x } end
-
-local function for_builder (x, h)
- match x with
- | `Comp{ _, acc } -> table.insert (acc, h[1]); return x
- | `Pair{ _, _ } -> error "No explicit key in a for list generator"
- | _ -> return `Comp{ x, {h[1]} }
- end
-end
-
-local function if_builder (x, p)
- match x with
- | `Comp{ _, acc } -> table.insert (acc, `If{ p[1] }); return x
- | `Pair{ _, _ } -> error "No explicit key in a list guard"
- | _ -> return `Comp{ x, p[1] }
- end
-end
-
-local function comp_builder(core, list, no_unpack)
- -- [ti] = temp var holding table.insert
- -- [v] = variable holding the table being built
- -- [r] = the core of the list being built
- local ti, v, r = mlp.gensym "table_insert", mlp.gensym "table"
-
- -----------------------------------------------------------------------------
- -- 1 - Build the loop's core: if it has suffix "...", every elements of the
- -- multi-return must be inserted, hence the extra [for] loop.
- -----------------------------------------------------------------------------
- match core with
- | `Dots{ x } ->
- local w = mlp.gensym()
- r = +{stat: for -{w} in values( -{x} ) do -{ `Call{ ti, v, w } } end }
- | `Pair{ k, w } ->
- r = `Set{ { `Index{ v, k } }, { w } }
- | _ -> r = `Call{ ti, v, core }
- end
-
- -----------------------------------------------------------------------------
- -- 2 - Stack the if and for control structures, from outside to inside.
- -- This is done in a destructive way for the elements of [list].
- -----------------------------------------------------------------------------
- for i = #list, 1, -1 do
- table.insert (list[i], {r})
- r = list[i]
- end
- if no_unpack then
- return `Stat{ { `Local{ {ti, v}, { +{table.insert}, `Table} }, r }, v }
- else
- return +{ function()
- local -{ti}, -{v} = table.insert, { }
- -{r}; return unpack(-{v})
- end () }
- end
-end
-
-local function table_content_builder (list)
- match list with
- | { `Comp{ y, acc } } -> return comp_builder( y, acc, "no unpack")
- | _ ->
- local tables = { `Table }
- local ctable = tables[1]
- local function flush() ctable=`Table; table.insert(tables, ctable) end
- for x in values(list) do
- match x with
- | `Comp{ y, acc } -> table.insert(ctable, comp_builder(y, acc)); flush()
- | `Dots{ y } -> table.insert(ctable, y); flush()
- | _ -> table.insert (ctable, x);
- end
- end
- match tables with
- | { x } | { x, { } } -> return x
- | _ ->
- if #tables[#tables]==0 then table.remove(tables) end --suppress empty table
- return `Call{ +{table.cat}, unpack(tables) }
- end
- end
-end
-
-mlp.table_field = gg.expr{ name="table cell",
- primary = mlp.table_field,
- suffix = { name="table cell suffix",
- { "...", builder = dots_builder },
- { "for", mlp.for_header, builder = for_builder },
- { "if", mlp.expr, builder = if_builder } } }
-
-mlp.table_content.builder = table_content_builder
-
---[[
-mlp.stat:add{ "for", gg.expr {
- primary = for_header,
- suffix = {
- { "for", mlp.for_header, builder = for_builder },
- { "if", mlp.expr, builder = if_builder } } },
- "do", mlp.block, "end", builder = for_stat_builder }
---]]
-
---------------------------------------------------------------------------------
--- Back-end for improved index operator.
---------------------------------------------------------------------------------
-local function index_builder(a, suffix)
- match suffix[1] with
- -- Single index, no range: keep the native semantics
- | { { e, false } } -> return `Index{ a, e }
- -- Either a range, or multiple indexes, or both
- | ranges ->
- local r = `Call{ +{table.isub}, a }
- local function acc (x,y) table.insert (r,x); table.insert (r,y) end
- for seq in ivalues (ranges) do
- match seq with
- | { e, false } -> acc(e,e)
- | { e, f } -> acc(e,f)
- end
- end
- return r
- end
-end
-
---------------------------------------------------------------------------------
--- Improved "[...]" index operator:
--- * support for multi-indexes ("foo[bar, gnat]")
--- * support for ranges ("foo[bar ... gnat]")
---------------------------------------------------------------------------------
-mlp.expr.suffix:del '['
-mlp.expr.suffix:add{ name="table index/range",
- "[", gg.list{
- gg.sequence { mlp.expr, gg.onkeyword{ "...", mlp.expr } } ,
- separators = { ",", ";" } },
- "]", builder = index_builder }
+++ /dev/null
-require "metalua.walk"
-
-----------------------------------------------------------------------
--- * [loop_tags] are the tags of statements which support continue.
--- * [loop_keywords] are the initial keywords which trigger the parsing
--- of these statements: they're indeed indexed by keyword in [mlp.stat].
-----------------------------------------------------------------------
-
-local loop_tags = table.transpose{ "Forin", "Fornum", "While", "Repeat" }
-local loop_keywords = { "for", "while", "repeat" }
-
-----------------------------------------------------------------------
--- This function takes the AST of a continue-enabled loop, parse
--- its body to find all instances of [`Continue]. If any of them
--- is found ([label~=nil]), they're transformed in [`Goto{...}], and
--- the corresponding label is added at the end of the loop's body.
---
--- Caveat: if a [continue] appears in the non-body part of a loop
--- (and therefore is relative to some enclosing loop), it isn't
--- handled, and therefore causes a compilation error. This could
--- only happen due in a [`Stat{ }], however, since [`Function{ }]
--- cuts the search for [`Continue].
-----------------------------------------------------------------------
-local function loop_transformer (ast)
- local label
- local cfg = { stat = { }; expr = { } }
-
- function cfg.stat.down (x)
- if loop_tags[x.tag] then return 'break'
- elseif x.tag=='Continue' then
- if not label then label = mlp.gensym 'continue' end
- x <- `Goto{ label }
- end
- end
-
- function cfg.expr.down (x)
- return x.tag=='Function' and 'break'
- end
-
- local loop_body = ast.tag=="Repeat" and ast[1] or ast[#ast]
- walk.block (cfg, loop_body)
- if label then table.insert (loop_body, `Label{ label }) end
-end
-
-----------------------------------------------------------------------
--- Register the transformer for each kind of loop:
-----------------------------------------------------------------------
-for keyword in values (loop_keywords) do
- mlp.stat:get(keyword).transformers:add (loop_transformer)
-end
-
-mlp.lexer:add "continue"
-mlp.stat:add{ "continue", builder = ||`Continue }
+++ /dev/null
-mlp.expr:add{ "local", mlp.id, "=", mlp.expr, "in", mlp.expr,
- builder=|x| `Stat{ { `Local{ { x[1] }, { x[2] } } }, x[3] } }
\ No newline at end of file
+++ /dev/null
-require 'metalua.dollar'
-
--{ extension 'match' }
-
-function dollar.log(...)
- local args = {...}
- local ti = table.insert
- local code = { }
- local nohash = false
- local width = 80
-
- local i=1
- if args[i].tag=='String' then
- ti(code, +{print(" [LOG] "..-{args[1]})})
- i += 1
- end
-
- local xtra_args, names, vals = { }, { }, { }
- for i=i, #args do
- match args[i] with
- | +{ 'nohash' } -> nohash = true
- | `Number{ w } -> width = w
- --| `String{...} | `Number{...} -> ti (xtra_args, args[i])
- | `Id{n} -> ti (names, n); ti (vals, args[i])
- | x -> ti (names, table.tostring(x, 'nohash')); ti (vals, x)
- end
- end
-
- for i=1, #names do
- local msg = string.format(" [LOG] %s = ", names[i])
- local printer = `Call{ +{table.tostring},
- vals[i],
- `Number{ width },
- `Number{ #msg } }
- if nohash then ti(printer, +{'nohash'}) end
- ti (code, `Call{ +{printf}, +{"%s%s"}, `String{ msg }, printer })
- end
- return code
-end
+++ /dev/null
-----------------------------------------------------------------------
--- Metalua samples: $Id$
---
--- Summary: Structural pattern matching for metalua ADT.
---
-----------------------------------------------------------------------
---
--- Copyright (c) 2006-2008, Fabien Fleutot <metalua@gmail.com>.
---
--- This software is released under the MIT Licence, see licence.txt
--- for details.
---
---------------------------------------------------------------------------------
---
--- Glossary:
---
--- * term_seq: the tested stuff, a sequence of terms
--- * pattern_element: might match one term of a term seq. Represented
--- as expression ASTs.
--- * pattern_seq: might match a term_seq
--- * pattern_group: several pattern seqs, one of them might match
--- the term seq.
--- * case: pattern_group * guard option * block
--- * match_statement: tested term_seq * case list
---
--- Hence a complete match statement is a:
---
--- { list(expr), list{ list(list(expr)), expr or false, block } }
---
--- Implementation hints
--- ====================
---
--- The implementation is made as modular as possible, so that parts
--- can be reused in other extensions. The priviledged way to share
--- contextual information across functions is through the 'cfg' table
--- argument. Its fields include:
---
--- * code: code generated from pattern. A pattern_(element|seq|group)
--- is compiled as a sequence of instructions which will jump to
--- label [cfg.on_failure] if the tested term doesn't match.
---
--- * on_failure: name of the label where the code will jump if the
--- pattern doesn't match
---
--- * locals: names of local variables used by the pattern. This
--- includes bound variables, and temporary variables used to
--- destructure tables. Names are stored as keys of the table,
--- values are meaningless.
---
--- * after_success: label where the code must jump after a pattern
--- succeeded to capture a term, and the guard suceeded if there is
--- any, and the conditional block has run.
---
--- * ntmp: number of temporary variables used to destructurate table
--- in the current case.
---
--- Code generation is performed by acc_xxx() functions, which accumulate
--- code in cfg.code:
---
--- * acc_test(test, cfg) will generate a jump to cfg.on_failure
--- *when the test returns TRUE*
---
--- * acc_stat accumulates a statement
---
--- * acc_assign accumulate an assignment statement, and makes sure that
--- the LHS variable the registered as local in cfg.locals.
---
-----------------------------------------------------------------------
-
--- TODO: hygiene wrt type()
--- TODO: cfg.ntmp isn't reset as often as it could. I'm not even sure
--- the corresponding locals are declared.
-
-module ('spmatch', package.seeall)
-
-----------------------------------------------------------------------
--- This would have been best done through library 'metalua.walk',
--- but walk depends on match, so we have to break the dependency.
--- It replaces all instances of `...' in `ast' with `term', unless
--- it appears in a function.
-----------------------------------------------------------------------
-function replace_dots (ast, term)
- local function rec (x)
- if type(x) == 'table' then
- if x.tag=='Dots' then
- if term=='ambiguous' then
- error ("You can't use `...' on the right of a match case when it appears "..
- "more than once on the left")
- else
- x <- term
- end
- elseif x.tag=='Function' then return
- else for y in ivalues (x) do rec (y) end end
- end
- end
- return rec (ast)
-end
-
-tmpvar_base = mlp.gensym 'submatch.' [1]
-function next_tmpvar(cfg)
- assert (cfg.ntmp, "No cfg.ntmp imbrication level in the match compiler")
- cfg.ntmp = cfg.ntmp+1
- return `Id{ tmpvar_base .. cfg.ntmp }
-end
-
--- Code accumulators
-acc_stat = |x,cfg| table.insert (cfg.code, x)
-acc_test = |x,cfg| acc_stat(+{stat: if -{x} then -{`Goto{cfg.on_failure}} end}, cfg)
--- lhs :: `Id{ string }
--- rhs :: expr
-function acc_assign (lhs, rhs, cfg)
- assert(lhs.tag=='Id')
- cfg.locals[lhs[1]] = true
- acc_stat (`Set{ {lhs}, {rhs} }, cfg)
-end
-
-literal_tags = table.transpose{ 'String', 'Number', 'True', 'False', 'Nil' }
-
--- pattern :: `Id{ string }
--- term :: expr
-function id_pattern_element_builder (pattern, term, cfg)
- assert (pattern.tag == "Id")
- if pattern[1] == "_" then
- -- "_" is used as a dummy var ==> no assignment, no == checking
- cfg.locals._ = true
- elseif cfg.locals[pattern[1]] then
- -- This var is already bound ==> test for equality
- acc_test (+{ -{term} ~= -{pattern} }, cfg)
- else
- -- Free var ==> bind it, and remember it for latter linearity checking
- acc_assign (pattern, term, cfg)
- cfg.locals[pattern[1]] = true
- end
-end
-
--- Concatenate code in [cfg.code], that will jump to label
--- [cfg.on_failure] if [pattern] doesn't match [term]. [pattern]
--- should be an identifier, or at least cheap to compute and
--- side-effects free.
---
--- pattern :: pattern_element
--- term :: expr
-function pattern_element_builder (pattern, term, cfg)
- if literal_tags[pattern.tag] then
- acc_test (+{ -{term} ~= -{pattern} }, cfg)
- elseif "Id" == pattern.tag then
- id_pattern_element_builder (pattern, term, cfg)
- elseif "Op" == pattern.tag and "div" == pattern[1] then
- regexp_pattern_element_builder (pattern, term, cfg)
- elseif "Op" == pattern.tag and "eq" == pattern[1] then
- eq_pattern_element_builder (pattern, term, cfg)
- elseif "Table" == pattern.tag then
- table_pattern_element_builder (pattern, term, cfg)
- else
- error ("Invalid pattern: "..table.tostring(pattern, "nohash"))
- end
-end
-
-function eq_pattern_element_builder (pattern, term, cfg)
- local _, pat1, pat2 = unpack (pattern)
- local ntmp_save = cfg.ntmp
- pattern_element_builder (pat1, term, cfg)
- cfg.ntmp = ntmp_save
- pattern_element_builder (pat2, term, cfg)
-end
-
--- pattern :: `Op{ 'div', string, list{`Id string} or `Id{ string }}
--- term :: expr
-function regexp_pattern_element_builder (pattern, term, cfg)
- local op, regexp, sub_pattern = unpack(pattern)
-
- -- Sanity checks --
- assert (op=='div', "Don't know what to do with that op in a pattern")
- assert (regexp.tag=="String",
- "Left hand side operand for '/' in a pattern must be "..
- "a literal string representing a regular expression")
- if sub_pattern.tag=="Table" then
- for x in ivalues(sub_pattern) do
- assert (x.tag=="Id" or x.tag=='Dots',
- "Right hand side operand for '/' in a pattern must be "..
- "a list of identifiers")
- end
- else
- assert (sub_pattern.tag=="Id",
- "Right hand side operand for '/' in a pattern must be "..
- "an identifier or a list of identifiers")
- end
-
- -- Regexp patterns can only match strings
- acc_test (+{ type(-{term}) ~= 'string' }, cfg)
- -- put all captures in a list
- local capt_list = +{ { string.strmatch(-{term}, -{regexp}) } }
- -- save them in a var_n for recursive decomposition
- local v2 = next_tmpvar(cfg)
- acc_stat (+{stat: local -{v2} = -{capt_list} }, cfg)
- -- was capture successful?
- acc_test (+{ not next (-{v2}) }, cfg)
- pattern_element_builder (sub_pattern, v2, cfg)
-end
-
--- pattern :: pattern and `Table{ }
--- term :: expr
-function table_pattern_element_builder (pattern, term, cfg)
- local seen_dots, len = false, 0
- acc_test (+{ type( -{term} ) ~= "table" }, cfg)
- for i = 1, #pattern do
- local key, sub_pattern
- if pattern[i].tag=="Pair" then -- Explicit key/value pair
- key, sub_pattern = unpack (pattern[i])
- assert (literal_tags[key.tag], "Invalid key")
- else -- Implicit key
- len, key, sub_pattern = len+1, `Number{ len+1 }, pattern[i]
- end
-
- -- '...' can only appear in final position
- -- Could be fixed actually...
- assert (not seen_dots, "Wrongly placed `...' ")
-
- if sub_pattern.tag == "Id" then
- -- Optimization: save a useless [ v(n+1)=v(n).key ]
- id_pattern_element_builder (sub_pattern, `Index{ term, key }, cfg)
- if sub_pattern[1] ~= "_" then
- acc_test (+{ -{sub_pattern} == nil }, cfg)
- end
- elseif sub_pattern.tag == "Dots" then
- -- Remember where the capture is, and thatt arity checking shouldn't occur
- seen_dots = true
- else
- -- Business as usual:
- local v2 = next_tmpvar(cfg)
- acc_assign (v2, `Index{ term, key }, cfg)
- pattern_element_builder (sub_pattern, v2, cfg)
- -- TODO: restore ntmp?
- end
- end
- if seen_dots then -- remember how to retrieve `...'
- -- FIXME: check, but there might be cases where the variable -{term}
- -- will be overridden in contrieved tables.
- -- ==> save it now, and clean the setting statement if unused
- if cfg.dots_replacement then cfg.dots_replacement = 'ambiguous'
- else cfg.dots_replacement = +{ select (-{`Number{len}}, unpack(-{term})) } end
- else -- Check arity
- acc_test (+{ #-{term} ~= -{`Number{len}} }, cfg)
- end
-end
-
--- Jumps to [cfg.on_faliure] if pattern_seq doesn't match
--- term_seq.
-function pattern_seq_builder (pattern_seq, term_seq, cfg)
- if #pattern_seq ~= #term_seq then error ("Bad seq arity") end
- cfg.locals = { } -- reset bound variables between alternatives
- for i=1, #pattern_seq do
- cfg.ntmp = 1 -- reset the tmp var generator
- pattern_element_builder(pattern_seq[i], term_seq[i], cfg)
- end
-end
-
---------------------------------------------------
--- for each case i:
--- pattern_seq_builder_i:
--- * on failure, go to on_failure_i
--- * on success, go to on_success
--- label on_success:
--- block
--- goto after_success
--- label on_failure_i
---------------------------------------------------
-function case_builder (case, term_seq, cfg)
- local patterns_group, guard, block = unpack(case)
- local on_success = mlp.gensym 'on_success' [1]
- for i = 1, #patterns_group do
- local pattern_seq = patterns_group[i]
- cfg.on_failure = mlp.gensym 'match_fail' [1]
- cfg.dots_replacement = false
- pattern_seq_builder (pattern_seq, term_seq, cfg)
- if i<#patterns_group then
- acc_stat (`Goto{on_success}, cfg)
- acc_stat (`Label{cfg.on_failure}, cfg)
- end
- end
- acc_stat (`Label{on_success}, cfg)
- if guard then acc_test (+{not -{guard}}, cfg) end
- if cfg.dots_replacement then
- replace_dots (block, cfg.dots_replacement)
- end
- block.tag = 'Do'
- acc_stat (block, cfg)
- acc_stat (`Goto{cfg.after_success}, cfg)
- acc_stat (`Label{cfg.on_failure}, cfg)
-end
-
-function match_builder (x)
- local term_seq, cases = unpack(x)
- local cfg = {
- code = `Do{ },
- after_success = mlp.gensym "_after_success" }
-
-
- -- Some sharing issues occur when modifying term_seq,
- -- so it's replaced by a copy new_term_seq.
- -- TODO: clean that up, and re-suppress the useless copies
- -- (cf. remarks about capture bug below).
- local new_term_seq = { }
-
- local match_locals
-
- -- Make sure that all tested terms are variables or literals
- for i=1, #term_seq do
- local t = term_seq[i]
- -- Capture problem: the following would compile wrongly:
- -- `match x with x -> end'
- -- Temporary workaround: suppress the condition, so that
- -- all external variables are copied into unique names.
- --if t.tag ~= 'Id' and not literal_tags[t.tag] then
- local v = mlp.gensym 'v'
- if not match_locals then match_locals = `Local{ {v}, {t} } else
- table.insert(match_locals[1], v)
- table.insert(match_locals[2], t)
- end
- new_term_seq[i] = v
- --end
- end
- term_seq = new_term_seq
-
- if match_locals then acc_stat(match_locals, cfg) end
-
- for i=1, #cases do
- local case_cfg = {
- after_success = cfg.after_success,
- code = `Do{ }
- -- locals = { } -- unnecessary, done by pattern_seq_builder
- }
- case_builder (cases[i], term_seq, case_cfg)
- if next (case_cfg.locals) then
- local case_locals = { }
- table.insert (case_cfg.code, 1, `Local{ case_locals, { } })
- for v in keys (case_cfg.locals) do
- table.insert (case_locals, `Id{ v })
- end
- end
- acc_stat(case_cfg.code, cfg)
- end
- acc_stat(+{error 'mismatch'}, cfg)
- acc_stat(`Label{cfg.after_success}, cfg)
- return cfg.code
-end
-
-----------------------------------------------------------------------
--- Syntactical front-end
-----------------------------------------------------------------------
-
-mlp.lexer:add{ "match", "with", "->" }
-mlp.block.terminators:add "|"
-
-match_cases_list_parser = gg.list{ name = "match cases list",
- gg.sequence{ name = "match case",
- gg.list{ name = "match case patterns list",
- primary = mlp.expr_list,
- separators = "|",
- terminators = { "->", "if" } },
- gg.onkeyword{ "if", mlp.expr, consume = true },
- "->",
- mlp.block },
- separators = "|",
- terminators = "end" }
-
-mlp.stat:add{ name = "match statement",
- "match",
- mlp.expr_list,
- "with", gg.optkeyword "|",
- match_cases_list_parser,
- "end",
- builder = |x| match_builder{ x[1], x[3] } }
-
+++ /dev/null
-local function b(x, suffix)
- local v, ontrue, onfalse = mlp.gensym "test", unpack (suffix)
- return `Stat{
- +{ block:
- local -{v}
- if -{x} then (-{v}) = -{ontrue} else (-{v}) = -{onfalse or `Nil} end },
- v }
-end
-
-mlp.expr.suffix:add{ "?", mlp.expr, gg.onkeyword{ ",", mlp.expr }, prec=5, builder=b }
+++ /dev/null
--{ extension 'match' }
-
---------------------------------------------------------------------------------
---
--- TODO:
---
--- * Hygienize calls to pcall()
---
---------------------------------------------------------------------------------
-
--{ extension 'H' }
--{ extension 'log' }
-
--- Get match parsers and builder, for catch cases handling:
-local match_alpha = require 'metalua.extension.match'
-local H = H:new{side='inside', alpha = match_alpha }
-
--- We'll need to track rogue return statements:
-require 'metalua.walk'
-
--- Put a block AST into a pcall():
-local mkpcall = |block| +{pcall(function() -{block} end)}
-
--- The statement builder:
-function trycatch_builder(x)
- --$log ("trycatch_builder", x, 'nohash', 60)
- local try_code, catch_cases, finally_code = unpack(x)
- local insert_return_catcher = false
-
- -- Can't be hygienize automatically by the current version of H, as
- -- it must bridge from inside user code (hacjed return statements)
- -- to outside macro code.
- local caught_return = !mlp.gensym 'caught_return'
- local saved_args
-
- !try_code; !(finally_code or { })
- -- FIXME: Am I sure there's no need to hygienize inside?
- --[[if catch_cases then
- for case in ivalues(catch_cases) do
- --$log(case,'nohash')
- local patterns, guard, block = unpack(case)
- ! block
- end
- end]]
-
-
- ----------------------------------------------------------------
- -- Returns in the try-block must be transformed:
- -- from the user's PoV, the code in the try-block isn't
- -- a function, therefore a return in it must not merely
- -- end the execution of the try block, but:
- -- * not cause any error to be caught;
- -- * let the finally-block be executed;
- -- * only then, let the enclosing function return with the
- -- appropraite values.
- -- The way to handle that is that any returned value is stored
- -- into the runtime variable caught_return, then a return with
- -- no value is sent, to stop the execution of the try-code.
- --
- -- Similarly, a return in a catch case code must not prevent
- -- the finally-code from being run.
- --
- -- This walker catches return statements and perform the relevant
- -- transformation into caught_return setting + empty return.
- --
- -- There is an insert_return_catcher compile-time flag, which
- -- allows to avoid inserting return-handling code in the result
- -- when not needed.
- ----------------------------------------------------------------
- local replace_returns_and_dots do
- local function f(x)
- match x with
- | `Return{...} ->
- insert_return_catcher = true
- -- Setvar's 'caught_return' code can't be hygienize by H currently.
- local setvar = `Set{ {caught_return}, { `Table{ unpack(x) } } }
- x <- { setvar; `Return }; x.tag = nil;
- --$log('transformed return stat:', x, 60)
- return 'break'
- | `Function{...} -> return 'break'
- -- inside this, returns would be the nested function's, not ours.
- | `Dots ->
- if not saved_args then saved_args = mlp.gensym 'args' end
- x <- `Call{ `Id 'unpack', saved_args }
- | _ -> -- pass
- end
- end
- local cfg = { stat = {down=f}, expr = {down=f} }
- replace_returns_and_dots = |x| walk.block(cfg, x)
- end
-
- -- parse returns in the try-block:
- replace_returns_and_dots (try_code)
-
- -- code handling the error catching process:
- local catch_result do
- if catch_cases and #catch_cases>0 then
- ----------------------------------------------------------
- -- Protect catch code against failures: they run in a pcall(), and
- -- the result is kept in catch_* vars so that it can be used to
- -- relaunch the error after the finally code has been executed.
- ----------------------------------------------------------
- for x in ivalues (catch_cases) do
- local case_code = x[3]
- -- handle rogue returns:
- replace_returns_and_dots (case_code)
- -- in case of error in the catch, we still need to run "finally":
- x[3] = +{block: catch_success, catch_error = -{mkpcall(case_code)}}
- end
- ----------------------------------------------------------
- -- Uncaught exceptions must not cause a mismatch,
- -- so we introduce a catch-all do-nothing last case:
- ----------------------------------------------------------
- table.insert (catch_cases, { { { `Id '_' } }, false, { } })
- catch_result = spmatch.match_builder{ {+{user_error}}, catch_cases }
- else
- catch_result = { }
- end
- end
-
- ----------------------------------------------------------------
- -- Build the bits of code that will handle return statements
- -- in the user code (try-block and catch-blocks).
- ----------------------------------------------------------------
- local caught_return_init, caught_return_rethrow do
- if insert_return_catcher then
- caught_return_init = `Local{{caught_return}}
- caught_return_rethrow =
- +{stat: if -{caught_return} then return unpack(-{caught_return}) end}
- else
- caught_return_init, caught_return_rethrow = { }, { }
- end
- end
-
- local saved_args_init =
- saved_args and `Local{ {saved_args}, { `Table{`Dots} } } or { }
-
- -- The finally code, to execute no matter what:
- local finally_result = finally_code or { }
-
- -- And the whole statement, gluing all taht together:
- local result = +{stat:
- do
- -{ saved_args_init }
- -{ caught_return_init }
- local user_success, user_error = -{mkpcall(try_code)}
- local catch_success, catch_error = false, user_error
- if not user_success then -{catch_result} end
- -{finally_result}
- if not user_success and not catch_success then error(catch_error) end
- -{ caught_return_rethrow }
- end }
-
- H(result)
-
- return result
-end
-
-function catch_case_builder(x)
- --$log ("catch_case_builder", x, 'nohash', 60)
- local patterns, guard, _, code = unpack(x)
- -- patterns ought to be a pattern_group, but each expression must
- -- be converted into a single-element pattern_seq.
- for i = 1, #patterns do patterns[i] = {patterns[i]} end
- return { patterns, guard, code }
-end
-
-mlp.lexer:add{ 'try', 'catch', 'finally', '->' }
-mlp.block.terminators:add{ 'catch', 'finally' }
-
-mlp.stat:add{
- 'try',
- mlp.block,
- gg.onkeyword{ 'catch',
- gg.list{
- gg.sequence{
- mlp.expr_list,
- gg.onkeyword{ 'if', mlp.expr },
- gg.optkeyword 'then',
- mlp.block,
- builder = catch_case_builder },
- separators = 'catch' } },
- gg.onkeyword{ 'finally', mlp.block },
- 'end',
- builder = trycatch_builder }
-
-return H.alpha
-
-
+++ /dev/null
---------------------------------------------------------------------------------
--- Initialize the types table. It has an __index metatable entry,
--- so that if a symbol is not found in it, it is looked for in the current
--- environment. It allows to write things like [ n=3; x :: vector(n) ].
---------------------------------------------------------------------------------
-types = { }
-setmetatable (types, { __index = getfenv(0)})
-
-function types.error (fmt, ...)
- error(string.format("Runtime type-checking failure: "..fmt, ...))
-end
-
---------------------------------------------------------------------------------
--- Add a prefix to an error message, if an error occurs.
--- Useful for type checkers that call sub-type-checkers.
---------------------------------------------------------------------------------
-local function nest_error (prefix, ...)
- local status, msg = pcall(...)
- if not status then types.error("%s:\n%s", prefix, msg) end
-end
-
---------------------------------------------------------------------------------
--- Built-in types
---------------------------------------------------------------------------------
-for typename in values{ "number", "string", "boolean", "function", "thread" } do
- types[typename] =
- function (val)
- if type(val) ~= typename then types.error ("%s expected", typename) end
- end
-end
-
-function types.integer(val)
- if type(val)~='number' or val%1~=0 then types.error 'integer expected' end
-end
-
---------------------------------------------------------------------------------
--- table(foo) checks
--- table(foo, bar) checks
--- table(i) where i is an integer checks
--- table(i, j) where i and j are integers checks
--- Integers and key/value types can be combined
---------------------------------------------------------------------------------
-function types.table (...)
-
- local key_type, val_type, range_from, range_to
- -- arguments parsing
- for x in values{...} do
- if type(x) == "number" then
- if range2 then types.error "Invalid type: too many numbers in table type"
- elseif range1 then range2 = x
- else range1 = x end
- else
- if type_key then types.error "Invalid type: too many types"
- elseif type_val then type_key, type_val = type_val, x
- else type_val = x end
- end
- end
- if not range2 then range2=range1 end
- if not type_key then type_key = types.integer end
- return function (val)
- if type(val) ~= "table" then types.error "table expected" end
- local s = #val
- if range2 and range2 > s then types.error "Not enough table elements" end
- if range1 and range1 < s then types.error "Too many elements table elements" end
- for k,v in pairs(val) do
- nest_error ("in table key", type_key, k)
- nest_error ("in table value", type_val, v)
- end
- end
-end
-
---------------------------------------------------------------------------------
--- [list (subtype)] checks that the term is a table, and all of its
--- integer-indexed elements are of type [subtype].
---------------------------------------------------------------------------------
-types.list = |...| types.table (types.integer, ...)
-
---------------------------------------------------------------------------------
--- Check that [x] is an integral number
---------------------------------------------------------------------------------
-function types.int (x)
- if type(x)~="number" or x%1~=0 then types.error "Integer number expected" end
-end
-
---------------------------------------------------------------------------------
--- [range(a,b)] checks that number [val] is between [a] and [b]. [a] and [b]
--- can be omitted.
---------------------------------------------------------------------------------
-function types.range (a,b)
- return function (val)
- if type(val)~="number" or a and val<a or b and val>b then
- types.error ("Number between %s and %s expected",
- a and tostring(a) or "-infty",
- b and tostring(b) or "+infty")
- end
- end
-end
-
---------------------------------------------------------------------------------
--- [inter (x, y)] checks that the term has both types [x] and [y].
---------------------------------------------------------------------------------
-function types.inter (...)
- local args={...}
- return function(val)
- for t in values(args) do nest_error ("in inter type", t, args) end
- end
-end
-
---------------------------------------------------------------------------------
--- [inter (x, y)] checks that the term has type either [x] or [y].
---------------------------------------------------------------------------------
-function types.union (...)
- local args={...}
- return function(val)
- for t in values(args) do if pcall(t, val) then return end end
- types.error "None of the types in the union fits"
- end
-end
-
---------------------------------------------------------------------------------
--- [optional(t)] accepts values of types [t] or [nil].
---------------------------------------------------------------------------------
-function types.optional(t)
- return function(val)
- if val~=nil then nest_error("In optional type", t, val) end
- end
-end
-
---------------------------------------------------------------------------------
--- A call to this is done on litteral tables passed as types, i.e.
--- type {1,2,3} is transformed into types.__table{1,2,3}.
---------------------------------------------------------------------------------
-function types.__table(s_type)
- return function (s_val)
- if type(s_val) ~= "table" then types.error "Struct table expected" end
- for k, field_type in pairs (s_type) do
- nest_error ("in struct field "..k, field_type, s_val[k])
- end
- end
-end
-
---------------------------------------------------------------------------------
--- Same as __table, except that it's called on literal strings.
---------------------------------------------------------------------------------
-function types.__string(s_type)
- return function (s_val)
- if s_val ~= s_type then
- types.error("String %q expected", s_type)
- end
- end
-end
-
---------------------------------------------------------------------------------
--- Top and Bottom:
---------------------------------------------------------------------------------
-function types.any() end
-function types.none() types.error "Empty type" end
-types.__or = types.union
-types.__and = types.inter
\ No newline at end of file
+++ /dev/null
--- This extension inserts type-checking code at approriate place in the code,
--- thanks to annotations based on "::" keyword:
---
--- * function declarations can be annotated with a returned type. When they
--- are, type-checking code is inserted in each of their return statements,
--- to make sure they return the expected type.
---
--- * function parameters can also be annotated. If they are, type-checking
--- code is inserted in the function body, which checks the arguments' types
--- and cause an explicit error upon incorrect calls. Moreover, if a new value
--- is assigned to the parameter in the function's body, the new value's type
--- is checked before the assignment is performed.
---
--- * Local variables can also be annotated. If they are, type-checking
--- code is inserted before any value assignment or re-assignment is
--- performed on them.
---
--- Type checking can be disabled with:
---
--- -{stat: types.enabled = false }
---
--- Code transformation is performed at the chunk level, i.e. file by
--- file. Therefore, it the value of compile-time variable
--- [types.enabled] changes in the file, the only value that counts is
--- its value once the file is entirely parsed.
---
--- Syntax
--- ======
---
--- Syntax annotations consist of "::" followed by a type
--- specifier. They can appear after a function parameter name, after
--- the closing parameter parenthese of a function, or after a local
--- variable name in the declaration. See example in samples.
---
--- Type specifiers are expressions, in which identifiers are taken
--- from table types. For instance, [number] is transformed into
--- [types.number]. These [types.xxx] fields must contain functions,
--- which generate an error when they receive an argument which doesn't
--- belong to the type they represent. It is perfectly acceptible for a
--- type-checking function to return another type-checking function,
--- thus defining parametric/generic types. Parameters can be
--- identifiers (they're then considered as indexes in table [types])
--- or literals.
---
--- Design hints
--- ============
---
--- This extension uses the code walking library [walk] to globally
--- transform the chunk AST. See [chunk_transformer()] for details
--- about the walker.
---
--- During parsing, type informations are stored in string-indexed
--- fields, in the AST nodes of tags `Local and `Function. They are
--- used by the walker to generate code only if [types.enabled] is
--- true.
---
--- TODO
--- ====
---
--- It's easy to add global vars type-checking, by declaring :: as an
--- assignment operator. It's easy to add arbitrary expr
--- type-checking, by declaring :: as an infix operator. How to make
--- both cohabit?
-
---------------------------------------------------------------------------------
---
--- Function chunk_transformer()
---
---------------------------------------------------------------------------------
---
--- Takes a block annotated with extra fields, describing typing
--- constraints, and returns a normal AST where these constraints have
--- been turned into type-checking instructions.
---
--- It relies on the following annotations:
---
--- * [`Local{ }] statements may have a [types] field, which contains a
--- id name ==> type name map.
---
--- * [Function{ }] expressions may have an [param_types] field, also a
--- id name ==> type name map. They may also have a [ret_type] field
--- containing the type of the returned value.
---
--- Design hints:
--- =============
---
--- It relies on the code walking library, and two states:
---
--- * [return_types] is a stack of the expected return values types for
--- the functions currently in scope, the most deeply nested one
--- having the biggest index.
---
--- * [scopes] is a stack of id name ==> type name scopes, one per
--- currently active variables scope.
---
--- What's performed by the walker:
---
--- * Assignments to a typed variable involve a type checking of the
--- new value;
---
--- * Local declarations are checked for additional type declarations.
---
--- * Blocks create and destroy variable scopes in [scopes]
---
--- * Functions create an additional scope (around its body block's scope)
--- which retains its argument type associations, and stacks another
--- return type (or [false] if no type constraint is given)
---
--- * Return statements get the additional type checking statement if
--- applicable.
---
---------------------------------------------------------------------------------
-
--- TODO: unify scopes handling with free variables detector
--- FIXME: scopes are currently incorrect anyway, only functions currently define a scope.
-
-require "metalua.walk"
-
--{ extension 'match' }
-
-module("types", package.seeall)
-
-enabled = true
-
-local function chunk_transformer (block)
- if not enabled then return end
- local return_types, scopes = { }, { }
- local cfg = { block = { }; stat = { }; expr = { } }
-
- function cfg.stat.down (x)
- match x with
- | `Local{ lhs, rhs, types = x_types } ->
- -- Add new types declared by lhs in current scope.
- local myscope = scopes [#scopes]
- for var, type in pairs (x_types) do
- myscope [var] = process_type (type)
- end
- -- Type-check each rhs value with the type of the
- -- corresponding lhs declaration, if any. Check backward, in
- -- case a local var name is used more than once.
- for i = 1, max (#lhs, #rhs) do
- local type, new_val = myscope[lhs[i][1]], rhs[i]
- if type and new_val then
- rhs[i] = checktype_builder (type, new_val, 'expr')
- end
- end
- | `Set{ lhs, rhs } ->
- for i=1, #lhs do
- match lhs[i] with
- | `Id{ v } ->
- -- Retrieve the type associated with the variable, if any:
- local j, type = #scopes, nil
- repeat j, type = j-1, scopes[j][v] until type or j==0
- -- If a type constraint is found, apply it:
- if type then rhs[i] = checktype_builder(type, rhs[i] or `Nil, 'expr') end
- | _ -> -- assignment to a non-variable, pass
- end
- end
- | `Return{ r_val } ->
- local r_type = return_types[#return_types]
- if r_type then
- x <- `Return{ checktype_builder (r_type, r_val, 'expr') }
- end
- | _ -> -- pass
- end
- end
-
- function cfg.expr.down (x)
- if x.tag ~= 'Function' then return end
- local new_scope = { }
- table.insert (scopes, new_scope)
- for var, type in pairs (x.param_types or { }) do
- new_scope[var] = process_type (type)
- end
- local r_type = x.ret_type and process_type (x.ret_type) or false
- table.insert (return_types, r_type)
- end
-
- -------------------------------------------------------------------
- -- Unregister the returned type and the variable scope in which
- -- arguments are registered;
- -- then, adds the parameters type checking instructions at the
- -- beginning of the function, if applicable.
- -------------------------------------------------------------------
- function cfg.expr.up (x)
- if x.tag ~= 'Function' then return end
- -- Unregister stuff going out of scope:
- table.remove (return_types)
- table.remove (scopes)
- -- Add initial type checking:
- for v, t in pairs(x.param_types or { }) do
- table.insert(x[2], 1, checktype_builder(t, `Id{v}, 'stat'))
- end
- end
-
- cfg.block.down = || table.insert (scopes, { })
- cfg.block.up = || table.remove (scopes)
-
- walk.block(cfg, block)
-end
-
---------------------------------------------------------------------------
--- Perform required transformations to change a raw type expression into
--- a callable function:
---
--- * identifiers are changed into indexes in [types], unless they're
--- allready indexed, or into parentheses;
---
--- * literal tables are embedded into a call to types.__table
---
--- This transformation is not performed when type checking is disabled:
--- types are stored under their raw form in the AST; the transformation is
--- only performed when they're put in the stacks (scopes and return_types)
--- of the main walker.
---------------------------------------------------------------------------
-function process_type (type_term)
- -- Transform the type:
- cfg = { expr = { } }
-
- function cfg.expr.down(x)
- match x with
- | `Index{...} | `Paren{...} -> return 'break'
- | _ -> -- pass
- end
- end
- function cfg.expr.up (x)
- match x with
- | `Id{i} -> x <- `Index{ `Id "types", `String{ i } }
- | `Table{...} | `String{...} | `Op{...} ->
- local xcopy, name = table.shallow_copy(x)
- match x.tag with
- | 'Table' -> name = '__table'
- | 'String' -> name = '__string'
- | 'Op' -> name = '__'..x[1]
- end
- x <- `Call{ `Index{ `Id "types", `String{ name } }, xcopy }
- | `Function{ params, { results } } if results.tag=='Return' ->
- results.tag = nil
- x <- `Call{ +{types.__function}, params, results }
- | `Function{...} -> error "malformed function type"
- | _ -> -- pass
- end
- end
- walk.expr(cfg, type_term)
- return type_term
-end
-
---------------------------------------------------------------------------
--- Insert a type-checking function call on [term] before returning
--- [term]'s value. Only legal in an expression context.
---------------------------------------------------------------------------
-local non_const_tags = table.transpose
- { 'Dots', 'Op', 'Index', 'Call', 'Invoke', 'Table' }
-function checktype_builder(type, term, kind)
- -- Shove type-checking code into the term to check:
- match kind with
- | 'expr' if non_const_tags [term.tag] ->
- local v = mlp.gensym()
- return `Stat{ { `Local{ {v}, {term} }; `Call{ type, v } }, v }
- | 'expr' ->
- return `Stat{ { `Call{ type, term } }, term }
- | 'stat' ->
- return `Call{ type, term }
- end
-end
-
---------------------------------------------------------------------------
--- Parse the typechecking tests in a function definition, and adds the
--- corresponding tests at the beginning of the function's body.
---------------------------------------------------------------------------
-local function func_val_builder (x)
- local typed_params, ret_type, body = unpack(x)
- local e = `Function{ { }, body; param_types = { }; ret_type = ret_type }
-
- -- Build [untyped_params] list, and [e.param_types] dictionary.
- for i, y in ipairs (typed_params) do
- if y.tag=="Dots" then
- assert(i==#typed_params, "`...' must be the last parameter")
- break
- end
- local param, type = unpack(y)
- e[1][i] = param
- if type then e.param_types[param[1]] = type end
- end
- return e
-end
-
---------------------------------------------------------------------------
--- Parse ":: type" annotation if next token is "::", or return false.
--- Called by function parameters parser
---------------------------------------------------------------------------
-local opt_type = gg.onkeyword{ "::", mlp.expr }
-
---------------------------------------------------------------------------
--- Updated function definition parser, which accepts typed vars as
--- parameters.
---------------------------------------------------------------------------
-
--- Parameters parsing:
-local id_or_dots = gg.multisequence{ { "...", builder = "Dots" }, default = mlp.id }
-
--- Function parsing:
-mlp.func_val = gg.sequence{
- "(", gg.list{
- gg.sequence{ id_or_dots, opt_type }, terminators = ")", separators = "," },
- ")", opt_type, mlp.block, "end",
- builder = func_val_builder }
-
-mlp.lexer:add { "::", "newtype" }
-mlp.chunk.transformers:add (chunk_transformer)
-
--- Local declarations parsing:
-local local_decl_parser = mlp.stat:get "local" [2].default
-
-local_decl_parser[1].primary = gg.sequence{ mlp.id, opt_type }
-
-function local_decl_parser.builder(x)
- local lhs, rhs = unpack(x)
- local s, stypes = `Local{ { }, rhs or { } }, { }
- for i = 1, #lhs do
- local id, type = unpack(lhs[i])
- s[1][i] = id
- if type then stypes[id[1]]=type end
- end
- if next(stypes) then s.types = stypes end
- return s
-end
-
-function newtype_builder(x)
- local lhs, rhs = unpack(x)
- match lhs with
- | `Id{ x } -> t = process_type (rhs)
- | `Call{ `Id{ x }, ... } ->
- t = `Function{ { }, rhs }
- for i = 2, #lhs do
- if lhs[i].tag ~= "Id" then error "Invalid newtype parameter" end
- t[1][i-1] = lhs[i]
- end
- | _ -> error "Invalid newtype definition"
- end
- return `Let{ { `Index{ `Id "types", `String{ x } } }, { t } }
-end
-
-mlp.stat:add{ "newtype", mlp.expr, "=", mlp.expr, builder = newtype_builder }
-
-
---------------------------------------------------------------------------
--- Register as an operator
---------------------------------------------------------------------------
---mlp.expr.infix:add{ "::", prec=100, builder = |a, _, b| insert_test(a,b) }
-
-return +{ require (-{ `String{ package.metalua_extension_prefix .. 'types-runtime' } }) }
+++ /dev/null
--- RAII in metalua.
---
--- Write:
--- with var_1, var_2... = val_1, val_2... do
--- ...
--- end
---
--- will assign val_n to var_n foreach n, and guaranty that var_n:close() will be called,
--- no matter what, even if the body causes an error, even if it returns, even
--- if another :close() call causes an error, etc. No. Matter. What.
-
-require 'metalua.extension.trycatch'
-
-function withdo_builder (x)
- local names, vals, body = unpack(x)
- for i = #names, 1, -1 do
- local name, val = names[i], vals[i]
- body = trycatch_builder{ { `Set{ {name}, {val} }, body }, -- try-block
- { }, -- catch-block
- { +{ print ("closing "..-{`String{name[1]}}) },
- `Invoke{ name, `String "close" } } }
- end
- table.insert(body, 1, `Local{ names })
- return body
-end
-
-mlp.lexer:add 'with'
-mlp.stat:add{
- 'with', mlp.id_list, '=', mlp.expr_list, 'do', mlp.block, 'end',
- builder = withdo_builder }
+++ /dev/null
-local _G = getfenv()
-local _G_mt = getmetatable(_G)
-
-
--- Set the __globals metafield in the global environment's metatable,
--- if not already there.
-if _G_mt then
- if _G_mt.__globals then return else
- print( "Warning: _G already has a metatable,"..
- " which might interfere with xglobals")
- _G_mt.__globals = { }
- end
-else
- _G_mt = { __globals = { } }
- setmetatable(_G, _G_mt)
-end
-
--- add a series of variable names to the list of declared globals
-function _G_mt.__newglobal(...)
- local g = _G_mt.__globals
- for v in ivalues{...} do g[v]=true end
-end
-
--- Try to set a global that's not in _G:
--- if it isn't declared, fail
-function _G_mt.__newindex(_G, var, val)
- if not _G_mt.__globals[var] then
- error ("Setting undeclared global variable "..var)
- end
- rawset(_G, var, val)
-end
-
--- Try to read a global that's not in _G:
--- if it isn't declared, fail
-function _G_mt.__index(_G, var)
- if not _G_mt.__globals[var] then
- error ("Reading undeclared global variable "..var)
- end
- return nil
-end
-
+++ /dev/null
--- WARNING, this is undertested, especially in cases where mutliple
--- modules have their own fenvs. Use at your own risks.
-
-require 'strict'
-
-local function decl_builder(x)
- local ids, vals = unpack(x)
- local ids_as_strings = table.imap(|x| `String{x[1]}, ids)
- local decl = `Call{ +{getmetatable(getfenv()).__newglobal},
- unpack(ids_as_strings) }
- if vals then return { decl, `Set{ ids, vals } }
- else return decl end
-end
-
-mlp.lexer:add 'global'
-mlp.stat:add{
- 'global', mlp.id_list, gg.onkeyword{ '=', mlp.expr_list },
- builder = decl_builder }
-
-return +{ require (-{ `String{ package.metalua_extension_prefix .. 'xglobal-runtime' } }) }
+++ /dev/null
--{ extension 'match' }
--{ extension 'log' }
-
-require 'metalua.walk'
-
-----------------------------------------------------------------------
--- Back-end:
-----------------------------------------------------------------------
-
--- Parse additional elements in a loop
-loop_element = gg.multisequence{
- { 'while', mlp.expr, builder = |x| `Until{ `Op{ 'not', x[1] } } },
- { 'until', mlp.expr, builder = |x| `Until{ x[1] } },
- { 'if', mlp.expr, builder = |x| `If{ x[1] } },
- { 'unless', mlp.expr, builder = |x| `If{ `Op{ 'not', x[1] } } },
- { 'for', mlp.for_header, builder = |x| x[1] } }
-
--- Recompose the loop
-function xloop_builder(x)
- local first, elements, body = unpack(x)
-
- -------------------------------------------------------------------
- -- If it's a regular loop, don't bloat the code
- -------------------------------------------------------------------
- if not next(elements) then
- table.insert(first, body)
- return first
- end
-
- -------------------------------------------------------------------
- -- There's no reason to treat the first element in a special way
- -------------------------------------------------------------------
- table.insert(elements, 1, first)
-
- -------------------------------------------------------------------
- -- if a header or a break must be able to exit the loops, ti will
- -- set exit_label and use it (a regular break wouldn't be enough,
- -- as it couldn't escape several nested loops.)
- -------------------------------------------------------------------
- local exit_label
- local function exit()
- if not exit_label then exit_label = mlp.gensym 'break' [1] end
- return `Goto{ exit_label }
- end
-
- -------------------------------------------------------------------
- -- Compile all headers elements, from last to first
- -------------------------------------------------------------------
- for i = #elements, 1, -1 do
- local e = elements[i]
- match e with
- | `If{ cond } ->
- body = `If{ cond, {body} }
- | `Until{ cond } ->
- body = +{stat: if -{cond} then -{exit()} else -{body} end }
- | `Forin{ ... } | `Fornum{ ... } ->
- table.insert (e, {body}); body=e
- end
- end
-
- -------------------------------------------------------------------
- -- Change breaks into gotos that escape all loops at once.
- -------------------------------------------------------------------
- local cfg = { stat = { }, expr = { } }
- function cfg.stat.down(x)
- match x with
- | `Break -> x <- exit()
- | `Forin{ ... } | `Fornum{ ... } | `While{ ... } | `Repeat{ ... } ->
- return 'break'
- | _ -> -- pass
- end
- end
- function cfg.expr.down(x) if x.tag=='Function' then return 'break' end end
- walk.stat(cfg, body)
-
- if exit_label then body = { body, `Label{ exit_label } } end
- return body
-end
-
-----------------------------------------------------------------------
--- Front-end:
-----------------------------------------------------------------------
-
-mlp.lexer:add 'unless'
-mlp.stat:del 'for'
-mlp.stat:del 'while'
-
-loop_element_list = gg.list{ loop_element, terminators='do' }
-
-mlp.stat:add{
- 'for', mlp.for_header, loop_element_list, 'do', mlp.block, 'end',
- builder = xloop_builder }
-
-mlp.stat:add{
- 'while', mlp.expr, loop_element_list, 'do', mlp.block, 'end',
- builder = |x| xloop_builder{ `While{x[1]}, x[2], x[3] } }
-
-mlp.stat:add{
- 'unless', mlp.expr, 'then', mlp.block, 'end',
- builder = |x| +{stat: if not -{x[1]} then -{x[2]} end} }
+++ /dev/null
-
-require 'metalua.extension.match'
-
-module ('spmatch', package.seeall)
-
-require 'metalua.walk.id'
-
--{extension 'log'}
-
-----------------------------------------------------------------------
--- Back-end for statements
--- "match function ..." and "local match function...".
--- Tag must be either "Localrec" or "Set".
-----------------------------------------------------------------------
-named_match_function_builder = |tag| function (x)
- local func_name, _, cases = unpack(x)
- local arity = #cases[1][1][1]
- if arity==0 then
- error "There must be at least 1 case in match function"
- end
- local args = { }
- for i=1, arity do args[i] = mlp.gensym("arg."..i) end
- local body = match_builder{args, cases}
- return { tag=tag, {func_name}, { `Function{ args, {body} } } }
-end
-
--- Get rid of the former parser, it will be blended in a multiseq:
-mlp.stat:del 'match'
-
-----------------------------------------------------------------------
--- "match function", "match ... with"
-----------------------------------------------------------------------
-mlp.stat:add{ 'match',
- gg.multisequence{
-
- ----------------------------------------------------------------
- -- Shortcut for declaration of functions containing only a match:
- -- "function f($1) match $1 with $2 end end" can be written:
- -- "match function f $2 end"
- ----------------------------------------------------------------
- { 'function', mlp.expr, gg.optkeyword '|',
- match_cases_list_parser, 'end',
- builder = named_match_function_builder 'Set' },
-
- ----------------------------------------------------------------
- -- Reintroduce the original match statement:
- ----------------------------------------------------------------
- default = gg.sequence{
- mlp.expr_list, 'with', gg.optkeyword '|',
- match_cases_list_parser, 'end',
- builder = |x| match_builder{ x[1], x[3] } } } }
-
-----------------------------------------------------------------------
--- Shortcut: "local match function f $cases end" translates to:
--- "local function f($args) match $args with $cases end end"
-----------------------------------------------------------------------
-mlp.stat:get'local'[2]:add{
- 'match', 'function', mlp.expr, gg.optkeyword '|',
- match_cases_list_parser, 'end',
- builder = named_match_function_builder 'Localrec' }
-
-----------------------------------------------------------------------
--- "match...with" expressions and "match function..."
-----------------------------------------------------------------------
-mlp.expr:add{ 'match', builder = |x| x[1], gg.multisequence{
-
- ----------------------------------------------------------------
- -- Anonymous match functions:
- -- "function ($1) match $1 with $2 end end" can be written:
- -- "match function $2 end"
- ----------------------------------------------------------------
- { 'function', gg.optkeyword '|',
- match_cases_list_parser,
- 'end',
- builder = function(x)
- local _, cases = unpack(x)
- local v = mlp.gensym()
- local body = match_builder{v, cases}
- return `Function{ {v}, {body} }
- end },
-
- ----------------------------------------------------------------
- -- match expressions: you can put a match where an expression
- -- is expected. The case bodies are then expected to be
- -- expressions, not blocks.
- ----------------------------------------------------------------
- default = gg.sequence{
- mlp.expr_list, 'with', gg.optkeyword '|',
- gg.list{ name = "match cases list",
- gg.sequence{ name = "match expr case",
- gg.list{ name = "match expr case patterns list",
- primary = mlp.expr_list,
- separators = "|",
- terminators = { "->", "if" } },
- gg.onkeyword{ "if", mlp.expr, consume = true },
- "->",
- mlp.expr }, -- Notice: expression, not block!
- separators = "|" },
- -- Notice: no "end" keyword!
- builder = function (x)
- local tested_term_seq, _, cases = unpack(x)
- local v = mlp.gensym 'match_expr'
- -- Replace expressions with blocks
- for case in ivalues (cases) do
- local body = case[3]
- case[3] = { `Set{ {v}, {body} } }
- end
- local m = match_builder { tested_term_seq, cases }
- return `Stat{ { `Local{{v}}; m }, v }
- end } } }
-
-function bind (x)
- local patterns, values = unpack(x)
-
- -------------------------------------------------------------------
- -- Generate pattern code: "bind vars = vals" translates to:
- -- do
- -- pattern matching code, goto 'fail' on mismatch
- -- goto 'success'
- -- label 'fail': error "..."
- -- label success
- -- end
- -- vars is the set of variables used by the pattern
- -------------------------------------------------------------------
- local code, vars do
- local match_cfg = {
- on_failure = mlp.gensym 'mismatch' [1],
- locals = { },
- code = { } }
- pattern_seq_builder(patterns, values, match_cfg)
- local on_success = mlp.gensym 'on_success' [1]
- code = {
- match_cfg.code;
- `Goto{ on_success };
- `Label{ match_cfg.on_failure };
- +{error "bind error"};
- `Label{ on_success } }
- vars = match_cfg.locals
- end
-
- -------------------------------------------------------------------
- -- variables that actually appear in the pattern:
- -------------------------------------------------------------------
- local vars_in_pattern do
- vars_in_pattern = { }
- local walk_cfg = { id = { } }
- function walk_cfg.id.free(v) vars_in_pattern[v[1]]=true end
- walk_id.expr_list(walk_cfg, patterns)
- end
-
- -------------------------------------------------------------------
- -- temp variables that are generated for destructuring,
- -- but aren't explicitly typed by the user. These must be made
- -- local.
- -------------------------------------------------------------------
- local vars_not_in_pattern do
- vars_not_in_pattern = { }
- for k in keys(vars) do
- if not vars_in_pattern[k] then
- vars_not_in_pattern[k] = true
- end
- end
- end
-
- -------------------------------------------------------------------
- -- Declare the temp variables as local to the statement.
- -------------------------------------------------------------------
- if next(vars_not_in_pattern) then
- local loc = { }
- for k in keys (vars_not_in_pattern) do
- table.insert (loc, `Id{k})
- end
- table.insert (code, 1, `Local{ loc, { } })
- end
-
- -------------------------------------------------------------------
- -- Transform the set of pattern variable names into a list of `Id{}
- -------------------------------------------------------------------
- local decl_list do
- decl_list = { }
- for k in keys (vars_in_pattern) do
- table.insert (decl_list, `Id{k})
- end
- end
-
- return code, decl_list
-end
-
-function local_bind(x)
- local code, vars = bind (x)
- return { `Local{ vars, { } }; code }
-end
-
-function non_local_bind(x)
- local code, _ = bind (x)
- code.tag = 'Do'
- return code
-end
-
-----------------------------------------------------------------------
--- Syntax front-end
-----------------------------------------------------------------------
-mlp.lexer:add 'bind'
-
-----------------------------------------------------------------------
--- bind patterns = vars
-----------------------------------------------------------------------
-mlp.stat:add{ 'bind', mlp.expr_list, '=', mlp.expr_list,
- builder = non_local_bind }
-
-----------------------------------------------------------------------
--- local bind patterns = vars
--- Some monkey-patching of "local ..." must take place
-----------------------------------------------------------------------
-mlp.stat:get'local'[2]:add{ 'bind', mlp.expr_list, '=', mlp.expr_list,
- builder = local_bind }
+++ /dev/null
-require 'metalua.compiler'
-
-module ('metaloop', package.seeall)
-
-PRINT_AST = true
-LINE_WIDTH = 60
-PROMPT = "M> "
-PROMPT2 = ">> "
-
-do -- set readline() to a line reader, either editline otr a default
- local status, _ = pcall(require, 'editline')
- if status then
- local rl_handle = editline.init 'metalua'
- readline = |p| rl_handle:read(p)
- else
- function readline (p)
- io.write (p)
- io.flush ()
- return io.read '*l'
- end
- end
-end
-
-function reached_eof(lx, msg)
- return lx:peek().tag=='Eof' or msg:find "token `Eof"
-end
-
-printf ("Metalua, interactive REPLoop.\n"..
- "(c) 2006-2008 <metalua@gmail.com>")
-
-function run()
- local lines = { }
- while true do
- local src, lx, ast, f, results, success
- repeat
- local line = readline(next(lines) and PROMPT2 or PROMPT)
- if not line then print(); os.exit(0) end -- line==nil iff eof on stdin
- if not next(lines) then
- line = line:gsub('^%s*=', 'return ')
- end
- table.insert(lines, line)
- src = table.concat (lines, "\n")
- until #line>0
-
- lx = mlc.lexstream_of_luastring(src)
- success, ast = pcall(mlc.ast_of_lexstream, lx)
- if success then
- success, f = pcall(mlc.function_of_ast, ast, '=stdin')
- if success then
- results = { pcall(f) }
- success = table.remove (results, 1)
- if success then
- -- Success!
- table.iforeach(|x| table.print(x, LINE_WIDTH), results)
- lines = { }
- else
- print "Evaluation error:"
- print (results[1])
- lines = { }
- end
- else
- print "Can't compile into bytecode:"
- print (f)
- lines = { }
- end
- else
- -- If lx has been read entirely, try to read another
- -- line before failing.
- if not reached_eof(lx, ast) then
- print "Can't compile source into AST:"
- print (ast)
- lines = { }
- end
- end
- end
-end
\ No newline at end of file
+++ /dev/null
---------------------------------------------------------------------------------
--- Execute an `mlc.ast_of_*()' in a separate lua process.
--- Communication between processes goes through temporary files,
--- for the sake of portability.
---------------------------------------------------------------------------------
-
-mlc_xcall = { }
-
---------------------------------------------------------------------------------
--- Number of lines to remove at the end of a traceback, should it be
--- dumped due to a compilation error in metabugs mode.
---------------------------------------------------------------------------------
-local STACK_LINES_TO_CUT = 7
-
---------------------------------------------------------------------------------
--- (Not intended to be called directly by users)
---
--- This is the back-end function, called in a separate lua process
--- by `mlc_xcall.client_*()' through `os.execute()'.
--- * inputs:
--- * the name of a lua source file to compile in a separate process
--- * the name of a writable file where the resulting ast is dumped
--- with `serialize()'.
--- * metabugs: if true and an error occurs during compilation,
--- the compiler's stacktrace is printed, allowing meta-programs
--- debugging.
--- * results:
--- * an exit status of 0 or -1, depending on whethet compilation
--- succeeded;
--- * the ast file filled will either the serialized ast, or the
--- error message.
---------------------------------------------------------------------------------
-function mlc_xcall.server (luafilename, astfilename, metabugs)
-
- -- We don't want these to be loaded when people only do client-side business
- require 'metalua.compiler'
- require 'serialize'
-
- mlc.metabugs = metabugs
-
- -- compile the content of luafile name in an AST, serialized in astfilename
- --local status, ast = pcall (mlc.luafile_to_ast, luafilename)
- local status, ast
- local function compile() return mlc.luafile_to_ast (luafilename) end
- if mlc.metabugs then
- print 'mlc_xcall.server/metabugs'
- --status, ast = xpcall (compile, debug.traceback)
- --status, ast = xpcall (compile, debug.traceback)
- local function tb(msg)
- local r = debug.traceback(msg)
-
- -- Cut superfluous end lines
- local line_re = '\n[^\n]*'
- local re = "^(.-)" .. (line_re) :rep (STACK_LINES_TO_CUT) .. "$"
- return r :strmatch (re) or r
- end
- --status, ast = xpcall (compile, debug.traceback)
- status, ast = xpcall (compile, tb)
- else status, ast = pcall (compile) end
- local out = io.open (astfilename, 'w')
- if status then -- success
- out:write (serialize (ast))
- out:close ()
- os.exit (0)
- else -- failure, `ast' is actually the error message
- out:write (ast)
- out:close ()
- os.exit (-1)
- end
-end
-
---------------------------------------------------------------------------------
--- Compile the file whose name is passed as argument, in a separate process,
--- communicating through a temporary file.
--- returns:
--- * true or false, indicating whether the compilation succeeded
--- * the ast, or the error message.
---------------------------------------------------------------------------------
-function mlc_xcall.client_file (luafile)
-
- -- printf("\n\nmlc_xcall.client_file(%q)\n\n", luafile)
-
- local tmpfilename = os.tmpname()
- local cmd = string.format (
- [=[lua -l metalua.mlc_xcall -e "mlc_xcall.server([[%s]], [[%s]], %s)"]=],
- luafile, tmpfilename, mlc.metabugs and "true" or "false")
-
- -- printf("os.execute [[%s]]\n\n", cmd)
-
- local status = (0 == os.execute (cmd))
- local result -- ast or error msg
- if status then
- result = (lua_loadfile or loadfile) (tmpfilename) ()
- else
- local f = io.open (tmpfilename)
- result = f :read '*a'
- f :close()
- end
- os.remove(tmpfilename)
- return status, result
-end
-
---------------------------------------------------------------------------------
--- Compile a source string into an ast, by dumping it in a tmp
--- file then calling `mlc_xcall.client_file()'.
--- returns: the same as `mlc_xcall.client_file()'.
---------------------------------------------------------------------------------
-function mlc_xcall.client_literal (luasrc)
- local srcfilename = os.tmpname()
- local srcfile, msg = io.open (srcfilename, 'w')
- if not srcfile then print(msg) end
- srcfile :write (luasrc)
- srcfile :close ()
- local status, ast = mlc_xcall.client_file (srcfilename)
- os.remove(srcfilename)
- return status, ast
-end
-
-return mlc_xcall
\ No newline at end of file
+++ /dev/null
-local package = package
-
-require 'metalua.mlc'
-
-package.metalua_extension_prefix = 'metalua.extension.'
-
-package.mpath = os.getenv 'LUA_MPATH' or
- './?.mlua;/usr/local/share/lua/5.1/?.mlua;'..
- '/usr/local/share/lua/5.1/?/init.mlua;'..
- '/usr/local/lib/lua/5.1/?.mlua;'..
- '/usr/local/lib/lua/5.1/?/init.mlua'
-
-
-----------------------------------------------------------------------
--- resc(k) returns "%"..k if it's a special regular expression char,
--- or just k if it's normal.
-----------------------------------------------------------------------
-local regexp_magic = table.transpose{
- "^", "$", "(", ")", "%", ".", "[", "]", "*", "+", "-", "?" }
-local function resc(k)
- return regexp_magic[k] and '%'..k or k
-end
-
-----------------------------------------------------------------------
--- Take a Lua module name, return the open file and its name,
--- or <false> and an error message.
-----------------------------------------------------------------------
-function package.findfile(name, path_string)
- local config_regexp = ("([^\n])\n"):rep(5):sub(1, -2)
- local dir_sep, path_sep, path_mark, execdir, igmark =
- package.config:strmatch (config_regexp)
- name = name:gsub ('%.', dir_sep)
- local errors = { }
- local path_pattern = string.format('[^%s]+', resc(path_sep))
- for path in path_string:gmatch (path_pattern) do
- --printf('path = %s, rpath_mark=%s, name=%s', path, resc(path_mark), name)
- local filename = path:gsub (resc (path_mark), name)
- --printf('filename = %s', filename)
- local file = io.open (filename, 'r')
- if file then return file, filename end
- table.insert(errors, string.format("\tno lua file %q", filename))
- end
- return false, table.concat(errors, "\n")..'\n'
-end
-
-----------------------------------------------------------------------
--- Execute a metalua module sources compilation in a separate process
--- Sending back the bytecode directly is difficult, as some shells
--- (at least MS-Windows') interpret some characters. So rather than
--- base64-encoding the bytecode, AST is returned from the child
--- process, and converted to bytecode then function in the calling
--- process.
-----------------------------------------------------------------------
-local function spring_load(filename)
- -- FIXME: handle compilation errors
- local pattern =
- [=[lua -l metalua.compiler -l serialize -e ]=]..
- [=["print(serialize(mlc.ast_of_luafile [[%s]]))"]=]
- local cmd = string.format (pattern, filename)
- --print ("running command: ``" .. cmd .. "''")
- local fd = io.popen (cmd)
- local ast_src = fd:read '*a'
- fd:close()
- local ast = lua_loadstring (ast_src) () -- much faster than loadstring()
- return mlc.function_of_ast (ast, filename)
-end
-
-----------------------------------------------------------------------
--- Load a metalua source file.
-----------------------------------------------------------------------
-function package.metalua_loader (name)
- local file, filename_or_msg = package.findfile (name, package.mpath)
- if not file then return filename_or_msg end
- if package.metalua_nopopen then
- local luastring = file:read '*a'
- file:close()
- return mlc.function_of_luastring (luastring, name)
- else
- file:close()
- require 'metalua.mlc_xcall'
- local status, ast = mlc_xcall.client_file (filename_or_msg)
- return mlc.function_of_ast(ast)
- end
-end
-
-----------------------------------------------------------------------
--- Placed after lua/luac loader, so precompiled files have
--- higher precedence.
-----------------------------------------------------------------------
-table.insert(package.loaders, package.metalua_loader)
-
-----------------------------------------------------------------------
--- Load an extension.
-----------------------------------------------------------------------
-function extension (name, noruntime)
- local complete_name = package.metalua_extension_prefix..name
- local x = require (complete_name)
- if x==true then return
- elseif type(x) ~= 'table' then
- error ("extension returned %s instead of an AST", type(x))
- else
- return x
- end
-end
-
-return package
+++ /dev/null
-require 'metalua.base'
-require 'metalua.table2'
-require 'metalua.string2'
+++ /dev/null
-
-----------------------------------------------------------------------
-----------------------------------------------------------------------
---
--- String module extension
---
-----------------------------------------------------------------------
-----------------------------------------------------------------------
-
--- Courtesy of lua-users.org
-function string.split(str, pat)
- local t = {}
- local fpat = "(.-)" .. pat
- local last_end = 1
- local s, e, cap = string.find(str, fpat, 1)
- while s do
- if s ~= 1 or cap ~= "" then
- table.insert(t,cap)
- end
- last_end = e+1
- s, e, cap = string.find(str, fpat, last_end)
- end
- if last_end <= string.len(str) then
- cap = string.sub(str, last_end)
- table.insert(t, cap)
- end
- return t
-end
-
--- "match" is regularly used as a keyword for pattern matching,
--- so here is an always available substitute.
-string.strmatch = string["match"]
-
--- change a compiled string into a function
-function string.undump(str)
- if str:strmatch '^\027LuaQ' or str:strmatch '^#![^\n]+\n\027LuaQ' then
- local f = (lua_loadstring or loadstring)(str)
- return f
- else
- error "Not a chunk dump"
- end
-end
-
-return string
\ No newline at end of file
+++ /dev/null
----------------------------------------------------------------------
-----------------------------------------------------------------------
---
--- Table module extension
---
-----------------------------------------------------------------------
-----------------------------------------------------------------------
-
--- todo: table.scan (scan1?) fold1? flip?
-
-function table.transpose(t)
- local tt = { }
- for a, b in pairs(t) do tt[b] = a end
- return tt
-end
-
-function table.iforeach(f, ...)
- -- assert (type (f) == "function") [wouldn't allow metamethod __call]
- local nargs = select("#", ...)
- if nargs==1 then -- Quick iforeach (most common case), just one table arg
- local t = ...
- assert (type (t) == "table")
- for i = 1, #t do
- local result = f (t[i])
- -- If the function returns non-false, stop iteration
- if result then return result end
- end
- else -- advanced case: boundaries and/or multiple tables
-
- -- fargs: arguments fot a single call to f
- -- first, last: indexes of the first & last elements mapped in each table
- -- arg1: index of the first table in args
-
- -- 1 - find boundaries if any
- local args, fargs, first, last, arg1 = {...}, { }
- if type(args[1]) ~= "number" then first, arg1 = 1, 1 -- no boundary
- elseif type(args[2]) ~= "number" then first, last, arg1 = 1, args[1], 2
- else first, last, arg1 = args[1], args[2], 3 end
- assert (nargs >= arg1) -- at least one table
- -- 2 - determine upper boundary if not given
- if not last then for i = arg1, nargs do
- assert (type (args[i]) == "table")
- last = max (#args[i], last)
- end end
- -- 3 - remove non-table arguments from args, adjust nargs
- if arg1>1 then args = { select(arg1, unpack(args)) }; nargs = #args end
-
- -- 4 - perform the iteration
- for i = first, last do
- for j = 1, nargs do fargs[j] = args[j][i] end -- build args list
- local result = f (unpack (fargs)) -- here is the call
- -- If the function returns non-false, stop iteration
- if result then return result end
- end
- end
-end
-
-function table.imap (f, ...)
- local result, idx = { }, 1
- local function g(...) result[idx] = f(...); idx=idx+1 end
- table.iforeach(g, ...)
- return result
-end
-
-function table.ifold (f, acc, ...)
- local function g(...) acc = f (acc,...) end
- table.iforeach (g, ...)
- return acc
-end
-
--- function table.ifold1 (f, ...)
--- return table.ifold (f, acc, 2, false, ...)
--- end
-
-function table.izip(...)
- local function g(...) return {...} end
- return table.imap(g, ...)
-end
-
-function table.ifilter(f, t)
- local yes, no = { }, { }
- for i=1,#t do table.insert (f(t[i]) and yes or no, t[i]) end
- return yes, no
-end
-
-function table.icat(...)
- local result = { }
- for t in values {...} do
- for x in values (t) do
- table.insert (result, x)
- end
- end
- return result
-end
-
-function table.iflatten (x) return table.icat (unpack (x)) end
-
-function table.irev (t)
- local result, nt = { }, #t
- for i=0, nt-1 do result[nt-i] = t[i+1] end
- return result
-end
-
-function table.isub (t, ...)
- local ti, u = table.insert, { }
- local args, nargs = {...}, select("#", ...)
- for i=1, nargs/2 do
- local a, b = args[2*i-1], args[2*i]
- for i=a, b, a<=b and 1 or -1 do ti(u, t[i]) end
- end
- return u
-end
-
-function table.iall (f, ...)
- local result = true
- local function g(...) return not f(...) end
- return not table.iforeach(g, ...)
- --return result
-end
-
-function table.iany (f, ...)
- local function g(...) return not f(...) end
- return not table.iall(g, ...)
-end
-
-function table.shallow_copy(x)
- local y={ }
- for k, v in pairs(x) do y[k]=v end
- return y
-end
-
--- Warning, this is implementation dependent: it relies on
--- the fact the [next()] enumerates the array-part before the hash-part.
-function table.cat(...)
- local y={ }
- for x in values{...} do
- -- cat array-part
- for _, v in ipairs(x) do table.insert(y,v) end
- -- cat hash-part
- local lx, k = #x
- if lx>0 then k=next(x,lx) else k=next(x) end
- while k do y[k]=x[k]; k=next(x,k) end
- end
- return y
-end
-
-function table.deep_copy(x)
- local tracker = { }
- local function aux (x)
- if type(x) == "table" then
- local y=tracker[x]
- if y then return y end
- y = { }; tracker[x] = y
- setmetatable (y, getmetatable (x))
- for k,v in pairs(x) do y[aux(k)] = aux(v) end
- return y
- else return x end
- end
- return aux(x)
-end
-
-function table.override(dst, src)
- for k, v in pairs(src) do dst[k] = v end
- for i = #src+1, #dst do dst[i] = nil end
- return dst
-end
-
-
-function table.range(a,b,c)
- if not b then assert(not(c)); b=a; a=1
- elseif not c then c = (b>=a) and 1 or -1 end
- local result = { }
- for i=a, b, c do table.insert(result, i) end
- return result
-end
-
--- FIXME: new_indent seems to be always nil?!
--- FIXME: accumulator function should be configurable,
--- so that print() doesn't need to bufferize the whole string
--- before starting to print.
-function table.tostring(t, ...)
- local PRINT_HASH, HANDLE_TAG, FIX_INDENT, LINE_MAX, INITIAL_INDENT = true, true
- for _, x in ipairs {...} do
- if type(x) == "number" then
- if not LINE_MAX then LINE_MAX = x
- else INITIAL_INDENT = x end
- elseif x=="nohash" then PRINT_HASH = false
- elseif x=="notag" then HANDLE_TAG = false
- else
- local n = string['match'](x, "^indent%s*(%d*)$")
- if n then FIX_INDENT = tonumber(n) or 3 end
- end
- end
- LINE_MAX = LINE_MAX or math.huge
- INITIAL_INDENT = INITIAL_INDENT or 1
-
- local current_offset = 0 -- indentation level
- local xlen_cache = { } -- cached results for xlen()
- local acc_list = { } -- Generated bits of string
- local function acc(...) -- Accumulate a bit of string
- local x = table.concat{...}
- current_offset = current_offset + #x
- table.insert(acc_list, x)
- end
- local function valid_id(x)
- -- FIXME: we should also reject keywords; but the list of
- -- current keywords is not fixed in metalua...
- return type(x) == "string"
- and string['match'](x, "^[a-zA-Z_][a-zA-Z0-9_]*$")
- end
-
- -- Compute the number of chars it would require to display the table
- -- on a single line. Helps to decide whether some carriage returns are
- -- required. Since the size of each sub-table is required many times,
- -- it's cached in [xlen_cache].
- local xlen_type = { }
- local function xlen(x, nested)
- nested = nested or { }
- if x==nil then return #"nil" end
- --if nested[x] then return #tostring(x) end -- already done in table
- local len = xlen_cache[x]
- if len then return len end
- local f = xlen_type[type(x)]
- if not f then return #tostring(x) end
- len = f (x, nested)
- xlen_cache[x] = len
- return len
- end
-
- -- optim: no need to compute lengths if I'm not going to use them
- -- anyway.
- if LINE_MAX == math.huge then xlen = function() return 0 end end
-
- xlen_type["nil"] = function () return 3 end
- function xlen_type.number (x) return #tostring(x) end
- function xlen_type.boolean (x) return x and 4 or 5 end
- function xlen_type.string (x) return #string.format("%q",x) end
- function xlen_type.table (adt, nested)
-
- -- Circular references detection
- if nested [adt] then return #tostring(adt) end
- nested [adt] = true
-
- local has_tag = HANDLE_TAG and valid_id(adt.tag)
- local alen = #adt
- local has_arr = alen>0
- local has_hash = false
- local x = 0
-
- if PRINT_HASH then
- -- first pass: count hash-part
- for k, v in pairs(adt) do
- if k=="tag" and has_tag then
- -- this is the tag -> do nothing!
- elseif type(k)=="number" and k<=alen and math.fmod(k,1)==0 then
- -- array-part pair -> do nothing!
- else
- has_hash = true
- if valid_id(k) then x=x+#k
- else x = x + xlen (k, nested) + 2 end -- count surrounding brackets
- x = x + xlen (v, nested) + 5 -- count " = " and ", "
- end
- end
- end
-
- for i = 1, alen do x = x + xlen (adt[i], nested) + 2 end -- count ", "
-
- nested[adt] = false -- No more nested calls
-
- if not (has_tag or has_arr or has_hash) then return 3 end
- if has_tag then x=x+#adt.tag+1 end
- if not (has_arr or has_hash) then return x end
- if not has_hash and alen==1 and type(adt[1])~="table" then
- return x-2 -- substract extraneous ", "
- end
- return x+2 -- count "{ " and " }", substract extraneous ", "
- end
-
- -- Recursively print a (sub) table at given indentation level.
- -- [newline] indicates whether newlines should be inserted.
- local function rec (adt, nested, indent)
- if not FIX_INDENT then indent = current_offset end
- local function acc_newline()
- acc ("\n"); acc (string.rep (" ", indent))
- current_offset = indent
- end
- local x = { }
- x["nil"] = function() acc "nil" end
- function x.number() acc (tostring (adt)) end
- --function x.string() acc (string.format ("%q", adt)) end
- function x.string() acc ((string.format ("%q", adt):gsub("\\\n", "\\n"))) end
- function x.boolean() acc (adt and "true" or "false") end
- function x.table()
- if nested[adt] then acc(tostring(adt)); return end
- nested[adt] = true
-
-
- local has_tag = HANDLE_TAG and valid_id(adt.tag)
- local alen = #adt
- local has_arr = alen>0
- local has_hash = false
-
- if has_tag then acc("`"); acc(adt.tag) end
-
- -- First pass: handle hash-part
- if PRINT_HASH then
- for k, v in pairs(adt) do
- -- pass if the key belongs to the array-part or is the "tag" field
- if not (k=="tag" and HANDLE_TAG) and
- not (type(k)=="number" and k<=alen and math.fmod(k,1)==0) then
-
- -- Is it the first time we parse a hash pair?
- if not has_hash then
- acc "{ "
- if not FIX_INDENT then indent = current_offset end
- else acc ", " end
-
- -- Determine whether a newline is required
- local is_id, expected_len = valid_id(k)
- if is_id then expected_len = #k + xlen (v, nested) + #" = , "
- else expected_len = xlen (k, nested) +
- xlen (v, nested) + #"[] = , " end
- if has_hash and expected_len + current_offset > LINE_MAX
- then acc_newline() end
-
- -- Print the key
- if is_id then acc(k); acc " = "
- else acc "["; rec (k, nested, indent+(FIX_INDENT or 0)); acc "] = " end
-
- -- Print the value
- rec (v, nested, indent+(FIX_INDENT or 0))
- has_hash = true
- end
- end
- end
-
- -- Now we know whether there's a hash-part, an array-part, and a tag.
- -- Tag and hash-part are already printed if they're present.
- if not has_tag and not has_hash and not has_arr then acc "{ }";
- elseif has_tag and not has_hash and not has_arr then -- nothing, tag already in acc
- else
- assert (has_hash or has_arr)
- local no_brace = false
- if has_hash and has_arr then acc ", "
- elseif has_tag and not has_hash and alen==1 and type(adt[1])~="table" then
- -- No brace required; don't print "{", remember not to print "}"
- acc (" "); rec (adt[1], nested, indent+(FIX_INDENT or 0))
- no_brace = true
- elseif not has_hash then
- -- Braces required, but not opened by hash-part handler yet
- acc "{ "
- if not FIX_INDENT then indent = current_offset end
- end
-
- -- 2nd pass: array-part
- if not no_brace and has_arr then
- rec (adt[1], nested, indent+(FIX_INDENT or 0))
- for i=2, alen do
- acc ", ";
- if current_offset + xlen (adt[i], { }) > LINE_MAX
- then acc_newline() end
- rec (adt[i], nested, indent+(FIX_INDENT or 0))
- end
- end
- if not no_brace then acc " }" end
- end
- nested[adt] = false -- No more nested calls
- end
- local y = x[type(adt)]
- if y then y() else acc(tostring(adt)) end
- end
- --printf("INITIAL_INDENT = %i", INITIAL_INDENT)
- current_offset = INITIAL_INDENT or 0
- rec(t, { }, 0)
- return table.concat (acc_list)
-end
-
-function table.print(...) return print(table.tostring(...)) end
-
-return table
\ No newline at end of file
+++ /dev/null
---------------------------------------------------------------------------------
--- Code walkers
--- "Make everything as simple as possible, but not simpler".
---
--- This library offers a generic way to write AST transforming
--- functions. Macros can take bits of AST as parameters and generate a
--- more complex AST with them; but modifying an AST a posteriori is
--- much more difficult; typical tasks requiring code walking are
--- transformation such as lazy evaluation or Continuation Passing
--- Style, but more mundane operations are required in more macros than
--- one would thing, such as "transform all returns which aren't inside
--- a nested function into an error throwing".
---
--- AST walking is an intrinsically advanced operation, and the
--- interface of this library, although it tries to remain as simple as
--- possible, is not trivial. You'll probably need to write a couple of
--- walkers with it before feeling comfortable.
---
---
--- We deal here with 3 important kinds of AST: statements, expressions
--- and blocks. Code walkers for these three kinds for AST are called
--- [walk.stat (cfg, ast)], [walk.expr (cfg, ast)] and [walk.block
--- (cfg, ast)] respectively. the [cfg] parameter describes what shall
--- happen as the AST is traversed by the walker, and [ast] is the tree
--- itself.
---
--- An aparte to fellow functional programmers: although Lua has
--- got all the features that constitute a functional language, its
--- heart, and in particular it table data, is imperative. It's often
--- asking for trouble to work against the host language's nature, so
--- code walkers are imperative, cope with it. Or use table.deep_copy()
--- if you don't want issues with shared state.
---
--- Since walkers are imperative (i.e. they transform the tree in
--- place, rather than returning a fresh variant of it), you'll often
--- want to override a node, i.e. keep its "pointer identity", but
--- replace its content with a new one; this is done by
--- table.override(), and is conveniently abbreviated as
--- "target <- new_content".
---
--- So, [cfg] can contain a series of sub-tables fields 'expr', 'stat',
--- 'block'. each of them can contain a function up() and/or a function
--- down().
---
--- * down() is called when the walker starts visiting a node of the
--- matching kind, i.e. before any of its sub-nodes have been
--- visited. down() is allowed to return either the string "break",
--- which means "don't go further down this tree, don't try to walk
--- its children", or nil, i.e. "please process with the children
--- nodes".
---
--- There are two reasons why you might want down() to return
--- "break": either because you really weren't interested into the
--- children nodes,or because you wanted to walk through them in a
--- special way, and down() already performed this special walking.
---
--- * up() is called just before the node is left, i.e. after all of
--- its children nodes have been completely parsed, down and up. This
--- is a good place to put treatments which rely on sub-nodes being
--- already treated. Notice that if down() returned 'break', up() is
--- run immediately after.
---
--- In previous versions of this library, there were plenty of fancy
--- configurable ways to decide whether an up() or down() functions
--- would be triggered or not. Experience suggested that the best way
--- is to keep it simpler, as done by the current design: the functions
--- in sub-table expr are run on each expression node, and ditto for
--- stat and block; the user is expected to use the pattern matching
--- extension to decide whether to act or not on a given node.
---
--- Advanced features
--- =================
---
--- The version above is a strict subset of the truth: there are a
--- couple of other, more advanced features in the library.
---
--- Paths in visitor functions
--- --------------------------
--- First, up() and down() don't take only one node as a parameter, but
--- a series thereof: all the nested expr/stat/block nodes on the way
--- up to the ast's root. For instance, when a walker works on
--- +{ foo(bar*2+1) } an is on the node +{2}, up() and down() are called
--- with arguments (+{bar*2}, +{bar*2+1}, +{foo(bar*2+1)}).
---
--- `Call and `Invoke as statements
--- -------------------------------
--- `Call and `Invoke are normally expressions, but they can also
--- appear as statements. In this case, the cfg.expr.xxx() visitors
--- aren't called on them. Sometimes you want to consider tham as
--- expressions, sometimes not, and it's much easier to add a special
--- case in cfg.stat.xxx() visitors than to determine whether we're in
--- a statament's context in cfg.expr.xxx(),
---
--- Extra walkers
--- -------------
--- There are some second class walkers: walk.expr_list() and walk.guess().
---
--- * The first one walks through a list of expressions. Although used
--- internally by the other walkers, it remains a second class
--- citizen: the list it works on won't appear in the path of nested
--- ASTs that's passed to up() and down(). This design choice has
--- been made because there's no clear definition of what is or isn't
--- an expr list in an AST, and anyway such lists are probably not
--- part of metacoders' mental image of an AST, so it's been thought
--- best to let people pretend they don't exist.
---
--- * walk.guess() tries to guess the type of the AST it receives,
--- according to its tag, and runs the appropriate walker. Node which
--- can be both stats and exprs (`Call and `Invoke) are considered as
--- expr.
---
--- These three walkers, although used internally by the other walkers,
--- remain second class citizens: the lists they work on won't appear
--- in the path of nested ASTs that's passed to up() and down().
---
--- Tag dictionaries
--- ----------------
--- There are two public dictionaries, walk.tags.stat and
--- walk.tags.expr, which keep the set of all tags that can start a
--- statement or an expression AST. They're used by walk.guess, and
--- users sometimes need them as well, so they've been kept available.
---
--- Binder visitor
--- --------------
--- Finally, there's one last field in [cfg]: binder(). This function
--- is called on identifiers in a binder position, i.e. `Id{ } nodes
--- which create a scoped local variable, in `Function, `Fornum, `Local
--- etc. The main use case for that function is to keep track of
--- variables, captures, etc. and perform alpha conversions. In many
--- cases that work is best done through the library 'walk.id', which
--- understands the notions of scope, free variable, bound variable
--- etc.
---
--- Binder visitors are called just before the variable's scope starts,
--- e.g. they're called after the right-hand-side has been visited in a
--- `Local node, but before in a `Localrec node.
---
--- TODO: document scopes, relaxed cfg descriptions
--- -----------------------------------------------
---
--- Examples of cfg structures:
---
--- { Id = f1, Local = f2 }
--- f
--- { up = f1, down = f2 }
--- { scope = { up = f1, down = f2 }, up = f1, down = f2 }
--- { stat = f1, expr = { up = f1 } }
---
---
---------------------------------------------------------------------------------
-
--{ extension "match" }
-
-walk = { traverse = { }; tags = { }; debug = false }
-
---------------------------------------------------------------------------------
--- Standard tags: can be used to guess the type of an AST, or to check
--- that the type of an AST is respected.
---------------------------------------------------------------------------------
-walk.tags.stat = table.transpose{
- 'Do', 'Set', 'While', 'Repeat', 'Local', 'Localrec', 'Return',
- 'Fornum', 'Forin', 'If', 'Break', 'Goto', 'Label',
- 'Call', 'Invoke' }
-walk.tags.expr = table.transpose{
- 'Paren', 'Call', 'Invoke', 'Index', 'Op', 'Function', 'Stat',
- 'Table', 'Nil', 'Dots', 'True', 'False', 'Number', 'String', 'Id' }
-
-local function scope (cfg, dir)
- local h = cfg.scope and cfg.scope[dir]
- if h then h() end
-end
-
---------------------------------------------------------------------------------
--- These [walk.traverse.xxx()] functions are in charge of actually going through
--- ASTs. At each node, they make sure to call the appropriate walker.
---------------------------------------------------------------------------------
-function walk.traverse.stat (cfg, x, ...)
- if walk.debug then printf("traverse stat %s", table.tostring(x)) end
- local log = {...}
- local B = |y| walk.block (cfg, y, x, unpack(log))
- local S = |y| walk.stat (cfg, y, x, unpack(log))
- local E = |y| walk.expr (cfg, y, x, unpack(log))
- local EL = |y| walk.expr_list (cfg, y, x, unpack(log))
- local I = |y| walk.binder_list (cfg, y, x, unpack(log))
- local function BS(y)
- scope (cfg, 'down'); B(y); scope (cfg, 'up')
- end
-
- match x with
- | {...} if x.tag == nil -> for y in ivalues(x) do walk.stat(cfg, y, ...) end
- -- no tag --> node not inserted in the history log
- | `Do{...} -> BS(x)
- | `Set{ lhs, rhs } -> EL(lhs); EL(rhs)
- | `While{ cond, body } -> E(cond); BS(body)
- | `Repeat{ body, cond } -> scope(cfg, 'down'); B(body); E(cond); scope(cfg, 'up')
- | `Local{ lhs } -> I(lhs)
- | `Local{ lhs, rhs } -> EL(rhs); I(lhs)
- | `Localrec{ lhs, rhs } -> I(lhs); EL(rhs)
- | `Fornum{ i, a, b, body } -> E(a); E(b); I{i}; BS(body)
- | `Fornum{ i, a, b, c, body } -> E(a); E(b); E(c); I{i}; BS(body)
- | `Forin{ i, rhs, body } -> EL(rhs); I(i); BS(body)
- | `If{...} -> for i=1, #x-1, 2 do E(x[i]); BS(x[i+1]) end
- if #x%2 == 1 then BS(x[#x]) end
- | `Call{...}|`Invoke{...}|`Return{...} -> EL(x)
- | `Break | `Goto{ _ } | `Label{ _ } -> -- nothing
- | { tag=tag, ...} if walk.tags.stat[tag]->
- walk.malformed (cfg, x, unpack (log))
- | _ ->
- walk.unknonw (cfg, x, unpack (log))
- end
-end
-
-function walk.traverse.expr (cfg, x, ...)
- if walk.debug then printf("traverse expr %s", table.tostring(x)) end
- local log = {...}
- local B = |y| walk.block (cfg, y, x, unpack(log))
- local S = |y| walk.stat (cfg, y, x, unpack(log))
- local E = |y| walk.expr (cfg, y, x, unpack(log))
- local EL = |y| walk.expr_list (cfg, y, x, unpack(log))
- local I = |y| walk.binder_list (cfg, y, x, unpack(log))
- match x with
- | `Paren{ e } -> E(e)
- | `Call{...} | `Invoke{...} -> EL(x)
- | `Index{ a, b } -> E(a); E(b)
- | `Op{ opid, ... } -> E(x[2]); if #x==3 then E(x[3]) end
- | `Function{ params, body } -> I(params); scope(cfg, 'down'); B(body); scope (cfg, 'in')
- | `Stat{ b, e } -> scope(cfg, 'down'); B(b); E(e); scope (cfg, 'in')
- | `Table{ ... } ->
- for i = 1, #x do match x[i] with
- | `Pair{ k, v } -> E(k); E(v)
- | v -> E(v)
- end end
- |`Nil|`Dots|`True|`False|`Number{_}|`String{_}|`Id{_} -> -- nothing
- | { tag=tag, ...} if walk.tags.expr[tag]->
- walk.malformed (cfg, x, unpack (log))
- | _ ->
- walk.unknonw (cfg, x, unpack (log))
- end
-end
-
-function walk.traverse.block (cfg, x, ...)
- assert(type(x)=='table', "traverse.block() expects a table")
- for y in ivalues(x) do walk.stat(cfg, y, x, ...) end
-end
-
-function walk.traverse.expr_list (cfg, x, ...)
- assert(type(x)=='table', "traverse.expr_list() expects a table")
- -- x doesn't appear in the log
- for y in ivalues(x) do walk.expr(cfg, y, ...) end
-end
-
-----------------------------------------------------------------------
--- Generic walker generator.
--- * if `cfg' has an entry matching the tree name, use this entry
--- * if not, try to use the entry whose name matched the ast kind
--- * if an entry is a table, look for 'up' and 'down' entries
--- * if it is a function, consider it as a `down' traverser.
-----------------------------------------------------------------------
-local walker_builder = |cfg_field, traverse| function (cfg, x, ...)
- local sub_cfg = type (x)=='table' and x.tag and cfg[x.tag]
- or cfg[cfg_field] or cfg
- local broken, down, up = false
- if type(sub_cfg)=='table' then
- down, up = sub_cfg.down, sub_cfg.up
- elseif type(sub_cfg)=='function' or sub_cfg=='break' then
- down, up = sub_cfg, nil
- else error "Invalid walk config" end
-
- if down then
- if down=='break' then broken='break'
- else broken = down (x, ...) end
- assert(not broken or broken=='break',
- "Map functions must return 'break' or nil")
- end
- if not broken and traverse then traverse (cfg, x, ...) end
- if up then up (x, ...) end
-end
-
-----------------------------------------------------------------------
--- Declare [walk.stat], [walk.expr], [walk.block] and [walk.expr_list]
-----------------------------------------------------------------------
-for w in values{ "stat", "expr", "block", "expr_list",
- "malformed", "unknown" } do
- walk[w] = walker_builder (w, walk.traverse[w])
-end
-
-----------------------------------------------------------------------
--- Walk a list of `Id{...} (mainly a helper function actually).
-----------------------------------------------------------------------
-function walk.binder_list (cfg, x, ...)
- local f = cfg.binder
- if f then for v in ivalues(x) do f(v, ...) end end
-end
-
-----------------------------------------------------------------------
--- Tries to guess the type of the AST then choose the right walkker.
-----------------------------------------------------------------------
-function walk.guess (cfg, x, ...)
- assert(type(x)=='table', "arg #2 in a walker must be an AST")
- if walk.tags.expr[x.tag] then return walk.expr(cfg, x, ...) end
- if walk.tags.stat[x.tag] then return walk.stat(cfg, x, ...) end
- if not x.tag then return walk.block(cfg, x, ...) end
- error ("Can't guess the AST type from tag "..(x.tag or '<none>'))
-end
+++ /dev/null
-require 'metalua.walk'
-require 'metalua.walk.scope'
-
-function bindings(ast)
- -- binders :: ast => name => occurences
- -- unbound :: name => occurences
- -- scope :: name => ast
-
- local binders, unbound, cfg, scope = { }, { }, { scope={ } }, scope:new()
-
- -- * id: identifier entering in scope
- -- * ast: statement or expr carrying this id, on of:
- -- Local, Localrec, Forin, Fornum, Function.
- function cfg.binder (id, ast)
- if id.tag ~= 'Id' then return end
- local id_name = id[1]
- -- Reference in scope, so that the binding statement can be retrieved:
- scope.current[id_name] = ast
- -- Init the occurences list for this identifier:
- if binders[ast] then binders[ast][id_name] = { }
- else binders[ast] = { [id_name] = { } } end
- end
-
- -- identifier occurence, not as a binder: reference this occurence
- function cfg.Id (id)
- local id_name = id[1]
- -- ast which binds this id, might be nil:
- local binder_ast = scope.current [id_name]
- -- dict id_name => occurences, might be the list of unbound occurences:
- local occur_dict = binder_ast and binders[binder_ast] or unbound
- -- add an occurence of `id' in the occurences list:
- local occurences = occur_dict [id_name]
- if occurences then table.insert (occurences, id)
- else occur_dict [id_name] = { id } end
- end
-
- function cfg.scope.down() scope:push() end
- function cfg.scope.up() scope:pop() end
-
- walk.guess (cfg, ast)
- return binders, unbound
-end
-
+++ /dev/null
---------------------------------------------------------------------------------
---
--- This library walks AST to gather information about the identifiers
--- in it. It classifies them between free variables and bound
--- variables, and keeps track of which AST node created a given bound
--- variable occurence.
---
--- walk_id (kind, ast)
---
--- Input:
--- * an AST kind: 'expr', 'stat', 'block', 'expr_list', 'binder_list', 'guess'
--- * an AST of the corresponding kind.
---
--- > string, AST
---
--- Output: a table with two fields, 'bound' and 'free';
--- * free associates the name of each free variable with the list of
--- all its occurences in the AST. That list is never empty.
--- * bound associates each stat or expr binding a new variable with
--- the occurences of that/those new variable(s).
---
--- > { free = table (string, AST and `Id{ });
--- > bound = table (AST, table(AST and `Id{ })) }
---
--- How it works
--- ============
--- Walk the tree to:
--- * locate open variables, and keep pointers on them so that they can
--- be alpha converted.
--- * locate variable bindings, so that we can find bound variables
--- * locate bound variables, keep them in association with their
--- binder, again in order to alpha-convert them.
---
--- Special treatments:
--- * `Function `Local `Localrec `Fornum `Forin have binders;
--- `Local takes effect from the next statement,
--- `Localrec from the current statement,
--- `Function and other statments inside their bodies.
--- * `Repeat has a special scoping rule for its condition.
--- * blocks create temporary scopes
--- * `Splice must stop the walking, so that user code won't be
--- converted
---
---------------------------------------------------------------------------------
-
--{ extension 'match' }
--{ extension 'log' }
-
-require 'metalua.walk'
-require 'metalua.walk.scope'
-
--- variable lists auto-create empty list as values by default.
-local varlist_mt = { __index = function (self, key)
- local x={ }; self[key] = x; return x
- end }
-
-local function _walk_id (kind, supercfg, ast, ...)
-
- assert(walk[kind], "Inbalid AST kind selector")
- assert(type(supercfg=='table'), "Config table expected")
- assert(type(ast)=='table', "AST expected")
-
- local cfg = { expr = { }; block = { }; stat = { } }
- local scope = scope:new()
-
- local visit_bound_var, visit_free_var
- if not supercfg.id then
- printf("Warning, you're using the id walker without id visitor. "..
- "If you know what you want do to, then you're probably doing "..
- "something else...")
- visit_bound_var = || nil
- visit_free_var = || nil
- else
- visit_free_var = supercfg.id.free or || nil
- visit_bound_var = supercfg.id.bound or || nil
- end
-
- -----------------------------------------------------------------------------
- -- Check identifiers; add functions parameters to scope
- -----------------------------------------------------------------------------
- function cfg.expr.down(x, ...)
- -- Execute the generic expression walker; if it breaks.
- -- don't do the id walking.
- if supercfg.expr and supercfg.expr.down then
- local r = supercfg.expr.down(x, ...)
- if r then return r end
- end
- local parents = {...}
- match x with
- | `Id{ name } ->
- local binder, r = scope.current[name] -- binder :: ast which bound var
- if binder then
- --$log( 'walk.id found a bound var:', x, binder)
- r = visit_bound_var(x, binder, unpack(parents))
- else
- --$log( 'walk.id found a free var:', x, scope.current)
- r = visit_free_var(x, unpack(parents))
- end
- if r then return r end
- | `Function{ params, _ } -> scope:push (params, x)
- | `Stat{ block, expr } ->
- -------------------------------------------------------------
- -- 'expr' is in the scope of 'block': create the scope and
- -- walk the block 'manually', then prevent automatic walk
- -- by returning 'break'.
- -------------------------------------------------------------
- scope:push()
- for stat in values (block) do walk.stat(cfg, stat, x, ...) end
- walk.expr(cfg, expr, x, unpack(parents))
- scope:pop()
- return 'break'
- | _ -> -- pass
- end
-
- end
-
- -----------------------------------------------------------------------------
- -- Close the function scope opened by 'down()'
- -----------------------------------------------------------------------------
- function cfg.expr.up(x, ...)
- match x with `Function{...} -> scope:pop() | _ -> end
- if supercfg.expr and supercfg.expr.up then supercfg.expr.up(x, ...) end
- end
-
- -----------------------------------------------------------------------------
- -- Create a new scope and register loop variable[s] in it
- -----------------------------------------------------------------------------
- function cfg.stat.down(x, ...)
- -- Execute the generic statement walker; if it breaks.
- -- don't do the id walking.
- if supercfg.stat and supercfg.stat.down then
- local r = supercfg.stat.down(x, ...)
- if r then return r end
- end
- match x with
- | `Forin{ vars, ... } -> scope:push (vars, x)
- | `Fornum{ var, ... } -> scope:push ({var}, x)
- | `Localrec{ vars, ... } -> scope:add (vars, x)
- | `Repeat{ block, expr } ->
- -------------------------------------------------------------
- -- 'expr' is in the scope of 'block': create the scope and
- -- walk the block 'manually', then prevent automatic walk
- -- by returning 'break'.
- -------------------------------------------------------------
- scope:push()
- for stat in values (block) do walk.stat(cfg, stat, x, ...) end
- walk.expr(cfg, expr, x, ...)
- scope:pop()
- return 'break'
- | _ -> -- pass
- end
- end
-
- -----------------------------------------------------------------------------
- -- Close the scopes opened by 'up()'
- -----------------------------------------------------------------------------
- function cfg.stat.up(x, ...)
- match x with
- | `Forin{ ... } | `Fornum{ ... } -> scope:pop()
- | `Local{ vars, ... } -> scope:add(vars, x)
- | _ -> -- pass
- -- `Repeat has no up(), because it 'break's.
- end
- if supercfg.stat and supercfg.stat.up then supercfg.stat.up(x, ...) end
- end
-
- -----------------------------------------------------------------------------
- -- Create a separate scope for each block
- -----------------------------------------------------------------------------
- function cfg.block.down(x, ...)
- if supercfg.block and supercfg.block.down then
- local r = supercfg.block.down(x, ...)
- if r then return r end
- end
- scope:push()
- end
- function cfg.block.up(x, ...)
- scope:pop()
- if supercfg.block and supercfg.block.up then supercfg.block.up(x, ...) end
- end
- cfg.binder = supercfg.binder
- walk[kind](cfg, ast, ...)
-end
-
-local mt = { __index = |_,k| |...| _walk_id(k, ...) }
-walk_id = setmetatable({ }, mt)
+++ /dev/null
---------------------------------------------------------------------------------
---
--- Scopes: this library helps keeping track of identifier scopes,
--- typically in code walkers.
---
--- * scope:new() returns a new scope instance s
---
--- * s:push() bookmarks the current set of variables, so the it can be
--- retrieved next time a s:pop() is performed.
---
--- * s:pop() retrieves the last state saved by s:push(). Calls to
--- :push() and :pop() can be nested as deep as one wants.
---
--- * s:add(var_list, val) adds new variable names (stirng) into the
--- scope, as keys. val is the (optional) value associated with them:
--- it allows to attach arbitrary information to variables, e.g. the
--- statement or expression that created them.
---
--- * s:push(var_list, val) is a shortcut for
--- s:push(); s:add(var_list, val).
---
--- * s.current is the current scope, a table with variable names as
--- keys and their associated value val (or 'true') as value.
---
---------------------------------------------------------------------------------
-
-scope = { }
-scope.__index = scope
-
-function scope:new()
- local ret = { current = { } }
- ret.stack = { ret.current }
- setmetatable (ret, self)
- return ret
-end
-
-function scope:push(...)
- table.insert (self.stack, table.shallow_copy (self.current))
- if ... then return self:add(...) end
-end
-
-function scope:pop()
- self.current = table.remove (self.stack)
-end
-
-function scope:add (vars, val)
- val = val or true
- for i, id in ipairs (vars) do
- assert(id.tag=='Id' or id.tag=='Dots' and i==#vars)
- if id.tag=='Id' then self.current[id[1]] = val end
- end
-end
-
-return scope
\ No newline at end of file
+++ /dev/null
---------------------------------------------------------------------------------
--- Metalua
--- Summary: Table-to-source serializer
---------------------------------------------------------------------------------
---
--- Copyright (c) 2008-2009, Fabien Fleutot <metalua@gmail.com>.
---
--- This software is released under the MIT Licence, see licence.txt
--- for details.
---
---------------------------------------------------------------------------------
---
--- Serialize an object into a source code string. This string, when passed as
--- an argument to loadstring()(), returns an object structurally identical
--- to the original one.
---
--- The following are supported:
---
--- * strings, numbers, booleans, nil
---
--- * functions without upvalues
---
--- * tables thereof. There is no restriction on keys; recursive and shared
--- sub-tables are handled correctly.
---
--- Caveat: metatables and environments aren't saved; this might or might not
--- be what you want.
---------------------------------------------------------------------------------
-
-local no_identity = { number=1, boolean=1, string=1, ['nil']=1 }
-
-function serialize (x)
-
- local gensym_max = 0 -- index of the gensym() symbol generator
- local seen_once = { } -- element->true set of elements seen exactly once in the table
- local multiple = { } -- element->varname set of elements seen more than once
- local nested = { } -- transient, set of elements currently being traversed
- local nest_points = { }
- local nest_patches = { }
-
- -- Generate fresh indexes to store new sub-tables:
- local function gensym()
- gensym_max = gensym_max + 1 ; return gensym_max
- end
-
- -----------------------------------------------------------------------------
- -- `nest_points' are places where a (recursive) table appears within
- -- itself, directly or not. for instance, all of these chunks
- -- create nest points in table `x':
- --
- -- "x = { }; x[x] = 1"
- -- "x = { }; x[1] = x"
- -- "x = { }; x[1] = { y = { x } }".
- --
- -- To handle those, two tables are created by `mark_nest_point()':
- --
- -- * `nest_points [parent]' associates all keys and values in table
- -- parent which create a nest_point with boolean `true'
- --
- -- * `nest_patches' contains a list of `{ parent, key, value }'
- -- tuples creating a nest point. They're all dumped after all the
- -- other table operations have been performed.
- --
- -- `mark_nest_point (p, k, v)' fills tables `nest_points' and
- -- `nest_patches' with informations required to remember that
- -- key/value `(k,v)' creates a nest point in parent table `p'. It
- -- also marks `p' as occuring multiple times, since several
- -- references to it will be required in order to patch the nest
- -- points.
- -----------------------------------------------------------------------------
- local function mark_nest_point (parent, k, v)
- local nk, nv = nested[k], nested[v]
- assert (not nk or seen_once[k] or multiple[k])
- assert (not nv or seen_once[v] or multiple[v])
- local mode = (nk and nv and "kv") or (nk and "k") or ("v")
- local parent_np = nest_points [parent]
- local pair = { k, v }
- if not parent_np then parent_np = { }; nest_points [parent] = parent_np end
- parent_np [k], parent_np [v] = nk, nv
- table.insert (nest_patches, { parent, k, v })
- seen_once [parent], multiple [parent] = nil, true
- end
-
- -----------------------------------------------------------------------------
- -- 1st pass, list the tables and functions which appear more than once in `x'
- -----------------------------------------------------------------------------
- local function mark_multiple_occurences (x)
- if no_identity [type(x)] then return end
- if seen_once [x] then seen_once [x], multiple [x] = nil, true
- elseif multiple [x] then -- pass
- else seen_once [x] = true end
-
- if type (x) == 'table' then
- nested [x] = true
- for k, v in pairs (x) do
- if nested[k] or nested[v] then mark_nest_point (x, k, v) else
- mark_multiple_occurences (k)
- mark_multiple_occurences (v)
- end
- end
- nested [x] = nil
- end
- end
-
- local dumped = { } -- multiply occuring values already dumped in localdefs
- local localdefs = { } -- already dumped local definitions as source code lines
-
-
- -- mutually recursive functions:
- local dump_val, dump_or_ref_val
-
- ------------------------------------------------------------------------------
- -- if `x' occurs multiple times, dump the local var rather than the
- -- value. If it's the first time it's dumped, also dump the content
- -- in localdefs.
- ------------------------------------------------------------------------------
- function dump_or_ref_val (x)
- if nested[x] then return 'false' end -- placeholder for recursive reference
- if not multiple[x] then return dump_val (x) end
- local var = dumped [x]
- if var then return "_[" .. var .. "]" end -- already referenced
- local val = dump_val(x) -- first occurence, create and register reference
- var = gensym()
- table.insert(localdefs, "_["..var.."]="..val)
- dumped [x] = var
- return "_[" .. var .. "]"
- end
-
- -----------------------------------------------------------------------------
- -- 2nd pass, dump the object; subparts occuring multiple times are dumped
- -- in local variables, which can then be referenced multiple times;
- -- care is taken to dump local vars in an order which repect dependencies.
- -----------------------------------------------------------------------------
- function dump_val(x)
- local t = type(x)
- if x==nil then return 'nil'
- elseif t=="number" then return tostring(x)
- elseif t=="string" then return string.format("%q", x)
- elseif t=="boolean" then return x and "true" or "false"
- elseif t=="function" then
- return string.format ("loadstring(%q,'@serialized')", string.dump (x))
- elseif t=="table" then
-
- local acc = { }
- local idx_dumped = { }
- local np = nest_points [x]
- for i, v in ipairs(x) do
- if np and np[v] then
- table.insert (acc, 'false') -- placeholder
- else
- table.insert (acc, dump_or_ref_val(v))
- end
- idx_dumped[i] = true
- end
- for k, v in pairs(x) do
- if np and (np[k] or np[v]) then
- --check_multiple(k); check_multiple(v) -- force dumps in localdefs
- elseif not idx_dumped[k] then
- table.insert (acc, "[" .. dump_or_ref_val(k) .. "] = " .. dump_or_ref_val(v))
- end
- end
- return "{ "..table.concat(acc,", ").." }"
- else
- error ("Can't serialize data of type "..t)
- end
- end
-
- -- Patch the recursive table entries:
- local function dump_nest_patches()
- for _, entry in ipairs(nest_patches) do
- local p, k, v = unpack (entry)
- assert (multiple[p])
- local set = dump_or_ref_val (p) .. "[" .. dump_or_ref_val (k) .. "] = " ..
- dump_or_ref_val (v) .. " -- rec "
- table.insert (localdefs, set)
- end
- end
-
- mark_multiple_occurences (x)
- local toplevel = dump_or_ref_val (x)
- dump_nest_patches()
-
- if next (localdefs) then
- -- Dump local vars containing shared or recursive parts,
- -- then the main table using them.
- return "local _={ }\n" ..
- table.concat (localdefs, "\n") ..
- "\nreturn " .. toplevel
- else
- -- No shared part, straightforward dump:
- return "return " .. toplevel
- end
-end
+++ /dev/null
---
--- strict.lua
--- checks uses of undeclared global variables
--- All global variables must be 'declared' through a regular assignment
--- (even assigning nil will do) in a main chunk before being used
--- anywhere or assigned to inside a function.
---
-
-local getinfo, error, rawset, rawget = debug.getinfo, error, rawset, rawget
-
-local mt = getmetatable(_G)
-if mt == nil then
- mt = {}
- setmetatable(_G, mt)
-end
-
-__strict = true
-mt.__declared = {}
-
-local function what ()
- local d = getinfo(3, "S")
- return d and d.what or "C"
-end
-
-mt.__newindex = function (t, n, v)
- if __strict and not mt.__declared[n] then
- local w = what()
- if w ~= "main" and w ~= "C" then
- error("assign to undeclared variable '"..n.."'", 2)
- end
- mt.__declared[n] = true
- end
- rawset(t, n, v)
-end
-
-mt.__index = function (t, n)
- if __strict and not mt.__declared[n] and what() ~= "C" then
- error("variable '"..n.."' is not declared", 2)
- end
- return rawget(t, n)
-end
-
-function global(...)
- for _, v in ipairs{...} do mt.__declared[v] = true end
-end
+++ /dev/null
-do
- local xrequire, n, ind = require, 0, "| "
- function require (x)
- print(ind:rep(n).."/ require: "..x)
- n=n+1
- local y = xrequire(x)
- n=n-1
- print(ind:rep(n).."\\_");
- return y
- end
-end
+++ /dev/null
-@CLS
-
-@REM *** Settings ***
-
-@REM BASE = root directory of metalua sources
-@REM DISTRIB_BIN = metalua executables target directory
-@REM DISTRIB_LIB = metalua libraries target directory, can be an existing path referenced in LUA_PATH
-@REM LUA, LUAC = Lua executables, provided by metalua by default.
-
-@REM --- BEGINNING OF USER-EDITABLE PART ---
-
-@set BASE=%CD%
-@set DISTRIB=%BASE%\..\distrib
-@set DISTRIB_BIN=%DISTRIB%\bin
-@set DISTRIB_LIB=%DISTRIB%\lib
-@set LUA=%DISTRIB_BIN%\lua
-@set LUAC=%DISTRIB_BIN%\luac
-
-@REM --- END OF USER-EDITABLE PART ---
-
-
-@REM *** Create the distribution directories, populate them with lib sources ***
-
-@set LUA_PATH=?.luac;?.lua;%DISTRIB_LIB%\?.luac;%DISTRIB_LIB%\?.lua
-@set LUA_MPATH=?.mlua;%DISTRIB_LIB%\?.mlua
-
-mkdir %DISTRIB%
-mkdir %DISTRIB_BIN%
-mkdir %DISTRIB_LIB%
-xcopy /y /s lib %DISTRIB_LIB%
-xcopy /y /s bin %DISTRIB_BIN%
-
-@REM *** Generate a callable batch metalua.bat script ***
-
-echo @set LUA_PATH=?.luac;?.lua;%DISTRIB_LIB%\?.luac;%DISTRIB_LIB%\?.lua > %DISTRIB_BIN%\metalua.bat
-echo @set LUA_MPATH=?.mlua;%DISTRIB_LIB%\?.mlua >> %DISTRIB_BIN%\metalua.bat
-echo @%LUA% %DISTRIB_LIB%\metalua.luac %%* >> %DISTRIB_BIN%\metalua.bat
-
-
-@REM *** Compiling the parts of the compiler written in plain Lua ***
-
-cd compiler
-%LUAC% -o %DISTRIB_LIB%\metalua\bytecode.luac lopcodes.lua lcode.lua ldump.lua compile.lua
-%LUAC% -o %DISTRIB_LIB%\metalua\mlp.luac lexer.lua gg.lua mlp_lexer.lua mlp_misc.lua mlp_table.lua mlp_meta.lua mlp_expr.lua mlp_stat.lua mlp_ext.lua
-cd ..
-
-@REM *** Bootstrap the parts of the compiler written in metalua ***
-
-%LUA% %BASE%\build-utils\bootstrap.lua %BASE%\compiler\mlc.mlua output=%DISTRIB_LIB%\metalua\mlc.luac
-%LUA% %BASE%\build-utils\bootstrap.lua %BASE%\compiler\metalua.mlua output=%DISTRIB_LIB%\metalua.luac
-
-@REM *** Finish the bootstrap: recompile the metalua parts of the compiler with itself ***
-
-call %DISTRIB_BIN%\metalua -vb -f compiler\mlc.mlua -o %DISTRIB_LIB%\metalua\mlc.luac
-call %DISTRIB_BIN%\metalua -vb -f compiler\metalua.mlua -o %DISTRIB_LIB%\metalua.luac
-
-@REM *** Precompile metalua libraries ***
-%LUA% %BASE%\build-utils\precompile.lua directory=%DISTRIB_LIB% command=%DISTRIB_BIN%\metalua
+++ /dev/null
-#! /bin/sh
-
-# --- BEGINNING OF USER-EDITABLE PART ---
-
-# Metalua sources
-BASE=${PWD}
-
-# Temporary building location.
-# Upon installation, everything will be moved to ${INSTALL_LIB} and ${INSTALL_BIN}
-
-if [ -z "${BUILD}" ]; then
- BUILD=$(mkdir -p ../build; cd ../build; pwd)
-fi
-
-if [ -z "${BUILD_BIN}" ]; then
- BUILD_BIN=${BUILD}/bin
-fi
-
-if [ -z "${BUILD_LIB}" ]; then
- BUILD_LIB=${BUILD}/lib
-fi
-
-# Where to place the final results
-# DESTDIR=
-# INSTALL_BIN=/usr/local/bin
-# INSTALL_LIB=/usr/local/lib/lua/5.1
-if [ -z "${INSTALL_BIN}" ]; then
- INSTALL_BIN=~/local/bin
-fi
-
-if [ -z "${INSTALL_LIB}" ]; then
- INSTALL_LIB=~/local/lib/lua
-fi
-
-# Where to find Lua executables.
-# On many Debian-based systems, those can be installed with "sudo apt-get install lua5.1"
-LUA=$(which lua)
-LUAC=$(which luac)
-
-# --- END OF USER-EDITABLE PART ---
-
-if [ -z ${LUA} ] ; then echo "Error: no lua interpreter found"; fi
-if [ -z ${LUAC} ] ; then echo "Error: no lua compiler found"; fi
-
-if [ -f ~/.metaluabuildrc ] ; then . ~/.metaluabuildrc; fi
-
-if [ -z "$LINEREADER" ] && which -s rlwrap; then LINEREADER=rlwrap; fi
-
-echo '*** Lua paths setup ***'
-
-export LUA_PATH="?.luac;?.lua;${BUILD_LIB}/?.luac;${BUILD_LIB}/?.lua"
-export LUA_MPATH="?.mlua;${BUILD_LIB}/?.mlua"
-
-echo '*** Create the distribution directories, populate them with lib sources ***'
-
-mkdir -p ${BUILD_BIN}
-mkdir -p ${BUILD_LIB}
-cp -Rp lib/* ${BUILD_LIB}/
-# cp -Rp bin/* ${BUILD_BIN}/ # No binaries provided for unix (for now)
-
-echo '*** Generate a callable metalua shell script ***'
-
-cat > ${BUILD_BIN}/metalua <<EOF
-#!/bin/sh
-export LUA_PATH='?.luac;?.lua;${BUILD_LIB}/?.luac;${BUILD_LIB}/?.lua'
-export LUA_MPATH='?.mlua;${BUILD_LIB}/?.mlua'
-${LUA} ${BUILD_LIB}/metalua.luac \$*
-EOF
-chmod a+x ${BUILD_BIN}/metalua
-
-echo '*** Compiling the parts of the compiler written in plain Lua ***'
-
-cd compiler
-${LUAC} -o ${BUILD_LIB}/metalua/bytecode.luac lopcodes.lua lcode.lua ldump.lua compile.lua
-${LUAC} -o ${BUILD_LIB}/metalua/mlp.luac lexer.lua gg.lua mlp_lexer.lua mlp_misc.lua mlp_table.lua mlp_meta.lua mlp_expr.lua mlp_stat.lua mlp_ext.lua
-cd ..
-
-echo '*** Bootstrap the parts of the compiler written in metalua ***'
-
-${LUA} ${BASE}/build-utils/bootstrap.lua ${BASE}/compiler/mlc.mlua output=${BUILD_LIB}/metalua/mlc.luac
-${LUA} ${BASE}/build-utils/bootstrap.lua ${BASE}/compiler/metalua.mlua output=${BUILD_LIB}/metalua.luac
-
-echo '*** Finish the bootstrap: recompile the metalua parts of the compiler with itself ***'
-
-${BUILD_BIN}/metalua -vb -f compiler/mlc.mlua -o ${BUILD_LIB}/metalua/mlc.luac
-${BUILD_BIN}/metalua -vb -f compiler/metalua.mlua -o ${BUILD_LIB}/metalua.luac
-
-echo '*** Precompile metalua libraries ***'
-for SRC in $(find ${BUILD_LIB} -name '*.mlua'); do
- DST=$(dirname $SRC)/$(basename $SRC .mlua).luac
- if [ $DST -nt $SRC ]; then
- echo "+ $DST already up-to-date"
- else
- echo "- $DST generated from $SRC"
- ${BUILD_BIN}/metalua $SRC -o $DST
- fi
-done
-
-echo '*** Generate make-install.sh script ***'
-
-cat > make-install.sh <<EOF2
-#!/bin/sh
-mkdir -p ${INSTALL_BIN}
-mkdir -p ${INSTALL_LIB}
-if [ -n "${DESTDIR}" ]; then
- mkdir -p ${DESTDIR}${INSTALL_BIN}
- mkdir -p ${DESTDIR}${INSTALL_LIB}
-fi
-cat > ${DESTDIR}${INSTALL_BIN}/metalua <<EOF
-#!/bin/sh
-METALUA_LIB=${INSTALL_LIB}
-export LUA_PATH="?.luac;?.lua;\\\${METALUA_LIB}/?.luac;\\\${METALUA_LIB}/?.lua"
-export LUA_MPATH="?.mlua;\\\${METALUA_LIB}/?.mlua"
-exec ${LINEREADER} ${LUA} \\\${METALUA_LIB}/metalua.luac "\\\$@"
-EOF
-
-chmod a+x ${DESTDIR}${INSTALL_BIN}/metalua
-
-cp -pR ${BUILD_LIB}/* ${DESTDIR}${INSTALL_LIB}/
-
-echo "metalua libs installed in ${INSTALL_LIB};"
-echo "metalua executable in ${INSTALL_BIN}."
-EOF2
-chmod a+x make-install.sh
-
-echo
-echo "Build completed, proceed to installation with './make-install.sh' or 'sudo ./make-install.sh'"
-echo
+++ /dev/null
--{extension "clist"}
-
--- integers from 2 to 50, by steps of 2:
-x = { i for i = 2, 50, 2 }
-
--- the same, obtained by filtering over all integers <= 50:
-y = { i for i = 1, 50 if i%2==0 }
-
--- prime numbers, implemented in an inefficient way:
-local sieve, n = { i for i=2, 100 }, 1
-while n < #sieve do
- sieve = {
- i for i in values(sieve[1 ... n]);
- i for i in values(sieve[n+1 ... #sieve]) if i%sieve[n] ~= 0 }
- n += 1
-end
-
-print "Prime numbers < 100, computed with lists by comprehension:"
-table.print(sieve)
-
+++ /dev/null
--{ extension 'log' }
--{ extension 'H' }
-
-require 'metalua.compiler'
-
-TEST_CASES = {
-
- { "everything should be renamed",
- +{ block:
- local x = 3
- print(x) },
- { } },
-
- { "don't get confused between global and local x",
- +{ block:
- print(x)
- local x = 3
- print(x) },
- { alpha = { } } },
-
- { "don't rename keepme",
- +{ block:
- keepme()
- dont_keep_me() },
- { keep = 'keepme' , alpha = `Local{ { }, { } } } },
-
- { "print shouldn't be renamed the 2nd and 3rd time",
- +{ block:
- print(i)
- -{!`Call{`Id 'print', `String 'hi' } }
- -{!+{print 'hi'}} },
- { } },
-
- { "print shouldn't be renamed at all",
- +{ block:
- print(i)
- -{`Call{`Id 'print', `String 'hi' } }
- -{+{print 'hi'}} },
- { keep = 'print' } },
-
- { "Rename print with a pre-specified name, rename x freely, not y",
- +{ block:
- print (x, y) },
- { alpha = +{stat: local RENAMED_PRINT = print},
- keep = {y = true} } } }
-
-for case in ivalues(TEST_CASES) do
- local comment, ast, cfg = unpack(case)
- print ('\n'..'-':rep(70))
- print (comment)
- local H = H:new(cfg)
- print ("\nBEFORE PARSING:")
- $log (ast, H, 50)
- H(ast)
- print ("\nAFTER PARSING:")
- $log (ast, H, 50)
-end
-
-print ('\n'..'=':rep(70))
-$log(TEST_CASES,40)
\ No newline at end of file
+++ /dev/null
--{ block:
- -{ extension 'log' }
- -{ extension 'H' }
-
- require 'metalua.dollar'
-
- local H = H:new()
- print("initial H.alpha", H.alpha)
-
-
- function dollar.Q(cond, iftrue, iffalse)
- local b = +{ block:
- local v
- if -{!cond} then v = -{!iftrue}
- else v = -{!iffalse} end }
- local r = `Stat{ b, +{v} }
- H(r)
- return r
- end
-
- $log(H)
- return H.alpha }
-
-x=1 ; y=$Q(x==1, 'one', 'two') ; print(y)
-x=2 ; y=$Q(x==1, 'one', 'two') ; print(y)
\ No newline at end of file
+++ /dev/null
--- The message numbered (1) will display at compilation, i.e. before
--- (2) is displayed (supposing that compilation and execution are
--- both requested)
-
-print "(2) This is a runtime message: Hello world!"
--{ print "(1) This is a compile-time message: Hello world!" }
-
+++ /dev/null
-----------------------------------------------------------------------------------
--- This samples walks you through the writing of a simple extension.
---
--- Lua makes a difference between statements and expressions, and it's sometimes
--- cumbersome to put a statement where an expression is expected. Among others,
--- if-then-else constructs are statements, so you cannot write:
---
--- > local foo = if bar then 1 else 2
---
--- Indeed, an expression is expected at the right of the equal, and "if ..." is
--- a statement, which expects nested statements as "then" and "else" clauses.
--- The example above must therefore be written:
---
--- > local foo
--- > if bar then foo=1 else foo=2 end
---
---
--- Let's allow if-then-[elseif*]-[else] constructs to be used in an expression's
--- context. In such a context, 'then' and 'else' are expected to be followed by
--- expressions, not statement blocks.
---
--- Stuff you probably need to understand, at least summarily, to follow this
--- code:
--- * Lua syntax
--- * the fact that -{ ... } switches metalua into compile time mode
--- * mlp, the dynamically extensible metalua parser, which will be extended with
--- the new construct at compile time.
--- * gg, the grammar generator that allows to build and extend parsers, and with
--- which mlp is implemented.
--- * the fact that code can be interchangeably represented as abstract syntax
--- trees with `Foo{ bar } notations (easy to manipulate) or as quotes inside a
--- +{ ... } (easy to read).
---
-----------------------------------------------------------------------------------
-
-
-----------------------------------------------------------------------------------
--- How to turn this file in a proper syntax extension.
--- ===================================================
---
--- To turn this example's metalevel 0 code into a regular extension:
--- * Put everything inside the -{block: ... } in a separate .mlua file;
--- * save it in a directory called 'extension', which is itself
--- in your $LUA_MPATH. For instance, if your $LUA_MPATH contains
--- '~/local/metalua/?.mlua', you can save it as
--- '~/local/metalua/extension-compiler/ifexpr.mlua'
--- * Load the extension with "-{ extension 'ifexpr' }", whenever you want to
--- use it.
-----------------------------------------------------------------------------------
-
--{ block: -- Enter metalevel 0, where we'll start hacking the parser.
-
- -------------------------------------------------------------------------------
- -- Most extension implementations are cut in two parts: a front-end which
- -- parses the syntax into some custom tree, and a back-end which turns that
- -- tree into a compilable AST. Since the front-end calls the back-end, the
- -- later has to be declared first.
- -------------------------------------------------------------------------------
-
- -------------------------------------------------------------------------------
- -- Back-end:
- -- =========
- -- This is the builder that turns the parser's result into an expression AST.
- -- Local vars:
- -- -----------
- -- elseifthen_list : list of { condition, expression_if_true } pairs,
- -- opt_else: either the expression in the 'else' final clause if any,
- -- or false if there's no else clause.
- -- v: the variable in which the result will be stored.
- -- ifstat: the if-then-else statement that will be generated from
- -- then if-then-else expression, then embedded in a `Stat{}
- --
- -- The builder simply turns all expressions into blocks, so that they fit in
- -- a regular if-then-else statement. Then the resulting if-then-else is
- -- embedded in a `Stat{ } node, so that it can be placed where an expression
- -- is expected.
- --
- -- The variable in which the result is stored has its name generated by
- -- mlp.gensym(). This way we're sure there will be no variable capture.
- -- When macro hygiene problems are more complex, it's generally a good
- -- idea to give a look at the extension 'H'.
- -------------------------------------------------------------------------------
- local function builder (x)
- local elseifthen_list, opt_else = unpack (x)
-
- local v = mlp.gensym 'ife' -- the selected expr will be stored in this var.
- local ifstat = `If{ }
- for y in ivalues (elseifthen_list) do
- local cond, val = unpack (y)
- table.insert (ifstat, cond)
- table.insert (ifstat, { `Set{ {v}, {val} } }) -- change expr into stat.
- end
- if opt_else then -- the same for else clause, except that there's no cond.
- table.insert (ifstat, { `Set{ {v}, {opt_else} } })
- end
- return `Stat{ +{block: local -{v}; -{ifstat}}, v }
- end
-
- -------------------------------------------------------------------------------
- -- Front-end:
- -- ==========
- -- This is mostly the same as the regular if-then-else parser, except that:
- -- * it's added to the expression parser, not the statement parser;
- -- * blocks after conditions are replaced by exprs;
- --
- -- In Lua, 'end' traditionally terminates a block, not an
- -- expression. Should there be a 'end' to respect if-then-else
- -- usual syntax, or should there be none, to respect usual implicit
- -- expression ending? I chose not to put an 'end' here, but other people
- -- might have other tastes...
- -------------------------------------------------------------------------------
- mlp.expr:add{ name = 'if-expression',
- 'if',
- gg.list { gg.sequence{mlp.expr, "then", mlp.expr}, separators="elseif" },
- gg.onkeyword{ 'else', mlp.expr },
- builder = builder }
-
-} -- Back to metalevel 1, with the new feature enabled
-
-local foo, bar
-
-------------------------------------------------------------
--- The parser will read this as:
--- { { { `Id 'foo', `Number 1 },
--- { `Id 'bar', `Number 2 } },
--- `Number 3 },
--- then feed it to 'builder', which will turn it into an AST
-------------------------------------------------------------
-
-local x = if false then 1 elseif bar then 2 else 3
-
-------------------------------------------------------------
--- The result after builder will be:
--- `Stat{ +{block: local $v$
--- if foo then $v$ = 1
--- elseif bar then $v$ = 2
--- else $v$ = 3
--- end }, `Id "$v$" }
-------------------------------------------------------------
-
-assert (x == 3)
-print "It seems to work..."
\ No newline at end of file
+++ /dev/null
--- This is a simple and somewhat stupid example of how to switch
--- lexers dynamically. Behind a V, X and Y are the only reserved
--- keywords. In normal conditions, X and Y aren't keywords and can be
--- used as variables.
-
--{ block:
- require 'lexer'
- local my_lexer = lexer.lexer:clone() -- no keywords
- my_lexer:add{"X", "Y"}
- mlp.lexer:add "V"
-
- function num(lx)
- local a = lx:next()
- assert(a.tag=='Number')
- return a
- end
-
- my_parser = gg.list{
- gg.multisequence{
- { "X", num, builder = |x| `Table{ x[1], +{0} } },
- { "Y", num, builder = |y| `Table{ +{0}, y[1] } },
- default = gg.sequence{ mlp.id, builder = |x| `Pair{ `String{x[1][1]},`True } } },
- separators = { ',', ';' },
- builder = function(l) l.tag='Table'; return l end }
-
- mlp.expr:add{ "V", gg.with_lexer(my_lexer, my_parser), builder = unpack } }
-
--- Use the special lexer:
-foo = V X 1, Y 2, X 3,
- for, foo, in, tag, function -- check that these aren't keywords in my_lexer
-
--- Use X and Y as Id, in the unpolluted lexer:
-print "Vector:"
-X = table.tostring(foo, 60)
-print (X)
-
-print "Sum:" -- Ready for a functional one-liner? :)
-Y = |v| table.ifold (|a,b| table.imap (|c,d| c+d, a, b), {0,0}, v)
-table.print (Y(foo))
-
+++ /dev/null
--{extension 'match'}
-
-WIDTH = 50
-function p(msg)
- io.write(msg, ' ':rep(WIDTH-#msg))
- io.flush()
-end
-
-p "Basic match"
-match 1 with 1 -> print 'ok' end
-
-p "Sequence match"
-match 3, 4 with
-| 1, 2 -> print 'KO'
-| 3, 4 -> print 'ok'
-end
-
-p "Id binding"
-match 3, 4 with
-| 1, 2 -> print 'KO'
-| x, y -> print 'ok'
-end
-
-p "Table destructuring & non-litteral tested term"
-match {1, 2} with
-|{a, 2} -> assert(a==1); print 'ok'
-end
-
-p "Pattern group"
-match {'?'} with
-|1|2|3 -> print 'KO'
-|{...} -> print 'ok'
-end
-
-p "Multi-level destructuring"
-match {{1000}} with
-|{{2000}} -> print 'KO'
-|{{3000}} -> print 'KO'
-|{{1000}} -> print 'ok'
-end
-
-p "Guard"
-match 1 with
-| 1 if false -> print 'KO'
-| 1 -> print 'ok'
-end
-
-p "Guard with bound var"
-match 1 with
-| a if a ~= 1 -> print 'KO'
-| a if a == 1 -> print 'ok'
-end
-
-p "Non linear var & destructuring"
-match {1, {2}} with
-| {a, {a}} -> print 'KO'
-| {a, {b}} -> print 'ok'
-end
-
-p "Non-linear vars on a sequence"
-match 1, 2 with
-| a, a -> print 'KO'
-| a, b -> print 'ok'
-end
-
-p "Multiple _ wildcards"
-match 1, 2 with
-| _, _ -> print 'ok'
-| a, b -> print 'KO'
-end
-
-p "Regexp & non-linear vars"
-match 'toto' with
-| 't(.)t(.)' / { a, a } -> print (a..'k')
-end
-
-p "Nested match & ..."
-match { { 'o', 'k', '!' } } with
-| { t } -> match t with
- | { a, b } -> print 'KO'
- | { a, b, ... } -> print (a..b)
- | _ -> print 'KO'
- end
-| _ -> print 'KO'
-end
-
+++ /dev/null
-Metalint 0.1 - INSTALL.TXT
-==========================
-
-Metalint is a regular Metalua program, and relies on Metalua compilation
-libraries. You must therefore have a working Metalua installation on your
-system. You can run it with: "metalua metalint.mlua -- <metalint arguments>".
-For instance, to check metalint itself:
-
- ~/src/metalua/src/sandbox$ metalua metalint.mlua -- metalint.mlua
- File metalint.mlua checked successfully
- ~/src/metalua/src/sandbox$
-
-You can also precompile it:
-
- ~/src/metalua/src/sandbox$ metalua metalint.mlua -s '#!/usr/bin/env lua' -o metalint
- ~/src/metalua/src/sandbox$ ./metalint lint.mlua
- File lint.mlua checked successfully
- ~/src/metalua/src/sandbox$
-
-Beware that even when precompiled, it still requires the Metalua runtime libs in LUA_PATH.
-
-Don't forget to set the LUA_DPATH environment variable!
\ No newline at end of file
+++ /dev/null
-Metalint
-
-Copyright (c) 2006-2008 Fabien Fleutot <metalua@gmail.com>
-
-Metalint is available under the MIT licence.
-
-MIT License
-===========
-
-Permission is hereby granted, free of charge, to any person obtaining
-a copy of this software and associated documentation files (the
-"Software"), to deal in the Software without restriction, including
-without limitation the rights to use, copy, modify, merge, publish,
-distribute, sublicense, and/or sell copies of the Software, and to
-permit persons to whom the Software is furnished to do so, subject to
-the following conditions:
-
-The above copyright notice and this permission notice shall be
-included in all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
-LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
-OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
-WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+++ /dev/null
-Metalint 0.2 - README.TXT
-=========================
-
-Metalint is a utility that checks Lua and Metalua source files for global
-variables usage. Beyond checking toplevel global variables, it also checks
-fields in modules: for instance, it will catch typos such as taable.insert(),
-both also table.iinsert().
-
-Metalint works with declaration files, which list which globals are declared,
-and what can be done with them. The syntax is:
-
-DECL ::= (DECL_ELEM ";"?) *
-DECL_ELEM ::= NAME | "module" NAME DECL "end" | "free" NAME | "private" DECL_ELEM
-NAME ::= <identifier> | <string>
-
-Identifiers and strings are the same as in Lua, except that the only reserved
-keywords are "free", "module", "end" and "private". A variable name can be
-equivalently specified as a string or as an identifier. Lua comments are allowed
-in declaration files, short and long. Check for *.dlua files in the distribution
-for examples.
-
-Meaning of declaration elements:
-
-- Standalone names declare the existence of a variable. This variable is not a
- module, i.e. people must not extract fields from it. For instance, the
- function ipairs() will simply be declared as: "ipairs". With this declaration,
- it's an error to write, for instance, "ipairs.some_field".
-
-- Names preceded with "free" can be used as you want, including arbitrary
- sub-indexing. This is useful for global tables not used as modules, and for
- modules you're too lazy to fully declare. For instance, the declaration "free
- _G" allows you to bypass all checkings, as long as you access stuff through _G
- rather than directly (i.e. "table.iinsert" will fail, but "_G.table.iinsert"
- will be accepted).
-
-- modules contain field declarations. For instance, the contents of the standard
- "os" module will be declared as "module os exit ; setlocale; date; [...]
- execute end".
-
-Declaration files are loaded:
-
-- manually, by passing "-f filename", "-l libname" or "-e
- decl_literal_expression" as options to the checking program. Options are
- processed in order, i.e. if you load a library after a file name to check,
- this library won't be accessible while checking the dource file.
-
-- automatically, when a call to "require()" is found in the code.
-
-- declaration library "base" is automatically loaded.
-
-Declaration library files are retrieved with the same algorithm as for Lua
-libraries, except that the pattern string is taken from environment variable
-LUA_DPATH rather than LUA_PATH or LUA_CPATH. For instance, if
-LUA_DPATH="./?.dlua" and a "require 'walk.id'" is found, the checker will
-attempt to load "./walk/id.dlua". It won't fail if it can't find it, but then,
-attempts to use globals declared by walk.id are likely to fail.
-
-The metalua base libraries, which include Lua base libraries, can be found in
-base.dlua. They're automatically loaded when you run metalint.
-
-Limitations: if you try to affect custom names to modules, e.g. "local
-wi=require 'walk.id'", the checker won't be able to check your usage of
-subfields of "wi". Similarly, if you redefine require() or module(), or create
-custom versions of these, metalint will be lost. Finally, computed access to
-modules are obviously not checked, i.e. "local x, y = 'iinsert', { };
-table[x](y, 1)" will be accepted.
-
-Future: Metalint is intended to support richer static type checkings, including
-function argument types. The idea is not to formally prove type soundness, but
-to accelerate the discovery of many silly bugs when using a (possibly third
-party) library. However, to perform interesting checks, the type declaration
-system must support a couple of non-trivial stuff like union types and higher
-order functions. Moreover, runtime checking code could optionally be inserted to
-check that a function API is respected when it's called (check the types
-extension in Metalua). Stay tuned.
-
-Notice that metalint can easily be turned into a smarter variable localizer,
-which would change references to module elements into local variables.
-For instance, it would add "local _table_insert = table.insert" at the beginning
-of the file, and change every instance of "table.insert" into a reference to the
-local variable. This would be much more efficient than simply adding a "local
-table=table".
-
-
-
-Finally, to accelerate the migration of existing codebases, a decl_dump()
-function is provided with metalint, which attempts to generate a declaration for
-a module currently loaded in RAM. The result is not always perfect, but remains
-a serious time saver:
-
-~/src/metalua/src/sandbox$ metalua
-Metalua, interactive REPLoop.
-(c) 2006-2008 <metalua@gmail.com>
-M> require "metalint"
-M> require "walk"
-M> decl_dump ("walk", "decl/walk.dlua")
-M> ^D
-~/src/metalua/src/sandbox$ cat decl/walk.dlua
-module walk
- debug;
- module tags
- module stat
- Forin;
- Do;
- Set;
- Fornum;
- Invoke;
- While;
- Break;
- Call;
- Label;
- Goto;
- Local;
- If;
- Repeat;
- Localrec;
- Return;
- end;
- module expr
- True;
- String;
- Index;
- Paren;
- Id;
- False;
- Invoke;
- Function;
- Op;
- Number;
- Table;
- Dots;
- Nil;
- Stat;
- Call;
- end;
- end;
- expr_list;
- binder_list;
- guess;
- expr;
- block;
- module traverse
- expr;
- block;
- stat;
- expr_list;
- end;
- stat;
-end;
-
-NEW SINCE 0.1:
-==============
-
-Feature-wise, option -a replaces all references to declared fields with locals
-and stores the compiled result in a .luac compiled file
-
-Architecture-wise, the system now remembers where (i.e. by which require()
-statement, if applicable) a given field has been declared. This is necessary for
-the autolocal feature to work correctly.
\ No newline at end of file
+++ /dev/null
-rawtype;
-gcinfo;
-module os
- exit;
- setlocale;
- date;
- getenv;
- difftime;
- remove;
- time;
- clock;
- tmpname;
- rename;
- execute;
-end;
-o;
-getfenv;
-const;
-pairs;
-max;
-tonumber;
-module io
- lines;
- write;
- close;
- flush;
- open;
- output;
- type;
- read;
- stderr;
- stdin;
- input;
- stdout;
- popen;
- tmpfile;
-end;
-load;
-"module";
-free _G;
-rawpairs;
-module coroutine
- resume;
- yield;
- status;
- wrap;
- create;
- running;
-end;
-rawipairs;
-loadstring;
-module string
- split;
- match;
- gmatch;
- upper;
- gsub;
- format;
- lower;
- sub;
- gfind;
- find;
- char;
- dump;
- undump;
- reverse;
- byte;
- strmatch;
- len;
- rep;
-end;
-module metalua
- version;
- ext_compiler_prefix;
- ext_runtime_prefix;
-end;
-module package
- path;
- metalua_loader;
- cpath;
- findfile;
- free preload;
- free loaders;
- config;
- free loaded;
- loadlib;
- mpath;
- seeall;
-end;
-module table
- shallow_copy;
- iforeach;
- tostring;
- getn;
- foreachi;
- foreach;
- sort;
- ifold;
- print;
- icat;
- isub;
- transpose;
- iany;
- override;
- imap;
- izip;
- range;
- deep_copy;
- cat;
- iall;
- maxn;
- remove;
- concat;
- iflatten;
- irev;
- ifilter;
- setn;
- insert;
-end
-min;
-printf;
-require;
-unpack;
-global;
-setmetatable;
-next;
-ipairs;
-parser;
-rawequal;
-collectgarbage;
-arg;
-newproxy;
-values;
-xpcall;
-rawset;
-keys;
-tostring;
-print;
-dostring;
-decl_builder;
-module math
- log;
- max;
- acos;
- huge;
- ldexp;
- pi;
- cos;
- tanh;
- pow;
- deg;
- tan;
- cosh;
- sinh;
- random;
- randomseed;
- frexp;
- ceil;
- floor;
- rad;
- abs;
- sqrt;
- modf;
- asin;
- min;
- mod;
- fmod;
- log10;
- atan2;
- exp;
- sin;
- atan;
-end
-lua_loadstring;
-pcall;
-assert;
-type;
-getmetatable;
-select;
-ivalues;
-rawget;
-id;
-setfenv;
-module debug
- getupvalue;
- debug;
- sethook;
- getmetatable;
- gethook;
- setmetatable;
- setlocal;
- traceback;
- setfenv;
- getinfo;
- setupvalue;
- getlocal;
- getregistry;
- getfenv;
-end
-module strict end
-dofile;
-error;
-loadfile;
+++ /dev/null
-free clopts;
\ No newline at end of file
+++ /dev/null
-module lexer
- module lexer
- save;
- newstream;
- extract_long_string;
- extract_word;
- extract_short_string;
- clone;
- free __index;
- is_keyword;
- peek;
- module sym
- end;
- extract;
- next;
- restore;
- module extractors
- end;
- module alpha
- end;
- sync;
- takeover;
- module patterns
- spaces;
- number_exponant;
- word;
- long_string;
- short_comment;
- long_comment;
- module number_mantissa
- end;
- final_short_comment;
- end;
- extract_symbol;
- skip_whitespaces_and_comments;
- check;
- extract_number;
- module token_metatable
- end;
- add;
- end;
- free _M;
- _NAME;
- _PACKAGE;
-end;
-
-module gg
- sequence;
- _PACKAGE;
- e;
- is_parser;
- with_lexer;
- optkeyword;
- onkeyword;
- make_parser;
- _NAME;
- list;
- expr;
- free _M;
- multisequence;
- parse_error;
-end;
-
-module bytecode
- MAXPARAMS;
- metalua_compile;
- dump_file;
- dump_string;
- VARARG_ISVARARG;
- indexupvalue;
- MAX_INT;
- free _M;
- module luaU
- LUA_TSTRING;
- DumpBlock;
- DumpByte;
- DumpProtos;
- DumpCode;
- LUA_TNIL;
- endianness;
- LUA_TBOOLEAN;
- DumpConstants;
- DumpInt;
- DumpDebug;
- DumpLiteral;
- DumpNumber;
- from_int;
- DumpSize;
- LUA_TNUMBER;
- DumpHeader;
- dump;
- DumpString;
- ttype;
- make_setS;
- DumpUpvalues;
- DumpLines;
- DumpLocals;
- make_setF;
- LUA_TNONE;
- DumpFunction;
- from_double;
- end;
- LUA_MAXPARSERLEVEL;
- VARARG_NEEDSARG;
- module luaK
- infix;
- codenot;
- NO_JUMP;
- indexed;
- checkstack;
- dischargejpc;
- fixline;
- concat;
- exp2reg;
- code;
- code_label;
- exp2val;
- sethvalue;
- jumponcond;
- prefix;
- jump;
- condjump;
- ttisnumber;
- exp2anyreg;
- exp2RK;
- setsvalue;
- setnilvalue;
- _nil;
- exp2nextreg;
- getjump;
- codeAsBx;
- addk;
- need_value;
- freeexp;
- posfix;
- nilK;
- discharge2reg;
- storevar;
- setmultret;
- setlist;
- codeABx;
- MAXSTACK;
- codeABC;
- freereg;
- reserveregs;
- codecomp;
- dischargevars;
- hasjumps;
- setnvalue;
- module arith_opc
- sub;
- mul;
- not;
- len;
- pow;
- div;
- mod;
- add;
- end;
- patchlist;
- constfolding;
- getlabel;
- module test_opc
- module ne
- cond;
- opc;
- end;
- module eq
- cond;
- opc;
- end;
- module ge
- cond;
- opc;
- end;
- module gt
- cond;
- opc;
- end;
- module le
- cond;
- opc;
- end;
- module lt
- cond;
- opc;
- end;
- end;
- getjumpcontrol;
- patchtohere;
- LUA_MULTRET;
- codearith;
- boolK;
- fixjump;
- ret;
- nvalue;
- goiffalse;
- isnumeral;
- patchlistaux;
- discharge2anyreg;
- setoneret;
- patchtestreg;
- removevalues;
- getcode;
- _self;
- goiftrue;
- numberK;
- setcallreturns;
- invertjump;
- setreturns;
- stringK;
- end;
- module luaP
- MAXARG_C;
- SETARG_C;
- MAXARG_A;
- SETARG_sBx;
- MAXARG_sBx;
- MAXARG_Bx;
- GETARG_A;
- GETARG_C;
- GETARG_sBx;
- OpModeT;
- POS_C;
- GET_OPCODE;
- SIZE_B;
- module OpCode
- OP_GETTABLE;
- OP_GETGLOBAL;
- OP_NOT;
- OP_MOD;
- OP_LOADK;
- OP_TAILCALL;
- OP_TEST;
- OP_TESTSET;
- OP_LE;
- OP_GETUPVAL;
- OP_CALL;
- OP_SETTABLE;
- OP_LT;
- OP_POW;
- OP_ADD;
- OP_EQ;
- OP_SETLIST;
- OP_CONCAT;
- OP_JMP;
- OP_SETGLOBAL;
- OP_CLOSE;
- OP_SETUPVAL;
- OP_NEWTABLE;
- OP_DIV;
- OP_LEN;
- OP_CLOSURE;
- OP_SELF;
- OP_TFORLOOP;
- OP_MUL;
- OP_FORPREP;
- OP_MOVE;
- OP_LOADBOOL;
- OP_FORLOOP;
- OP_SUB;
- OP_LOADNIL;
- OP_RETURN;
- OP_UNM;
- OP_VARARG;
- end;
- MAXARG_B;
- SETARG_A;
- testOpMode;
- SIZE_OP;
- OpModeK;
- module ROpCode
- end;
- SET_OPCODE;
- NO_REG;
- ISK;
- module opnames
- end;
- MAXINDEXRK;
- getOpMode;
- SIZE_C;
- RKASK;
- OpModesetA;
- SETARG_Bx;
- OpModeCrk;
- OpModeBrk;
- OpModeBreg;
- NUM_OPCODES;
- LFIELDS_PER_FLUSH;
- DecodeInst;
- Instruction;
- SETARG_B;
- CREATE_ABC;
- CREATE_ABx;
- GETARG_B;
- module OpMode
- end;
- POS_A;
- POS_B;
- POS_Bx;
- module opmodes
- end;
- SIZE_A;
- BITRK;
- SIZE_Bx;
- GETARG_Bx;
- end;
- _NAME;
- VARARG_HASARG;
- MAXUPVALUES;
- _PACKAGE;
- MAXVARS;
- module format
- number_size;
- instr_size;
- header;
- little_endian;
- int_size;
- size_t_size;
- integral;
- end;
-end;
-
-module mlc
-
- luastring_of_luafile;
- lexstream_of_luafile;
- ast_of_luafile;
- proto_of_luafile;
- luacstring_of_luafile;
- function_of_luafile;
-
- lexstream_of_luastring;
- ast_of_luastring;
- proto_of_luastring;
- luacstring_of_luastring;
- function_of_luastring;
-
- ast_of_lexstream;
- proto_of_lexstream;
- luacstring_of_lexstream;
- function_of_lexstream;
-
- proto_of_ast;
- luacstring_of_ast;
- function_of_ast;
-
- luacstring_of_proto;
- function_of_proto;
-
- function_of_luacstring;
-
- luafile_to_luastring;
- luafile_to_lexstream;
- luafile_to_ast;
- luafile_to_proto;
- luafile_to_luacstring;
- luafile_to_function;
-
- luastring_to_lexstream;
- luastring_to_ast;
- luastring_to_proto;
- luastring_to_luacstring;
- luastring_to_function;
-
- lexstream_to_ast;
- lexstream_to_proto;
- lexstream_to_luacstring;
- lexstream_to_function;
-
- ast_to_proto;
- ast_to_luacstring;
- ast_to_function;
-
- proto_to_luacstring;
- proto_to_function;
-
- luacstring_to_function;
-
- luacstring_of_function;
- function_to_luacstring;
-
- convert;
- module order
- function;
- luafile;
- luacstring;
- proto;
- lexstream;
- luastring;
- ast;
- end;
- __index;
-end;
-extension;
-module mlp
-end
-
-module metalua
- module compiler
- end
-end
\ No newline at end of file
+++ /dev/null
-module walk
- expr; block; stat; expr_list; guess
-end
\ No newline at end of file
+++ /dev/null
-module walk_id
- expr; block; stat; expr_list; guess
-end
\ No newline at end of file
+++ /dev/null
-free decl_lexer; -- I want to access its alpha symbols table
-decl_builder;
-free decl_parser;
-free decl_elem_parser;
-parse_decl_lib;
-parse_decl_expr;
-parse_decl_file;
-check_src_file;
-decl_dump;
-free clopts_cfg;
\ No newline at end of file
+++ /dev/null
--{ extension 'match' }
--{ extension 'log' }
-
-require 'strict'
-require 'metalua.compiler'
-
-local VERBOSE = false
-local PARSING_OWN_DECL = false
-local MY_GLOBALS = { }
-local LOAD_SOURCE = nil
-local DECLARATIONS = { }
-local AUTOLOCALS = { }
-
-
-local function debug_print(...)
- if VERBOSE then return printf(...) end
-end
-
--- Lexer --
-decl_lexer = lexer.lexer:clone()
-decl_lexer:add{ 'module', 'free', 'end', 'private' }
-
--- Parser --
-
--- Merge two decl together
-local function merge (x, y)
- --$log('merge', x, y)
- for k, v in pairs (y) do
- match x[k], v with
- | `Free, _ | `Atom{x}, `Atom{x} -> -- pass
- | _, `Free | nil, _ -> x[k] = v
- | `Module{ _, mod_x }, `Module{ _, mod_y } -> merge (mod_x, mod_y)
- | _, _ ->
- $log("Merge failure", x[k], v)
- error ("Can't merge type elements")
- end
- end
-end
-
--- break mutual dependency between decl_elem_parser and decl_parser
-local _decl_elem_parser = |...| decl_elem_parser(...)
-
--- Parse a name, presented as an `Id or a `String
-local function name(lx)
- local a = lx:next()
- if a.tag=='String' or a.tag=='Id' then return a[1]
- else error("Name expected, got "..table.tostring(a,'nohash')) end
-end
-
-function decl_builder(x)
- --$log('decl_builder', x)
- local r = { }
- for y in ivalues(x) do
- if y.tag ~= 'Private' then merge (r, {[y[1]]=y}) end
- end
- return r
-end
-
-decl_parser = gg.list{
- gg.sequence{ _decl_elem_parser, gg.optkeyword ';', builder = |x|x[1] },
- terminators = 'end', builder = decl_builder }
-
-decl_elem_parser = gg.multisequence{
- { 'module', name, decl_parser, 'end', builder = |x| `Module{x[1], x[2]} },
- { 'free', name, builder = |x| `Free{x[1]} },
- { 'private', _decl_elem_parser, builder = |x| PARSING_OWN_DECL and x[1] or `Private },
- default = gg.sequence{ name, builder = |x| `Atom{x[1]} } }
-
-decl_elem_parser.transformers:add (function(x) x.loader = LOAD_SOURCE end)
-
-function parse_decl_lib (libname)
- debug_print ("Loading decl lib "..libname)
- local fd, msg = package.findfile (libname, os.getenv 'LUA_DPATH' or "?.dlua")
- if not fd then error ("Can't find declaration file for "..libname) end
- local src = fd:read '*a'
- fd:close()
- return parse_decl_expr (src)
-end
-
-function parse_decl_expr (src)
- local lx = decl_lexer:newstream (src)
- local r = decl_parser (lx)
- --$log('result of parse_decl', r)
- merge(DECLARATIONS, r)
- return r
-end
-
-function parse_decl_file (filename)
- debug_print ("Loading decl file "..filename)
- local src = mlc.luastring_of_luafile (filename)
- return parse_decl_expr (src)
-end
-
--- AST checker --
-require 'walk.id'
-
-local function index_autolocal (e, loader)
- --$log('index_autolocals', loader)
- local is_mine = false
- local function get_name(x)
- match x with
- | `Index{ y, `String{key} } -> return get_name(y)..'~'..key
- | `Invoke{ y, `String{key}, _ } ->
- error('autolocals for invocation not implemented '..table.tostring(x))
- | `Id{ name } -> is_mine = MY_GLOBALS[name]; return '~'..name
- | _ -> error(table.tostring(x)..'\n')
- end
- end
- local name = get_name(e)
- if is_mine then return end -- Don't index my own global vars
- local x = AUTOLOCALS[name]
- if not x then x={ }; AUTOLOCALS[name] = x end
- table.insert(x, { e, loader })
-end
-
-local walk_cfg = { id = { }, stat = { }, expr = { } }
-
-function walk_cfg.id.free(x, ...)
- --$log('in free id walker', x)
- local parents = {...}
- local dic = DECLARATIONS
- local name = x[1]
- for p in ivalues (parents) do
- local decl = dic[name]
- if not decl then error("Not declared: "..name) end
- match p with
- | `Index{ _x, `String{n} } | `Invoke{ _x, `String{n}, ...} if _x==x ->
- match decl with
- | `Free{...} -> break
- | `Atom{...} -> error (name.." is not a module")
- | `Module{ _, dic2 } -> dic, name, x = dic2, n, p
- end
- | _ -> -- x == last checked variable
- debug_print("Checked "..table.tostring(x, 'nohash')..
- ", found in "..table.tostring(decl.loader, 'nohash'))
- index_autolocal (x, decl.loader)
- break
- end
- end
-end
-
-local function try_load_decl (kind, mod_name)
- local success, r = pcall(_G['parse_decl_'..kind], mod_name)
- if not success then
- debug_print("Warning, error when trying to load %s:\n%s", mod_name, r)
- else
- return r
- end
-end
-
-local function call_walker(x)
- --$log('in call walker', x)
- match x with
- | `Call{ `Id 'require', `String{ mod_name } } ->
- if not DECLARATIONS[mod_name] then
- LOAD_SOURCE = `Require{x}
- try_load_decl('lib', mod_name)
- end
- | `Module{ `Id 'module', _ } -> -- no package.seeall
- DECLARATIONS = { } -- reset declarations
- | _ -> -- pass
- end
-end
-
-walk_cfg.expr.down = call_walker
-walk_cfg.stat.down = call_walker
-
-local CHECKED_AST, CHECKED_NAME
-
-function check_src_file(name)
- debug_print ("Checking file "..name)
- CHECKED_NAME = name
- CHECKED_AST = mlc.ast_of_luafile (name)
- --$log(ast,'nohash')
- PARSING_OWN_DECL = true
- local x = try_load_decl('lib', name:gsub("%.m?lua$", ""))
- for name in keys(x) do MY_GLOBALS[name] = true end
- PARSING_OWN_DECL = false
- walk_id.block (walk_cfg, CHECKED_AST)
- printf("File %s checked successfully", name)
-end
-
-local function replace_autolocals ()
- local top_defs, req_defs = { }, { }
- for k, v in pairs (AUTOLOCALS) do
- local original = table.shallow_copy(v[1][1])
- local loader = v[1][2]
- match loader with
- | `Require{ r } ->
- local defs = req_defs[r]
- if not defs then defs={ }; req_defs[r]=defs end
- defs[k] = original
- | `Base | `Directive ->
- top_defs[k] = original
- end
- for exlo in ivalues (v) do
- local expr, this_loader = unpack(exlo)
- assert (this_loader[1]==loader[1] and this_loader.tag==loader.tag,
- "Autolocal lost by homonymous declarations")
- expr <- `Id{k}
- end
- end
-
- -- Insert beginning-of-file local declarations
- local top_locals = `Local{ { }, { } }
- for k, v in pairs(top_defs) do
- table.insert(top_locals[1], `Id{k})
- table.insert(top_locals[2], v)
- end
- table.insert (CHECKED_AST, 1, top_locals)
-
- -- Insert declarations after require() statements
- for req_stat, renamings in pairs (req_defs) do
- local req_locals = `Local{ { }, { } }
- local r2 = table.shallow_copy(req_stat)
- req_stat <- { r2, req_locals }; req_stat.tag = nil
- for k, v in pairs (renamings) do
- table.insert(req_locals[1], `Id{k})
- table.insert(req_locals[2], v)
- end
- end
-
- if clopts_cfg.debug then table.print(CHECKED_AST, 'nohash', 60) end
- local chunk = mlc.luacstring_of_ast (CHECKED_AST)
- local f = io.open (CHECKED_NAME:gsub('%.m?lua', '')..'.luac', 'w')
- f:write(chunk)
- f:close()
-end
-
--- RAM dumper --
-
-function decl_dump(name, f)
- match type(f) with
- | 'nil' -> f=io.stdout
- | 'string' -> f=io.open(f, 'w') or error ("Can't open file "..f)
- | 'userdata' -> -- pass
- | t -> error ("Invalid target file type "..t)
- end
- local indentation, acc, seen = 0, { }, { }
- local function esc(n)
- if n:gmatch "[%a_][%w_]*" and not decl_lexer.alpha[n] then return n else return '"'..n..'"' end
- end
- local function add_line(...) table.insert(acc, table.concat{' ':rep(indentation), ...}) end
- local function rec(n, v)
- if seen[v] then add_line ('free ', esc(n), ";")
- elseif type(v)=='table' then
- seen[v] = true
- add_line ('module ', esc(n))
- indentation += 1
- for n2, v2 in pairs(v) do
- if type(n2)=='string' then rec (n2, v2) end
- end
- indentation -= 1
- add_line 'end;'
- else
- add_line (esc(n), ';')
- end
- end
- rec(name, _G[name])
- for line in ivalues (acc) do
- f:write(line, '\n')
- end
- if f~=io.stdout then f:close() end
-end
-
-
--- options handling --
-require 'clopts'
-
-local cl_parser = clopts {
- check_src_file,
-
- { short = 'd', long = 'debug', type = 'boolean',
- usage = 'print debug traces', action = function(x) VERBOSE=x end },
-
- { short = 'l', long = 'decl_lib', type = 'string*', usage = 'load decl lib',
- action = function (x) LOAD_SOURCE=`Directive; return parse_decl_lib(x) end },
-
- { short = 'f', long = 'decl_file', type = 'string*', usage = 'load decl file',
- action = function (x) LOAD_SOURCE=`Directive; return parse_decl_file(x) end },
-
- { short = 'x', long = 'decl_expr', type = 'string*',
- usage = 'decl expression to eval',
- action = function (x) LOAD_SOURCE=`Directive; return parse_decl_expr(x) end },
-
- { short = 'a', long = 'autolocals', type = 'boolean',
- usage = 'compiles the program with autolocals' } }
-
-LOAD_SOURCE = `Base
-try_load_decl('lib', 'base')
-clopts_cfg = cl_parser (...)
-if clopts_cfg.autolocals then
- replace_autolocals()
-end
\ No newline at end of file
+++ /dev/null
-require 'strict'
-
--{ extension 'match' }
-
-synth = { }
-synth.__index = synth
-
---------------------------------------------------------------------------------
--- Instanciate a new AST->source synthetizer
---------------------------------------------------------------------------------
-function synth.new ()
- local self = {
- _acc = { }, -- Accumulates pieces of source as strings
- current_indent = 0, -- Current level of line indentation
- indent_step = " " -- Indentation symbol, normally spaces or '\t'
- }
- return setmetatable (self, synth)
-end
-
---------------------------------------------------------------------------------
--- Run a synthetizer on the `ast' arg and return the source as a string.
--- Can also be used as a static method `synth.run (ast)'; in this case,
--- a temporary synthetizer is instanciated on the fly.
---------------------------------------------------------------------------------
-function synth:run (ast)
- if not ast then
- self, ast = synth.new(), self
- end
- self._acc = { }
- self:node (ast)
- return table.concat (self._acc)
-end
-
---------------------------------------------------------------------------------
--- Accumulate a piece of source file in the synthetizer.
---------------------------------------------------------------------------------
-function synth:acc (x)
- if x then table.insert (self._acc, x) end
-end
-
---------------------------------------------------------------------------------
--- Accumulate an indented newline.
--- Jumps an extra line if indentation is 0, so that
--- toplevel definitions are separated by an extra empty line.
---------------------------------------------------------------------------------
-function synth:nl ()
- if self.current_indent == 0 then self:acc "\n" end
- self:acc ("\n" .. self.indent_step:rep (self.current_indent))
-end
-
---------------------------------------------------------------------------------
--- Increase indentation and accumulate a new line.
---------------------------------------------------------------------------------
-function synth:nlindent ()
- self.current_indent = self.current_indent + 1
- self:nl ()
-end
-
---------------------------------------------------------------------------------
--- Decrease indentation and accumulate a new line.
---------------------------------------------------------------------------------
-function synth:nldedent ()
- self.current_indent = self.current_indent - 1
- self:acc ("\n" .. self.indent_step:rep (self.current_indent))
-end
-
---------------------------------------------------------------------------------
--- Keywords, which are illegal as identifiers.
---------------------------------------------------------------------------------
-local keywords = table.transpose {
- "and", "break", "do", "else", "elseif",
- "end", "false", "for", "function", "if",
- "in", "local", "nil", "not", "or",
- "repeat", "return", "then", "true", "until",
- "while" }
-
---------------------------------------------------------------------------------
--- Return true iff string `id' is a legal identifier name.
---------------------------------------------------------------------------------
-local function is_ident (id)
- return id:strmatch "^[%a_][%w_]*$" and not keywords[id]
-end
-
---------------------------------------------------------------------------------
--- Return true iff ast represents a legal function name for
--- syntax sugar ``function foo.bar.gnat() ... end'':
--- a series of nested string indexes, with an identifier as
--- the innermost node.
---------------------------------------------------------------------------------
-local function is_idx_stack (ast)
- match ast with
- | `Id{ _ } -> return true
- | `Index{ left, `String{ _ } } -> return is_idx_stack (left)
- | _ -> return false
- end
-end
-
---------------------------------------------------------------------------------
--- Operator precedences, in increasing order.
--- This is not directly used, it's used to generate op_prec below.
---------------------------------------------------------------------------------
-local op_preprec = {
- { "or", "and" },
- { "lt", "le", "eq", "ne" },
- { "concat" },
- { "add", "sub" },
- { "mul", "div", "mod" },
- { "unary", "not", "len" },
- { "pow" },
- { "index" } }
-
---------------------------------------------------------------------------------
--- operator --> precedence table, generated from op_preprec.
---------------------------------------------------------------------------------
-local op_prec = { }
-
-for prec, ops in ipairs (op_preprec) do
- for op in ivalues (ops) do
- op_prec[op] = prec
- end
-end
-
---------------------------------------------------------------------------------
--- operator --> source representation.
---------------------------------------------------------------------------------
-local op_symbol = {
- add = " + ", sub = " - ", mul = " * ",
- div = " / ", mod = " % ", pow = " ^ ",
- concat = " .. ", eq = " == ", ne = " ~= ",
- lt = " < ", le = " <= ", ["and"] = " and ",
- ["or"] = " or ", ["not"] = "not ", len = "# " }
-
---------------------------------------------------------------------------------
--- Accumulate the source representation of AST `node' in
--- the synthetizer. Most of the work is done by delegating to
--- the method having the name of the AST tag.
--- If something can't be converted to normal sources, it's
--- instead dumped as a `-{ ... }' splice in the source accumulator.
---------------------------------------------------------------------------------
-function synth:node (node)
- assert (self~=synth and self._acc)
- if not node.tag then -- tagless block.
- self:list (node, self.nl)
- else
- local f = synth[node.tag]
- if type (f) == "function" then -- Delegate to tag method.
- f (self, node, unpack (node))
- elseif type (f) == "string" then -- tag string.
- self:acc (f)
- else -- No appropriate method, fall back to splice dumping.
- -- This cannot happen in a plain Lua AST.
- self:acc " -{ "
- self:acc (table.tostring (node, "nohash"), 80)
- self:acc " }"
- end
- end
-end
-
---------------------------------------------------------------------------------
--- Convert every node in the AST list `list' passed as 1st arg.
--- `sep' is an optional separator to be accumulated between each list element,
--- it can be a string or a synth method.
--- `start' is an optional number (default == 1), indicating which is the
--- first element of list to be converted, so that we can skip the begining
--- of a list.
---------------------------------------------------------------------------------
-function synth:list (list, sep, start)
- for i = start or 1, # list do
- self:node (list[i])
- if list[i + 1] then
- if not sep then
- elseif type (sep) == "function" then sep (self)
- elseif type (sep) == "string" then self:acc (sep)
- else error "Invalid list separator" end
- end
- end
-end
-
---------------------------------------------------------------------------------
---
--- Tag methods.
--- ------------
---
--- Specific AST node dumping methods, associated to their node kinds
--- by their name, which is the corresponding AST tag.
--- synth:node() is in charge of delegating a node's treatment to the
--- appropriate tag method.
---
--- Such tag methods are called with the AST node as 1st arg.
--- As a convenience, the n node's children are passed as args #2 ... n+1.
---
--- There are several things that could be refactored into common subroutines
--- here: statement blocks dumping, function dumping...
--- However, given their small size and linear execution
--- (they basically perform series of :acc(), :node(), :list(),
--- :nl(), :nlindent() and :nldedent() calls), it seems more readable
--- to avoid multiplication of such tiny functions.
---
--- To make sense out of these, you need to know metalua's AST syntax, as
--- found in the reference manual or in metalua/doc/ast.txt.
---
---------------------------------------------------------------------------------
-
-function synth:Do (node)
- self:acc "do"
- self:nlindent ()
- self:list (node, self.nl)
- self:nldedent ()
- self:acc "end"
-end
-
-function synth:Set (node)
- match node with
- | `Set{ { `Index{ lhs, `String{ method } } },
- { `Function{ { `Id "self", ... } == params, body } } }
- if is_idx_stack (lhs) and is_ident (method) ->
- -- ``function foo:bar(...) ... end'' --
- self:acc "function "
- self:node (lhs)
- self:acc ":"
- self:acc (method)
- self:acc " ("
- self:list (params, ", ", 2)
- self:acc ")"
- self:nlindent ()
- self:list (body, self.nl)
- self:nldedent ()
- self:acc "end"
-
- | `Set{ { lhs }, { `Function{ params, body } } } if is_idx_stack (lhs) ->
- -- ``function foo(...) ... end'' --
- self:acc "function "
- self:node (lhs)
- self:acc " ("
- self:list (params, ", ")
- self:acc ")"
- self:nlindent ()
- self:list (body, self.nl)
- self:nldedent ()
- self:acc "end"
-
- | `Set{ { `Id{ lhs1name } == lhs1, ... } == lhs, rhs }
- if not is_ident (lhs1name) ->
- -- ``foo, ... = ...'' when foo is *not* a valid identifier.
- -- In that case, the spliced 1st variable must get parentheses,
- -- to be distinguished from a statement splice.
- -- This cannot happen in a plain Lua AST.
- self:acc "("
- self:node (lhs1)
- self:acc ")"
- if lhs[2] then -- more than one lhs variable
- self:acc ", "
- self:list (lhs, ", ", 2)
- end
- self:acc " = "
- self:list (rhs, ", ")
-
- | `Set{ lhs, rhs } ->
- -- ``... = ...'', no syntax sugar --
- self:list (lhs, ", ")
- self:acc " = "
- self:list (rhs, ", ")
- end
-end
-
-function synth:While (node, cond, body)
- self:acc "while "
- self:node (cond)
- self:acc " do"
- self:nlindent ()
- self:list (body, self.nl)
- self:nldedent ()
- self:acc "end"
-end
-
-function synth:Repeat (node, body, cond)
- self:acc "repeat"
- self:nlindent ()
- self:list (body, self.nl)
- self:nldedent ()
- self:acc "until "
- self:node (cond)
-end
-
-function synth:If (node)
- for i = 1, #node-1, 2 do
- -- for each ``if/then'' and ``elseif/then'' pair --
- local cond, body = node[i], node[i+1]
- self:acc (i==1 and "if " or "elseif ")
- self:node (cond)
- self:acc " then"
- self:nlindent ()
- self:list (body, self.nl)
- self:nldedent ()
- end
- -- odd number of children --> last one is an `else' clause --
- if #node%2 == 1 then
- self:acc "else"
- self:nlindent ()
- self:list (node[#node], self.nl)
- self:nldedent ()
- end
- self:acc "end"
-end
-
-function synth:Fornum (node, var, first, last)
- local body = node[#node]
- self:acc "for "
- self:node (var)
- self:acc " = "
- self:node (first)
- self:acc ", "
- self:node (last)
- if #node==5 then -- 5 children --> child #4 is a step increment.
- self:acc ", "
- self:node (node[4])
- end
- self:acc " do"
- self:nlindent ()
- self:list (body, self.nl)
- self:nldedent ()
- self:acc "end"
-end
-
-function synth:Forin (node, vars, generators, body)
- self:acc "for "
- self:list (vars, ", ")
- self:acc " in "
- self:list (generators, ", ")
- self:acc " do"
- self:nlindent ()
- self:list (body, self.nl)
- self:nldedent ()
- self:acc "end"
-end
-
-function synth:Local (node, lhs, rhs)
- self:acc "local "
- self:list (lhs, ", ")
- if rhs[1] then
- self:acc " = "
- self:list (rhs, ", ")
- end
-end
-
-function synth:Localrec (node, lhs, rhs)
- match node with
- | `Localrec{ { `Id{name} }, { `Function{ params, body } } }
- if is_ident (name) ->
- -- ``local function name() ... end'' --
- self:acc "local function "
- self:acc (name)
- self:acc " ("
- self:list (params, ", ")
- self:acc ")"
- self:nlindent ()
- self:list (body, self.nl)
- self:nldedent ()
- self:acc "end"
-
- | _ ->
- -- Other localrec are unprintable ==> splice them --
- -- This cannot happen in a plain Lua AST. --
- self:acc "-{ "
- self:acc (table.tostring (node, 'nohash', 80))
- self:acc " }"
- end
-end
-
-function synth:Call (node, f)
- -- single string or table literal arg ==> no need for parentheses. --
- local parens
- match node with
- | `Call{ _, `String{_} }
- | `Call{ _, `Table{...}} -> parens = false
- | _ -> parens = true
- end
- self:node (f)
- self:acc (parens and " (" or " ")
- self:list (node, ", ", 2) -- skip `f'.
- self:acc (parens and ")")
-end
-
-function synth:Invoke (node, f, method)
- -- single string or table literal arg ==> no need for parentheses. --
- local parens
- match node with
- | `Invoke{ _, _, `String{_} }
- | `Invoke{ _, _, `Table{...}} -> parens = false
- | _ -> parens = true
- end
- self:node (f)
- self:acc ":"
- self:acc (method[1])
- self:acc (parens and " (" or " ")
- self:list (node, ", ", 3) -- Skip args #1 and #2, object and method name.
- self:acc (parens and ")")
-end
-
-function synth:Return (node)
- self:acc "return "
- self:list (node, ", ")
-end
-
-synth.Break = "break"
-synth.Nil = "nil"
-synth.False = "false"
-synth.True = "true"
-synth.Dots = "..."
-
-function synth:Number (node, n)
- self:acc (tostring (n))
-end
-
-function synth:String (node, str)
- -- format "%q" prints '\n' in an umpractical way IMO,
- -- so this is fixed with the :gsub( ) call.
- self:acc (string.format ("%q", str):gsub ("\\\n", "\\n"))
-end
-
-function synth:Function (node, params, body)
- self:acc "function "
- self:acc " ("
- self:list (params, ", ")
- self:acc ")"
- self:nlindent ()
- self:list (body, self.nl)
- self:nldedent ()
- self:acc "end"
-end
-
-function synth:Table (node)
- if not node[1] then self:acc "{ }" else
- self:acc "{"
- self:nlindent ()
- for i, elem in ipairs (node) do
- match elem with
- | `Pair{ `String{ key }, value } if is_ident (key) ->
- -- ``key = value''. --
- self:acc (key)
- self:acc " = "
- self:node (value)
-
- | `Pair{ key, value } ->
- -- ``[key] = value''. --
- self:acc "["
- self:node (key)
- self:acc "] = "
- self:node (value)
-
- | _ ->
- -- ``value''. --
- self:node (elem)
- end
- if node [i+1] then
- self:acc ","
- self:nl ()
- end
- end
- self:nldedent ()
- self:acc "}"
- end
-end
-
-function synth:Op (node, op, a, b)
- -- Transform ``not (a == b)'' into ``a ~= b''. --
- match node with
- | `Op{ "not", `Op{ "eq", _a, _b } }
- | `Op{ "not", `Paren{ `Op{ "eq", _a, _b } } } ->
- op, a, b = "ne", _a, _b
- | _ ->
- end
-
- if b then -- binary operator.
- local left_paren, right_paren
- match a with
- | `Op{ op_a, ...} if op_prec[op] >= op_prec[op_a] -> left_paren = true
- | _ -> left_paren = false
- end
-
- match b with -- FIXME: might not work with right assoc operators ^ and ..
- | `Op{ op_b, ...} if op_prec[op] >= op_prec[op_b] -> right_paren = true
- | _ -> right_paren = false
- end
-
- self:acc (left_paren and "(")
- self:node (a)
- self:acc (left_paren and ")")
-
- self:acc (op_symbol [op])
-
- self:acc (right_paren and "(")
- self:node (b)
- self:acc (right_paren and ")")
-
- else -- unary operator.
- local paren
- match a with
- | `Op{ op_a, ... } if op_prec[op] >= op_prec[op_a] -> paren = true
- | _ -> paren = false
- end
- self:acc (op_symbol[op])
- self:acc (paren and "(")
- self:node (a)
- self:acc (paren and ")")
- end
-end
-
-function synth:Paren (node, content)
- self:acc "("
- self:node (content)
- self:acc ")"
-end
-
-function synth:Index (node, table, key)
- local paren_table
- -- Check precedence, see if parens are needed around the table --
- match table with
- | `Op{ op, ... } if op_prec[op] < op_prec.index -> paren_table = true
- | _ -> paren_table = false
- end
-
- self:acc (paren_table and "(")
- self:node (table)
- self:acc (paren_table and ")")
-
- match key with
- | `String{ field } if is_ident (field) ->
- -- ``table.key''. --
- self:acc "."
- self:acc (field)
- | _ ->
- -- ``table [key]''. --
- self:acc "["
- self:node (key)
- self:acc "]"
- end
-end
-
-function synth:Id (node, name)
- if is_ident (name) then
- self:acc (name)
- else -- Unprintable identifier, fall back to splice representation.
- -- This cannot happen in a plain Lua AST.
- self:acc "-{`Id "
- self:String (node, name)
- self:acc "}"
- end
-end
-
-
---------------------------------------------------------------------------------
--- Read a file, get its AST, use synth to regenerate sources
--- from that AST
---------------------------------------------------------------------------------
-require 'metalua.compiler'
-local filename = (arg[2] or arg[1]) or arg[0]
-local ast = mlc.luafile_to_ast (filename)
-
-print(synth.run(ast))
+++ /dev/null
--{ extension 'trycatch' }
-
-
-----------------------------------------------------------------------
-print "1) no error"
-try
- print(" Hi")
-end
-
-
-----------------------------------------------------------------------
-print "2) caught error"
-try
- error "some_error"
-catch x then
- printf(" Successfully caught %q", x)
-end
-
-
--- [[
-----------------------------------------------------------------------
-print "3) no error, with a finally"
-try
- print " Hi"
-finally
- print " Finally OK"
-end
-
-
-----------------------------------------------------------------------
-print "4) error, with a finally"
-try
- print " Hi"
- error "bang"
-catch "bang"/{_} then
- print " Bang caught"
-finally
- print " Finally OK"
-end
-
-
-----------------------------------------------------------------------
-print "5) nested catchers"
-try
- try
- error "some_error"
- catch "some_other_error" then
- assert (false, "mismatch, this must not happen")
- end
- catch "some_error"/{x} then
- printf(" Successfully caught %q across a try that didn't catch", x)
-catch x then
- assert (false, "We shouldn't reach this catch-all")
-end
-
-
-----------------------------------------------------------------------
-print "6) nested catchers, with a 'finally in the inner one"
-try
- try
- error "some_error"
- catch "some_other_error" then
- assert (false, "mismatch, this must not happen")
- finally
- print " Leaving the inner try-catch"
- end
-catch "some_error"/{x} then
- printf(" Successfully caught %q across a try that didn't catch", x)
-catch x then
- assert (false, "We shouldn't reach this catch-all")
-end
-
-
-----------------------------------------------------------------------
-print "7) 'finally' intercepts a return from a function"
-function f()
- try
- print " into f:"
- return "F_RESULT"
- assert (false, "I'll never go there")
- catch _ then
- assert (false, "No exception should be thrown")
- finally
- print " I do the finally before leaving f()"
- end
-end
-local fr = f()
-printf(" f returned %q", fr)
-
-
-----------------------------------------------------------------------
-print "8) don't be fooled by nested functions"
-function f()
- try
- local function g() return "from g" end
- printf(" g() returns %q", g())
- return "from f"
- catch _ then
- assert (false, "No exception should be thrown")
- end
-end
-local fr = f()
-printf(" f returned %q", fr)
-
-----------------------------------------------------------------------
-print "*) done."
-
+++ /dev/null
--{ extension "types" }
--{ extension "clist" }
-
--- Uncomment this to turn typechecking code generation off:
--- -{stat: types.enabled=false}
-
-function sum (x :: table(number)) :: number
- local acc :: number = 0
- for i=1, #x do
- acc = acc + x[i] -- .. 'x' -- converts to string
- end
- --acc='bug' -- put a string in a number variable
- return acc
-end
-
-x = { i for i=1,100 }
---x[23] = 'toto' -- string in a number list, sum() will complain
-y = sum (x)
-printf ("sum 1 .. %i = %i", #x, y)
\ No newline at end of file
+++ /dev/null
--{ extension 'match' }
-
-require 'metalua.walk.id'
-
-ast = +{ block:
- y = type(1)
- function foo(x)
- local type = 'number'
- assert(x==type or not x)
- end
- foo(x) }
-
-disp = |msg,ast| printf("\n%s:\n%s", msg, table.tostring(ast, 80, 'nohash'))
-disp('initial term', ast)
-
-do -- Make globals explicit:
- local ast = table.deep_copy(ast)
- local cfg = { id = { } }
- function cfg.id.free(i)
- i <- `Index{ `Id '_G', `String{i[1]} }
- return 'break'
- end
- walk_id.block(cfg, ast)
- disp('Globals made explicit', ast)
-end
-
+++ /dev/null
-require 'metalua.mlc'
-require 'metalua.walk'
-
-function weave_ast (src, ast, name)
-
- -------------------------------------------------------------------
- -- translation: associate an AST node to its recomposed source
- -- ast_children: associate an AST node to the list of its children
- -- ast_parent: associate an AST node to the list of its parent
- -- weavable: whether an AST node supports weaving of its children
- -- node: common walker config for exprs, stats & blocks
- -------------------------------------------------------------------
- local translation, ast_children, ast_parent, weaveable, node =
- { }, { }, { }, { }, { }
-
- -------------------------------------------------------------------
- -- Build up the parent/children relationships. This is not the same
- -- as inclusion between tables: the relation we're building only
- -- relates blocks, expressions and statements; in the AST, some
- -- tables don't represent any of these node kinds.
- -- For instance in `Local{ { `Id "x" }, { } }, `Id"x" is a child of
- -- the `Local{ } node, although it's not directly included in it.
- -------------------------------------------------------------------
- function node.down(ast, parent)
- ----------------------------------------------------
- -- `Do{ } blocks are processed twice:
- -- * once as a statement
- -- * once as a block, child of itself
- -- This prevents them from becoming their own child.
- ----------------------------------------------------
- if ast==parent then return end
-
- if not ast.lineinfo then
- weaveable [ast] = false, false
- if parent then weaveable [parent] = false end
- else
- weaveable [ast] = true
-
- -- normalize lineinfo
- -- TODO: FIXME
- if ast.lineinfo.first[3] > ast.lineinfo.last[3] then
- ast.lineinfo.first, ast.lineinfo.last = ast.lineinfo.last, ast.lineinfo.first
- end
- end
- ast_children [ast] = { }
- ast_parent [ast] = parent
- if parent then table.insert (ast_children [parent], ast) end
- end
-
- -------------------------------------------------------------------
- -- Visit up, from leaves to upper-level nodes, and weave leaves
- -- back into the text of their parent node, recursively. Since the
- -- visitor is imperative, we can't easily make it return a value
- -- (the resulting recomposed source, here). Therefore we
- -- imperatively store results in the association table
- -- `translation'.
- -------------------------------------------------------------------
- function node.up(ast)
- local _acc = { }
- local function acc(x) table.insert (_acc, x) end
-
- if not next(ast) then -- shadow node, remove from ast_children
- local x = ast_children[ast_parent[ast]]
- for i,a in ipairs (x) do if a==ast then table.remove (x, i); break end end
- return "" -- no need to continue, we know that the node is empty!
- end
-
- -- ast Can't be weaved normally, try something else --
- local function synthetize (ast)
- acc "-{expr: "
- acc (table.tostring (ast, 'nohash', 80, 8))
- acc " }"
- end
-
- -- regular weaving of chidren in the parent's sources --
- local function weave (ast)
- -- sort children in appearence order
- local comp = |a,b| a.lineinfo.first[3] < b.lineinfo.first[3]
- table.sort (ast_children [ast], comp)
-
- local li = ast.lineinfo
- if not li then return synthetize (ast) end
- local a, d = li.first[3], li.last[3]
- for child in ivalues (ast_children [ast]) do
- local li = child.lineinfo
- local b, c = li.first[3], li.last[3]
- acc (src:sub (a, b - 1))
- acc (translation [child])
- a = c + 1
- end
- acc (src:sub (a, d))
- end
-
- -- compute the translation from the children's ones --
- if not translation [ast] then
- if weaveable [ast] then weave (ast) else synthetize (ast) end
- translation [ast] = table.concat (_acc)
- end
- end
-
- local cfg = { expr=node; stat=node; block=node }
- walk.block (cfg, ast)
-
- return translation [ast]
-end
-
--- Get the source. If none is given, use itself as an example. --
-local filename = arg[2] or arg[1] or arg[0]
-local f = assert (io.open (filename, 'r'))
-local src = f:read '*a'
-f:close()
-
-local ast = mlc.luastring_to_ast (src, name)
-if not next(ast) then
- io.write (src) -- Empty ast, probably empty file, or comments only
-else
- local before = src:sub (1, ast.lineinfo.first[3]-1)
- local after = src:sub (ast.lineinfo.last[3]+1, -1)
- io.write (before .. weave_ast (src, ast) .. after)
-end
+++ /dev/null
--{ extension 'withdo' }
-
-local original_close = io.close
-
-function x()
- with f1, f2 = io.open 'withdo_test.mlua', io.open 'trycatch_test.mlua' do
- local t1 = f1:read '*a'
- local t2 = f2:read '*a'
- return #t1, #t2
- end
-end
-
-print(x())
\ No newline at end of file
+++ /dev/null
--{ extension 'xglobal' }
-
-----------------------------------------------------------------------
-print "1) declare unassigned globals"
-global a, b
-
-----------------------------------------------------------------------
-print "2) declare-and-assign global"
-global c = 3
-
-----------------------------------------------------------------------
-print "3) assign to pre-declared globals"
-a, b = 1, 2
-
-----------------------------------------------------------------------
-print "4) fail when setting an undeclared global"
-local st1, msg1 = pcall(function()
- a = 4
- d = 5 -- failure, assignment to undeclared global
-end)
-assert(not st1)
-printf (" -> This error was expected: %s", msg1)
-
-----------------------------------------------------------------------
-print "5) fail when reading an undeclared global"
-local st2, msg2 = pcall(function()
- b = c -- OK
- local _ = d -- failure, try to read undeclared global
-end)
-assert(not st2)
-printf (" -> This error was expected: %s", msg2)
-
-----------------------------------------------------------------------
-print "6) check the globals' values"
-assert(a==4)
-assert(b==3)
-assert(c==3)
-
-----------------------------------------------------------------------
-print "*) done."
+++ /dev/null
--{ extension 'xloop' }
-for i=1,9 for j=10,90,10 if i~=3 while i<8 do
- io.write(i+j, ' ')
-end
\ No newline at end of file
+++ /dev/null
--{ extension 'xmatch' }
-
-WIDTH=60
-function p(msg) io.write(msg..' ':rep(WIDTH-#msg)) end
-
-----------------------------------------------------------------------
-p "match as an expression"
-print(match 1 with 1 -> 'ok' | 2 -> 'KO')
-
-----------------------------------------------------------------------
-p "global match function"
-match function g
-| x if x<10 -> return 'o'
-| _ -> return 'k'
-end
-print(g(1)..g(11))
-
-----------------------------------------------------------------------
-p "global match function, multi-args"
-match function cmp
-| x, y if x<y -> return 'increasing'
-| _, _ -> return 'decreasing'
- end
-
-if cmp(1,2)=='increasing' and cmp(2,1)=='decreasing' then
- print "ok" else print "KO"
-end
-
-----------------------------------------------------------------------
-p "local match function"
-do
- local match function x
- | 1 -> print 'ok'
- end
- x(1)
-end
-assert(not x)
-
-----------------------------------------------------------------------
-p "global bind assignment"
-bind {a, b} = {'o', 'k'}
-print(a..b)
-
-----------------------------------------------------------------------
-p "local bind assignment"
-c, d = 'k', 'o'
-do
- local bind {c, {d}} = {'o', {'k'}}
- print(c..d)
-end
-
-----------------------------------------------------------------------
-p "local bind assignment scope"
-print(d..c)
+++ /dev/null
-local foo
-
-x = -{ `Stat{ { `Local{ { `Id "B" },
- { `Stat{ { `Local{ { `Id "A" },
- { `Number 4 } },
- `Set{ { `Id "y" },
- { `Id "A" } } },
- `Id "A" } } },
- `Set{ { `Id "x" },
- { `Id "B" } } },
- `Id "B" } }
-
-assert(x==4)
-print "Test passed."
-
+++ /dev/null
--{ extension 'xloop' }
-
-ls = io.popen ( (os.getenv("OS") or "") :match "^Windows" and "dir /b reweave" or "ls reweave")
-this_script = arg[1]
-
-local errors = {}
-
-for filename in ls :lines() if filename :strmatch "%.m?lua$" do
- printf ("--- weaver check %s ---", filename)
- local ret = os.execute ("metalua ../samples/weaver.mlua reweave/"..filename.." | diff -q reweave/"..filename.." -")
- if ret ~= 0 then
- print("================================================================================")
- print("Reweaved source does not match original:")
- print("================================================================================")
- os.execute ("metalua ../samples/weaver.mlua reweave/"..filename.." | diff reweave/"..filename.." -")
- errors[#errors + 1] = "Reweaving of "..filename.." failed, returned "..ret
- end
-end
-
-ls :close()
-
-if #errors > 0 then
- print("================================================================================")
- error("REWEAVING ERRORS DETECTED:\n * " .. table.concat(errors, "\n * "))
-end
+++ /dev/null
---[[
-comment
---]]
+++ /dev/null
---[[comment]]
-local code = 5
+++ /dev/null
-if true then
- -- comment
-end
-
-if true then
- -- comment
- print("something else after")
-end
+++ /dev/null
-# it eats
---[[ all ]]
---[===[ my ]===]
-comments() -- foo
---[[ bar
-baz ]] qqq()
--- even
-one() -- liners
+++ /dev/null
-f(a > b)
-f(c >= d)
+++ /dev/null
-t = {}
-
-t = { }
-
-t {}
-
-t { }
-
-assert(count(function () end) == 1)
-
-for k,v,w in a do end
-
-repeat until 1; repeat until true;
-while false do end; while nil do end;
-
-foo(1) { };
+++ /dev/null
-function a.b.c.f1 (x) return x+1 end
-function a.b.c:f2 (x,y) self[x] = y end
+++ /dev/null
-function a.b.c (x) end
+++ /dev/null
-local print, verb, dbg, errr, print_table, printt = make_module_loggers("schema", "SCM")
-
-local CT, GMF,
- game_const
- = import 'game/const.lua'
- {
- 'chipTypes',
- 'gameModeFlags'
- }
-
-local MTF,
- cast_type
- = import (game_const.abilities)
- {
- 'manualTargetFlags',
- 'castType'
- }
-
-local AP, abiprob_mapping = import (game_const.abilities.property)
- {
- 'mappingInv', -- Note order (inverted goes first)
- 'mapping'
- }
-
-local PO, CM, CST, SO,
- abie_const
- = import 'abie/const.lua'
- {
- 'propObjects',
- 'customMessages',
- 'clientStat',
- 'storeObjects'
- }
-
-local non_empty_list,
- no_check,
- not_implemented,
- get_children,
- get_children_concat_newline,
- get_children_concat_str,
- get_children_concat_table,
- get_value,
- get_value_quoted,
- get_value_tonumber,
- check_mapping_tonumber,
- get_value_mapped_tonumber_quoted,
- node_children_placeholders_filler,
- check_tonumber
- = import 'jsle/schema/util.lua'
- {
- 'non_empty_list',
- 'no_check',
- 'not_implemented',
- 'get_children',
- 'get_children_concat_newline',
- 'get_children_concat_str',
- 'get_children_concat_table',
- 'get_value',
- 'get_value_quoted',
- 'get_value_tonumber',
- 'check_mapping_tonumber',
- 'get_value_mapped_tonumber_quoted',
- 'node_children_placeholders_filler',
- 'check_tonumber'
- }
-
-local declare_common = import 'jsle/schema/common.lua' { 'declare_common' }
-
--- Optional TODOs:
-
--- TODO: Must be able to fetch back data from lang file to this schema.
--- TODO: Write effect validation with human readable answers. Make it available via jobman's job.
--- TODO: Write auto-conversion function for old abilities (v.1.01->current)
--- TODO: Embed limitations on number of simultanious identical active OT effects
--- TODO: Write checkers for numeric fields
--- TODO: Adapt game/ctrl.lua to abie
-
-local define_schema = function(jsle)
- assert_is_table(jsle)
-
--- WARNING: Return nil on error from handlers, do not return false -- it is a legitimate value.
--- WARNING: Reordering of schema elements would result in INCOMPATIBLE format change!
-
- local propwrite_values =
- {
- { ["health"] = [[жизнь]] };
- { ["health_max"] = [[здоровье]] };
- { ["mana1"] = [[красную ману]] };
- { ["mana2"] = [[зелёную ману]] };
- { ["mana3"] = [[синюю ману]] };
- -- Note mana4 is reserved for health
- { ["mana5"] = [[ману 5]] };
- { ["mana6"] = [[ману 6]] };
- { ["mana7"] = [[ману 7]] };
- { ["mana8"] = [[ману 8]] };
- { ["armor"] = [[броню]] };
- { ["fury"] = [[ярость]] };
- { ["block"] = [[блок]] };
- { ["fortune"] = [[удачу]] };
- { ["stun"] = [[оглушение]] };
- { ["armour_piercing"] = [[бронебойность]] };
- { ["agility"] = [[ловкость]] };
- { ["counterattack"] = [[контрудар]] };
- { ["damage"] = [[базовый урон]] };
- { ["damage_min"] = [[минимальный урон]] };
- { ["damage_max"] = [[максимальный урон]] };
- { ["damage_mult"] = [[множитель урона]] };
- { ["vampiric"] = [[вампиризм]] };
- { ["stun_count"] = [[оглушённость]] };
- }
-
- local propread_values = tiappend(
- tclone(propwrite_values),
- {
- { ["race_id"] = [[расу]] },
- { ["level"] = [[уровень]] },
- { ["grade"] = [[степень]] }, -- TODO: clan_rank?!
- { ["rank"] = [[ранг]] },
- { ["glory"] = [[доблесть]] },
- { ["scalps"] = [[скальпы]] },
- { ["kills"] = [[убийства]] },
- }
- )
-
- -- TODO: Be more specific. Should be at least "abie-1.03".
- jsle:version("1.03") -- WARNING: Do an ordering cleanup when this changes
-
- jsle:record "ROOT"
- {
- children =
- {
- [1] = "TARGET_LIST";
- [2] = "IMMEDIATE_EFFECT_LIST";
- [3] = "OVERTIME_EFFECT";
- [4] = { "BOOLEAN", default = 0 }; -- Warning! Do not use BOOLOP_VARIANT, nothing of it would work at this point.
- [5] = { "CUSTOM_OVERTIME_EFFECTS", default = empty_table };
- };
- html = [[<h2>Цели</h2>%C(1)%<h2>Мгновенные эффекты</h2><b>Игнорировать активацию в статистике:</b>%C(4)%<br><br><b>Действия:</b>%C(2)%<h2>Овертайм-эффекты</h2>%C(3)%<hr>%C(5)%]];
- checker = no_check;
- handler = function(self, node)
- return self:effect_from_string(
- node.value[1], -- Target list
- node.value[4], -- Ignore usage stats flag
- self:fill_placeholders(
- node.value,
-[[
-function(self)
- self:set_custom_ot_effects($(5))
-
- do
- $(2)
- end
-
- do
- $(3)
- end
-end
-]]
- )
- )
- end;
- }
-
- jsle:list "TARGET_LIST"
- {
- type = "TARGET_VALUE";
- html = [[%LIST(", ")%]];
- checker = non_empty_list;
- handler = function(self, node)
- local result = 0
- for i, v in ipairs(node.value) do
- result = result + v
- end
- return result
- end;
- }
-
- jsle:enum "TARGET_VALUE"
- {
- values =
- {
- { [MTF.AUTO_ONLY] = [[неинтерактивно]] };
- { [MTF.SELF_HUMAN] = [[на себя]] };
- { [MTF.SELF_TEAM_HUMAN] = [[на человека в своей команде]] };
- { [MTF.OPP_HUMAN] = [[на противника]] };
- { [MTF.OPP_TEAM_HUMAN] = [[на человека в команде противника]] };
- { [MTF.FIELD_CHIP] = [[на фишку]] };
- };
- html = [[%VALUE()%]];
- checker = no_check;
- handler = get_value_tonumber;
- numeric_keys = true;
- }
-
- jsle:list "IMMEDIATE_EFFECT_LIST"
- {
- type = "ACTION_VARIANT";
- html = [[%LE("<i>Нет</i>")%%LNE("<ol><li>")%%LIST("<li>")%%LNE("</ol>")%]];
- checker = no_check;
- handler = get_children_concat_newline;
- }
-
- jsle:record "OVERTIME_EFFECT"
- {
- children =
- {
- [1] = "OT_EFFECT_TARGET";
- [2] = "NUMOP_VARIANT";
- [3] = "NUMOP_VARIANT";
- [4] = "BOOLOP_VARIANT";
- [5] = "OVERTIME_EFFECT_LIST";
- [6] = "OVERTIME_EFFECT_LIST";
- [7] = "OVERTIME_EFFECT_LIST";
- [8] = "OT_MODIFIER_LIST";
- [9] = "NUMOP_VARIANT"; -- TODO: Must be higher in the list. Straighten numbers on next version change (do not forget to fix texts)
- [10] = "NUMOP_VARIANT"; -- TODO: Must be higher in the list. Straighten numbers on next version change (do not forget to fix texts)
- [11] = { "GAME_MODES", default = GMF.ALL }; -- TODO: Must be higher in the list. Straighten numbers on next version change (do not forget to fix texts)
- [12] = { "BOOLEAN", default = 0 };
- };
- html = [[<br><b>Цель:</b> %C(1)%<br><b>Время жизни:</b> %C(2)% <i>(≥255 — бессрочно)</i><br><b>Период:</b> %C(3)%<br><b>Изначальный кулдаун:</b> %C(10)%<br><b>Сброс в конце боя:</b> %C(4)%<br><b>Остается при снятии всех эффектов вручную:</b> %C(12)%<br><b>Максимальное число одновременно активных эффектов:</b> %C(9)% <i>(0 — не ограничено)</i><br><b>Игровые режимы:</b> %C(11)%<h3>При изменении набора характеристик</h3>%C(5)%<h3>В конце хода цели</h3>%C(7)%<h3>Временные модификаторы <i>(кроме жизни)</i></h3>%C(8)%]];
- checker = no_check;
- handler = function(self, node)
- if
- node.value[5] ~= "" or
- node.value[6] ~= "" or
- node.value[7] ~= "" or
- node.value[8] ~= "{}"
- then
- -- Spawning OT effect only if have any actions in it.
- return node_children_placeholders_filler
- [[
- self:spawn_overtime_effect(
- $(1),
- $(2),
- $(3),
- $(10),
- $(4),
- $(9),
- function(self)
- $(5)
- end,
- function(self)
- $(6)
- end,
- function(self)
- $(7)
- end,
- $(8),
- $(11),
- $(12)
- )
- ]] (self, node)
- else
- return [[-- No OT effects]]
- end
- end;
- }
-
- jsle:list "OT_MODIFIER_LIST"
- {
- type = "OT_MODIFIER_VARIANT";
- html = [[%LE("<i>Нет</i>")%%LNE("<ol><li>")%%LIST("<li>")%%LNE("</ol>")%]];
- checker = no_check;
- handler = get_children_concat_table;
- }
-
- jsle:variant "OT_MODIFIER_VARIANT"
- {
- values =
- {
- { ["MOD_SET"] = [[Установить]] };
- { ["MOD_INC"] = [[Увеличить]] };
- { ["MOD_DEC"] = [[Уменьшить]] };
- { ["MOD_MULT"] = [[Умножить]] };
- };
- label = [["<i title=\"Модификатор\">M</i>"]];
- html = [[%VALUE()%]];
- checker = no_check;
- handler = get_value;
- }
-
- jsle:record "MOD_SET"
- {
- children =
- {
- [1] = "PROPWRITE";
- [2] = "NUMOP_VARIANT";
- };
- html = [[Установить %C(1)% цели в %C(2)%]];
- checker = no_check;
- handler = node_children_placeholders_filler [[{ name = $(1), fn = function(self, value) return ($(2)) end; }]];
- }
-
- jsle:record "MOD_INC"
- {
- children =
- {
- [1] = "PROPWRITE";
- [2] = "NUMOP_VARIANT";
- };
- html = [[Увеличить %C(1)% цели на %C(2)%]];
- checker = no_check;
- handler = node_children_placeholders_filler [[{ name = $(1), fn = function(self, value) return value + ($(2)) end; }]];
- }
-
- jsle:record "MOD_DEC"
- {
- children =
- {
- [1] = "PROPWRITE";
- [2] = "NUMOP_VARIANT";
- };
- html = [[Уменьшить %C(1)% цели на %C(2)%]];
- checker = no_check;
- handler = node_children_placeholders_filler [[{ name = $(1), fn = function(self, value) return value - ($(2)) end; }]];
- }
-
- jsle:record "MOD_MULT"
- {
- children =
- {
- [1] = "PROPWRITE";
- [2] = "NUMOP_VARIANT";
- };
- html = [[Умножить %C(1)% цели на %C(2)%]];
- checker = no_check;
- handler = node_children_placeholders_filler [[{ name = $(1), fn = function(self, value) return value * ($(2)) end; }]];
- }
-
- jsle:list "OVERTIME_EFFECT_LIST"
- {
- type = "ACTION_VARIANT";
- html = [[%LE("<i>Нет</i>")%%LNE("<ol><li>")%%LIST("<li>")%%LNE("</ol>")%]];
- checker = no_check;
- handler = get_children_concat_newline;
- }
-
- jsle:list "ACTION_LIST"
- {
- type = "ACTION_VARIANT";
- html = [[<ol><li>%LIST("<li>")%</ol>]];
- checker = non_empty_list;
- handler = get_children_concat_newline;
- }
-
- jsle:variant "ACTION_VARIANT"
- {
- values =
- {
- { ["ACT_SET"] = [[Установить]] };
- { ["ACT_INC"] = [[Увеличить]] };
- { ["ACT_DEC"] = [[Уменьшить]] };
- { ["ACT_MULT"] = [[Умножить]] };
- { ["ACT_DIRECTSET"] = [[Установить напрямую]] };
- { ["ACT_DIRECTINC"] = [[Увеличить напрямую]] };
- { ["ACT_DIRECTDEC"] = [[Уменьшить напрямую]] };
- { ["ACT_DIRECTMULT"] = [[Умножить напрямую]] };
- { ["ACT_FLDEXPLODE"] = [[Взорвать фишки]] };
- { ["ACT_FLDLEVELDELTA"] = [[Поднять уровень фишек]] };
- { ["ACT_FLDCOLLECT_COORDS"] = [[Собрать фишки по координатам]] };
- { ["ACT_FLDREPLACE_COORDS"] = [[Заменить фишки по координатам]] };
- { ["ACT_ONEMOREACTION"] = [[Дать ещё одно действие]] };
- { ["ACT_KEEPTIMEOUT"] = [[Не сбрасывать таймер]] };
- { ["ACT_SETVAR"] = [[Запомнить]] };
- { ["ACT_SETOBJVAR_LOCAL"] = [[Запомнить в объекте локально]] };
- { ["ACT_SETOBJVAR_GLOBAL"] = [[Запомнить в объекте глобально]] };
- { ["ACT_SETOBJVAR_OT"] = [[Запомнить в текущем овертайме]] };
- { ["ACT_DOIF"] = [[Если]] };
- { ["ACT_DOIFELSE"] = [[Если ... иначе]] };
- { ["ACT_PLAYABIANIM"] = [[Играть эффект абилки]] };
- { ["ACT_SENDCUSTOMMSG"] = [[Отправить данные клиентам]] };
- { ["ACT_INCSTAT"] = [[Увеличить статистику клиента]] };
- { ["ACT_ACTIVATEOT"] = [[Активировать ОТ-эффект]] };
- { ["ACT_REMOVE_OVERTIMES"] = [[Снять ОТ-эффекты]] };
- -- Keep these below --
- { ["ACT_FLDREPLACE"] = [[Заменить фишки <b><i>(устарело)</i></b>]] };
- { ["ACT_CRASH_GAME"] = [[УРОНИТЬ игру <b><i>(только для тестов)</i></b>]] };
- -- { ["PLAINLUA"] = [[Lua]] };
- };
- label = [["<i title=\"Действие\">A</i>"]];
- html = [[%VALUE()%]];
- checker = no_check;
- handler = get_value;
- }
-
- declare_common(jsle, "ACT_DOIF", "ACT_DOIFELSE")
-
- jsle:record "ACT_SET"
- {
- children =
- {
- [1] = "PROPPATH_WRITE";
- [2] = "NUMOP_VARIANT";
- };
- html = [[Установить %C(1)% в %C(2)%]];
- checker = no_check;
- handler = node_children_placeholders_filler [[self:propset($(1), $(2))]];
- }
-
- jsle:record "ACT_INC"
- {
- children =
- {
- [1] = "PROPPATH_WRITE";
- [2] = "NUMOP_VARIANT";
- };
- html = [[Увеличить %C(1)% на %C(2)%]];
- checker = no_check;
- handler = node_children_placeholders_filler [[self:propinc($(1), $(2))]];
- }
-
- jsle:record "ACT_DEC"
- {
- children =
- {
- [1] = "PROPPATH_WRITE";
- [2] = "NUMOP_VARIANT";
- };
- html = [[Уменьшить %C(1)% на %C(2)%]];
- checker = no_check;
- handler = node_children_placeholders_filler [[self:propdec($(1), $(2))]];
- }
-
- jsle:record "ACT_MULT"
- {
- children =
- {
- [1] = "PROPPATH_WRITE";
- [2] = "NUMOP_VARIANT";
- };
- html = [[Умножить %C(1)% на %C(2)%]];
- checker = no_check;
- handler = node_children_placeholders_filler [[self:propmult($(1), $(2))]];
- }
-
- jsle:record "ACT_DIRECTSET"
- {
- children =
- {
- [1] = "PROPPATH_WRITE";
- [2] = "NUMOP_VARIANT";
- };
- html = [[Установить напрямую %C(1)% в %C(2)%]];
- checker = no_check;
- handler = node_children_placeholders_filler [[self:propset_direct($(1), $(2))]];
- }
-
- jsle:record "ACT_DIRECTINC"
- {
- children =
- {
- [1] = "PROPPATH_WRITE";
- [2] = "NUMOP_VARIANT";
- };
- html = [[Увеличить напрямую %C(1)% на %C(2)%]];
- checker = no_check;
- handler = node_children_placeholders_filler [[self:propinc_direct($(1), $(2))]];
- }
-
- jsle:record "ACT_DIRECTDEC"
- {
- children =
- {
- [1] = "PROPPATH_WRITE";
- [2] = "NUMOP_VARIANT";
- };
- html = [[Уменьшить напрямую %C(1)% на %C(2)%]];
- checker = no_check;
- handler = node_children_placeholders_filler [[self:propdec_direct($(1), $(2))]];
- }
-
- jsle:record "ACT_DIRECTMULT"
- {
- children =
- {
- [1] = "PROPPATH_WRITE";
- [2] = "NUMOP_VARIANT";
- };
- html = [[Умножить напрямую %C(1)% на %C(2)%]];
- checker = no_check;
- handler = node_children_placeholders_filler [[self:propmult_direct($(1), $(2))]];
- }
-
- jsle:record "ACT_FLDEXPLODE"
- {
- children =
- {
- [1] = "NUMOP_VARIANT";
- [2] = "CHIPCOORD";
- };
- html = [[Взорвать бомбу радиусом %C(1)% в координатах %C(2)%]];
- checker = no_check;
- handler = node_children_placeholders_filler [[self:fld_explode($(1), $(2))]];
- }
-
- jsle:record "ACT_FLDREPLACE"
- {
- children =
- {
- [1] = "CHIPTYPE";
- [2] = "NUMOP_VARIANT";
- [3] = "CHIPTYPE";
- [4] = "NUMOP_VARIANT";
- };
- html = [[Заменить %C(1)% уровня %C(2)% на %C(3)% уровня %C(4)%]];
- checker = no_check;
- handler = node_children_placeholders_filler [[self:fld_replace($(1), $(2), $(3), $(4))]];
- doc = [[Deprecated, use other replace actions]];
- }
-
- jsle:record "ACT_FLDLEVELDELTA"
- {
- children =
- {
- [1] = "NUMOP_VARIANT";
- [2] = "CHIPTYPE";
- [3] = "NUMOP_VARIANT";
- [4] = "NUMOP_VARIANT";
- };
- html = [[Поднять уровень %C(2)% на %C(1)% в диапазоне от %C(3)% до %C(4)%]];
- checker = no_check;
- handler = node_children_placeholders_filler [[self:fld_level_delta($(1), $(2), $(3), $(4))]];
- }
-
- jsle:record "ACT_FLDCOLLECT_COORDS"
- {
- children =
- {
- [1] = "COORDLISTOP_VARIANT";
- };
- html = [[Собрать %C(1)%]];
- checker = no_check;
- handler = node_children_placeholders_filler [[self:fld_collect_coords($(1))]];
- }
-
- jsle:record "ACT_FLDREPLACE_COORDS"
- {
- children =
- {
- [1] = "COORDLISTOP_VARIANT";
- [2] = "CHIPTYPE_LIST";
- [3] = "NUMOP_VARIANT";
- };
- html = [[Заменить %C(1)% на %C(2)% уровня %C(3)%]];
- checker = no_check;
- handler = node_children_placeholders_filler [[self:fld_replace_coords($(1),$(2),$(3))]];
- }
-
- jsle:literal "ACT_ONEMOREACTION"
- {
- html = [[Дать ещё одно действие <i>(только мгновенный эффект)</i>]];
- checker = no_check;
- handler = invariant [[self:one_more_action()]];
- }
-
- jsle:literal "ACT_KEEPTIMEOUT"
- {
- html = [[Не сбрасывать таймер <i>(только мгновенный эффект)</i>]];
- checker = no_check;
- handler = invariant [[self:keep_timeout()]];
- }
-
- jsle:record "ACT_SETVAR"
- {
- children =
- {
- [1] = "NUMOP_VARIANT";
- [2] = "NUMOP_VARIANT";
- };
- html = [[Запомнить в №%C(1)% значение %C(2)%]];
- checker = no_check;
- handler = node_children_placeholders_filler [[self:setvar($(1), $(2))]];
- }
-
- jsle:enum "OT_EFFECT_TARGET"
- {
- values =
- {
- { [PO.SELF] = [[на себя]] };
- { [PO.OPP] = [[на противника]] };
- { [PO.TARGET] = [[на цель]] };
- };
- html = [[%VALUE()%]];
- checker = no_check;
- handler = get_value_quoted;
- }
-
- jsle:variant "BOOLOP_VARIANT"
- {
- values =
- {
- { ["BOOLEAN"] = [[Логическое значение]] };
- { ["BOOLOP_LT"] = [[<]] };
- { ["BOOLOP_LTE"] = [[≤]] };
- { ["BOOLOP_GT"] = [[>]] };
- { ["BOOLOP_GTE"] = [[≥]] };
- { ["BOOLOP_EQ"] = [[==]] };
- { ["BOOLOP_NEQ"] = [[!=]] };
- { ["BOOLOP_AND_MANY"] = [[И (Список)]] };
- { ["BOOLOP_OR_MANY"] = [[ИЛИ (Список)]] };
- { ["BOOLOP_NOT"] = [[НЕ]] };
- { ["BOOLOP_HAVEMEDAL"] = [[МЕДАЛЬ]] };
- { ["BOOLOP_ISACTIVE"] = [[Изменения инициированы целью овертайм-эффекта]] };
- { ["BOOLOP_IS_GAME_IN_MODE"] = [[Текущий игровой режим]] };
- -- Deprecated, keep below --
- { ["BOOLOP_AND"] = [[И]] };
- { ["BOOLOP_OR"] = [[ИЛИ]] };
- --{ ["PLAINLUA"] = [[Lua]] };
- };
- label = [["<i title=\"Логическая операция\">B</i>"]];
- html = [[%VALUE()%]];
- checker = no_check;
- handler = get_value;
- }
-
- jsle:record "BOOLOP_HAVEMEDAL"
- {
- children =
- {
- [1] = "PROPOBJECT";
- [2] = "NUMOP_VARIANT";
- };
- html = [[есть медаль №%C(2)% %C(1)%]];
- checker = no_check;
- handler = node_children_placeholders_filler [[self:have_medal($(1), $(2))]];
- }
-
- jsle:literal "BOOLOP_ISACTIVE"
- {
- html = [[изменения инициированы целью овертайм-эффекта]];
- checker = no_check; -- Only for on_changeset event.
- handler = invariant [[self:is_overtime_target_active()]];
- }
-
- declare_common(
- jsle,
- "BOOLOP_LT",
- "BOOLOP_LTE",
- "BOOLOP_GT",
- "BOOLOP_GTE",
- "BOOLOP_EQ",
- "BOOLOP_NEQ",
- "BOOLOP_AND",
- "BOOLOP_OR",
- "BOOLOP_NOT"
- )
-
- jsle:variant "NUMOP_VARIANT"
- {
- values =
- {
- { ["NUMBER"] = [[Число]] };
- { ["NUMOP_ADD_MANY"] = [[+ (Список)]] };
- { ["NUMOP_DEC_MANY"] = [[- (Список)]] };
- { ["NUMOP_MUL_MANY"] = [[* (Список)]] };
- { ["NUMOP_DIV_MANY"] = [[/ (Список)]] };
- { ["NUMOP_POV"] = [[POW]] }; -- TODO: POW, not POV! Fix by search and replace
- { ["NUMOP_MOD"] = [[MOD]] };
- { ["NUMOP_MIN"] = [[MIN]] };
- { ["NUMOP_MAX"] = [[MAX]] };
- { ["NUMOP_UNM"] = [[Знак]] };
- { ["NUMOP_GET"] = [[Характеристика]] };
- { ["NUMOP_GET_RAW"] = [[Базовое значение характеристики]] };
- { ["NUMOP_GET_ABIPROP"] = [[Характеристика абилки]] };
- { ["NUMOP_PERCENT_ROLL"] = [[Cлучайный процент]] };
- { ["NUMOP_TEAMSIZE"] = [[Размер команды]] };
- { ["NUMOP_GETVAR"] = [[Вспомнить]] };
- { ["NUMOP_GETOBJVAR_LOCAL"] = [[Вспомнить из объекта локально]] };
- { ["NUMOP_GETOBJVAR_GLOBAL"] = [[Вспомнить из объекта глобально]] };
- { ["NUMOP_GETOBJVAR_OT"] = [[Вспомнить из текущего овертайма]] };
- { ["NUMOP_OTLIFETIMELEFT"] = [[Оставшееся время жизни]] };
- { ["NUMOP_OTLIFETIMETOTAL"] = [[Общее время жизни]] };
- { ["NUMOP_FLDGETQUANTITYOFCHIPS"] = [[Число фишек по цвету и уровню]] };
- { ["NUMOP_TARGETX"] = [[Координата X выбранной фишки]] };
- { ["NUMOP_TARGETY"] = [[Координата Y выбранной фишки]] };
- { ["NUMOP_OTEFFECTCOUNT"] = [[Число активных овертайм-эффектов]] };
- { ["NUMOP_IFF"] = [[Если]] };
- { ["NUMOP_GETUID"] = [[Идентификатор игрока]] };
- -- Keep these below --
- { ["NUMOP_FLDCOUNTCHIPS"] = [[Число фишек на поле <b><i>(устарело)</i></b>]] };
- { ["NUMOP_ADD"] = [[+]] };
- { ["NUMOP_DEC"] = [[-]] };
- { ["NUMOP_MUL"] = [[*]] };
- { ["NUMOP_DIV"] = [[/]] };
- { ["NUMOP_CRASH_GAME"] = [[УРОНИТЬ игру <b><i>(только для тестов)</i></b>]] };
- --{ ["PLAINLUA"] = [[Lua]] };
- };
- label = [["<i title=\"Численная операция\">I</i>"]];
- html = [[%VALUE()%]];
- checker = no_check;
- handler = get_value;
- }
-
- declare_common(
- jsle,
- "NUMOP_ADD",
- "NUMOP_DEC",
- "NUMOP_MUL",
- "NUMOP_DIV",
- "NUMOP_POV",
- "NUMOP_MOD",
- "NUMOP_MIN",
- "NUMOP_MAX",
- "NUMOP_UNM"
- )
-
- jsle:record "NUMOP_GET"
- {
- children =
- {
- [1] = "PROPPATH_READ";
- };
- html = [[%C(1)%]];
- checker = no_check;
- handler = node_children_placeholders_filler [[self:propget($(1), false)]];
- }
-
- declare_common(jsle, "NUMOP_PERCENT_ROLL")
-
- jsle:record "NUMOP_FLDCOUNTCHIPS"
- {
- children =
- {
- [1] = "CHIPTYPE";
- [2] = "BOOLOP_VARIANT";
- };
- html = [[число %C(1)% на поле (учитывая уровни: %C(2)%)]];
- checker = no_check;
- handler = node_children_placeholders_filler [[self:fld_count_chips($(1), $(2))]];
- doc = [[Deprecated, use other chip count operations]];
- }
-
- jsle:record "NUMOP_TEAMSIZE"
- {
- children =
- {
- [1] = "PROPOBJECT";
- };
- html = [[размер команды %C(1)%]];
- checker = no_check;
- handler = node_children_placeholders_filler [[self:team_size($(1))]];
- }
-
- jsle:record "NUMOP_GETVAR"
- {
- children =
- {
- [1] = "NUMOP_VARIANT";
- };
- html = [[вспомнить из №%C(1)%]];
- checker = no_check;
- handler = node_children_placeholders_filler [[self:getvar($(1))]];
- }
-
- jsle:literal "NUMOP_OTLIFETIMELEFT"
- {
- html = [[оставшееся время жизни]];
- checker = no_check;
- handler = invariant [[self:ot_lifetime_left()]];
- }
-
- jsle:literal "NUMOP_OTLIFETIMETOTAL"
- {
- html = [[общее время жизни]];
- checker = no_check;
- handler = invariant [[self:ot_lifetime_total()]];
- }
-
- jsle:literal "NUMOP_TARGETX"
- {
- html = [[X выбранной фишки]];
- checker = no_check;
- handler = invariant [[self:target_x()]];
- }
-
- jsle:literal "NUMOP_TARGETY"
- {
- html = [[Y выбранной фишки]];
- checker = no_check;
- handler = invariant [[self:target_y()]];
- }
-
- jsle:record "PROPPATH_WRITE"
- {
- children =
- {
- [1] = "PROPOBJECT";
- [2] = "PROPWRITE";
- };
- html = [[%C(2)% %C(1)%]];
- checker = no_check;
- handler = node_children_placeholders_filler [[self:make_proppath($(1), $(2))]];
- }
-
- jsle:record "PROPPATH_READ"
- {
- children =
- {
- [1] = "PROPOBJECT";
- [2] = "PROPREAD";
- };
- html = [[%C(2)% %C(1)%]];
- checker = no_check;
- handler = node_children_placeholders_filler [[self:make_proppath($(1), $(2))]];
- }
-
- jsle:enum "PROPOBJECT"
- {
- values =
- {
- { [PO.SELF] = [[у себя]] };
- { [PO.OPP] = [[у противника]] };
- { [PO.TARGET] = [[у цели]] };
- { [PO.OWN_CHANGESET] = [[в своём наборе изменений]] };
- { [PO.OPP_CHANGESET] = [[в наборе изменений противника]] };
- };
- html = [[%VALUE()%]];
- checker = no_check; -- Check value is valid for current action list subtype
- handler = get_value_quoted;
- }
-
- jsle:enum "PROPWRITE"
- {
- values = propwrite_values;
- html = [[%VALUE()%]];
- checker = no_check;
- handler = get_value_quoted;
- }
-
- jsle:enum "PROPREAD"
- {
- values = propread_values;
- html = [[%VALUE()%]];
- checker = no_check;
- handler = get_value_quoted;
- }
-
- jsle:enum "CHIPTYPE"
- {
- values =
- {
- { [CT.EMERALD] = [[зелёных фишек]] };
- { [CT.RUBY] = [[красных фишек]] };
- { [CT.AQUA] = [[синих фишек]] };
- { [CT.DMG] = [[черепов]] };
- { [CT.CHIP5] = [[фишек-5]] };
- { [CT.CHIP6] = [[фишек-6]] };
- { [CT.CHIP7] = [[фишек-7]] };
- { [CT.CHIP8] = [[фишек-8]] };
- { [CT.EMPTY] = [[пустых фишек]] };
- };
- html = [[%VALUE()%]];
- checker = no_check;
- handler = get_value_tonumber;
- numeric_keys = true;
- }
-
- jsle:edit "NUMBER"
- {
- size = 4;
- numeric = true;
- checker = check_tonumber;
- handler = get_value_tonumber;
- }
-
- declare_common(
- jsle,
- "BOOLEAN",
- "PLAINLUA"
- )
-
- jsle:list "COORDLISTOP_STD"
- {
- type = "CHIPCOORD";
- html = [[фишки с координатами %LIST(", ")%]];
- checker = non_empty_list;
- handler = get_children_concat_table;
- }
-
- jsle:record "CHIPCOORD"
- {
- children =
- {
- [1] = "NUMOP_VARIANT";
- [2] = "NUMOP_VARIANT";
- };
- html = [[(x: %C(1)%, y: %C(2)%)]];
- checker = no_check;
- handler = node_children_placeholders_filler [[{x=$(1), y=$(2)}]];
- }
-
- -- TODO: UNUSED. Remove or use.
- jsle:record "BOOLOP_SELECTEDTARGET"
- {
- children =
- {
- [1] = "TARGET_VALUE";
- };
- html = [[выбрана цель %C(1)%]];
- checker = no_check;
- handler = node_children_placeholders_filler [[self:is_target_selected($(1))]];
- doc = [[Currently not used]];
- }
-
- jsle:record "NUMOP_OTEFFECTCOUNT"
- {
- children =
- {
- [1] = "PROPOBJECT";
- [2] = "NUMOP_VARIANT";
- [3] = "NUMOP_VARIANT";
- };
- html = [[число овертайм-эффектов абилки ID %C(2)% <i>(0 — этот эффект)</i> № эффекта %C(3)% <i>(0 — по умолчанию)</i>, активных %C(1)%]];
- checker = no_check;
- handler = node_children_placeholders_filler [[self:active_ot_effect_count($(1), $(2), $(3))]];
- }
-
- declare_common(jsle, "NUMOP_IFF")
-
- jsle:record "NUMOP_GET_RAW"
- {
- children =
- {
- [1] = "PROPPATH_READ";
- };
- html = [[базовое значение %C(1)%]];
- checker = no_check;
- handler = node_children_placeholders_filler [[self:propget($(1), true)]];
- }
-
- -- TODO: Get rid of non-list versions!
-
- declare_common(
- jsle,
- "NUMOP_ADD_MANY",
- "NUMOP_DEC_MANY",
- "NUMOP_MUL_MANY",
- "NUMOP_DIV_MANY"
- )
-
- declare_common(
- jsle,
- "BOOLOP_AND_MANY",
- "BOOLOP_OR_MANY"
- )
-
- jsle:list "CHIPTYPE_LIST"
- {
- type = "CHIPTYPE";
- html = [[%LIST(", ")%]];
- checker = non_empty_list;
- handler = get_children_concat_table;
- }
-
- jsle:record "NUMOP_GET_ABIPROP"
- {
- children =
- {
- [1] = "ABIPROP_NAME";
- };
- html = [[%C(1)% абилки]];
- checker = no_check;
- handler = node_children_placeholders_filler [[self:abipropget($(1))]];
- }
-
- jsle:enum "ABIPROP_NAME"
- {
- values =
- {
- { [AP.prob] = [[вероятность активации]] };
- };
- html = [[%VALUE()%]];
- checker = check_mapping_tonumber;
- handler = get_value_mapped_tonumber_quoted(abiprob_mapping);
- }
-
- jsle:record "ACT_SENDCUSTOMMSG"
- {
- children =
- {
- [1] = "NUMOP_LIST";
- };
- html = [[Отправить участникам боя данные: %C(1)%]];
- checker = no_check;
- handler = node_children_placeholders_filler [[self:send_custom_msg($(1))]];
- }
-
- declare_common(jsle, "NUMOP_LIST")
-
- jsle:record "ACT_PLAYABIANIM"
- {
- children =
- {
- [1] = "NUMOP_VARIANT";
- };
- html = [[Играть эффект абилки ID: %C(1)%]];
- checker = no_check;
- -- Hack. Should format be hardcoded here or below?
- handler = node_children_placeholders_filler(
- [[self:send_custom_msg({]]..assert_is_number(CM.PLAYABIANIM)
- ..[[, $(1), self:get_uid("]]..PO.SELF..[[")})]]
- );
- }
-
- jsle:variant "COORDLISTOP_VARIANT"
- {
- values =
- {
- { ["COORDLISTOP_STD"] = [[Обычный список коордтнат]] };
- { ["COORDLISTOP_GETLEVEL"] = [[Фишки цвета <i>цв1</i> с уровнями от <i>ур1</i> до <i>ур2</i>]] };
- };
- label = [["<i title=\"Список координат\">C</i>"]];
- html = [[%VALUE()%]];
- checker = no_check;
- handler = get_value;
- }
-
- jsle:record "COORDLISTOP_GETLEVEL"
- {
- children =
- {
- [1] = "CHIPTYPE";
- [2] = "NUMOP_VARIANT";
- [3] = "NUMOP_VARIANT";
- };
- html = [[%C(1)% с уровнями от %C(2)% до %C(3)%]];
- checker = no_check;
- handler = node_children_placeholders_filler [[self:fld_get_coordlist_from_levels_and_type($(1), $(2), $(3))]];
- }
-
- jsle:record "NUMOP_FLDGETQUANTITYOFCHIPS"
- {
- children =
- {
- [1] = "CHIPTYPE";
- [2] = "NUMOP_VARIANT";
- [3] = "NUMOP_VARIANT";
- [4] = "BOOLOP_VARIANT";
- };
- html = [[число %C(1)% на поле уровней с %C(2)% до %C(3)% (учитывая уровень в счетчике: %C(4)%)]];
- checker = no_check;
- handler = node_children_placeholders_filler [[self:fld_get_quantity_of_chips($(1), $(2), $(3), $(4))]];
- }
-
- jsle:enum "CLIENTSTAT"
- {
- values =
- {
- -- TODO: Support commented out variants?
- { [CST.SPELL_USE] = [[исп. спеллов]] };
- --{ [CST.SPELL_FRAG] = [[фраги от спеллов]] };
- { [CST.CONSUMABLE_USE] = [[исп. расходников]] };
- --{ [CST.CONSUMABLE_FRAG] = [[фраги от расходников]] };
- { [CST.AUTOABILITY_USE] = [[исп. автоабилок]] };
- --{ [CST.AUTOABILITY_FRAG] = [[фраги от автоабилок]] };
- --{ [CST.RATING] = [[рейтинг]] };
- --{ [CST.CUSTOM] = [[пользовательская]] };
- };
- html = [[%VALUE()%]];
- checker = check_mapping_tonumber;
- handler = get_value_tonumber;
- }
-
- jsle:record "ACT_INCSTAT"
- {
- children =
- {
- [1] = "PROPOBJECT";
- [2] = "CLIENTSTAT";
- [3] = "NUMOP_VARIANT";
- [4] = "NUMOP_VARIANT";
- };
- html = [[Увеличить %C(1)% статистику «%C(2)%» эффекта №%C(3)% <i>(0 — текущий)</i> на %C(4)%]];
- checker = no_check;
- handler = node_children_placeholders_filler [[self:inc_client_stat($(1), $(2), $(3), $(4))]];
- }
-
- jsle:record "ACT_ACTIVATEOT"
- {
- children =
- {
- [1] = "NUMOP_VARIANT";
- [2] = { "KEYVALUE_LIST", default = empty_table };
- };
- html = [[Активировать ОТ-эффект №%C(1)%, передав %C(2)%]];
- checker = no_check;
- handler = node_children_placeholders_filler [[self:activate_custom_ot_effect($(1),$(2))]];
- }
-
- jsle:list "CUSTOM_OVERTIME_EFFECTS"
- {
- type = "OVERTIME_EFFECT";
- html = [[%LE("<i>(Нет дополнительных ОТ-эффектов)</i>")%%LNE("<ol><li><h2>Дополнительный OT-эффект</h2>")%%LIST("<hr><li><h2>Дополнительный OT-эффект</h2>")%%LNE("</ol>")%]];
- checker = no_check;
- handler = function(self, node)
- local buf = {[[{]]}
- local _ = function(v) buf[#buf + 1] = tostring(v) end
- for i, child in ipairs(node.value) do
- _ [[
-[]] _(i) _[[] = function(self)
-]] _(child) _ [[
-end;
-]]
- end
- _ [[}]]
- return table.concat(buf)
- end;
- }
-
- jsle:record "NUMOP_GETUID"
- {
- children =
- {
- [1] = "PROPOBJECT";
- };
- html = [[идентификатор игрока %C(1)%]];
- checker = no_check;
- handler = node_children_placeholders_filler [[self:get_uid($(1))]];
- }
-
- jsle:enum "STORE_OBJ"
- {
- values =
- {
- { [SO.CLIENT_SELF] = [[на себе]] };
- { [SO.CLIENT_OPP] = [[на противнике]] };
- { [SO.CLIENT_TARGET] = [[на цели]] };
- { [SO.FIGHT] = [[на бою]] };
- { [SO.GAME] = [[на игре]] };
- };
- html = [[%VALUE()%]];
- checker = no_check;
- handler = get_value_tonumber;
- }
-
- jsle:record "ACT_SETOBJVAR_LOCAL"
- {
- children =
- {
- [1] = "STORE_OBJ";
- [2] = "NUMOP_VARIANT";
- [3] = "NUMOP_VARIANT";
- };
- html = [[Запомнить в объекте «%C(1)%» в слот №%C(2)% <b>приватное</b> значение %C(3)%]];
- checker = no_check;
- handler = node_children_placeholders_filler [[self:setobjvar_local($(1), $(2), $(3))]];
- }
-
- jsle:record "NUMOP_GETOBJVAR_LOCAL"
- {
- children =
- {
- [1] = "STORE_OBJ";
- [2] = "NUMOP_VARIANT";
- };
- html = [[вспомнить из объекта «%C(1)%» из слота №%C(2)% <b>приватное</b> значение]];
- checker = no_check;
- handler = node_children_placeholders_filler [[self:getobjvar_local($(1), $(2))]];
- }
-
- jsle:record "ACT_SETOBJVAR_GLOBAL"
- {
- children =
- {
- [1] = "STORE_OBJ";
- [2] = "NUMOP_VARIANT";
- [3] = "NUMOP_VARIANT";
- };
- html = [[Запомнить в объекте %C(1)% в слот №%C(2)% <b>публичное</b> значение %C(3)%]];
- checker = no_check;
- handler = node_children_placeholders_filler [[self:setobjvar_global($(1), $(2), $(3))]];
- }
-
- jsle:record "NUMOP_GETOBJVAR_GLOBAL"
- {
- children =
- {
- [1] = "STORE_OBJ";
- [2] = "NUMOP_VARIANT";
- };
- html = [[вспомнить из объекта %C(1)% из слота №%C(2)% <b>публичное</b> значение]];
- checker = no_check;
- handler = node_children_placeholders_filler [[self:getobjvar_global($(1), $(2))]];
- }
-
- jsle:record "ACT_REMOVE_OVERTIMES"
- {
- children =
- {
- [1] = "OT_EFFECT_TARGET";
- };
- html = [[Снять все эффекты, наложенные %C(1)%]];
- checker = no_check;
- handler = node_children_placeholders_filler [[self:remove_overtime_effects($(1))]];
- }
-
- jsle:enum "GAME_MODES"
- {
- values =
- {
- { [GMF.ALL] = [[любой]] };
- { [GMF.DUEL] = [[дуэль]] };
- { [GMF.SINGLE] = [[одиночная игра]] };
- };
- html = [[%VALUE()%]];
- checker = no_check;
- handler = get_value_tonumber;
- }
-
- jsle:record "BOOLOP_IS_GAME_IN_MODE"
- {
- children =
- {
- [1] = "GAME_MODES";
- };
- html = [[игровой режим «%C(1)%» включён]];
- checker = no_check;
- handler = node_children_placeholders_filler [[self:is_game_in_mode($(1))]];
- }
-
- jsle:record "ACT_SETOBJVAR_OT"
- {
- children =
- {
- [1] = "NUMOP_VARIANT";
- [2] = "NUMOP_VARIANT";
- };
- html = [[Запомнить в текущем овертайме в слот №%C(1)% значение %C(2)%]];
- checker = no_check;
- handler = node_children_placeholders_filler [[self:setobjvar_ot($(1), $(2))]];
- }
-
- jsle:record "NUMOP_GETOBJVAR_OT"
- {
- children =
- {
- [1] = "NUMOP_VARIANT";
- };
- html = [[Вспомнить из текущего овертайма из слота №%C(1)%]];
- checker = no_check;
- handler = node_children_placeholders_filler [[self:getobjvar_ot($(1))]];
- }
-
- declare_common(
- jsle,
- "KEYVALUE_LIST",
- "KEYVALUE"
- )
-
- jsle:literal "ACT_CRASH_GAME"
- {
- html = [[<span style="color:red"><b>УРОНИТЬ</b> игру (только для теста)<span>]];
- checker = function(self, node)
- if common_get_config().crashers_enabled == true then
- errr("WARNING: ACT_CRASH_GAME CRASHER IS ON")
- return true
- end
-
- errr("DETECTED ATTEMPT TO UPLOAD CRASHERS (SCHEMA)")
- return false, "crashers are disabled in config"
- end;
- handler = invariant [[self:crash_game()]];
- }
-
- jsle:literal "NUMOP_CRASH_GAME"
- {
- html = [[<span style="color:red"><b>УРОНИТЬ</b> игру (только для теста)<span>]];
- checker = function(self, node)
- if common_get_config().crashers_enabled == true then
- errr("WARNING: NUMOP_CRASH_GAME CRASHER IS ON")
- return true
- end
-
- errr("DETECTED ATTEMPT TO UPLOAD CRASHERS (SCHEMA)")
- return false, "crashers are disabled in config"
- end;
- handler = invariant [[(self:crash_game() or 0)]];
- }
-
- return jsle
-end
-
-return
-{
- define_schema = define_schema;
-}
+++ /dev/null
-do
- print("scope")
-end
+++ /dev/null
-sample=[==========[perl -e 'print "<IMG SRC=javascript:alert(\"XSS\")>";' > out]==========]
-sample=[==========[perl -e 'print "<IMG SRC=javascript:alert(\"XSS\")>";' > out]==========]
+++ /dev/null
-repeat until true
+++ /dev/null
--- Run all *.lua and *.mlua files in this directory.
--- This makes it easy to run all tests in the directory,
-
--{ extension 'xloop' }
-
-LS_COMMANDS = { "ls", "dir /b" }
-for i, cmd in ipairs(LS_COMMANDS) do
- local f = io.popen (cmd)
- ls = f :read '*a'
- f :close()
- if ls ~= '' then
- break
- elseif i == #LS_COMMANDS then
- error "Can't figure out how to list files on your OS"
- end
-end
-
-this_script = arg[1]
-
-local errors = {}
-
-for filename in ls :gmatch "[^\n]+" if filename ~= this_script and filename :strmatch "%.m?lua$" do
- printf ("*** running %s ***", filename)
- local ret = os.execute ("metalua "..filename)
- if ret ~= 0 then
- errors[#errors + 1] = "Test "..filename.." failed, returned "..ret
- end
-end
-
-if #errors > 0 then
- print("\n\n================================================================================")
- error(
- "TEST FAILURES DETECTED:\n" ..
- "-----------------------\n" ..
- " * " .. table.concat(errors, "\n * ")
- )
-end